xref: /linux/drivers/crypto/inside-secure/safexcel_cipher.c (revision 6fdcba32711044c35c0e1b094cbd8f3f0b4472c9)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (C) 2017 Marvell
4  *
5  * Antoine Tenart <antoine.tenart@free-electrons.com>
6  */
7 
8 #include <asm/unaligned.h>
9 #include <linux/device.h>
10 #include <linux/dma-mapping.h>
11 #include <linux/dmapool.h>
12 #include <crypto/aead.h>
13 #include <crypto/aes.h>
14 #include <crypto/authenc.h>
15 #include <crypto/chacha.h>
16 #include <crypto/ctr.h>
17 #include <crypto/internal/des.h>
18 #include <crypto/gcm.h>
19 #include <crypto/ghash.h>
20 #include <crypto/poly1305.h>
21 #include <crypto/sha.h>
22 #include <crypto/sm3.h>
23 #include <crypto/sm4.h>
24 #include <crypto/xts.h>
25 #include <crypto/skcipher.h>
26 #include <crypto/internal/aead.h>
27 #include <crypto/internal/skcipher.h>
28 
29 #include "safexcel.h"
30 
31 enum safexcel_cipher_direction {
32 	SAFEXCEL_ENCRYPT,
33 	SAFEXCEL_DECRYPT,
34 };
35 
36 enum safexcel_cipher_alg {
37 	SAFEXCEL_DES,
38 	SAFEXCEL_3DES,
39 	SAFEXCEL_AES,
40 	SAFEXCEL_CHACHA20,
41 	SAFEXCEL_SM4,
42 };
43 
44 struct safexcel_cipher_ctx {
45 	struct safexcel_context base;
46 	struct safexcel_crypto_priv *priv;
47 
48 	u32 mode;
49 	enum safexcel_cipher_alg alg;
50 	char aead; /* !=0=AEAD, 2=IPSec ESP AEAD, 3=IPsec ESP GMAC */
51 	char xcm;  /* 0=authenc, 1=GCM, 2 reserved for CCM */
52 
53 	__le32 key[16];
54 	u32 nonce;
55 	unsigned int key_len, xts;
56 
57 	/* All the below is AEAD specific */
58 	u32 hash_alg;
59 	u32 state_sz;
60 	__be32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
61 	__be32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
62 
63 	struct crypto_cipher *hkaes;
64 	struct crypto_aead *fback;
65 };
66 
67 struct safexcel_cipher_req {
68 	enum safexcel_cipher_direction direction;
69 	/* Number of result descriptors associated to the request */
70 	unsigned int rdescs;
71 	bool needs_inv;
72 	int  nr_src, nr_dst;
73 };
74 
75 static void safexcel_cipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
76 				  struct safexcel_command_desc *cdesc)
77 {
78 	u32 block_sz = 0;
79 
80 	if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD ||
81 	    ctx->aead & EIP197_AEAD_TYPE_IPSEC_ESP) { /* _ESP and _ESP_GMAC */
82 		cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
83 
84 		/* 32 bit nonce */
85 		cdesc->control_data.token[0] = ctx->nonce;
86 		/* 64 bit IV part */
87 		memcpy(&cdesc->control_data.token[1], iv, 8);
88 
89 		if (ctx->alg == SAFEXCEL_CHACHA20 ||
90 		    ctx->xcm == EIP197_XCM_MODE_CCM) {
91 			/* 32 bit counter, starting at 0 */
92 			cdesc->control_data.token[3] = 0;
93 		} else {
94 			/* 32 bit counter, start at 1 (big endian!) */
95 			cdesc->control_data.token[3] =
96 				(__force u32)cpu_to_be32(1);
97 		}
98 
99 		return;
100 	} else if (ctx->xcm == EIP197_XCM_MODE_GCM ||
101 		   (ctx->aead && ctx->alg == SAFEXCEL_CHACHA20)) {
102 		cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
103 
104 		/* 96 bit IV part */
105 		memcpy(&cdesc->control_data.token[0], iv, 12);
106 
107 		if (ctx->alg == SAFEXCEL_CHACHA20) {
108 			/* 32 bit counter, starting at 0 */
109 			cdesc->control_data.token[3] = 0;
110 		} else {
111 			/* 32 bit counter, start at 1 (big endian!) */
112 			*(__be32 *)&cdesc->control_data.token[3] =
113 				cpu_to_be32(1);
114 		}
115 
116 		return;
117 	} else if (ctx->alg == SAFEXCEL_CHACHA20) {
118 		cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
119 
120 		/* 96 bit nonce part */
121 		memcpy(&cdesc->control_data.token[0], &iv[4], 12);
122 		/* 32 bit counter */
123 		cdesc->control_data.token[3] = *(u32 *)iv;
124 
125 		return;
126 	} else if (ctx->xcm == EIP197_XCM_MODE_CCM) {
127 		cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
128 
129 		/* Variable length IV part */
130 		memcpy(&cdesc->control_data.token[0], iv, 15 - iv[0]);
131 		/* Start variable length counter at 0 */
132 		memset((u8 *)&cdesc->control_data.token[0] + 15 - iv[0],
133 		       0, iv[0] + 1);
134 
135 		return;
136 	}
137 
138 	if (ctx->mode != CONTEXT_CONTROL_CRYPTO_MODE_ECB) {
139 		switch (ctx->alg) {
140 		case SAFEXCEL_DES:
141 			block_sz = DES_BLOCK_SIZE;
142 			cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD;
143 			break;
144 		case SAFEXCEL_3DES:
145 			block_sz = DES3_EDE_BLOCK_SIZE;
146 			cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD;
147 			break;
148 		case SAFEXCEL_SM4:
149 			block_sz = SM4_BLOCK_SIZE;
150 			cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
151 			break;
152 		case SAFEXCEL_AES:
153 			block_sz = AES_BLOCK_SIZE;
154 			cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
155 			break;
156 		default:
157 			break;
158 		}
159 		memcpy(cdesc->control_data.token, iv, block_sz);
160 	}
161 }
162 
163 static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
164 				    struct safexcel_command_desc *cdesc,
165 				    u32 length)
166 {
167 	struct safexcel_token *token;
168 
169 	safexcel_cipher_token(ctx, iv, cdesc);
170 
171 	/* skip over worst case IV of 4 dwords, no need to be exact */
172 	token = (struct safexcel_token *)(cdesc->control_data.token + 4);
173 
174 	token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
175 	token[0].packet_length = length;
176 	token[0].stat = EIP197_TOKEN_STAT_LAST_PACKET |
177 			EIP197_TOKEN_STAT_LAST_HASH;
178 	token[0].instructions = EIP197_TOKEN_INS_LAST |
179 				EIP197_TOKEN_INS_TYPE_CRYPTO |
180 				EIP197_TOKEN_INS_TYPE_OUTPUT;
181 }
182 
183 static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
184 				struct safexcel_command_desc *cdesc,
185 				enum safexcel_cipher_direction direction,
186 				u32 cryptlen, u32 assoclen, u32 digestsize)
187 {
188 	struct safexcel_token *token;
189 
190 	safexcel_cipher_token(ctx, iv, cdesc);
191 
192 	if (direction == SAFEXCEL_ENCRYPT) {
193 		/* align end of instruction sequence to end of token */
194 		token = (struct safexcel_token *)(cdesc->control_data.token +
195 			 EIP197_MAX_TOKENS - 14);
196 
197 		token[13].opcode = EIP197_TOKEN_OPCODE_INSERT;
198 		token[13].packet_length = digestsize;
199 		token[13].stat = EIP197_TOKEN_STAT_LAST_HASH |
200 				 EIP197_TOKEN_STAT_LAST_PACKET;
201 		token[13].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
202 					 EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
203 	} else {
204 		cryptlen -= digestsize;
205 
206 		/* align end of instruction sequence to end of token */
207 		token = (struct safexcel_token *)(cdesc->control_data.token +
208 			 EIP197_MAX_TOKENS - 15);
209 
210 		token[13].opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
211 		token[13].packet_length = digestsize;
212 		token[13].stat = EIP197_TOKEN_STAT_LAST_HASH |
213 				 EIP197_TOKEN_STAT_LAST_PACKET;
214 		token[13].instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
215 
216 		token[14].opcode = EIP197_TOKEN_OPCODE_VERIFY;
217 		token[14].packet_length = digestsize |
218 					  EIP197_TOKEN_HASH_RESULT_VERIFY;
219 		token[14].stat = EIP197_TOKEN_STAT_LAST_HASH |
220 				 EIP197_TOKEN_STAT_LAST_PACKET;
221 		token[14].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
222 	}
223 
224 	if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
225 		/* For ESP mode (and not GMAC), skip over the IV */
226 		token[8].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
227 		token[8].packet_length = EIP197_AEAD_IPSEC_IV_SIZE;
228 
229 		assoclen -= EIP197_AEAD_IPSEC_IV_SIZE;
230 	}
231 
232 	token[6].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
233 	token[6].packet_length = assoclen;
234 	token[6].instructions = EIP197_TOKEN_INS_LAST |
235 				EIP197_TOKEN_INS_TYPE_HASH;
236 
237 	if (likely(cryptlen || ctx->alg == SAFEXCEL_CHACHA20)) {
238 		token[11].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
239 		token[11].packet_length = cryptlen;
240 		token[11].stat = EIP197_TOKEN_STAT_LAST_HASH;
241 		if (unlikely(ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC)) {
242 			token[6].instructions = EIP197_TOKEN_INS_TYPE_HASH;
243 			/* Do not send to crypt engine in case of GMAC */
244 			token[11].instructions = EIP197_TOKEN_INS_LAST |
245 						 EIP197_TOKEN_INS_TYPE_HASH |
246 						 EIP197_TOKEN_INS_TYPE_OUTPUT;
247 		} else {
248 			token[11].instructions = EIP197_TOKEN_INS_LAST |
249 						 EIP197_TOKEN_INS_TYPE_CRYPTO |
250 						 EIP197_TOKEN_INS_TYPE_HASH |
251 						 EIP197_TOKEN_INS_TYPE_OUTPUT;
252 		}
253 	} else if (ctx->xcm != EIP197_XCM_MODE_CCM) {
254 		token[6].stat = EIP197_TOKEN_STAT_LAST_HASH;
255 	}
256 
257 	if (!ctx->xcm)
258 		return;
259 
260 	token[9].opcode = EIP197_TOKEN_OPCODE_INSERT_REMRES;
261 	token[9].packet_length = 0;
262 	token[9].instructions = AES_BLOCK_SIZE;
263 
264 	token[10].opcode = EIP197_TOKEN_OPCODE_INSERT;
265 	token[10].packet_length = AES_BLOCK_SIZE;
266 	token[10].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
267 				 EIP197_TOKEN_INS_TYPE_CRYPTO;
268 
269 	if (ctx->xcm != EIP197_XCM_MODE_GCM) {
270 		u8 *final_iv = (u8 *)cdesc->control_data.token;
271 		u8 *cbcmaciv = (u8 *)&token[1];
272 		__le32 *aadlen = (__le32 *)&token[5];
273 
274 		/* Construct IV block B0 for the CBC-MAC */
275 		token[0].opcode = EIP197_TOKEN_OPCODE_INSERT;
276 		token[0].packet_length = AES_BLOCK_SIZE +
277 					 ((assoclen > 0) << 1);
278 		token[0].instructions = EIP197_TOKEN_INS_ORIGIN_TOKEN |
279 					EIP197_TOKEN_INS_TYPE_HASH;
280 		/* Variable length IV part */
281 		memcpy(cbcmaciv, final_iv, 15 - final_iv[0]);
282 		/* fixup flags byte */
283 		cbcmaciv[0] |= ((assoclen > 0) << 6) | ((digestsize - 2) << 2);
284 		/* Clear upper bytes of variable message length to 0 */
285 		memset(cbcmaciv + 15 - final_iv[0], 0, final_iv[0] - 1);
286 		/* insert lower 2 bytes of message length */
287 		cbcmaciv[14] = cryptlen >> 8;
288 		cbcmaciv[15] = cryptlen & 255;
289 
290 		if (assoclen) {
291 			*aadlen = cpu_to_le32((assoclen >> 8) |
292 					      ((assoclen & 0xff) << 8));
293 			assoclen += 2;
294 		}
295 
296 		token[6].instructions = EIP197_TOKEN_INS_TYPE_HASH;
297 
298 		/* Align AAD data towards hash engine */
299 		token[7].opcode = EIP197_TOKEN_OPCODE_INSERT;
300 		assoclen &= 15;
301 		token[7].packet_length = assoclen ? 16 - assoclen : 0;
302 
303 		if (likely(cryptlen)) {
304 			token[7].instructions = EIP197_TOKEN_INS_TYPE_HASH;
305 
306 			/* Align crypto data towards hash engine */
307 			token[11].stat = 0;
308 
309 			token[12].opcode = EIP197_TOKEN_OPCODE_INSERT;
310 			cryptlen &= 15;
311 			token[12].packet_length = cryptlen ? 16 - cryptlen : 0;
312 			token[12].stat = EIP197_TOKEN_STAT_LAST_HASH;
313 			token[12].instructions = EIP197_TOKEN_INS_TYPE_HASH;
314 		} else {
315 			token[7].stat = EIP197_TOKEN_STAT_LAST_HASH;
316 			token[7].instructions = EIP197_TOKEN_INS_LAST |
317 						EIP197_TOKEN_INS_TYPE_HASH;
318 		}
319 	}
320 }
321 
322 static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
323 					const u8 *key, unsigned int len)
324 {
325 	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
326 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
327 	struct safexcel_crypto_priv *priv = ctx->priv;
328 	struct crypto_aes_ctx aes;
329 	int ret, i;
330 
331 	ret = aes_expandkey(&aes, key, len);
332 	if (ret) {
333 		crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
334 		return ret;
335 	}
336 
337 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
338 		for (i = 0; i < len / sizeof(u32); i++) {
339 			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
340 				ctx->base.needs_inv = true;
341 				break;
342 			}
343 		}
344 	}
345 
346 	for (i = 0; i < len / sizeof(u32); i++)
347 		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
348 
349 	ctx->key_len = len;
350 
351 	memzero_explicit(&aes, sizeof(aes));
352 	return 0;
353 }
354 
355 static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
356 				unsigned int len)
357 {
358 	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
359 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
360 	struct safexcel_ahash_export_state istate, ostate;
361 	struct safexcel_crypto_priv *priv = ctx->priv;
362 	struct crypto_authenc_keys keys;
363 	struct crypto_aes_ctx aes;
364 	int err = -EINVAL, i;
365 
366 	if (unlikely(crypto_authenc_extractkeys(&keys, key, len)))
367 		goto badkey;
368 
369 	if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
370 		/* Must have at least space for the nonce here */
371 		if (unlikely(keys.enckeylen < CTR_RFC3686_NONCE_SIZE))
372 			goto badkey;
373 		/* last 4 bytes of key are the nonce! */
374 		ctx->nonce = *(u32 *)(keys.enckey + keys.enckeylen -
375 				      CTR_RFC3686_NONCE_SIZE);
376 		/* exclude the nonce here */
377 		keys.enckeylen -= CTR_RFC3686_NONCE_SIZE;
378 	}
379 
380 	/* Encryption key */
381 	switch (ctx->alg) {
382 	case SAFEXCEL_DES:
383 		err = verify_aead_des_key(ctfm, keys.enckey, keys.enckeylen);
384 		if (unlikely(err))
385 			goto badkey_expflags;
386 		break;
387 	case SAFEXCEL_3DES:
388 		err = verify_aead_des3_key(ctfm, keys.enckey, keys.enckeylen);
389 		if (unlikely(err))
390 			goto badkey_expflags;
391 		break;
392 	case SAFEXCEL_AES:
393 		err = aes_expandkey(&aes, keys.enckey, keys.enckeylen);
394 		if (unlikely(err))
395 			goto badkey;
396 		break;
397 	case SAFEXCEL_SM4:
398 		if (unlikely(keys.enckeylen != SM4_KEY_SIZE))
399 			goto badkey;
400 		break;
401 	default:
402 		dev_err(priv->dev, "aead: unsupported cipher algorithm\n");
403 		goto badkey;
404 	}
405 
406 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
407 		for (i = 0; i < keys.enckeylen / sizeof(u32); i++) {
408 			if (le32_to_cpu(ctx->key[i]) !=
409 			    ((u32 *)keys.enckey)[i]) {
410 				ctx->base.needs_inv = true;
411 				break;
412 			}
413 		}
414 	}
415 
416 	/* Auth key */
417 	switch (ctx->hash_alg) {
418 	case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
419 		if (safexcel_hmac_setkey("safexcel-sha1", keys.authkey,
420 					 keys.authkeylen, &istate, &ostate))
421 			goto badkey;
422 		break;
423 	case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
424 		if (safexcel_hmac_setkey("safexcel-sha224", keys.authkey,
425 					 keys.authkeylen, &istate, &ostate))
426 			goto badkey;
427 		break;
428 	case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
429 		if (safexcel_hmac_setkey("safexcel-sha256", keys.authkey,
430 					 keys.authkeylen, &istate, &ostate))
431 			goto badkey;
432 		break;
433 	case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
434 		if (safexcel_hmac_setkey("safexcel-sha384", keys.authkey,
435 					 keys.authkeylen, &istate, &ostate))
436 			goto badkey;
437 		break;
438 	case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
439 		if (safexcel_hmac_setkey("safexcel-sha512", keys.authkey,
440 					 keys.authkeylen, &istate, &ostate))
441 			goto badkey;
442 		break;
443 	case CONTEXT_CONTROL_CRYPTO_ALG_SM3:
444 		if (safexcel_hmac_setkey("safexcel-sm3", keys.authkey,
445 					 keys.authkeylen, &istate, &ostate))
446 			goto badkey;
447 		break;
448 	default:
449 		dev_err(priv->dev, "aead: unsupported hash algorithm\n");
450 		goto badkey;
451 	}
452 
453 	crypto_aead_set_flags(ctfm, crypto_aead_get_flags(ctfm) &
454 				    CRYPTO_TFM_RES_MASK);
455 
456 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
457 	    (memcmp(ctx->ipad, istate.state, ctx->state_sz) ||
458 	     memcmp(ctx->opad, ostate.state, ctx->state_sz)))
459 		ctx->base.needs_inv = true;
460 
461 	/* Now copy the keys into the context */
462 	for (i = 0; i < keys.enckeylen / sizeof(u32); i++)
463 		ctx->key[i] = cpu_to_le32(((u32 *)keys.enckey)[i]);
464 	ctx->key_len = keys.enckeylen;
465 
466 	memcpy(ctx->ipad, &istate.state, ctx->state_sz);
467 	memcpy(ctx->opad, &ostate.state, ctx->state_sz);
468 
469 	memzero_explicit(&keys, sizeof(keys));
470 	return 0;
471 
472 badkey:
473 	crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
474 badkey_expflags:
475 	memzero_explicit(&keys, sizeof(keys));
476 	return err;
477 }
478 
479 static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
480 				    struct crypto_async_request *async,
481 				    struct safexcel_cipher_req *sreq,
482 				    struct safexcel_command_desc *cdesc)
483 {
484 	struct safexcel_crypto_priv *priv = ctx->priv;
485 	int ctrl_size = ctx->key_len / sizeof(u32);
486 
487 	cdesc->control_data.control1 = ctx->mode;
488 
489 	if (ctx->aead) {
490 		/* Take in account the ipad+opad digests */
491 		if (ctx->xcm) {
492 			ctrl_size += ctx->state_sz / sizeof(u32);
493 			cdesc->control_data.control0 =
494 				CONTEXT_CONTROL_KEY_EN |
495 				CONTEXT_CONTROL_DIGEST_XCM |
496 				ctx->hash_alg |
497 				CONTEXT_CONTROL_SIZE(ctrl_size);
498 		} else if (ctx->alg == SAFEXCEL_CHACHA20) {
499 			/* Chacha20-Poly1305 */
500 			cdesc->control_data.control0 =
501 				CONTEXT_CONTROL_KEY_EN |
502 				CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20 |
503 				(sreq->direction == SAFEXCEL_ENCRYPT ?
504 					CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT :
505 					CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN) |
506 				ctx->hash_alg |
507 				CONTEXT_CONTROL_SIZE(ctrl_size);
508 			return 0;
509 		} else {
510 			ctrl_size += ctx->state_sz / sizeof(u32) * 2;
511 			cdesc->control_data.control0 =
512 				CONTEXT_CONTROL_KEY_EN |
513 				CONTEXT_CONTROL_DIGEST_HMAC |
514 				ctx->hash_alg |
515 				CONTEXT_CONTROL_SIZE(ctrl_size);
516 		}
517 
518 		if (sreq->direction == SAFEXCEL_ENCRYPT &&
519 		    (ctx->xcm == EIP197_XCM_MODE_CCM ||
520 		     ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC))
521 			cdesc->control_data.control0 |=
522 				CONTEXT_CONTROL_TYPE_HASH_ENCRYPT_OUT;
523 		else if (sreq->direction == SAFEXCEL_ENCRYPT)
524 			cdesc->control_data.control0 |=
525 				CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
526 		else if (ctx->xcm == EIP197_XCM_MODE_CCM)
527 			cdesc->control_data.control0 |=
528 				CONTEXT_CONTROL_TYPE_DECRYPT_HASH_IN;
529 		else
530 			cdesc->control_data.control0 |=
531 				CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
532 	} else {
533 		if (sreq->direction == SAFEXCEL_ENCRYPT)
534 			cdesc->control_data.control0 =
535 				CONTEXT_CONTROL_TYPE_CRYPTO_OUT |
536 				CONTEXT_CONTROL_KEY_EN |
537 				CONTEXT_CONTROL_SIZE(ctrl_size);
538 		else
539 			cdesc->control_data.control0 =
540 				CONTEXT_CONTROL_TYPE_CRYPTO_IN |
541 				CONTEXT_CONTROL_KEY_EN |
542 				CONTEXT_CONTROL_SIZE(ctrl_size);
543 	}
544 
545 	if (ctx->alg == SAFEXCEL_DES) {
546 		cdesc->control_data.control0 |=
547 			CONTEXT_CONTROL_CRYPTO_ALG_DES;
548 	} else if (ctx->alg == SAFEXCEL_3DES) {
549 		cdesc->control_data.control0 |=
550 			CONTEXT_CONTROL_CRYPTO_ALG_3DES;
551 	} else if (ctx->alg == SAFEXCEL_AES) {
552 		switch (ctx->key_len >> ctx->xts) {
553 		case AES_KEYSIZE_128:
554 			cdesc->control_data.control0 |=
555 				CONTEXT_CONTROL_CRYPTO_ALG_AES128;
556 			break;
557 		case AES_KEYSIZE_192:
558 			cdesc->control_data.control0 |=
559 				CONTEXT_CONTROL_CRYPTO_ALG_AES192;
560 			break;
561 		case AES_KEYSIZE_256:
562 			cdesc->control_data.control0 |=
563 				CONTEXT_CONTROL_CRYPTO_ALG_AES256;
564 			break;
565 		default:
566 			dev_err(priv->dev, "aes keysize not supported: %u\n",
567 				ctx->key_len >> ctx->xts);
568 			return -EINVAL;
569 		}
570 	} else if (ctx->alg == SAFEXCEL_CHACHA20) {
571 		cdesc->control_data.control0 |=
572 			CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20;
573 	} else if (ctx->alg == SAFEXCEL_SM4) {
574 		cdesc->control_data.control0 |=
575 			CONTEXT_CONTROL_CRYPTO_ALG_SM4;
576 	}
577 
578 	return 0;
579 }
580 
581 static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
582 				      struct crypto_async_request *async,
583 				      struct scatterlist *src,
584 				      struct scatterlist *dst,
585 				      unsigned int cryptlen,
586 				      struct safexcel_cipher_req *sreq,
587 				      bool *should_complete, int *ret)
588 {
589 	struct skcipher_request *areq = skcipher_request_cast(async);
590 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
591 	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(skcipher);
592 	struct safexcel_result_desc *rdesc;
593 	int ndesc = 0;
594 
595 	*ret = 0;
596 
597 	if (unlikely(!sreq->rdescs))
598 		return 0;
599 
600 	while (sreq->rdescs--) {
601 		rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
602 		if (IS_ERR(rdesc)) {
603 			dev_err(priv->dev,
604 				"cipher: result: could not retrieve the result descriptor\n");
605 			*ret = PTR_ERR(rdesc);
606 			break;
607 		}
608 
609 		if (likely(!*ret))
610 			*ret = safexcel_rdesc_check_errors(priv, rdesc);
611 
612 		ndesc++;
613 	}
614 
615 	safexcel_complete(priv, ring);
616 
617 	if (src == dst) {
618 		dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
619 	} else {
620 		dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
621 		dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
622 	}
623 
624 	/*
625 	 * Update IV in req from last crypto output word for CBC modes
626 	 */
627 	if ((!ctx->aead) && (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
628 	    (sreq->direction == SAFEXCEL_ENCRYPT)) {
629 		/* For encrypt take the last output word */
630 		sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv,
631 				   crypto_skcipher_ivsize(skcipher),
632 				   (cryptlen -
633 				    crypto_skcipher_ivsize(skcipher)));
634 	}
635 
636 	*should_complete = true;
637 
638 	return ndesc;
639 }
640 
641 static int safexcel_send_req(struct crypto_async_request *base, int ring,
642 			     struct safexcel_cipher_req *sreq,
643 			     struct scatterlist *src, struct scatterlist *dst,
644 			     unsigned int cryptlen, unsigned int assoclen,
645 			     unsigned int digestsize, u8 *iv, int *commands,
646 			     int *results)
647 {
648 	struct skcipher_request *areq = skcipher_request_cast(base);
649 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
650 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
651 	struct safexcel_crypto_priv *priv = ctx->priv;
652 	struct safexcel_command_desc *cdesc;
653 	struct safexcel_command_desc *first_cdesc = NULL;
654 	struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
655 	struct scatterlist *sg;
656 	unsigned int totlen;
657 	unsigned int totlen_src = cryptlen + assoclen;
658 	unsigned int totlen_dst = totlen_src;
659 	int n_cdesc = 0, n_rdesc = 0;
660 	int queued, i, ret = 0;
661 	bool first = true;
662 
663 	sreq->nr_src = sg_nents_for_len(src, totlen_src);
664 
665 	if (ctx->aead) {
666 		/*
667 		 * AEAD has auth tag appended to output for encrypt and
668 		 * removed from the output for decrypt!
669 		 */
670 		if (sreq->direction == SAFEXCEL_DECRYPT)
671 			totlen_dst -= digestsize;
672 		else
673 			totlen_dst += digestsize;
674 
675 		memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
676 		       ctx->ipad, ctx->state_sz);
677 		if (!ctx->xcm)
678 			memcpy(ctx->base.ctxr->data + (ctx->key_len +
679 			       ctx->state_sz) / sizeof(u32), ctx->opad,
680 			       ctx->state_sz);
681 	} else if ((ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
682 		   (sreq->direction == SAFEXCEL_DECRYPT)) {
683 		/*
684 		 * Save IV from last crypto input word for CBC modes in decrypt
685 		 * direction. Need to do this first in case of inplace operation
686 		 * as it will be overwritten.
687 		 */
688 		sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv,
689 				   crypto_skcipher_ivsize(skcipher),
690 				   (totlen_src -
691 				    crypto_skcipher_ivsize(skcipher)));
692 	}
693 
694 	sreq->nr_dst = sg_nents_for_len(dst, totlen_dst);
695 
696 	/*
697 	 * Remember actual input length, source buffer length may be
698 	 * updated in case of inline operation below.
699 	 */
700 	totlen = totlen_src;
701 	queued = totlen_src;
702 
703 	if (src == dst) {
704 		sreq->nr_src = max(sreq->nr_src, sreq->nr_dst);
705 		sreq->nr_dst = sreq->nr_src;
706 		if (unlikely((totlen_src || totlen_dst) &&
707 		    (sreq->nr_src <= 0))) {
708 			dev_err(priv->dev, "In-place buffer not large enough (need %d bytes)!",
709 				max(totlen_src, totlen_dst));
710 			return -EINVAL;
711 		}
712 		dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
713 	} else {
714 		if (unlikely(totlen_src && (sreq->nr_src <= 0))) {
715 			dev_err(priv->dev, "Source buffer not large enough (need %d bytes)!",
716 				totlen_src);
717 			return -EINVAL;
718 		}
719 		dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
720 
721 		if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) {
722 			dev_err(priv->dev, "Dest buffer not large enough (need %d bytes)!",
723 				totlen_dst);
724 			dma_unmap_sg(priv->dev, src, sreq->nr_src,
725 				     DMA_TO_DEVICE);
726 			return -EINVAL;
727 		}
728 		dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
729 	}
730 
731 	memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
732 
733 	/* The EIP cannot deal with zero length input packets! */
734 	if (totlen == 0)
735 		totlen = 1;
736 
737 	/* command descriptors */
738 	for_each_sg(src, sg, sreq->nr_src, i) {
739 		int len = sg_dma_len(sg);
740 
741 		/* Do not overflow the request */
742 		if (queued - len < 0)
743 			len = queued;
744 
745 		cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
746 					   !(queued - len),
747 					   sg_dma_address(sg), len, totlen,
748 					   ctx->base.ctxr_dma);
749 		if (IS_ERR(cdesc)) {
750 			/* No space left in the command descriptor ring */
751 			ret = PTR_ERR(cdesc);
752 			goto cdesc_rollback;
753 		}
754 		n_cdesc++;
755 
756 		if (n_cdesc == 1) {
757 			first_cdesc = cdesc;
758 		}
759 
760 		queued -= len;
761 		if (!queued)
762 			break;
763 	}
764 
765 	if (unlikely(!n_cdesc)) {
766 		/*
767 		 * Special case: zero length input buffer.
768 		 * The engine always needs the 1st command descriptor, however!
769 		 */
770 		first_cdesc = safexcel_add_cdesc(priv, ring, 1, 1, 0, 0, totlen,
771 						 ctx->base.ctxr_dma);
772 		n_cdesc = 1;
773 	}
774 
775 	/* Add context control words and token to first command descriptor */
776 	safexcel_context_control(ctx, base, sreq, first_cdesc);
777 	if (ctx->aead)
778 		safexcel_aead_token(ctx, iv, first_cdesc,
779 				    sreq->direction, cryptlen,
780 				    assoclen, digestsize);
781 	else
782 		safexcel_skcipher_token(ctx, iv, first_cdesc,
783 					cryptlen);
784 
785 	/* result descriptors */
786 	for_each_sg(dst, sg, sreq->nr_dst, i) {
787 		bool last = (i == sreq->nr_dst - 1);
788 		u32 len = sg_dma_len(sg);
789 
790 		/* only allow the part of the buffer we know we need */
791 		if (len > totlen_dst)
792 			len = totlen_dst;
793 		if (unlikely(!len))
794 			break;
795 		totlen_dst -= len;
796 
797 		/* skip over AAD space in buffer - not written */
798 		if (assoclen) {
799 			if (assoclen >= len) {
800 				assoclen -= len;
801 				continue;
802 			}
803 			rdesc = safexcel_add_rdesc(priv, ring, first, last,
804 						   sg_dma_address(sg) +
805 						   assoclen,
806 						   len - assoclen);
807 			assoclen = 0;
808 		} else {
809 			rdesc = safexcel_add_rdesc(priv, ring, first, last,
810 						   sg_dma_address(sg),
811 						   len);
812 		}
813 		if (IS_ERR(rdesc)) {
814 			/* No space left in the result descriptor ring */
815 			ret = PTR_ERR(rdesc);
816 			goto rdesc_rollback;
817 		}
818 		if (first) {
819 			first_rdesc = rdesc;
820 			first = false;
821 		}
822 		n_rdesc++;
823 	}
824 
825 	if (unlikely(first)) {
826 		/*
827 		 * Special case: AEAD decrypt with only AAD data.
828 		 * In this case there is NO output data from the engine,
829 		 * but the engine still needs a result descriptor!
830 		 * Create a dummy one just for catching the result token.
831 		 */
832 		rdesc = safexcel_add_rdesc(priv, ring, true, true, 0, 0);
833 		if (IS_ERR(rdesc)) {
834 			/* No space left in the result descriptor ring */
835 			ret = PTR_ERR(rdesc);
836 			goto rdesc_rollback;
837 		}
838 		first_rdesc = rdesc;
839 		n_rdesc = 1;
840 	}
841 
842 	safexcel_rdr_req_set(priv, ring, first_rdesc, base);
843 
844 	*commands = n_cdesc;
845 	*results = n_rdesc;
846 	return 0;
847 
848 rdesc_rollback:
849 	for (i = 0; i < n_rdesc; i++)
850 		safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
851 cdesc_rollback:
852 	for (i = 0; i < n_cdesc; i++)
853 		safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
854 
855 	if (src == dst) {
856 		dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
857 	} else {
858 		dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
859 		dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
860 	}
861 
862 	return ret;
863 }
864 
865 static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
866 				      int ring,
867 				      struct crypto_async_request *base,
868 				      struct safexcel_cipher_req *sreq,
869 				      bool *should_complete, int *ret)
870 {
871 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
872 	struct safexcel_result_desc *rdesc;
873 	int ndesc = 0, enq_ret;
874 
875 	*ret = 0;
876 
877 	if (unlikely(!sreq->rdescs))
878 		return 0;
879 
880 	while (sreq->rdescs--) {
881 		rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
882 		if (IS_ERR(rdesc)) {
883 			dev_err(priv->dev,
884 				"cipher: invalidate: could not retrieve the result descriptor\n");
885 			*ret = PTR_ERR(rdesc);
886 			break;
887 		}
888 
889 		if (likely(!*ret))
890 			*ret = safexcel_rdesc_check_errors(priv, rdesc);
891 
892 		ndesc++;
893 	}
894 
895 	safexcel_complete(priv, ring);
896 
897 	if (ctx->base.exit_inv) {
898 		dma_pool_free(priv->context_pool, ctx->base.ctxr,
899 			      ctx->base.ctxr_dma);
900 
901 		*should_complete = true;
902 
903 		return ndesc;
904 	}
905 
906 	ring = safexcel_select_ring(priv);
907 	ctx->base.ring = ring;
908 
909 	spin_lock_bh(&priv->ring[ring].queue_lock);
910 	enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
911 	spin_unlock_bh(&priv->ring[ring].queue_lock);
912 
913 	if (enq_ret != -EINPROGRESS)
914 		*ret = enq_ret;
915 
916 	queue_work(priv->ring[ring].workqueue,
917 		   &priv->ring[ring].work_data.work);
918 
919 	*should_complete = false;
920 
921 	return ndesc;
922 }
923 
924 static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
925 					   int ring,
926 					   struct crypto_async_request *async,
927 					   bool *should_complete, int *ret)
928 {
929 	struct skcipher_request *req = skcipher_request_cast(async);
930 	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
931 	int err;
932 
933 	if (sreq->needs_inv) {
934 		sreq->needs_inv = false;
935 		err = safexcel_handle_inv_result(priv, ring, async, sreq,
936 						 should_complete, ret);
937 	} else {
938 		err = safexcel_handle_req_result(priv, ring, async, req->src,
939 						 req->dst, req->cryptlen, sreq,
940 						 should_complete, ret);
941 	}
942 
943 	return err;
944 }
945 
946 static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
947 				       int ring,
948 				       struct crypto_async_request *async,
949 				       bool *should_complete, int *ret)
950 {
951 	struct aead_request *req = aead_request_cast(async);
952 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
953 	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
954 	int err;
955 
956 	if (sreq->needs_inv) {
957 		sreq->needs_inv = false;
958 		err = safexcel_handle_inv_result(priv, ring, async, sreq,
959 						 should_complete, ret);
960 	} else {
961 		err = safexcel_handle_req_result(priv, ring, async, req->src,
962 						 req->dst,
963 						 req->cryptlen + crypto_aead_authsize(tfm),
964 						 sreq, should_complete, ret);
965 	}
966 
967 	return err;
968 }
969 
970 static int safexcel_cipher_send_inv(struct crypto_async_request *base,
971 				    int ring, int *commands, int *results)
972 {
973 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
974 	struct safexcel_crypto_priv *priv = ctx->priv;
975 	int ret;
976 
977 	ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
978 	if (unlikely(ret))
979 		return ret;
980 
981 	*commands = 1;
982 	*results = 1;
983 
984 	return 0;
985 }
986 
987 static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
988 				  int *commands, int *results)
989 {
990 	struct skcipher_request *req = skcipher_request_cast(async);
991 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
992 	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
993 	struct safexcel_crypto_priv *priv = ctx->priv;
994 	int ret;
995 
996 	BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
997 
998 	if (sreq->needs_inv) {
999 		ret = safexcel_cipher_send_inv(async, ring, commands, results);
1000 	} else {
1001 		struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1002 		u8 input_iv[AES_BLOCK_SIZE];
1003 
1004 		/*
1005 		 * Save input IV in case of CBC decrypt mode
1006 		 * Will be overwritten with output IV prior to use!
1007 		 */
1008 		memcpy(input_iv, req->iv, crypto_skcipher_ivsize(skcipher));
1009 
1010 		ret = safexcel_send_req(async, ring, sreq, req->src,
1011 					req->dst, req->cryptlen, 0, 0, input_iv,
1012 					commands, results);
1013 	}
1014 
1015 	sreq->rdescs = *results;
1016 	return ret;
1017 }
1018 
1019 static int safexcel_aead_send(struct crypto_async_request *async, int ring,
1020 			      int *commands, int *results)
1021 {
1022 	struct aead_request *req = aead_request_cast(async);
1023 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1024 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1025 	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1026 	struct safexcel_crypto_priv *priv = ctx->priv;
1027 	int ret;
1028 
1029 	BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1030 
1031 	if (sreq->needs_inv)
1032 		ret = safexcel_cipher_send_inv(async, ring, commands, results);
1033 	else
1034 		ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
1035 					req->cryptlen, req->assoclen,
1036 					crypto_aead_authsize(tfm), req->iv,
1037 					commands, results);
1038 	sreq->rdescs = *results;
1039 	return ret;
1040 }
1041 
1042 static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
1043 				    struct crypto_async_request *base,
1044 				    struct safexcel_cipher_req *sreq,
1045 				    struct safexcel_inv_result *result)
1046 {
1047 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1048 	struct safexcel_crypto_priv *priv = ctx->priv;
1049 	int ring = ctx->base.ring;
1050 
1051 	init_completion(&result->completion);
1052 
1053 	ctx = crypto_tfm_ctx(base->tfm);
1054 	ctx->base.exit_inv = true;
1055 	sreq->needs_inv = true;
1056 
1057 	spin_lock_bh(&priv->ring[ring].queue_lock);
1058 	crypto_enqueue_request(&priv->ring[ring].queue, base);
1059 	spin_unlock_bh(&priv->ring[ring].queue_lock);
1060 
1061 	queue_work(priv->ring[ring].workqueue,
1062 		   &priv->ring[ring].work_data.work);
1063 
1064 	wait_for_completion(&result->completion);
1065 
1066 	if (result->error) {
1067 		dev_warn(priv->dev,
1068 			"cipher: sync: invalidate: completion error %d\n",
1069 			 result->error);
1070 		return result->error;
1071 	}
1072 
1073 	return 0;
1074 }
1075 
1076 static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
1077 {
1078 	EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
1079 	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1080 	struct safexcel_inv_result result = {};
1081 
1082 	memset(req, 0, sizeof(struct skcipher_request));
1083 
1084 	skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1085 				      safexcel_inv_complete, &result);
1086 	skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
1087 
1088 	return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1089 }
1090 
1091 static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
1092 {
1093 	EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
1094 	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1095 	struct safexcel_inv_result result = {};
1096 
1097 	memset(req, 0, sizeof(struct aead_request));
1098 
1099 	aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1100 				  safexcel_inv_complete, &result);
1101 	aead_request_set_tfm(req, __crypto_aead_cast(tfm));
1102 
1103 	return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1104 }
1105 
1106 static int safexcel_queue_req(struct crypto_async_request *base,
1107 			struct safexcel_cipher_req *sreq,
1108 			enum safexcel_cipher_direction dir)
1109 {
1110 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1111 	struct safexcel_crypto_priv *priv = ctx->priv;
1112 	int ret, ring;
1113 
1114 	sreq->needs_inv = false;
1115 	sreq->direction = dir;
1116 
1117 	if (ctx->base.ctxr) {
1118 		if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
1119 			sreq->needs_inv = true;
1120 			ctx->base.needs_inv = false;
1121 		}
1122 	} else {
1123 		ctx->base.ring = safexcel_select_ring(priv);
1124 		ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
1125 						 EIP197_GFP_FLAGS(*base),
1126 						 &ctx->base.ctxr_dma);
1127 		if (!ctx->base.ctxr)
1128 			return -ENOMEM;
1129 	}
1130 
1131 	ring = ctx->base.ring;
1132 
1133 	spin_lock_bh(&priv->ring[ring].queue_lock);
1134 	ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
1135 	spin_unlock_bh(&priv->ring[ring].queue_lock);
1136 
1137 	queue_work(priv->ring[ring].workqueue,
1138 		   &priv->ring[ring].work_data.work);
1139 
1140 	return ret;
1141 }
1142 
1143 static int safexcel_encrypt(struct skcipher_request *req)
1144 {
1145 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1146 			SAFEXCEL_ENCRYPT);
1147 }
1148 
1149 static int safexcel_decrypt(struct skcipher_request *req)
1150 {
1151 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1152 			SAFEXCEL_DECRYPT);
1153 }
1154 
1155 static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
1156 {
1157 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1158 	struct safexcel_alg_template *tmpl =
1159 		container_of(tfm->__crt_alg, struct safexcel_alg_template,
1160 			     alg.skcipher.base);
1161 
1162 	crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
1163 				    sizeof(struct safexcel_cipher_req));
1164 
1165 	ctx->priv = tmpl->priv;
1166 
1167 	ctx->base.send = safexcel_skcipher_send;
1168 	ctx->base.handle_result = safexcel_skcipher_handle_result;
1169 	return 0;
1170 }
1171 
1172 static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
1173 {
1174 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1175 
1176 	memzero_explicit(ctx->key, sizeof(ctx->key));
1177 
1178 	/* context not allocated, skip invalidation */
1179 	if (!ctx->base.ctxr)
1180 		return -ENOMEM;
1181 
1182 	memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
1183 	return 0;
1184 }
1185 
1186 static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
1187 {
1188 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1189 	struct safexcel_crypto_priv *priv = ctx->priv;
1190 	int ret;
1191 
1192 	if (safexcel_cipher_cra_exit(tfm))
1193 		return;
1194 
1195 	if (priv->flags & EIP197_TRC_CACHE) {
1196 		ret = safexcel_skcipher_exit_inv(tfm);
1197 		if (ret)
1198 			dev_warn(priv->dev, "skcipher: invalidation error %d\n",
1199 				 ret);
1200 	} else {
1201 		dma_pool_free(priv->context_pool, ctx->base.ctxr,
1202 			      ctx->base.ctxr_dma);
1203 	}
1204 }
1205 
1206 static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
1207 {
1208 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1209 	struct safexcel_crypto_priv *priv = ctx->priv;
1210 	int ret;
1211 
1212 	if (safexcel_cipher_cra_exit(tfm))
1213 		return;
1214 
1215 	if (priv->flags & EIP197_TRC_CACHE) {
1216 		ret = safexcel_aead_exit_inv(tfm);
1217 		if (ret)
1218 			dev_warn(priv->dev, "aead: invalidation error %d\n",
1219 				 ret);
1220 	} else {
1221 		dma_pool_free(priv->context_pool, ctx->base.ctxr,
1222 			      ctx->base.ctxr_dma);
1223 	}
1224 }
1225 
1226 static int safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm *tfm)
1227 {
1228 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1229 
1230 	safexcel_skcipher_cra_init(tfm);
1231 	ctx->alg  = SAFEXCEL_AES;
1232 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1233 	return 0;
1234 }
1235 
1236 struct safexcel_alg_template safexcel_alg_ecb_aes = {
1237 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1238 	.algo_mask = SAFEXCEL_ALG_AES,
1239 	.alg.skcipher = {
1240 		.setkey = safexcel_skcipher_aes_setkey,
1241 		.encrypt = safexcel_encrypt,
1242 		.decrypt = safexcel_decrypt,
1243 		.min_keysize = AES_MIN_KEY_SIZE,
1244 		.max_keysize = AES_MAX_KEY_SIZE,
1245 		.base = {
1246 			.cra_name = "ecb(aes)",
1247 			.cra_driver_name = "safexcel-ecb-aes",
1248 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1249 			.cra_flags = CRYPTO_ALG_ASYNC |
1250 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1251 			.cra_blocksize = AES_BLOCK_SIZE,
1252 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1253 			.cra_alignmask = 0,
1254 			.cra_init = safexcel_skcipher_aes_ecb_cra_init,
1255 			.cra_exit = safexcel_skcipher_cra_exit,
1256 			.cra_module = THIS_MODULE,
1257 		},
1258 	},
1259 };
1260 
1261 static int safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm *tfm)
1262 {
1263 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1264 
1265 	safexcel_skcipher_cra_init(tfm);
1266 	ctx->alg  = SAFEXCEL_AES;
1267 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1268 	return 0;
1269 }
1270 
1271 struct safexcel_alg_template safexcel_alg_cbc_aes = {
1272 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1273 	.algo_mask = SAFEXCEL_ALG_AES,
1274 	.alg.skcipher = {
1275 		.setkey = safexcel_skcipher_aes_setkey,
1276 		.encrypt = safexcel_encrypt,
1277 		.decrypt = safexcel_decrypt,
1278 		.min_keysize = AES_MIN_KEY_SIZE,
1279 		.max_keysize = AES_MAX_KEY_SIZE,
1280 		.ivsize = AES_BLOCK_SIZE,
1281 		.base = {
1282 			.cra_name = "cbc(aes)",
1283 			.cra_driver_name = "safexcel-cbc-aes",
1284 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1285 			.cra_flags = CRYPTO_ALG_ASYNC |
1286 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1287 			.cra_blocksize = AES_BLOCK_SIZE,
1288 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1289 			.cra_alignmask = 0,
1290 			.cra_init = safexcel_skcipher_aes_cbc_cra_init,
1291 			.cra_exit = safexcel_skcipher_cra_exit,
1292 			.cra_module = THIS_MODULE,
1293 		},
1294 	},
1295 };
1296 
1297 static int safexcel_skcipher_aes_cfb_cra_init(struct crypto_tfm *tfm)
1298 {
1299 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1300 
1301 	safexcel_skcipher_cra_init(tfm);
1302 	ctx->alg  = SAFEXCEL_AES;
1303 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
1304 	return 0;
1305 }
1306 
1307 struct safexcel_alg_template safexcel_alg_cfb_aes = {
1308 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1309 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1310 	.alg.skcipher = {
1311 		.setkey = safexcel_skcipher_aes_setkey,
1312 		.encrypt = safexcel_encrypt,
1313 		.decrypt = safexcel_decrypt,
1314 		.min_keysize = AES_MIN_KEY_SIZE,
1315 		.max_keysize = AES_MAX_KEY_SIZE,
1316 		.ivsize = AES_BLOCK_SIZE,
1317 		.base = {
1318 			.cra_name = "cfb(aes)",
1319 			.cra_driver_name = "safexcel-cfb-aes",
1320 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1321 			.cra_flags = CRYPTO_ALG_ASYNC |
1322 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1323 			.cra_blocksize = 1,
1324 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1325 			.cra_alignmask = 0,
1326 			.cra_init = safexcel_skcipher_aes_cfb_cra_init,
1327 			.cra_exit = safexcel_skcipher_cra_exit,
1328 			.cra_module = THIS_MODULE,
1329 		},
1330 	},
1331 };
1332 
1333 static int safexcel_skcipher_aes_ofb_cra_init(struct crypto_tfm *tfm)
1334 {
1335 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1336 
1337 	safexcel_skcipher_cra_init(tfm);
1338 	ctx->alg  = SAFEXCEL_AES;
1339 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
1340 	return 0;
1341 }
1342 
1343 struct safexcel_alg_template safexcel_alg_ofb_aes = {
1344 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1345 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1346 	.alg.skcipher = {
1347 		.setkey = safexcel_skcipher_aes_setkey,
1348 		.encrypt = safexcel_encrypt,
1349 		.decrypt = safexcel_decrypt,
1350 		.min_keysize = AES_MIN_KEY_SIZE,
1351 		.max_keysize = AES_MAX_KEY_SIZE,
1352 		.ivsize = AES_BLOCK_SIZE,
1353 		.base = {
1354 			.cra_name = "ofb(aes)",
1355 			.cra_driver_name = "safexcel-ofb-aes",
1356 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1357 			.cra_flags = CRYPTO_ALG_ASYNC |
1358 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1359 			.cra_blocksize = 1,
1360 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1361 			.cra_alignmask = 0,
1362 			.cra_init = safexcel_skcipher_aes_ofb_cra_init,
1363 			.cra_exit = safexcel_skcipher_cra_exit,
1364 			.cra_module = THIS_MODULE,
1365 		},
1366 	},
1367 };
1368 
1369 static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
1370 					   const u8 *key, unsigned int len)
1371 {
1372 	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
1373 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1374 	struct safexcel_crypto_priv *priv = ctx->priv;
1375 	struct crypto_aes_ctx aes;
1376 	int ret, i;
1377 	unsigned int keylen;
1378 
1379 	/* last 4 bytes of key are the nonce! */
1380 	ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
1381 	/* exclude the nonce here */
1382 	keylen = len - CTR_RFC3686_NONCE_SIZE;
1383 	ret = aes_expandkey(&aes, key, keylen);
1384 	if (ret) {
1385 		crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
1386 		return ret;
1387 	}
1388 
1389 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1390 		for (i = 0; i < keylen / sizeof(u32); i++) {
1391 			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
1392 				ctx->base.needs_inv = true;
1393 				break;
1394 			}
1395 		}
1396 	}
1397 
1398 	for (i = 0; i < keylen / sizeof(u32); i++)
1399 		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
1400 
1401 	ctx->key_len = keylen;
1402 
1403 	memzero_explicit(&aes, sizeof(aes));
1404 	return 0;
1405 }
1406 
1407 static int safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm *tfm)
1408 {
1409 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1410 
1411 	safexcel_skcipher_cra_init(tfm);
1412 	ctx->alg  = SAFEXCEL_AES;
1413 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
1414 	return 0;
1415 }
1416 
1417 struct safexcel_alg_template safexcel_alg_ctr_aes = {
1418 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1419 	.algo_mask = SAFEXCEL_ALG_AES,
1420 	.alg.skcipher = {
1421 		.setkey = safexcel_skcipher_aesctr_setkey,
1422 		.encrypt = safexcel_encrypt,
1423 		.decrypt = safexcel_decrypt,
1424 		/* Add nonce size */
1425 		.min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1426 		.max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1427 		.ivsize = CTR_RFC3686_IV_SIZE,
1428 		.base = {
1429 			.cra_name = "rfc3686(ctr(aes))",
1430 			.cra_driver_name = "safexcel-ctr-aes",
1431 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1432 			.cra_flags = CRYPTO_ALG_ASYNC |
1433 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1434 			.cra_blocksize = 1,
1435 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1436 			.cra_alignmask = 0,
1437 			.cra_init = safexcel_skcipher_aes_ctr_cra_init,
1438 			.cra_exit = safexcel_skcipher_cra_exit,
1439 			.cra_module = THIS_MODULE,
1440 		},
1441 	},
1442 };
1443 
1444 static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
1445 			       unsigned int len)
1446 {
1447 	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1448 	int ret;
1449 
1450 	ret = verify_skcipher_des_key(ctfm, key);
1451 	if (ret)
1452 		return ret;
1453 
1454 	/* if context exits and key changed, need to invalidate it */
1455 	if (ctx->base.ctxr_dma)
1456 		if (memcmp(ctx->key, key, len))
1457 			ctx->base.needs_inv = true;
1458 
1459 	memcpy(ctx->key, key, len);
1460 	ctx->key_len = len;
1461 
1462 	return 0;
1463 }
1464 
1465 static int safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm *tfm)
1466 {
1467 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1468 
1469 	safexcel_skcipher_cra_init(tfm);
1470 	ctx->alg  = SAFEXCEL_DES;
1471 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1472 	return 0;
1473 }
1474 
1475 struct safexcel_alg_template safexcel_alg_cbc_des = {
1476 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1477 	.algo_mask = SAFEXCEL_ALG_DES,
1478 	.alg.skcipher = {
1479 		.setkey = safexcel_des_setkey,
1480 		.encrypt = safexcel_encrypt,
1481 		.decrypt = safexcel_decrypt,
1482 		.min_keysize = DES_KEY_SIZE,
1483 		.max_keysize = DES_KEY_SIZE,
1484 		.ivsize = DES_BLOCK_SIZE,
1485 		.base = {
1486 			.cra_name = "cbc(des)",
1487 			.cra_driver_name = "safexcel-cbc-des",
1488 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1489 			.cra_flags = CRYPTO_ALG_ASYNC |
1490 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1491 			.cra_blocksize = DES_BLOCK_SIZE,
1492 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1493 			.cra_alignmask = 0,
1494 			.cra_init = safexcel_skcipher_des_cbc_cra_init,
1495 			.cra_exit = safexcel_skcipher_cra_exit,
1496 			.cra_module = THIS_MODULE,
1497 		},
1498 	},
1499 };
1500 
1501 static int safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm *tfm)
1502 {
1503 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1504 
1505 	safexcel_skcipher_cra_init(tfm);
1506 	ctx->alg  = SAFEXCEL_DES;
1507 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1508 	return 0;
1509 }
1510 
1511 struct safexcel_alg_template safexcel_alg_ecb_des = {
1512 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1513 	.algo_mask = SAFEXCEL_ALG_DES,
1514 	.alg.skcipher = {
1515 		.setkey = safexcel_des_setkey,
1516 		.encrypt = safexcel_encrypt,
1517 		.decrypt = safexcel_decrypt,
1518 		.min_keysize = DES_KEY_SIZE,
1519 		.max_keysize = DES_KEY_SIZE,
1520 		.base = {
1521 			.cra_name = "ecb(des)",
1522 			.cra_driver_name = "safexcel-ecb-des",
1523 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1524 			.cra_flags = CRYPTO_ALG_ASYNC |
1525 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1526 			.cra_blocksize = DES_BLOCK_SIZE,
1527 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1528 			.cra_alignmask = 0,
1529 			.cra_init = safexcel_skcipher_des_ecb_cra_init,
1530 			.cra_exit = safexcel_skcipher_cra_exit,
1531 			.cra_module = THIS_MODULE,
1532 		},
1533 	},
1534 };
1535 
1536 static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
1537 				   const u8 *key, unsigned int len)
1538 {
1539 	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1540 	int err;
1541 
1542 	err = verify_skcipher_des3_key(ctfm, key);
1543 	if (err)
1544 		return err;
1545 
1546 	/* if context exits and key changed, need to invalidate it */
1547 	if (ctx->base.ctxr_dma)
1548 		if (memcmp(ctx->key, key, len))
1549 			ctx->base.needs_inv = true;
1550 
1551 	memcpy(ctx->key, key, len);
1552 	ctx->key_len = len;
1553 
1554 	return 0;
1555 }
1556 
1557 static int safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm *tfm)
1558 {
1559 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1560 
1561 	safexcel_skcipher_cra_init(tfm);
1562 	ctx->alg  = SAFEXCEL_3DES;
1563 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1564 	return 0;
1565 }
1566 
1567 struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
1568 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1569 	.algo_mask = SAFEXCEL_ALG_DES,
1570 	.alg.skcipher = {
1571 		.setkey = safexcel_des3_ede_setkey,
1572 		.encrypt = safexcel_encrypt,
1573 		.decrypt = safexcel_decrypt,
1574 		.min_keysize = DES3_EDE_KEY_SIZE,
1575 		.max_keysize = DES3_EDE_KEY_SIZE,
1576 		.ivsize = DES3_EDE_BLOCK_SIZE,
1577 		.base = {
1578 			.cra_name = "cbc(des3_ede)",
1579 			.cra_driver_name = "safexcel-cbc-des3_ede",
1580 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1581 			.cra_flags = CRYPTO_ALG_ASYNC |
1582 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1583 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1584 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1585 			.cra_alignmask = 0,
1586 			.cra_init = safexcel_skcipher_des3_cbc_cra_init,
1587 			.cra_exit = safexcel_skcipher_cra_exit,
1588 			.cra_module = THIS_MODULE,
1589 		},
1590 	},
1591 };
1592 
1593 static int safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm *tfm)
1594 {
1595 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1596 
1597 	safexcel_skcipher_cra_init(tfm);
1598 	ctx->alg  = SAFEXCEL_3DES;
1599 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1600 	return 0;
1601 }
1602 
1603 struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
1604 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1605 	.algo_mask = SAFEXCEL_ALG_DES,
1606 	.alg.skcipher = {
1607 		.setkey = safexcel_des3_ede_setkey,
1608 		.encrypt = safexcel_encrypt,
1609 		.decrypt = safexcel_decrypt,
1610 		.min_keysize = DES3_EDE_KEY_SIZE,
1611 		.max_keysize = DES3_EDE_KEY_SIZE,
1612 		.base = {
1613 			.cra_name = "ecb(des3_ede)",
1614 			.cra_driver_name = "safexcel-ecb-des3_ede",
1615 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1616 			.cra_flags = CRYPTO_ALG_ASYNC |
1617 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1618 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1619 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1620 			.cra_alignmask = 0,
1621 			.cra_init = safexcel_skcipher_des3_ecb_cra_init,
1622 			.cra_exit = safexcel_skcipher_cra_exit,
1623 			.cra_module = THIS_MODULE,
1624 		},
1625 	},
1626 };
1627 
1628 static int safexcel_aead_encrypt(struct aead_request *req)
1629 {
1630 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
1631 
1632 	return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
1633 }
1634 
1635 static int safexcel_aead_decrypt(struct aead_request *req)
1636 {
1637 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
1638 
1639 	return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
1640 }
1641 
1642 static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
1643 {
1644 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1645 	struct safexcel_alg_template *tmpl =
1646 		container_of(tfm->__crt_alg, struct safexcel_alg_template,
1647 			     alg.aead.base);
1648 
1649 	crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
1650 				sizeof(struct safexcel_cipher_req));
1651 
1652 	ctx->priv = tmpl->priv;
1653 
1654 	ctx->alg  = SAFEXCEL_AES; /* default */
1655 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC; /* default */
1656 	ctx->aead = true;
1657 	ctx->base.send = safexcel_aead_send;
1658 	ctx->base.handle_result = safexcel_aead_handle_result;
1659 	return 0;
1660 }
1661 
1662 static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
1663 {
1664 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1665 
1666 	safexcel_aead_cra_init(tfm);
1667 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1668 	ctx->state_sz = SHA1_DIGEST_SIZE;
1669 	return 0;
1670 }
1671 
1672 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
1673 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1674 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
1675 	.alg.aead = {
1676 		.setkey = safexcel_aead_setkey,
1677 		.encrypt = safexcel_aead_encrypt,
1678 		.decrypt = safexcel_aead_decrypt,
1679 		.ivsize = AES_BLOCK_SIZE,
1680 		.maxauthsize = SHA1_DIGEST_SIZE,
1681 		.base = {
1682 			.cra_name = "authenc(hmac(sha1),cbc(aes))",
1683 			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
1684 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1685 			.cra_flags = CRYPTO_ALG_ASYNC |
1686 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1687 			.cra_blocksize = AES_BLOCK_SIZE,
1688 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1689 			.cra_alignmask = 0,
1690 			.cra_init = safexcel_aead_sha1_cra_init,
1691 			.cra_exit = safexcel_aead_cra_exit,
1692 			.cra_module = THIS_MODULE,
1693 		},
1694 	},
1695 };
1696 
1697 static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
1698 {
1699 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1700 
1701 	safexcel_aead_cra_init(tfm);
1702 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1703 	ctx->state_sz = SHA256_DIGEST_SIZE;
1704 	return 0;
1705 }
1706 
1707 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
1708 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1709 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1710 	.alg.aead = {
1711 		.setkey = safexcel_aead_setkey,
1712 		.encrypt = safexcel_aead_encrypt,
1713 		.decrypt = safexcel_aead_decrypt,
1714 		.ivsize = AES_BLOCK_SIZE,
1715 		.maxauthsize = SHA256_DIGEST_SIZE,
1716 		.base = {
1717 			.cra_name = "authenc(hmac(sha256),cbc(aes))",
1718 			.cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
1719 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1720 			.cra_flags = CRYPTO_ALG_ASYNC |
1721 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1722 			.cra_blocksize = AES_BLOCK_SIZE,
1723 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1724 			.cra_alignmask = 0,
1725 			.cra_init = safexcel_aead_sha256_cra_init,
1726 			.cra_exit = safexcel_aead_cra_exit,
1727 			.cra_module = THIS_MODULE,
1728 		},
1729 	},
1730 };
1731 
1732 static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
1733 {
1734 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1735 
1736 	safexcel_aead_cra_init(tfm);
1737 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1738 	ctx->state_sz = SHA256_DIGEST_SIZE;
1739 	return 0;
1740 }
1741 
1742 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
1743 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1744 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1745 	.alg.aead = {
1746 		.setkey = safexcel_aead_setkey,
1747 		.encrypt = safexcel_aead_encrypt,
1748 		.decrypt = safexcel_aead_decrypt,
1749 		.ivsize = AES_BLOCK_SIZE,
1750 		.maxauthsize = SHA224_DIGEST_SIZE,
1751 		.base = {
1752 			.cra_name = "authenc(hmac(sha224),cbc(aes))",
1753 			.cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
1754 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1755 			.cra_flags = CRYPTO_ALG_ASYNC |
1756 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1757 			.cra_blocksize = AES_BLOCK_SIZE,
1758 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1759 			.cra_alignmask = 0,
1760 			.cra_init = safexcel_aead_sha224_cra_init,
1761 			.cra_exit = safexcel_aead_cra_exit,
1762 			.cra_module = THIS_MODULE,
1763 		},
1764 	},
1765 };
1766 
1767 static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm)
1768 {
1769 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1770 
1771 	safexcel_aead_cra_init(tfm);
1772 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1773 	ctx->state_sz = SHA512_DIGEST_SIZE;
1774 	return 0;
1775 }
1776 
1777 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
1778 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1779 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1780 	.alg.aead = {
1781 		.setkey = safexcel_aead_setkey,
1782 		.encrypt = safexcel_aead_encrypt,
1783 		.decrypt = safexcel_aead_decrypt,
1784 		.ivsize = AES_BLOCK_SIZE,
1785 		.maxauthsize = SHA512_DIGEST_SIZE,
1786 		.base = {
1787 			.cra_name = "authenc(hmac(sha512),cbc(aes))",
1788 			.cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
1789 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1790 			.cra_flags = CRYPTO_ALG_ASYNC |
1791 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1792 			.cra_blocksize = AES_BLOCK_SIZE,
1793 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1794 			.cra_alignmask = 0,
1795 			.cra_init = safexcel_aead_sha512_cra_init,
1796 			.cra_exit = safexcel_aead_cra_exit,
1797 			.cra_module = THIS_MODULE,
1798 		},
1799 	},
1800 };
1801 
1802 static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm)
1803 {
1804 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1805 
1806 	safexcel_aead_cra_init(tfm);
1807 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1808 	ctx->state_sz = SHA512_DIGEST_SIZE;
1809 	return 0;
1810 }
1811 
1812 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
1813 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1814 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1815 	.alg.aead = {
1816 		.setkey = safexcel_aead_setkey,
1817 		.encrypt = safexcel_aead_encrypt,
1818 		.decrypt = safexcel_aead_decrypt,
1819 		.ivsize = AES_BLOCK_SIZE,
1820 		.maxauthsize = SHA384_DIGEST_SIZE,
1821 		.base = {
1822 			.cra_name = "authenc(hmac(sha384),cbc(aes))",
1823 			.cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
1824 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1825 			.cra_flags = CRYPTO_ALG_ASYNC |
1826 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1827 			.cra_blocksize = AES_BLOCK_SIZE,
1828 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1829 			.cra_alignmask = 0,
1830 			.cra_init = safexcel_aead_sha384_cra_init,
1831 			.cra_exit = safexcel_aead_cra_exit,
1832 			.cra_module = THIS_MODULE,
1833 		},
1834 	},
1835 };
1836 
1837 static int safexcel_aead_sha1_des3_cra_init(struct crypto_tfm *tfm)
1838 {
1839 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1840 
1841 	safexcel_aead_sha1_cra_init(tfm);
1842 	ctx->alg = SAFEXCEL_3DES; /* override default */
1843 	return 0;
1844 }
1845 
1846 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des3_ede = {
1847 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1848 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
1849 	.alg.aead = {
1850 		.setkey = safexcel_aead_setkey,
1851 		.encrypt = safexcel_aead_encrypt,
1852 		.decrypt = safexcel_aead_decrypt,
1853 		.ivsize = DES3_EDE_BLOCK_SIZE,
1854 		.maxauthsize = SHA1_DIGEST_SIZE,
1855 		.base = {
1856 			.cra_name = "authenc(hmac(sha1),cbc(des3_ede))",
1857 			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des3_ede",
1858 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1859 			.cra_flags = CRYPTO_ALG_ASYNC |
1860 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1861 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1862 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1863 			.cra_alignmask = 0,
1864 			.cra_init = safexcel_aead_sha1_des3_cra_init,
1865 			.cra_exit = safexcel_aead_cra_exit,
1866 			.cra_module = THIS_MODULE,
1867 		},
1868 	},
1869 };
1870 
1871 static int safexcel_aead_sha256_des3_cra_init(struct crypto_tfm *tfm)
1872 {
1873 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1874 
1875 	safexcel_aead_sha256_cra_init(tfm);
1876 	ctx->alg = SAFEXCEL_3DES; /* override default */
1877 	return 0;
1878 }
1879 
1880 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des3_ede = {
1881 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1882 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1883 	.alg.aead = {
1884 		.setkey = safexcel_aead_setkey,
1885 		.encrypt = safexcel_aead_encrypt,
1886 		.decrypt = safexcel_aead_decrypt,
1887 		.ivsize = DES3_EDE_BLOCK_SIZE,
1888 		.maxauthsize = SHA256_DIGEST_SIZE,
1889 		.base = {
1890 			.cra_name = "authenc(hmac(sha256),cbc(des3_ede))",
1891 			.cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des3_ede",
1892 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1893 			.cra_flags = CRYPTO_ALG_ASYNC |
1894 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1895 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1896 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1897 			.cra_alignmask = 0,
1898 			.cra_init = safexcel_aead_sha256_des3_cra_init,
1899 			.cra_exit = safexcel_aead_cra_exit,
1900 			.cra_module = THIS_MODULE,
1901 		},
1902 	},
1903 };
1904 
1905 static int safexcel_aead_sha224_des3_cra_init(struct crypto_tfm *tfm)
1906 {
1907 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1908 
1909 	safexcel_aead_sha224_cra_init(tfm);
1910 	ctx->alg = SAFEXCEL_3DES; /* override default */
1911 	return 0;
1912 }
1913 
1914 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des3_ede = {
1915 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1916 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1917 	.alg.aead = {
1918 		.setkey = safexcel_aead_setkey,
1919 		.encrypt = safexcel_aead_encrypt,
1920 		.decrypt = safexcel_aead_decrypt,
1921 		.ivsize = DES3_EDE_BLOCK_SIZE,
1922 		.maxauthsize = SHA224_DIGEST_SIZE,
1923 		.base = {
1924 			.cra_name = "authenc(hmac(sha224),cbc(des3_ede))",
1925 			.cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des3_ede",
1926 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1927 			.cra_flags = CRYPTO_ALG_ASYNC |
1928 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1929 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1930 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1931 			.cra_alignmask = 0,
1932 			.cra_init = safexcel_aead_sha224_des3_cra_init,
1933 			.cra_exit = safexcel_aead_cra_exit,
1934 			.cra_module = THIS_MODULE,
1935 		},
1936 	},
1937 };
1938 
1939 static int safexcel_aead_sha512_des3_cra_init(struct crypto_tfm *tfm)
1940 {
1941 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1942 
1943 	safexcel_aead_sha512_cra_init(tfm);
1944 	ctx->alg = SAFEXCEL_3DES; /* override default */
1945 	return 0;
1946 }
1947 
1948 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des3_ede = {
1949 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1950 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
1951 	.alg.aead = {
1952 		.setkey = safexcel_aead_setkey,
1953 		.encrypt = safexcel_aead_encrypt,
1954 		.decrypt = safexcel_aead_decrypt,
1955 		.ivsize = DES3_EDE_BLOCK_SIZE,
1956 		.maxauthsize = SHA512_DIGEST_SIZE,
1957 		.base = {
1958 			.cra_name = "authenc(hmac(sha512),cbc(des3_ede))",
1959 			.cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des3_ede",
1960 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1961 			.cra_flags = CRYPTO_ALG_ASYNC |
1962 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1963 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1964 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1965 			.cra_alignmask = 0,
1966 			.cra_init = safexcel_aead_sha512_des3_cra_init,
1967 			.cra_exit = safexcel_aead_cra_exit,
1968 			.cra_module = THIS_MODULE,
1969 		},
1970 	},
1971 };
1972 
1973 static int safexcel_aead_sha384_des3_cra_init(struct crypto_tfm *tfm)
1974 {
1975 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1976 
1977 	safexcel_aead_sha384_cra_init(tfm);
1978 	ctx->alg = SAFEXCEL_3DES; /* override default */
1979 	return 0;
1980 }
1981 
1982 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des3_ede = {
1983 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1984 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
1985 	.alg.aead = {
1986 		.setkey = safexcel_aead_setkey,
1987 		.encrypt = safexcel_aead_encrypt,
1988 		.decrypt = safexcel_aead_decrypt,
1989 		.ivsize = DES3_EDE_BLOCK_SIZE,
1990 		.maxauthsize = SHA384_DIGEST_SIZE,
1991 		.base = {
1992 			.cra_name = "authenc(hmac(sha384),cbc(des3_ede))",
1993 			.cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des3_ede",
1994 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1995 			.cra_flags = CRYPTO_ALG_ASYNC |
1996 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1997 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1998 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1999 			.cra_alignmask = 0,
2000 			.cra_init = safexcel_aead_sha384_des3_cra_init,
2001 			.cra_exit = safexcel_aead_cra_exit,
2002 			.cra_module = THIS_MODULE,
2003 		},
2004 	},
2005 };
2006 
2007 static int safexcel_aead_sha1_des_cra_init(struct crypto_tfm *tfm)
2008 {
2009 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2010 
2011 	safexcel_aead_sha1_cra_init(tfm);
2012 	ctx->alg = SAFEXCEL_DES; /* override default */
2013 	return 0;
2014 }
2015 
2016 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des = {
2017 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2018 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
2019 	.alg.aead = {
2020 		.setkey = safexcel_aead_setkey,
2021 		.encrypt = safexcel_aead_encrypt,
2022 		.decrypt = safexcel_aead_decrypt,
2023 		.ivsize = DES_BLOCK_SIZE,
2024 		.maxauthsize = SHA1_DIGEST_SIZE,
2025 		.base = {
2026 			.cra_name = "authenc(hmac(sha1),cbc(des))",
2027 			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des",
2028 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2029 			.cra_flags = CRYPTO_ALG_ASYNC |
2030 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2031 			.cra_blocksize = DES_BLOCK_SIZE,
2032 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2033 			.cra_alignmask = 0,
2034 			.cra_init = safexcel_aead_sha1_des_cra_init,
2035 			.cra_exit = safexcel_aead_cra_exit,
2036 			.cra_module = THIS_MODULE,
2037 		},
2038 	},
2039 };
2040 
2041 static int safexcel_aead_sha256_des_cra_init(struct crypto_tfm *tfm)
2042 {
2043 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2044 
2045 	safexcel_aead_sha256_cra_init(tfm);
2046 	ctx->alg = SAFEXCEL_DES; /* override default */
2047 	return 0;
2048 }
2049 
2050 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des = {
2051 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2052 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2053 	.alg.aead = {
2054 		.setkey = safexcel_aead_setkey,
2055 		.encrypt = safexcel_aead_encrypt,
2056 		.decrypt = safexcel_aead_decrypt,
2057 		.ivsize = DES_BLOCK_SIZE,
2058 		.maxauthsize = SHA256_DIGEST_SIZE,
2059 		.base = {
2060 			.cra_name = "authenc(hmac(sha256),cbc(des))",
2061 			.cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des",
2062 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2063 			.cra_flags = CRYPTO_ALG_ASYNC |
2064 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2065 			.cra_blocksize = DES_BLOCK_SIZE,
2066 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2067 			.cra_alignmask = 0,
2068 			.cra_init = safexcel_aead_sha256_des_cra_init,
2069 			.cra_exit = safexcel_aead_cra_exit,
2070 			.cra_module = THIS_MODULE,
2071 		},
2072 	},
2073 };
2074 
2075 static int safexcel_aead_sha224_des_cra_init(struct crypto_tfm *tfm)
2076 {
2077 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2078 
2079 	safexcel_aead_sha224_cra_init(tfm);
2080 	ctx->alg = SAFEXCEL_DES; /* override default */
2081 	return 0;
2082 }
2083 
2084 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des = {
2085 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2086 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2087 	.alg.aead = {
2088 		.setkey = safexcel_aead_setkey,
2089 		.encrypt = safexcel_aead_encrypt,
2090 		.decrypt = safexcel_aead_decrypt,
2091 		.ivsize = DES_BLOCK_SIZE,
2092 		.maxauthsize = SHA224_DIGEST_SIZE,
2093 		.base = {
2094 			.cra_name = "authenc(hmac(sha224),cbc(des))",
2095 			.cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des",
2096 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2097 			.cra_flags = CRYPTO_ALG_ASYNC |
2098 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2099 			.cra_blocksize = DES_BLOCK_SIZE,
2100 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2101 			.cra_alignmask = 0,
2102 			.cra_init = safexcel_aead_sha224_des_cra_init,
2103 			.cra_exit = safexcel_aead_cra_exit,
2104 			.cra_module = THIS_MODULE,
2105 		},
2106 	},
2107 };
2108 
2109 static int safexcel_aead_sha512_des_cra_init(struct crypto_tfm *tfm)
2110 {
2111 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2112 
2113 	safexcel_aead_sha512_cra_init(tfm);
2114 	ctx->alg = SAFEXCEL_DES; /* override default */
2115 	return 0;
2116 }
2117 
2118 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des = {
2119 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2120 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2121 	.alg.aead = {
2122 		.setkey = safexcel_aead_setkey,
2123 		.encrypt = safexcel_aead_encrypt,
2124 		.decrypt = safexcel_aead_decrypt,
2125 		.ivsize = DES_BLOCK_SIZE,
2126 		.maxauthsize = SHA512_DIGEST_SIZE,
2127 		.base = {
2128 			.cra_name = "authenc(hmac(sha512),cbc(des))",
2129 			.cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des",
2130 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2131 			.cra_flags = CRYPTO_ALG_ASYNC |
2132 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2133 			.cra_blocksize = DES_BLOCK_SIZE,
2134 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2135 			.cra_alignmask = 0,
2136 			.cra_init = safexcel_aead_sha512_des_cra_init,
2137 			.cra_exit = safexcel_aead_cra_exit,
2138 			.cra_module = THIS_MODULE,
2139 		},
2140 	},
2141 };
2142 
2143 static int safexcel_aead_sha384_des_cra_init(struct crypto_tfm *tfm)
2144 {
2145 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2146 
2147 	safexcel_aead_sha384_cra_init(tfm);
2148 	ctx->alg = SAFEXCEL_DES; /* override default */
2149 	return 0;
2150 }
2151 
2152 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des = {
2153 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2154 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2155 	.alg.aead = {
2156 		.setkey = safexcel_aead_setkey,
2157 		.encrypt = safexcel_aead_encrypt,
2158 		.decrypt = safexcel_aead_decrypt,
2159 		.ivsize = DES_BLOCK_SIZE,
2160 		.maxauthsize = SHA384_DIGEST_SIZE,
2161 		.base = {
2162 			.cra_name = "authenc(hmac(sha384),cbc(des))",
2163 			.cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des",
2164 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2165 			.cra_flags = CRYPTO_ALG_ASYNC |
2166 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2167 			.cra_blocksize = DES_BLOCK_SIZE,
2168 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2169 			.cra_alignmask = 0,
2170 			.cra_init = safexcel_aead_sha384_des_cra_init,
2171 			.cra_exit = safexcel_aead_cra_exit,
2172 			.cra_module = THIS_MODULE,
2173 		},
2174 	},
2175 };
2176 
2177 static int safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm *tfm)
2178 {
2179 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2180 
2181 	safexcel_aead_sha1_cra_init(tfm);
2182 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2183 	return 0;
2184 }
2185 
2186 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_aes = {
2187 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2188 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
2189 	.alg.aead = {
2190 		.setkey = safexcel_aead_setkey,
2191 		.encrypt = safexcel_aead_encrypt,
2192 		.decrypt = safexcel_aead_decrypt,
2193 		.ivsize = CTR_RFC3686_IV_SIZE,
2194 		.maxauthsize = SHA1_DIGEST_SIZE,
2195 		.base = {
2196 			.cra_name = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2197 			.cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-aes",
2198 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2199 			.cra_flags = CRYPTO_ALG_ASYNC |
2200 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2201 			.cra_blocksize = 1,
2202 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2203 			.cra_alignmask = 0,
2204 			.cra_init = safexcel_aead_sha1_ctr_cra_init,
2205 			.cra_exit = safexcel_aead_cra_exit,
2206 			.cra_module = THIS_MODULE,
2207 		},
2208 	},
2209 };
2210 
2211 static int safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm *tfm)
2212 {
2213 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2214 
2215 	safexcel_aead_sha256_cra_init(tfm);
2216 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2217 	return 0;
2218 }
2219 
2220 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_ctr_aes = {
2221 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2222 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2223 	.alg.aead = {
2224 		.setkey = safexcel_aead_setkey,
2225 		.encrypt = safexcel_aead_encrypt,
2226 		.decrypt = safexcel_aead_decrypt,
2227 		.ivsize = CTR_RFC3686_IV_SIZE,
2228 		.maxauthsize = SHA256_DIGEST_SIZE,
2229 		.base = {
2230 			.cra_name = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2231 			.cra_driver_name = "safexcel-authenc-hmac-sha256-ctr-aes",
2232 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2233 			.cra_flags = CRYPTO_ALG_ASYNC |
2234 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2235 			.cra_blocksize = 1,
2236 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2237 			.cra_alignmask = 0,
2238 			.cra_init = safexcel_aead_sha256_ctr_cra_init,
2239 			.cra_exit = safexcel_aead_cra_exit,
2240 			.cra_module = THIS_MODULE,
2241 		},
2242 	},
2243 };
2244 
2245 static int safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm *tfm)
2246 {
2247 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2248 
2249 	safexcel_aead_sha224_cra_init(tfm);
2250 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2251 	return 0;
2252 }
2253 
2254 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_ctr_aes = {
2255 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2256 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2257 	.alg.aead = {
2258 		.setkey = safexcel_aead_setkey,
2259 		.encrypt = safexcel_aead_encrypt,
2260 		.decrypt = safexcel_aead_decrypt,
2261 		.ivsize = CTR_RFC3686_IV_SIZE,
2262 		.maxauthsize = SHA224_DIGEST_SIZE,
2263 		.base = {
2264 			.cra_name = "authenc(hmac(sha224),rfc3686(ctr(aes)))",
2265 			.cra_driver_name = "safexcel-authenc-hmac-sha224-ctr-aes",
2266 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2267 			.cra_flags = CRYPTO_ALG_ASYNC |
2268 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2269 			.cra_blocksize = 1,
2270 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2271 			.cra_alignmask = 0,
2272 			.cra_init = safexcel_aead_sha224_ctr_cra_init,
2273 			.cra_exit = safexcel_aead_cra_exit,
2274 			.cra_module = THIS_MODULE,
2275 		},
2276 	},
2277 };
2278 
2279 static int safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm *tfm)
2280 {
2281 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2282 
2283 	safexcel_aead_sha512_cra_init(tfm);
2284 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2285 	return 0;
2286 }
2287 
2288 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_ctr_aes = {
2289 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2290 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2291 	.alg.aead = {
2292 		.setkey = safexcel_aead_setkey,
2293 		.encrypt = safexcel_aead_encrypt,
2294 		.decrypt = safexcel_aead_decrypt,
2295 		.ivsize = CTR_RFC3686_IV_SIZE,
2296 		.maxauthsize = SHA512_DIGEST_SIZE,
2297 		.base = {
2298 			.cra_name = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2299 			.cra_driver_name = "safexcel-authenc-hmac-sha512-ctr-aes",
2300 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2301 			.cra_flags = CRYPTO_ALG_ASYNC |
2302 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2303 			.cra_blocksize = 1,
2304 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2305 			.cra_alignmask = 0,
2306 			.cra_init = safexcel_aead_sha512_ctr_cra_init,
2307 			.cra_exit = safexcel_aead_cra_exit,
2308 			.cra_module = THIS_MODULE,
2309 		},
2310 	},
2311 };
2312 
2313 static int safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm *tfm)
2314 {
2315 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2316 
2317 	safexcel_aead_sha384_cra_init(tfm);
2318 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2319 	return 0;
2320 }
2321 
2322 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes = {
2323 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2324 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2325 	.alg.aead = {
2326 		.setkey = safexcel_aead_setkey,
2327 		.encrypt = safexcel_aead_encrypt,
2328 		.decrypt = safexcel_aead_decrypt,
2329 		.ivsize = CTR_RFC3686_IV_SIZE,
2330 		.maxauthsize = SHA384_DIGEST_SIZE,
2331 		.base = {
2332 			.cra_name = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2333 			.cra_driver_name = "safexcel-authenc-hmac-sha384-ctr-aes",
2334 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2335 			.cra_flags = CRYPTO_ALG_ASYNC |
2336 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2337 			.cra_blocksize = 1,
2338 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2339 			.cra_alignmask = 0,
2340 			.cra_init = safexcel_aead_sha384_ctr_cra_init,
2341 			.cra_exit = safexcel_aead_cra_exit,
2342 			.cra_module = THIS_MODULE,
2343 		},
2344 	},
2345 };
2346 
2347 static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
2348 					   const u8 *key, unsigned int len)
2349 {
2350 	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
2351 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2352 	struct safexcel_crypto_priv *priv = ctx->priv;
2353 	struct crypto_aes_ctx aes;
2354 	int ret, i;
2355 	unsigned int keylen;
2356 
2357 	/* Check for illegal XTS keys */
2358 	ret = xts_verify_key(ctfm, key, len);
2359 	if (ret)
2360 		return ret;
2361 
2362 	/* Only half of the key data is cipher key */
2363 	keylen = (len >> 1);
2364 	ret = aes_expandkey(&aes, key, keylen);
2365 	if (ret) {
2366 		crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2367 		return ret;
2368 	}
2369 
2370 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2371 		for (i = 0; i < keylen / sizeof(u32); i++) {
2372 			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2373 				ctx->base.needs_inv = true;
2374 				break;
2375 			}
2376 		}
2377 	}
2378 
2379 	for (i = 0; i < keylen / sizeof(u32); i++)
2380 		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2381 
2382 	/* The other half is the tweak key */
2383 	ret = aes_expandkey(&aes, (u8 *)(key + keylen), keylen);
2384 	if (ret) {
2385 		crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2386 		return ret;
2387 	}
2388 
2389 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2390 		for (i = 0; i < keylen / sizeof(u32); i++) {
2391 			if (le32_to_cpu(ctx->key[i + keylen / sizeof(u32)]) !=
2392 			    aes.key_enc[i]) {
2393 				ctx->base.needs_inv = true;
2394 				break;
2395 			}
2396 		}
2397 	}
2398 
2399 	for (i = 0; i < keylen / sizeof(u32); i++)
2400 		ctx->key[i + keylen / sizeof(u32)] =
2401 			cpu_to_le32(aes.key_enc[i]);
2402 
2403 	ctx->key_len = keylen << 1;
2404 
2405 	memzero_explicit(&aes, sizeof(aes));
2406 	return 0;
2407 }
2408 
2409 static int safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm *tfm)
2410 {
2411 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2412 
2413 	safexcel_skcipher_cra_init(tfm);
2414 	ctx->alg  = SAFEXCEL_AES;
2415 	ctx->xts  = 1;
2416 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XTS;
2417 	return 0;
2418 }
2419 
2420 static int safexcel_encrypt_xts(struct skcipher_request *req)
2421 {
2422 	if (req->cryptlen < XTS_BLOCK_SIZE)
2423 		return -EINVAL;
2424 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2425 				  SAFEXCEL_ENCRYPT);
2426 }
2427 
2428 static int safexcel_decrypt_xts(struct skcipher_request *req)
2429 {
2430 	if (req->cryptlen < XTS_BLOCK_SIZE)
2431 		return -EINVAL;
2432 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2433 				  SAFEXCEL_DECRYPT);
2434 }
2435 
2436 struct safexcel_alg_template safexcel_alg_xts_aes = {
2437 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2438 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XTS,
2439 	.alg.skcipher = {
2440 		.setkey = safexcel_skcipher_aesxts_setkey,
2441 		.encrypt = safexcel_encrypt_xts,
2442 		.decrypt = safexcel_decrypt_xts,
2443 		/* XTS actually uses 2 AES keys glued together */
2444 		.min_keysize = AES_MIN_KEY_SIZE * 2,
2445 		.max_keysize = AES_MAX_KEY_SIZE * 2,
2446 		.ivsize = XTS_BLOCK_SIZE,
2447 		.base = {
2448 			.cra_name = "xts(aes)",
2449 			.cra_driver_name = "safexcel-xts-aes",
2450 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2451 			.cra_flags = CRYPTO_ALG_ASYNC |
2452 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2453 			.cra_blocksize = XTS_BLOCK_SIZE,
2454 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2455 			.cra_alignmask = 0,
2456 			.cra_init = safexcel_skcipher_aes_xts_cra_init,
2457 			.cra_exit = safexcel_skcipher_cra_exit,
2458 			.cra_module = THIS_MODULE,
2459 		},
2460 	},
2461 };
2462 
2463 static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
2464 				    unsigned int len)
2465 {
2466 	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2467 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2468 	struct safexcel_crypto_priv *priv = ctx->priv;
2469 	struct crypto_aes_ctx aes;
2470 	u32 hashkey[AES_BLOCK_SIZE >> 2];
2471 	int ret, i;
2472 
2473 	ret = aes_expandkey(&aes, key, len);
2474 	if (ret) {
2475 		crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2476 		memzero_explicit(&aes, sizeof(aes));
2477 		return ret;
2478 	}
2479 
2480 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2481 		for (i = 0; i < len / sizeof(u32); i++) {
2482 			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2483 				ctx->base.needs_inv = true;
2484 				break;
2485 			}
2486 		}
2487 	}
2488 
2489 	for (i = 0; i < len / sizeof(u32); i++)
2490 		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2491 
2492 	ctx->key_len = len;
2493 
2494 	/* Compute hash key by encrypting zeroes with cipher key */
2495 	crypto_cipher_clear_flags(ctx->hkaes, CRYPTO_TFM_REQ_MASK);
2496 	crypto_cipher_set_flags(ctx->hkaes, crypto_aead_get_flags(ctfm) &
2497 				CRYPTO_TFM_REQ_MASK);
2498 	ret = crypto_cipher_setkey(ctx->hkaes, key, len);
2499 	crypto_aead_set_flags(ctfm, crypto_cipher_get_flags(ctx->hkaes) &
2500 			      CRYPTO_TFM_RES_MASK);
2501 	if (ret)
2502 		return ret;
2503 
2504 	memset(hashkey, 0, AES_BLOCK_SIZE);
2505 	crypto_cipher_encrypt_one(ctx->hkaes, (u8 *)hashkey, (u8 *)hashkey);
2506 
2507 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2508 		for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
2509 			if (be32_to_cpu(ctx->ipad[i]) != hashkey[i]) {
2510 				ctx->base.needs_inv = true;
2511 				break;
2512 			}
2513 		}
2514 	}
2515 
2516 	for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
2517 		ctx->ipad[i] = cpu_to_be32(hashkey[i]);
2518 
2519 	memzero_explicit(hashkey, AES_BLOCK_SIZE);
2520 	memzero_explicit(&aes, sizeof(aes));
2521 	return 0;
2522 }
2523 
2524 static int safexcel_aead_gcm_cra_init(struct crypto_tfm *tfm)
2525 {
2526 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2527 
2528 	safexcel_aead_cra_init(tfm);
2529 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_GHASH;
2530 	ctx->state_sz = GHASH_BLOCK_SIZE;
2531 	ctx->xcm = EIP197_XCM_MODE_GCM;
2532 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2533 
2534 	ctx->hkaes = crypto_alloc_cipher("aes", 0, 0);
2535 	if (IS_ERR(ctx->hkaes))
2536 		return PTR_ERR(ctx->hkaes);
2537 
2538 	return 0;
2539 }
2540 
2541 static void safexcel_aead_gcm_cra_exit(struct crypto_tfm *tfm)
2542 {
2543 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2544 
2545 	crypto_free_cipher(ctx->hkaes);
2546 	safexcel_aead_cra_exit(tfm);
2547 }
2548 
2549 static int safexcel_aead_gcm_setauthsize(struct crypto_aead *tfm,
2550 					 unsigned int authsize)
2551 {
2552 	return crypto_gcm_check_authsize(authsize);
2553 }
2554 
2555 struct safexcel_alg_template safexcel_alg_gcm = {
2556 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2557 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
2558 	.alg.aead = {
2559 		.setkey = safexcel_aead_gcm_setkey,
2560 		.setauthsize = safexcel_aead_gcm_setauthsize,
2561 		.encrypt = safexcel_aead_encrypt,
2562 		.decrypt = safexcel_aead_decrypt,
2563 		.ivsize = GCM_AES_IV_SIZE,
2564 		.maxauthsize = GHASH_DIGEST_SIZE,
2565 		.base = {
2566 			.cra_name = "gcm(aes)",
2567 			.cra_driver_name = "safexcel-gcm-aes",
2568 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2569 			.cra_flags = CRYPTO_ALG_ASYNC |
2570 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2571 			.cra_blocksize = 1,
2572 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2573 			.cra_alignmask = 0,
2574 			.cra_init = safexcel_aead_gcm_cra_init,
2575 			.cra_exit = safexcel_aead_gcm_cra_exit,
2576 			.cra_module = THIS_MODULE,
2577 		},
2578 	},
2579 };
2580 
2581 static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
2582 				    unsigned int len)
2583 {
2584 	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2585 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2586 	struct safexcel_crypto_priv *priv = ctx->priv;
2587 	struct crypto_aes_ctx aes;
2588 	int ret, i;
2589 
2590 	ret = aes_expandkey(&aes, key, len);
2591 	if (ret) {
2592 		crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2593 		memzero_explicit(&aes, sizeof(aes));
2594 		return ret;
2595 	}
2596 
2597 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2598 		for (i = 0; i < len / sizeof(u32); i++) {
2599 			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2600 				ctx->base.needs_inv = true;
2601 				break;
2602 			}
2603 		}
2604 	}
2605 
2606 	for (i = 0; i < len / sizeof(u32); i++) {
2607 		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2608 		ctx->ipad[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] =
2609 			cpu_to_be32(aes.key_enc[i]);
2610 	}
2611 
2612 	ctx->key_len = len;
2613 	ctx->state_sz = 2 * AES_BLOCK_SIZE + len;
2614 
2615 	if (len == AES_KEYSIZE_192)
2616 		ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2617 	else if (len == AES_KEYSIZE_256)
2618 		ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2619 	else
2620 		ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2621 
2622 	memzero_explicit(&aes, sizeof(aes));
2623 	return 0;
2624 }
2625 
2626 static int safexcel_aead_ccm_cra_init(struct crypto_tfm *tfm)
2627 {
2628 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2629 
2630 	safexcel_aead_cra_init(tfm);
2631 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2632 	ctx->state_sz = 3 * AES_BLOCK_SIZE;
2633 	ctx->xcm = EIP197_XCM_MODE_CCM;
2634 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2635 	return 0;
2636 }
2637 
2638 static int safexcel_aead_ccm_setauthsize(struct crypto_aead *tfm,
2639 					 unsigned int authsize)
2640 {
2641 	/* Borrowed from crypto/ccm.c */
2642 	switch (authsize) {
2643 	case 4:
2644 	case 6:
2645 	case 8:
2646 	case 10:
2647 	case 12:
2648 	case 14:
2649 	case 16:
2650 		break;
2651 	default:
2652 		return -EINVAL;
2653 	}
2654 
2655 	return 0;
2656 }
2657 
2658 static int safexcel_ccm_encrypt(struct aead_request *req)
2659 {
2660 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
2661 
2662 	if (req->iv[0] < 1 || req->iv[0] > 7)
2663 		return -EINVAL;
2664 
2665 	return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
2666 }
2667 
2668 static int safexcel_ccm_decrypt(struct aead_request *req)
2669 {
2670 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
2671 
2672 	if (req->iv[0] < 1 || req->iv[0] > 7)
2673 		return -EINVAL;
2674 
2675 	return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
2676 }
2677 
2678 struct safexcel_alg_template safexcel_alg_ccm = {
2679 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2680 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
2681 	.alg.aead = {
2682 		.setkey = safexcel_aead_ccm_setkey,
2683 		.setauthsize = safexcel_aead_ccm_setauthsize,
2684 		.encrypt = safexcel_ccm_encrypt,
2685 		.decrypt = safexcel_ccm_decrypt,
2686 		.ivsize = AES_BLOCK_SIZE,
2687 		.maxauthsize = AES_BLOCK_SIZE,
2688 		.base = {
2689 			.cra_name = "ccm(aes)",
2690 			.cra_driver_name = "safexcel-ccm-aes",
2691 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2692 			.cra_flags = CRYPTO_ALG_ASYNC |
2693 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2694 			.cra_blocksize = 1,
2695 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2696 			.cra_alignmask = 0,
2697 			.cra_init = safexcel_aead_ccm_cra_init,
2698 			.cra_exit = safexcel_aead_cra_exit,
2699 			.cra_module = THIS_MODULE,
2700 		},
2701 	},
2702 };
2703 
2704 static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx *ctx,
2705 				     const u8 *key)
2706 {
2707 	struct safexcel_crypto_priv *priv = ctx->priv;
2708 
2709 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
2710 		if (memcmp(ctx->key, key, CHACHA_KEY_SIZE))
2711 			ctx->base.needs_inv = true;
2712 
2713 	memcpy(ctx->key, key, CHACHA_KEY_SIZE);
2714 	ctx->key_len = CHACHA_KEY_SIZE;
2715 }
2716 
2717 static int safexcel_skcipher_chacha20_setkey(struct crypto_skcipher *ctfm,
2718 					     const u8 *key, unsigned int len)
2719 {
2720 	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
2721 
2722 	if (len != CHACHA_KEY_SIZE) {
2723 		crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2724 		return -EINVAL;
2725 	}
2726 	safexcel_chacha20_setkey(ctx, key);
2727 
2728 	return 0;
2729 }
2730 
2731 static int safexcel_skcipher_chacha20_cra_init(struct crypto_tfm *tfm)
2732 {
2733 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2734 
2735 	safexcel_skcipher_cra_init(tfm);
2736 	ctx->alg  = SAFEXCEL_CHACHA20;
2737 	ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32;
2738 	return 0;
2739 }
2740 
2741 struct safexcel_alg_template safexcel_alg_chacha20 = {
2742 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2743 	.algo_mask = SAFEXCEL_ALG_CHACHA20,
2744 	.alg.skcipher = {
2745 		.setkey = safexcel_skcipher_chacha20_setkey,
2746 		.encrypt = safexcel_encrypt,
2747 		.decrypt = safexcel_decrypt,
2748 		.min_keysize = CHACHA_KEY_SIZE,
2749 		.max_keysize = CHACHA_KEY_SIZE,
2750 		.ivsize = CHACHA_IV_SIZE,
2751 		.base = {
2752 			.cra_name = "chacha20",
2753 			.cra_driver_name = "safexcel-chacha20",
2754 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2755 			.cra_flags = CRYPTO_ALG_ASYNC |
2756 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2757 			.cra_blocksize = 1,
2758 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2759 			.cra_alignmask = 0,
2760 			.cra_init = safexcel_skcipher_chacha20_cra_init,
2761 			.cra_exit = safexcel_skcipher_cra_exit,
2762 			.cra_module = THIS_MODULE,
2763 		},
2764 	},
2765 };
2766 
2767 static int safexcel_aead_chachapoly_setkey(struct crypto_aead *ctfm,
2768 				    const u8 *key, unsigned int len)
2769 {
2770 	struct safexcel_cipher_ctx *ctx = crypto_aead_ctx(ctfm);
2771 
2772 	if (ctx->aead  == EIP197_AEAD_TYPE_IPSEC_ESP &&
2773 	    len > EIP197_AEAD_IPSEC_NONCE_SIZE) {
2774 		/* ESP variant has nonce appended to key */
2775 		len -= EIP197_AEAD_IPSEC_NONCE_SIZE;
2776 		ctx->nonce = *(u32 *)(key + len);
2777 	}
2778 	if (len != CHACHA_KEY_SIZE) {
2779 		crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2780 		return -EINVAL;
2781 	}
2782 	safexcel_chacha20_setkey(ctx, key);
2783 
2784 	return 0;
2785 }
2786 
2787 static int safexcel_aead_chachapoly_setauthsize(struct crypto_aead *tfm,
2788 					 unsigned int authsize)
2789 {
2790 	if (authsize != POLY1305_DIGEST_SIZE)
2791 		return -EINVAL;
2792 	return 0;
2793 }
2794 
2795 static int safexcel_aead_chachapoly_crypt(struct aead_request *req,
2796 					  enum safexcel_cipher_direction dir)
2797 {
2798 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
2799 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
2800 	struct crypto_tfm *tfm = crypto_aead_tfm(aead);
2801 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2802 	struct aead_request *subreq = aead_request_ctx(req);
2803 	u32 key[CHACHA_KEY_SIZE / sizeof(u32) + 1];
2804 	int ret = 0;
2805 
2806 	/*
2807 	 * Instead of wasting time detecting umpteen silly corner cases,
2808 	 * just dump all "small" requests to the fallback implementation.
2809 	 * HW would not be faster on such small requests anyway.
2810 	 */
2811 	if (likely((ctx->aead != EIP197_AEAD_TYPE_IPSEC_ESP ||
2812 		    req->assoclen >= EIP197_AEAD_IPSEC_IV_SIZE) &&
2813 		   req->cryptlen > POLY1305_DIGEST_SIZE)) {
2814 		return safexcel_queue_req(&req->base, creq, dir);
2815 	}
2816 
2817 	/* HW cannot do full (AAD+payload) zero length, use fallback */
2818 	memcpy(key, ctx->key, CHACHA_KEY_SIZE);
2819 	if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
2820 		/* ESP variant has nonce appended to the key */
2821 		key[CHACHA_KEY_SIZE / sizeof(u32)] = ctx->nonce;
2822 		ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2823 					 CHACHA_KEY_SIZE +
2824 					 EIP197_AEAD_IPSEC_NONCE_SIZE);
2825 	} else {
2826 		ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2827 					 CHACHA_KEY_SIZE);
2828 	}
2829 	if (ret) {
2830 		crypto_aead_clear_flags(aead, CRYPTO_TFM_REQ_MASK);
2831 		crypto_aead_set_flags(aead, crypto_aead_get_flags(ctx->fback) &
2832 					    CRYPTO_TFM_REQ_MASK);
2833 		return ret;
2834 	}
2835 
2836 	aead_request_set_tfm(subreq, ctx->fback);
2837 	aead_request_set_callback(subreq, req->base.flags, req->base.complete,
2838 				  req->base.data);
2839 	aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
2840 			       req->iv);
2841 	aead_request_set_ad(subreq, req->assoclen);
2842 
2843 	return (dir ==  SAFEXCEL_ENCRYPT) ?
2844 		crypto_aead_encrypt(subreq) :
2845 		crypto_aead_decrypt(subreq);
2846 }
2847 
2848 static int safexcel_aead_chachapoly_encrypt(struct aead_request *req)
2849 {
2850 	return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_ENCRYPT);
2851 }
2852 
2853 static int safexcel_aead_chachapoly_decrypt(struct aead_request *req)
2854 {
2855 	return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_DECRYPT);
2856 }
2857 
2858 static int safexcel_aead_fallback_cra_init(struct crypto_tfm *tfm)
2859 {
2860 	struct crypto_aead *aead = __crypto_aead_cast(tfm);
2861 	struct aead_alg *alg = crypto_aead_alg(aead);
2862 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2863 
2864 	safexcel_aead_cra_init(tfm);
2865 
2866 	/* Allocate fallback implementation */
2867 	ctx->fback = crypto_alloc_aead(alg->base.cra_name, 0,
2868 				       CRYPTO_ALG_ASYNC |
2869 				       CRYPTO_ALG_NEED_FALLBACK);
2870 	if (IS_ERR(ctx->fback))
2871 		return PTR_ERR(ctx->fback);
2872 
2873 	crypto_aead_set_reqsize(aead, max(sizeof(struct safexcel_cipher_req),
2874 					  sizeof(struct aead_request) +
2875 					  crypto_aead_reqsize(ctx->fback)));
2876 
2877 	return 0;
2878 }
2879 
2880 static int safexcel_aead_chachapoly_cra_init(struct crypto_tfm *tfm)
2881 {
2882 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2883 
2884 	safexcel_aead_fallback_cra_init(tfm);
2885 	ctx->alg  = SAFEXCEL_CHACHA20;
2886 	ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32 |
2887 		    CONTEXT_CONTROL_CHACHA20_MODE_CALC_OTK;
2888 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_POLY1305;
2889 	ctx->state_sz = 0; /* Precomputed by HW */
2890 	return 0;
2891 }
2892 
2893 static void safexcel_aead_fallback_cra_exit(struct crypto_tfm *tfm)
2894 {
2895 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2896 
2897 	crypto_free_aead(ctx->fback);
2898 	safexcel_aead_cra_exit(tfm);
2899 }
2900 
2901 struct safexcel_alg_template safexcel_alg_chachapoly = {
2902 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2903 	.algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
2904 	.alg.aead = {
2905 		.setkey = safexcel_aead_chachapoly_setkey,
2906 		.setauthsize = safexcel_aead_chachapoly_setauthsize,
2907 		.encrypt = safexcel_aead_chachapoly_encrypt,
2908 		.decrypt = safexcel_aead_chachapoly_decrypt,
2909 		.ivsize = CHACHAPOLY_IV_SIZE,
2910 		.maxauthsize = POLY1305_DIGEST_SIZE,
2911 		.base = {
2912 			.cra_name = "rfc7539(chacha20,poly1305)",
2913 			.cra_driver_name = "safexcel-chacha20-poly1305",
2914 			/* +1 to put it above HW chacha + SW poly */
2915 			.cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
2916 			.cra_flags = CRYPTO_ALG_ASYNC |
2917 				     CRYPTO_ALG_KERN_DRIVER_ONLY |
2918 				     CRYPTO_ALG_NEED_FALLBACK,
2919 			.cra_blocksize = 1,
2920 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2921 			.cra_alignmask = 0,
2922 			.cra_init = safexcel_aead_chachapoly_cra_init,
2923 			.cra_exit = safexcel_aead_fallback_cra_exit,
2924 			.cra_module = THIS_MODULE,
2925 		},
2926 	},
2927 };
2928 
2929 static int safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm *tfm)
2930 {
2931 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2932 	int ret;
2933 
2934 	ret = safexcel_aead_chachapoly_cra_init(tfm);
2935 	ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
2936 	return ret;
2937 }
2938 
2939 struct safexcel_alg_template safexcel_alg_chachapoly_esp = {
2940 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2941 	.algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
2942 	.alg.aead = {
2943 		.setkey = safexcel_aead_chachapoly_setkey,
2944 		.setauthsize = safexcel_aead_chachapoly_setauthsize,
2945 		.encrypt = safexcel_aead_chachapoly_encrypt,
2946 		.decrypt = safexcel_aead_chachapoly_decrypt,
2947 		.ivsize = CHACHAPOLY_IV_SIZE - EIP197_AEAD_IPSEC_NONCE_SIZE,
2948 		.maxauthsize = POLY1305_DIGEST_SIZE,
2949 		.base = {
2950 			.cra_name = "rfc7539esp(chacha20,poly1305)",
2951 			.cra_driver_name = "safexcel-chacha20-poly1305-esp",
2952 			/* +1 to put it above HW chacha + SW poly */
2953 			.cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
2954 			.cra_flags = CRYPTO_ALG_ASYNC |
2955 				     CRYPTO_ALG_KERN_DRIVER_ONLY |
2956 				     CRYPTO_ALG_NEED_FALLBACK,
2957 			.cra_blocksize = 1,
2958 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2959 			.cra_alignmask = 0,
2960 			.cra_init = safexcel_aead_chachapolyesp_cra_init,
2961 			.cra_exit = safexcel_aead_fallback_cra_exit,
2962 			.cra_module = THIS_MODULE,
2963 		},
2964 	},
2965 };
2966 
2967 static int safexcel_skcipher_sm4_setkey(struct crypto_skcipher *ctfm,
2968 					const u8 *key, unsigned int len)
2969 {
2970 	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
2971 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2972 	struct safexcel_crypto_priv *priv = ctx->priv;
2973 
2974 	if (len != SM4_KEY_SIZE) {
2975 		crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2976 		return -EINVAL;
2977 	}
2978 
2979 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
2980 		if (memcmp(ctx->key, key, SM4_KEY_SIZE))
2981 			ctx->base.needs_inv = true;
2982 
2983 	memcpy(ctx->key, key, SM4_KEY_SIZE);
2984 	ctx->key_len = SM4_KEY_SIZE;
2985 
2986 	return 0;
2987 }
2988 
2989 static int safexcel_sm4_blk_encrypt(struct skcipher_request *req)
2990 {
2991 	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
2992 	if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
2993 		return -EINVAL;
2994 	else
2995 		return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2996 					  SAFEXCEL_ENCRYPT);
2997 }
2998 
2999 static int safexcel_sm4_blk_decrypt(struct skcipher_request *req)
3000 {
3001 	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3002 	if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3003 		return -EINVAL;
3004 	else
3005 		return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3006 					  SAFEXCEL_DECRYPT);
3007 }
3008 
3009 static int safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm *tfm)
3010 {
3011 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3012 
3013 	safexcel_skcipher_cra_init(tfm);
3014 	ctx->alg  = SAFEXCEL_SM4;
3015 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
3016 	return 0;
3017 }
3018 
3019 struct safexcel_alg_template safexcel_alg_ecb_sm4 = {
3020 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3021 	.algo_mask = SAFEXCEL_ALG_SM4,
3022 	.alg.skcipher = {
3023 		.setkey = safexcel_skcipher_sm4_setkey,
3024 		.encrypt = safexcel_sm4_blk_encrypt,
3025 		.decrypt = safexcel_sm4_blk_decrypt,
3026 		.min_keysize = SM4_KEY_SIZE,
3027 		.max_keysize = SM4_KEY_SIZE,
3028 		.base = {
3029 			.cra_name = "ecb(sm4)",
3030 			.cra_driver_name = "safexcel-ecb-sm4",
3031 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3032 			.cra_flags = CRYPTO_ALG_ASYNC |
3033 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3034 			.cra_blocksize = SM4_BLOCK_SIZE,
3035 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3036 			.cra_alignmask = 0,
3037 			.cra_init = safexcel_skcipher_sm4_ecb_cra_init,
3038 			.cra_exit = safexcel_skcipher_cra_exit,
3039 			.cra_module = THIS_MODULE,
3040 		},
3041 	},
3042 };
3043 
3044 static int safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm *tfm)
3045 {
3046 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3047 
3048 	safexcel_skcipher_cra_init(tfm);
3049 	ctx->alg  = SAFEXCEL_SM4;
3050 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
3051 	return 0;
3052 }
3053 
3054 struct safexcel_alg_template safexcel_alg_cbc_sm4 = {
3055 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3056 	.algo_mask = SAFEXCEL_ALG_SM4,
3057 	.alg.skcipher = {
3058 		.setkey = safexcel_skcipher_sm4_setkey,
3059 		.encrypt = safexcel_sm4_blk_encrypt,
3060 		.decrypt = safexcel_sm4_blk_decrypt,
3061 		.min_keysize = SM4_KEY_SIZE,
3062 		.max_keysize = SM4_KEY_SIZE,
3063 		.ivsize = SM4_BLOCK_SIZE,
3064 		.base = {
3065 			.cra_name = "cbc(sm4)",
3066 			.cra_driver_name = "safexcel-cbc-sm4",
3067 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3068 			.cra_flags = CRYPTO_ALG_ASYNC |
3069 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3070 			.cra_blocksize = SM4_BLOCK_SIZE,
3071 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3072 			.cra_alignmask = 0,
3073 			.cra_init = safexcel_skcipher_sm4_cbc_cra_init,
3074 			.cra_exit = safexcel_skcipher_cra_exit,
3075 			.cra_module = THIS_MODULE,
3076 		},
3077 	},
3078 };
3079 
3080 static int safexcel_skcipher_sm4_ofb_cra_init(struct crypto_tfm *tfm)
3081 {
3082 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3083 
3084 	safexcel_skcipher_cra_init(tfm);
3085 	ctx->alg  = SAFEXCEL_SM4;
3086 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
3087 	return 0;
3088 }
3089 
3090 struct safexcel_alg_template safexcel_alg_ofb_sm4 = {
3091 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3092 	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
3093 	.alg.skcipher = {
3094 		.setkey = safexcel_skcipher_sm4_setkey,
3095 		.encrypt = safexcel_encrypt,
3096 		.decrypt = safexcel_decrypt,
3097 		.min_keysize = SM4_KEY_SIZE,
3098 		.max_keysize = SM4_KEY_SIZE,
3099 		.ivsize = SM4_BLOCK_SIZE,
3100 		.base = {
3101 			.cra_name = "ofb(sm4)",
3102 			.cra_driver_name = "safexcel-ofb-sm4",
3103 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3104 			.cra_flags = CRYPTO_ALG_ASYNC |
3105 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3106 			.cra_blocksize = 1,
3107 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3108 			.cra_alignmask = 0,
3109 			.cra_init = safexcel_skcipher_sm4_ofb_cra_init,
3110 			.cra_exit = safexcel_skcipher_cra_exit,
3111 			.cra_module = THIS_MODULE,
3112 		},
3113 	},
3114 };
3115 
3116 static int safexcel_skcipher_sm4_cfb_cra_init(struct crypto_tfm *tfm)
3117 {
3118 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3119 
3120 	safexcel_skcipher_cra_init(tfm);
3121 	ctx->alg  = SAFEXCEL_SM4;
3122 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
3123 	return 0;
3124 }
3125 
3126 struct safexcel_alg_template safexcel_alg_cfb_sm4 = {
3127 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3128 	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
3129 	.alg.skcipher = {
3130 		.setkey = safexcel_skcipher_sm4_setkey,
3131 		.encrypt = safexcel_encrypt,
3132 		.decrypt = safexcel_decrypt,
3133 		.min_keysize = SM4_KEY_SIZE,
3134 		.max_keysize = SM4_KEY_SIZE,
3135 		.ivsize = SM4_BLOCK_SIZE,
3136 		.base = {
3137 			.cra_name = "cfb(sm4)",
3138 			.cra_driver_name = "safexcel-cfb-sm4",
3139 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3140 			.cra_flags = CRYPTO_ALG_ASYNC |
3141 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3142 			.cra_blocksize = 1,
3143 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3144 			.cra_alignmask = 0,
3145 			.cra_init = safexcel_skcipher_sm4_cfb_cra_init,
3146 			.cra_exit = safexcel_skcipher_cra_exit,
3147 			.cra_module = THIS_MODULE,
3148 		},
3149 	},
3150 };
3151 
3152 static int safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher *ctfm,
3153 					   const u8 *key, unsigned int len)
3154 {
3155 	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3156 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3157 
3158 	/* last 4 bytes of key are the nonce! */
3159 	ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3160 	/* exclude the nonce here */
3161 	len -= CTR_RFC3686_NONCE_SIZE;
3162 
3163 	return safexcel_skcipher_sm4_setkey(ctfm, key, len);
3164 }
3165 
3166 static int safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm *tfm)
3167 {
3168 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3169 
3170 	safexcel_skcipher_cra_init(tfm);
3171 	ctx->alg  = SAFEXCEL_SM4;
3172 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3173 	return 0;
3174 }
3175 
3176 struct safexcel_alg_template safexcel_alg_ctr_sm4 = {
3177 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3178 	.algo_mask = SAFEXCEL_ALG_SM4,
3179 	.alg.skcipher = {
3180 		.setkey = safexcel_skcipher_sm4ctr_setkey,
3181 		.encrypt = safexcel_encrypt,
3182 		.decrypt = safexcel_decrypt,
3183 		/* Add nonce size */
3184 		.min_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3185 		.max_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3186 		.ivsize = CTR_RFC3686_IV_SIZE,
3187 		.base = {
3188 			.cra_name = "rfc3686(ctr(sm4))",
3189 			.cra_driver_name = "safexcel-ctr-sm4",
3190 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3191 			.cra_flags = CRYPTO_ALG_ASYNC |
3192 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3193 			.cra_blocksize = 1,
3194 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3195 			.cra_alignmask = 0,
3196 			.cra_init = safexcel_skcipher_sm4_ctr_cra_init,
3197 			.cra_exit = safexcel_skcipher_cra_exit,
3198 			.cra_module = THIS_MODULE,
3199 		},
3200 	},
3201 };
3202 
3203 static int safexcel_aead_sm4_blk_encrypt(struct aead_request *req)
3204 {
3205 	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3206 	if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3207 		return -EINVAL;
3208 
3209 	return safexcel_queue_req(&req->base, aead_request_ctx(req),
3210 				  SAFEXCEL_ENCRYPT);
3211 }
3212 
3213 static int safexcel_aead_sm4_blk_decrypt(struct aead_request *req)
3214 {
3215 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3216 
3217 	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3218 	if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3219 		return -EINVAL;
3220 
3221 	return safexcel_queue_req(&req->base, aead_request_ctx(req),
3222 				  SAFEXCEL_DECRYPT);
3223 }
3224 
3225 static int safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm *tfm)
3226 {
3227 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3228 
3229 	safexcel_aead_cra_init(tfm);
3230 	ctx->alg = SAFEXCEL_SM4;
3231 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
3232 	ctx->state_sz = SHA1_DIGEST_SIZE;
3233 	return 0;
3234 }
3235 
3236 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_sm4 = {
3237 	.type = SAFEXCEL_ALG_TYPE_AEAD,
3238 	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3239 	.alg.aead = {
3240 		.setkey = safexcel_aead_setkey,
3241 		.encrypt = safexcel_aead_sm4_blk_encrypt,
3242 		.decrypt = safexcel_aead_sm4_blk_decrypt,
3243 		.ivsize = SM4_BLOCK_SIZE,
3244 		.maxauthsize = SHA1_DIGEST_SIZE,
3245 		.base = {
3246 			.cra_name = "authenc(hmac(sha1),cbc(sm4))",
3247 			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-sm4",
3248 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3249 			.cra_flags = CRYPTO_ALG_ASYNC |
3250 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3251 			.cra_blocksize = SM4_BLOCK_SIZE,
3252 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3253 			.cra_alignmask = 0,
3254 			.cra_init = safexcel_aead_sm4cbc_sha1_cra_init,
3255 			.cra_exit = safexcel_aead_cra_exit,
3256 			.cra_module = THIS_MODULE,
3257 		},
3258 	},
3259 };
3260 
3261 static int safexcel_aead_fallback_setkey(struct crypto_aead *ctfm,
3262 					 const u8 *key, unsigned int len)
3263 {
3264 	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3265 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3266 
3267 	/* Keep fallback cipher synchronized */
3268 	return crypto_aead_setkey(ctx->fback, (u8 *)key, len) ?:
3269 	       safexcel_aead_setkey(ctfm, key, len);
3270 }
3271 
3272 static int safexcel_aead_fallback_setauthsize(struct crypto_aead *ctfm,
3273 					      unsigned int authsize)
3274 {
3275 	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3276 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3277 
3278 	/* Keep fallback cipher synchronized */
3279 	return crypto_aead_setauthsize(ctx->fback, authsize);
3280 }
3281 
3282 static int safexcel_aead_fallback_crypt(struct aead_request *req,
3283 					enum safexcel_cipher_direction dir)
3284 {
3285 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
3286 	struct crypto_tfm *tfm = crypto_aead_tfm(aead);
3287 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3288 	struct aead_request *subreq = aead_request_ctx(req);
3289 
3290 	aead_request_set_tfm(subreq, ctx->fback);
3291 	aead_request_set_callback(subreq, req->base.flags, req->base.complete,
3292 				  req->base.data);
3293 	aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
3294 			       req->iv);
3295 	aead_request_set_ad(subreq, req->assoclen);
3296 
3297 	return (dir ==  SAFEXCEL_ENCRYPT) ?
3298 		crypto_aead_encrypt(subreq) :
3299 		crypto_aead_decrypt(subreq);
3300 }
3301 
3302 static int safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request *req)
3303 {
3304 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
3305 
3306 	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3307 	if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3308 		return -EINVAL;
3309 	else if (req->cryptlen || req->assoclen) /* If input length > 0 only */
3310 		return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3311 
3312 	/* HW cannot do full (AAD+payload) zero length, use fallback */
3313 	return safexcel_aead_fallback_crypt(req, SAFEXCEL_ENCRYPT);
3314 }
3315 
3316 static int safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request *req)
3317 {
3318 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
3319 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3320 
3321 	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3322 	if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3323 		return -EINVAL;
3324 	else if (req->cryptlen > crypto_aead_authsize(tfm) || req->assoclen)
3325 		/* If input length > 0 only */
3326 		return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3327 
3328 	/* HW cannot do full (AAD+payload) zero length, use fallback */
3329 	return safexcel_aead_fallback_crypt(req, SAFEXCEL_DECRYPT);
3330 }
3331 
3332 static int safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm *tfm)
3333 {
3334 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3335 
3336 	safexcel_aead_fallback_cra_init(tfm);
3337 	ctx->alg = SAFEXCEL_SM4;
3338 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
3339 	ctx->state_sz = SM3_DIGEST_SIZE;
3340 	return 0;
3341 }
3342 
3343 struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_cbc_sm4 = {
3344 	.type = SAFEXCEL_ALG_TYPE_AEAD,
3345 	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3346 	.alg.aead = {
3347 		.setkey = safexcel_aead_fallback_setkey,
3348 		.setauthsize = safexcel_aead_fallback_setauthsize,
3349 		.encrypt = safexcel_aead_sm4cbc_sm3_encrypt,
3350 		.decrypt = safexcel_aead_sm4cbc_sm3_decrypt,
3351 		.ivsize = SM4_BLOCK_SIZE,
3352 		.maxauthsize = SM3_DIGEST_SIZE,
3353 		.base = {
3354 			.cra_name = "authenc(hmac(sm3),cbc(sm4))",
3355 			.cra_driver_name = "safexcel-authenc-hmac-sm3-cbc-sm4",
3356 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3357 			.cra_flags = CRYPTO_ALG_ASYNC |
3358 				     CRYPTO_ALG_KERN_DRIVER_ONLY |
3359 				     CRYPTO_ALG_NEED_FALLBACK,
3360 			.cra_blocksize = SM4_BLOCK_SIZE,
3361 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3362 			.cra_alignmask = 0,
3363 			.cra_init = safexcel_aead_sm4cbc_sm3_cra_init,
3364 			.cra_exit = safexcel_aead_fallback_cra_exit,
3365 			.cra_module = THIS_MODULE,
3366 		},
3367 	},
3368 };
3369 
3370 static int safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm *tfm)
3371 {
3372 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3373 
3374 	safexcel_aead_sm4cbc_sha1_cra_init(tfm);
3375 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3376 	return 0;
3377 }
3378 
3379 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_sm4 = {
3380 	.type = SAFEXCEL_ALG_TYPE_AEAD,
3381 	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3382 	.alg.aead = {
3383 		.setkey = safexcel_aead_setkey,
3384 		.encrypt = safexcel_aead_encrypt,
3385 		.decrypt = safexcel_aead_decrypt,
3386 		.ivsize = CTR_RFC3686_IV_SIZE,
3387 		.maxauthsize = SHA1_DIGEST_SIZE,
3388 		.base = {
3389 			.cra_name = "authenc(hmac(sha1),rfc3686(ctr(sm4)))",
3390 			.cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-sm4",
3391 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3392 			.cra_flags = CRYPTO_ALG_ASYNC |
3393 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3394 			.cra_blocksize = 1,
3395 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3396 			.cra_alignmask = 0,
3397 			.cra_init = safexcel_aead_sm4ctr_sha1_cra_init,
3398 			.cra_exit = safexcel_aead_cra_exit,
3399 			.cra_module = THIS_MODULE,
3400 		},
3401 	},
3402 };
3403 
3404 static int safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm *tfm)
3405 {
3406 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3407 
3408 	safexcel_aead_sm4cbc_sm3_cra_init(tfm);
3409 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3410 	return 0;
3411 }
3412 
3413 struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_ctr_sm4 = {
3414 	.type = SAFEXCEL_ALG_TYPE_AEAD,
3415 	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3416 	.alg.aead = {
3417 		.setkey = safexcel_aead_setkey,
3418 		.encrypt = safexcel_aead_encrypt,
3419 		.decrypt = safexcel_aead_decrypt,
3420 		.ivsize = CTR_RFC3686_IV_SIZE,
3421 		.maxauthsize = SM3_DIGEST_SIZE,
3422 		.base = {
3423 			.cra_name = "authenc(hmac(sm3),rfc3686(ctr(sm4)))",
3424 			.cra_driver_name = "safexcel-authenc-hmac-sm3-ctr-sm4",
3425 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3426 			.cra_flags = CRYPTO_ALG_ASYNC |
3427 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3428 			.cra_blocksize = 1,
3429 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3430 			.cra_alignmask = 0,
3431 			.cra_init = safexcel_aead_sm4ctr_sm3_cra_init,
3432 			.cra_exit = safexcel_aead_cra_exit,
3433 			.cra_module = THIS_MODULE,
3434 		},
3435 	},
3436 };
3437 
3438 static int safexcel_rfc4106_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
3439 				       unsigned int len)
3440 {
3441 	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3442 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3443 
3444 	/* last 4 bytes of key are the nonce! */
3445 	ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3446 
3447 	len -= CTR_RFC3686_NONCE_SIZE;
3448 	return safexcel_aead_gcm_setkey(ctfm, key, len);
3449 }
3450 
3451 static int safexcel_rfc4106_gcm_setauthsize(struct crypto_aead *tfm,
3452 					    unsigned int authsize)
3453 {
3454 	return crypto_rfc4106_check_authsize(authsize);
3455 }
3456 
3457 static int safexcel_rfc4106_encrypt(struct aead_request *req)
3458 {
3459 	return crypto_ipsec_check_assoclen(req->assoclen) ?:
3460 	       safexcel_aead_encrypt(req);
3461 }
3462 
3463 static int safexcel_rfc4106_decrypt(struct aead_request *req)
3464 {
3465 	return crypto_ipsec_check_assoclen(req->assoclen) ?:
3466 	       safexcel_aead_decrypt(req);
3467 }
3468 
3469 static int safexcel_rfc4106_gcm_cra_init(struct crypto_tfm *tfm)
3470 {
3471 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3472 	int ret;
3473 
3474 	ret = safexcel_aead_gcm_cra_init(tfm);
3475 	ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
3476 	return ret;
3477 }
3478 
3479 struct safexcel_alg_template safexcel_alg_rfc4106_gcm = {
3480 	.type = SAFEXCEL_ALG_TYPE_AEAD,
3481 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3482 	.alg.aead = {
3483 		.setkey = safexcel_rfc4106_gcm_setkey,
3484 		.setauthsize = safexcel_rfc4106_gcm_setauthsize,
3485 		.encrypt = safexcel_rfc4106_encrypt,
3486 		.decrypt = safexcel_rfc4106_decrypt,
3487 		.ivsize = GCM_RFC4106_IV_SIZE,
3488 		.maxauthsize = GHASH_DIGEST_SIZE,
3489 		.base = {
3490 			.cra_name = "rfc4106(gcm(aes))",
3491 			.cra_driver_name = "safexcel-rfc4106-gcm-aes",
3492 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3493 			.cra_flags = CRYPTO_ALG_ASYNC |
3494 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3495 			.cra_blocksize = 1,
3496 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3497 			.cra_alignmask = 0,
3498 			.cra_init = safexcel_rfc4106_gcm_cra_init,
3499 			.cra_exit = safexcel_aead_gcm_cra_exit,
3500 		},
3501 	},
3502 };
3503 
3504 static int safexcel_rfc4543_gcm_setauthsize(struct crypto_aead *tfm,
3505 					    unsigned int authsize)
3506 {
3507 	if (authsize != GHASH_DIGEST_SIZE)
3508 		return -EINVAL;
3509 
3510 	return 0;
3511 }
3512 
3513 static int safexcel_rfc4543_gcm_cra_init(struct crypto_tfm *tfm)
3514 {
3515 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3516 	int ret;
3517 
3518 	ret = safexcel_aead_gcm_cra_init(tfm);
3519 	ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP_GMAC;
3520 	return ret;
3521 }
3522 
3523 struct safexcel_alg_template safexcel_alg_rfc4543_gcm = {
3524 	.type = SAFEXCEL_ALG_TYPE_AEAD,
3525 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3526 	.alg.aead = {
3527 		.setkey = safexcel_rfc4106_gcm_setkey,
3528 		.setauthsize = safexcel_rfc4543_gcm_setauthsize,
3529 		.encrypt = safexcel_rfc4106_encrypt,
3530 		.decrypt = safexcel_rfc4106_decrypt,
3531 		.ivsize = GCM_RFC4543_IV_SIZE,
3532 		.maxauthsize = GHASH_DIGEST_SIZE,
3533 		.base = {
3534 			.cra_name = "rfc4543(gcm(aes))",
3535 			.cra_driver_name = "safexcel-rfc4543-gcm-aes",
3536 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3537 			.cra_flags = CRYPTO_ALG_ASYNC |
3538 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3539 			.cra_blocksize = 1,
3540 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3541 			.cra_alignmask = 0,
3542 			.cra_init = safexcel_rfc4543_gcm_cra_init,
3543 			.cra_exit = safexcel_aead_gcm_cra_exit,
3544 		},
3545 	},
3546 };
3547 
3548 static int safexcel_rfc4309_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
3549 				       unsigned int len)
3550 {
3551 	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3552 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3553 
3554 	/* First byte of the nonce = L = always 3 for RFC4309 (4 byte ctr) */
3555 	*(u8 *)&ctx->nonce = EIP197_AEAD_IPSEC_COUNTER_SIZE - 1;
3556 	/* last 3 bytes of key are the nonce! */
3557 	memcpy((u8 *)&ctx->nonce + 1, key + len -
3558 	       EIP197_AEAD_IPSEC_CCM_NONCE_SIZE,
3559 	       EIP197_AEAD_IPSEC_CCM_NONCE_SIZE);
3560 
3561 	len -= EIP197_AEAD_IPSEC_CCM_NONCE_SIZE;
3562 	return safexcel_aead_ccm_setkey(ctfm, key, len);
3563 }
3564 
3565 static int safexcel_rfc4309_ccm_setauthsize(struct crypto_aead *tfm,
3566 					    unsigned int authsize)
3567 {
3568 	/* Borrowed from crypto/ccm.c */
3569 	switch (authsize) {
3570 	case 8:
3571 	case 12:
3572 	case 16:
3573 		break;
3574 	default:
3575 		return -EINVAL;
3576 	}
3577 
3578 	return 0;
3579 }
3580 
3581 static int safexcel_rfc4309_ccm_encrypt(struct aead_request *req)
3582 {
3583 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
3584 
3585 	/* Borrowed from crypto/ccm.c */
3586 	if (req->assoclen != 16 && req->assoclen != 20)
3587 		return -EINVAL;
3588 
3589 	return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3590 }
3591 
3592 static int safexcel_rfc4309_ccm_decrypt(struct aead_request *req)
3593 {
3594 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
3595 
3596 	/* Borrowed from crypto/ccm.c */
3597 	if (req->assoclen != 16 && req->assoclen != 20)
3598 		return -EINVAL;
3599 
3600 	return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3601 }
3602 
3603 static int safexcel_rfc4309_ccm_cra_init(struct crypto_tfm *tfm)
3604 {
3605 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3606 	int ret;
3607 
3608 	ret = safexcel_aead_ccm_cra_init(tfm);
3609 	ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
3610 	return ret;
3611 }
3612 
3613 struct safexcel_alg_template safexcel_alg_rfc4309_ccm = {
3614 	.type = SAFEXCEL_ALG_TYPE_AEAD,
3615 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
3616 	.alg.aead = {
3617 		.setkey = safexcel_rfc4309_ccm_setkey,
3618 		.setauthsize = safexcel_rfc4309_ccm_setauthsize,
3619 		.encrypt = safexcel_rfc4309_ccm_encrypt,
3620 		.decrypt = safexcel_rfc4309_ccm_decrypt,
3621 		.ivsize = EIP197_AEAD_IPSEC_IV_SIZE,
3622 		.maxauthsize = AES_BLOCK_SIZE,
3623 		.base = {
3624 			.cra_name = "rfc4309(ccm(aes))",
3625 			.cra_driver_name = "safexcel-rfc4309-ccm-aes",
3626 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3627 			.cra_flags = CRYPTO_ALG_ASYNC |
3628 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3629 			.cra_blocksize = 1,
3630 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3631 			.cra_alignmask = 0,
3632 			.cra_init = safexcel_rfc4309_ccm_cra_init,
3633 			.cra_exit = safexcel_aead_cra_exit,
3634 			.cra_module = THIS_MODULE,
3635 		},
3636 	},
3637 };
3638