xref: /linux/drivers/crypto/inside-secure/safexcel_cipher.c (revision 7d07de2c18abd95f72efb28f78a4825e0fc1aa6a)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (C) 2017 Marvell
4  *
5  * Antoine Tenart <antoine.tenart@free-electrons.com>
6  */
7 
8 #include <asm/unaligned.h>
9 #include <linux/device.h>
10 #include <linux/dma-mapping.h>
11 #include <linux/dmapool.h>
12 #include <crypto/aead.h>
13 #include <crypto/aes.h>
14 #include <crypto/authenc.h>
15 #include <crypto/chacha.h>
16 #include <crypto/ctr.h>
17 #include <crypto/internal/des.h>
18 #include <crypto/gcm.h>
19 #include <crypto/ghash.h>
20 #include <crypto/poly1305.h>
21 #include <crypto/sha.h>
22 #include <crypto/sm3.h>
23 #include <crypto/sm4.h>
24 #include <crypto/xts.h>
25 #include <crypto/skcipher.h>
26 #include <crypto/internal/aead.h>
27 #include <crypto/internal/skcipher.h>
28 
29 #include "safexcel.h"
30 
31 enum safexcel_cipher_direction {
32 	SAFEXCEL_ENCRYPT,
33 	SAFEXCEL_DECRYPT,
34 };
35 
36 enum safexcel_cipher_alg {
37 	SAFEXCEL_DES,
38 	SAFEXCEL_3DES,
39 	SAFEXCEL_AES,
40 	SAFEXCEL_CHACHA20,
41 	SAFEXCEL_SM4,
42 };
43 
44 struct safexcel_cipher_ctx {
45 	struct safexcel_context base;
46 	struct safexcel_crypto_priv *priv;
47 
48 	u32 mode;
49 	enum safexcel_cipher_alg alg;
50 	u8 aead; /* !=0=AEAD, 2=IPSec ESP AEAD, 3=IPsec ESP GMAC */
51 	u8 xcm;  /* 0=authenc, 1=GCM, 2 reserved for CCM */
52 	u8 aadskip;
53 	u8 blocksz;
54 	u32 ivmask;
55 	u32 ctrinit;
56 
57 	__le32 key[16];
58 	u32 nonce;
59 	unsigned int key_len, xts;
60 
61 	/* All the below is AEAD specific */
62 	u32 hash_alg;
63 	u32 state_sz;
64 	__be32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
65 	__be32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
66 
67 	struct crypto_cipher *hkaes;
68 	struct crypto_aead *fback;
69 };
70 
71 struct safexcel_cipher_req {
72 	enum safexcel_cipher_direction direction;
73 	/* Number of result descriptors associated to the request */
74 	unsigned int rdescs;
75 	bool needs_inv;
76 	int  nr_src, nr_dst;
77 };
78 
79 static int safexcel_skcipher_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
80 				struct safexcel_command_desc *cdesc)
81 {
82 	if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
83 		cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
84 		/* 32 bit nonce */
85 		cdesc->control_data.token[0] = ctx->nonce;
86 		/* 64 bit IV part */
87 		memcpy(&cdesc->control_data.token[1], iv, 8);
88 		/* 32 bit counter, start at 0 or 1 (big endian!) */
89 		cdesc->control_data.token[3] =
90 			(__force u32)cpu_to_be32(ctx->ctrinit);
91 		return 4;
92 	}
93 	if (ctx->alg == SAFEXCEL_CHACHA20) {
94 		cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
95 		/* 96 bit nonce part */
96 		memcpy(&cdesc->control_data.token[0], &iv[4], 12);
97 		/* 32 bit counter */
98 		cdesc->control_data.token[3] = *(u32 *)iv;
99 		return 4;
100 	}
101 
102 	cdesc->control_data.options |= ctx->ivmask;
103 	memcpy(cdesc->control_data.token, iv, ctx->blocksz);
104 	return ctx->blocksz / sizeof(u32);
105 }
106 
107 static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
108 				    struct safexcel_command_desc *cdesc,
109 				    struct safexcel_token *atoken,
110 				    u32 length)
111 {
112 	struct safexcel_token *token;
113 	int ivlen;
114 
115 	ivlen = safexcel_skcipher_iv(ctx, iv, cdesc);
116 	if (ivlen == 4) {
117 		/* No space in cdesc, instruction moves to atoken */
118 		cdesc->additional_cdata_size = 1;
119 		token = atoken;
120 	} else {
121 		/* Everything fits in cdesc */
122 		token = (struct safexcel_token *)(cdesc->control_data.token + 2);
123 		/* Need to pad with NOP */
124 		eip197_noop_token(&token[1]);
125 	}
126 
127 	token->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
128 	token->packet_length = length;
129 	token->stat = EIP197_TOKEN_STAT_LAST_PACKET |
130 		      EIP197_TOKEN_STAT_LAST_HASH;
131 	token->instructions = EIP197_TOKEN_INS_LAST |
132 			      EIP197_TOKEN_INS_TYPE_CRYPTO |
133 			      EIP197_TOKEN_INS_TYPE_OUTPUT;
134 }
135 
136 static void safexcel_aead_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
137 			     struct safexcel_command_desc *cdesc)
138 {
139 	if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD ||
140 	    ctx->aead & EIP197_AEAD_TYPE_IPSEC_ESP) { /* _ESP and _ESP_GMAC */
141 		/* 32 bit nonce */
142 		cdesc->control_data.token[0] = ctx->nonce;
143 		/* 64 bit IV part */
144 		memcpy(&cdesc->control_data.token[1], iv, 8);
145 		/* 32 bit counter, start at 0 or 1 (big endian!) */
146 		cdesc->control_data.token[3] =
147 			(__force u32)cpu_to_be32(ctx->ctrinit);
148 		return;
149 	}
150 	if (ctx->xcm == EIP197_XCM_MODE_GCM || ctx->alg == SAFEXCEL_CHACHA20) {
151 		/* 96 bit IV part */
152 		memcpy(&cdesc->control_data.token[0], iv, 12);
153 		/* 32 bit counter, start at 0 or 1 (big endian!) */
154 		cdesc->control_data.token[3] =
155 			(__force u32)cpu_to_be32(ctx->ctrinit);
156 		return;
157 	}
158 	/* CBC */
159 	memcpy(cdesc->control_data.token, iv, ctx->blocksz);
160 }
161 
162 static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
163 				struct safexcel_command_desc *cdesc,
164 				struct safexcel_token *atoken,
165 				enum safexcel_cipher_direction direction,
166 				u32 cryptlen, u32 assoclen, u32 digestsize)
167 {
168 	struct safexcel_token *aadref;
169 	int atoksize = 2; /* Start with minimum size */
170 	int assocadj = assoclen - ctx->aadskip, aadalign;
171 
172 	/* Always 4 dwords of embedded IV  for AEAD modes */
173 	cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
174 
175 	if (direction == SAFEXCEL_DECRYPT)
176 		cryptlen -= digestsize;
177 
178 	if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM)) {
179 		/* Construct IV block B0 for the CBC-MAC */
180 		u8 *final_iv = (u8 *)cdesc->control_data.token;
181 		u8 *cbcmaciv = (u8 *)&atoken[1];
182 		__le32 *aadlen = (__le32 *)&atoken[5];
183 
184 		if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
185 			/* Length + nonce */
186 			cdesc->control_data.token[0] = ctx->nonce;
187 			/* Fixup flags byte */
188 			*(__le32 *)cbcmaciv =
189 				cpu_to_le32(ctx->nonce |
190 					    ((assocadj > 0) << 6) |
191 					    ((digestsize - 2) << 2));
192 			/* 64 bit IV part */
193 			memcpy(&cdesc->control_data.token[1], iv, 8);
194 			memcpy(cbcmaciv + 4, iv, 8);
195 			/* Start counter at 0 */
196 			cdesc->control_data.token[3] = 0;
197 			/* Message length */
198 			*(__be32 *)(cbcmaciv + 12) = cpu_to_be32(cryptlen);
199 		} else {
200 			/* Variable length IV part */
201 			memcpy(final_iv, iv, 15 - iv[0]);
202 			memcpy(cbcmaciv, iv, 15 - iv[0]);
203 			/* Start variable length counter at 0 */
204 			memset(final_iv + 15 - iv[0], 0, iv[0] + 1);
205 			memset(cbcmaciv + 15 - iv[0], 0, iv[0] - 1);
206 			/* fixup flags byte */
207 			cbcmaciv[0] |= ((assocadj > 0) << 6) |
208 				       ((digestsize - 2) << 2);
209 			/* insert lower 2 bytes of message length */
210 			cbcmaciv[14] = cryptlen >> 8;
211 			cbcmaciv[15] = cryptlen & 255;
212 		}
213 
214 		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
215 		atoken->packet_length = AES_BLOCK_SIZE +
216 					((assocadj > 0) << 1);
217 		atoken->stat = 0;
218 		atoken->instructions = EIP197_TOKEN_INS_ORIGIN_TOKEN |
219 				       EIP197_TOKEN_INS_TYPE_HASH;
220 
221 		if (likely(assocadj)) {
222 			*aadlen = cpu_to_le32((assocadj >> 8) |
223 					      (assocadj & 255) << 8);
224 			atoken += 6;
225 			atoksize += 7;
226 		} else {
227 			atoken += 5;
228 			atoksize += 6;
229 		}
230 
231 		/* Process AAD data */
232 		aadref = atoken;
233 		atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
234 		atoken->packet_length = assocadj;
235 		atoken->stat = 0;
236 		atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
237 		atoken++;
238 
239 		/* For CCM only, align AAD data towards hash engine */
240 		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
241 		aadalign = (assocadj + 2) & 15;
242 		atoken->packet_length = assocadj && aadalign ?
243 						16 - aadalign :
244 						0;
245 		if (likely(cryptlen)) {
246 			atoken->stat = 0;
247 			atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
248 		} else {
249 			atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
250 			atoken->instructions = EIP197_TOKEN_INS_LAST |
251 					       EIP197_TOKEN_INS_TYPE_HASH;
252 		}
253 	} else {
254 		safexcel_aead_iv(ctx, iv, cdesc);
255 
256 		/* Process AAD data */
257 		aadref = atoken;
258 		atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
259 		atoken->packet_length = assocadj;
260 		atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
261 		atoken->instructions = EIP197_TOKEN_INS_LAST |
262 				       EIP197_TOKEN_INS_TYPE_HASH;
263 	}
264 	atoken++;
265 
266 	if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
267 		/* For ESP mode (and not GMAC), skip over the IV */
268 		atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
269 		atoken->packet_length = EIP197_AEAD_IPSEC_IV_SIZE;
270 		atoken->stat = 0;
271 		atoken->instructions = 0;
272 		atoken++;
273 		atoksize++;
274 	} else if (unlikely(ctx->alg == SAFEXCEL_CHACHA20 &&
275 			    direction == SAFEXCEL_DECRYPT)) {
276 		/* Poly-chacha decryption needs a dummy NOP here ... */
277 		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
278 		atoken->packet_length = 16; /* According to Op Manual */
279 		atoken->stat = 0;
280 		atoken->instructions = 0;
281 		atoken++;
282 		atoksize++;
283 	}
284 
285 	if  (ctx->xcm) {
286 		/* For GCM and CCM, obtain enc(Y0) */
287 		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT_REMRES;
288 		atoken->packet_length = 0;
289 		atoken->stat = 0;
290 		atoken->instructions = AES_BLOCK_SIZE;
291 		atoken++;
292 
293 		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
294 		atoken->packet_length = AES_BLOCK_SIZE;
295 		atoken->stat = 0;
296 		atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
297 				       EIP197_TOKEN_INS_TYPE_CRYPTO;
298 		atoken++;
299 		atoksize += 2;
300 	}
301 
302 	if (likely(cryptlen || ctx->alg == SAFEXCEL_CHACHA20)) {
303 		/* Fixup stat field for AAD direction instruction */
304 		aadref->stat = 0;
305 
306 		/* Process crypto data */
307 		atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
308 		atoken->packet_length = cryptlen;
309 
310 		if (unlikely(ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC)) {
311 			/* Fixup instruction field for AAD dir instruction */
312 			aadref->instructions = EIP197_TOKEN_INS_TYPE_HASH;
313 
314 			/* Do not send to crypt engine in case of GMAC */
315 			atoken->instructions = EIP197_TOKEN_INS_LAST |
316 					       EIP197_TOKEN_INS_TYPE_HASH |
317 					       EIP197_TOKEN_INS_TYPE_OUTPUT;
318 		} else {
319 			atoken->instructions = EIP197_TOKEN_INS_LAST |
320 					       EIP197_TOKEN_INS_TYPE_CRYPTO |
321 					       EIP197_TOKEN_INS_TYPE_HASH |
322 					       EIP197_TOKEN_INS_TYPE_OUTPUT;
323 		}
324 
325 		cryptlen &= 15;
326 		if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM && cryptlen)) {
327 			atoken->stat = 0;
328 			/* For CCM only, pad crypto data to the hash engine */
329 			atoken++;
330 			atoksize++;
331 			atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
332 			atoken->packet_length = 16 - cryptlen;
333 			atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
334 			atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
335 		} else {
336 			atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
337 		}
338 		atoken++;
339 		atoksize++;
340 	}
341 
342 	if (direction == SAFEXCEL_ENCRYPT) {
343 		/* Append ICV */
344 		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
345 		atoken->packet_length = digestsize;
346 		atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
347 			       EIP197_TOKEN_STAT_LAST_PACKET;
348 		atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
349 				       EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
350 	} else {
351 		/* Extract ICV */
352 		atoken->opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
353 		atoken->packet_length = digestsize;
354 		atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
355 			       EIP197_TOKEN_STAT_LAST_PACKET;
356 		atoken->instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
357 		atoken++;
358 		atoksize++;
359 
360 		/* Verify ICV */
361 		atoken->opcode = EIP197_TOKEN_OPCODE_VERIFY;
362 		atoken->packet_length = digestsize |
363 					EIP197_TOKEN_HASH_RESULT_VERIFY;
364 		atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
365 			       EIP197_TOKEN_STAT_LAST_PACKET;
366 		atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
367 	}
368 
369 	/* Fixup length of the token in the command descriptor */
370 	cdesc->additional_cdata_size = atoksize;
371 }
372 
373 static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
374 					const u8 *key, unsigned int len)
375 {
376 	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
377 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
378 	struct safexcel_crypto_priv *priv = ctx->priv;
379 	struct crypto_aes_ctx aes;
380 	int ret, i;
381 
382 	ret = aes_expandkey(&aes, key, len);
383 	if (ret) {
384 		crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
385 		return ret;
386 	}
387 
388 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
389 		for (i = 0; i < len / sizeof(u32); i++) {
390 			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
391 				ctx->base.needs_inv = true;
392 				break;
393 			}
394 		}
395 	}
396 
397 	for (i = 0; i < len / sizeof(u32); i++)
398 		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
399 
400 	ctx->key_len = len;
401 
402 	memzero_explicit(&aes, sizeof(aes));
403 	return 0;
404 }
405 
406 static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
407 				unsigned int len)
408 {
409 	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
410 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
411 	struct safexcel_ahash_export_state istate, ostate;
412 	struct safexcel_crypto_priv *priv = ctx->priv;
413 	struct crypto_authenc_keys keys;
414 	struct crypto_aes_ctx aes;
415 	int err = -EINVAL, i;
416 
417 	if (unlikely(crypto_authenc_extractkeys(&keys, key, len)))
418 		goto badkey;
419 
420 	if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
421 		/* Must have at least space for the nonce here */
422 		if (unlikely(keys.enckeylen < CTR_RFC3686_NONCE_SIZE))
423 			goto badkey;
424 		/* last 4 bytes of key are the nonce! */
425 		ctx->nonce = *(u32 *)(keys.enckey + keys.enckeylen -
426 				      CTR_RFC3686_NONCE_SIZE);
427 		/* exclude the nonce here */
428 		keys.enckeylen -= CTR_RFC3686_NONCE_SIZE;
429 	}
430 
431 	/* Encryption key */
432 	switch (ctx->alg) {
433 	case SAFEXCEL_DES:
434 		err = verify_aead_des_key(ctfm, keys.enckey, keys.enckeylen);
435 		if (unlikely(err))
436 			goto badkey_expflags;
437 		break;
438 	case SAFEXCEL_3DES:
439 		err = verify_aead_des3_key(ctfm, keys.enckey, keys.enckeylen);
440 		if (unlikely(err))
441 			goto badkey_expflags;
442 		break;
443 	case SAFEXCEL_AES:
444 		err = aes_expandkey(&aes, keys.enckey, keys.enckeylen);
445 		if (unlikely(err))
446 			goto badkey;
447 		break;
448 	case SAFEXCEL_SM4:
449 		if (unlikely(keys.enckeylen != SM4_KEY_SIZE))
450 			goto badkey;
451 		break;
452 	default:
453 		dev_err(priv->dev, "aead: unsupported cipher algorithm\n");
454 		goto badkey;
455 	}
456 
457 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
458 		for (i = 0; i < keys.enckeylen / sizeof(u32); i++) {
459 			if (le32_to_cpu(ctx->key[i]) !=
460 			    ((u32 *)keys.enckey)[i]) {
461 				ctx->base.needs_inv = true;
462 				break;
463 			}
464 		}
465 	}
466 
467 	/* Auth key */
468 	switch (ctx->hash_alg) {
469 	case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
470 		if (safexcel_hmac_setkey("safexcel-sha1", keys.authkey,
471 					 keys.authkeylen, &istate, &ostate))
472 			goto badkey;
473 		break;
474 	case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
475 		if (safexcel_hmac_setkey("safexcel-sha224", keys.authkey,
476 					 keys.authkeylen, &istate, &ostate))
477 			goto badkey;
478 		break;
479 	case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
480 		if (safexcel_hmac_setkey("safexcel-sha256", keys.authkey,
481 					 keys.authkeylen, &istate, &ostate))
482 			goto badkey;
483 		break;
484 	case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
485 		if (safexcel_hmac_setkey("safexcel-sha384", keys.authkey,
486 					 keys.authkeylen, &istate, &ostate))
487 			goto badkey;
488 		break;
489 	case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
490 		if (safexcel_hmac_setkey("safexcel-sha512", keys.authkey,
491 					 keys.authkeylen, &istate, &ostate))
492 			goto badkey;
493 		break;
494 	case CONTEXT_CONTROL_CRYPTO_ALG_SM3:
495 		if (safexcel_hmac_setkey("safexcel-sm3", keys.authkey,
496 					 keys.authkeylen, &istate, &ostate))
497 			goto badkey;
498 		break;
499 	default:
500 		dev_err(priv->dev, "aead: unsupported hash algorithm\n");
501 		goto badkey;
502 	}
503 
504 	crypto_aead_set_flags(ctfm, crypto_aead_get_flags(ctfm) &
505 				    CRYPTO_TFM_RES_MASK);
506 
507 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
508 	    (memcmp(ctx->ipad, istate.state, ctx->state_sz) ||
509 	     memcmp(ctx->opad, ostate.state, ctx->state_sz)))
510 		ctx->base.needs_inv = true;
511 
512 	/* Now copy the keys into the context */
513 	for (i = 0; i < keys.enckeylen / sizeof(u32); i++)
514 		ctx->key[i] = cpu_to_le32(((u32 *)keys.enckey)[i]);
515 	ctx->key_len = keys.enckeylen;
516 
517 	memcpy(ctx->ipad, &istate.state, ctx->state_sz);
518 	memcpy(ctx->opad, &ostate.state, ctx->state_sz);
519 
520 	memzero_explicit(&keys, sizeof(keys));
521 	return 0;
522 
523 badkey:
524 	crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
525 badkey_expflags:
526 	memzero_explicit(&keys, sizeof(keys));
527 	return err;
528 }
529 
530 static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
531 				    struct crypto_async_request *async,
532 				    struct safexcel_cipher_req *sreq,
533 				    struct safexcel_command_desc *cdesc)
534 {
535 	struct safexcel_crypto_priv *priv = ctx->priv;
536 	int ctrl_size = ctx->key_len / sizeof(u32);
537 
538 	cdesc->control_data.control1 = ctx->mode;
539 
540 	if (ctx->aead) {
541 		/* Take in account the ipad+opad digests */
542 		if (ctx->xcm) {
543 			ctrl_size += ctx->state_sz / sizeof(u32);
544 			cdesc->control_data.control0 =
545 				CONTEXT_CONTROL_KEY_EN |
546 				CONTEXT_CONTROL_DIGEST_XCM |
547 				ctx->hash_alg |
548 				CONTEXT_CONTROL_SIZE(ctrl_size);
549 		} else if (ctx->alg == SAFEXCEL_CHACHA20) {
550 			/* Chacha20-Poly1305 */
551 			cdesc->control_data.control0 =
552 				CONTEXT_CONTROL_KEY_EN |
553 				CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20 |
554 				(sreq->direction == SAFEXCEL_ENCRYPT ?
555 					CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT :
556 					CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN) |
557 				ctx->hash_alg |
558 				CONTEXT_CONTROL_SIZE(ctrl_size);
559 			return 0;
560 		} else {
561 			ctrl_size += ctx->state_sz / sizeof(u32) * 2;
562 			cdesc->control_data.control0 =
563 				CONTEXT_CONTROL_KEY_EN |
564 				CONTEXT_CONTROL_DIGEST_HMAC |
565 				ctx->hash_alg |
566 				CONTEXT_CONTROL_SIZE(ctrl_size);
567 		}
568 
569 		if (sreq->direction == SAFEXCEL_ENCRYPT &&
570 		    (ctx->xcm == EIP197_XCM_MODE_CCM ||
571 		     ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC))
572 			cdesc->control_data.control0 |=
573 				CONTEXT_CONTROL_TYPE_HASH_ENCRYPT_OUT;
574 		else if (sreq->direction == SAFEXCEL_ENCRYPT)
575 			cdesc->control_data.control0 |=
576 				CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
577 		else if (ctx->xcm == EIP197_XCM_MODE_CCM)
578 			cdesc->control_data.control0 |=
579 				CONTEXT_CONTROL_TYPE_DECRYPT_HASH_IN;
580 		else
581 			cdesc->control_data.control0 |=
582 				CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
583 	} else {
584 		if (sreq->direction == SAFEXCEL_ENCRYPT)
585 			cdesc->control_data.control0 =
586 				CONTEXT_CONTROL_TYPE_CRYPTO_OUT |
587 				CONTEXT_CONTROL_KEY_EN |
588 				CONTEXT_CONTROL_SIZE(ctrl_size);
589 		else
590 			cdesc->control_data.control0 =
591 				CONTEXT_CONTROL_TYPE_CRYPTO_IN |
592 				CONTEXT_CONTROL_KEY_EN |
593 				CONTEXT_CONTROL_SIZE(ctrl_size);
594 	}
595 
596 	if (ctx->alg == SAFEXCEL_DES) {
597 		cdesc->control_data.control0 |=
598 			CONTEXT_CONTROL_CRYPTO_ALG_DES;
599 	} else if (ctx->alg == SAFEXCEL_3DES) {
600 		cdesc->control_data.control0 |=
601 			CONTEXT_CONTROL_CRYPTO_ALG_3DES;
602 	} else if (ctx->alg == SAFEXCEL_AES) {
603 		switch (ctx->key_len >> ctx->xts) {
604 		case AES_KEYSIZE_128:
605 			cdesc->control_data.control0 |=
606 				CONTEXT_CONTROL_CRYPTO_ALG_AES128;
607 			break;
608 		case AES_KEYSIZE_192:
609 			cdesc->control_data.control0 |=
610 				CONTEXT_CONTROL_CRYPTO_ALG_AES192;
611 			break;
612 		case AES_KEYSIZE_256:
613 			cdesc->control_data.control0 |=
614 				CONTEXT_CONTROL_CRYPTO_ALG_AES256;
615 			break;
616 		default:
617 			dev_err(priv->dev, "aes keysize not supported: %u\n",
618 				ctx->key_len >> ctx->xts);
619 			return -EINVAL;
620 		}
621 	} else if (ctx->alg == SAFEXCEL_CHACHA20) {
622 		cdesc->control_data.control0 |=
623 			CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20;
624 	} else if (ctx->alg == SAFEXCEL_SM4) {
625 		cdesc->control_data.control0 |=
626 			CONTEXT_CONTROL_CRYPTO_ALG_SM4;
627 	}
628 
629 	return 0;
630 }
631 
632 static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
633 				      struct crypto_async_request *async,
634 				      struct scatterlist *src,
635 				      struct scatterlist *dst,
636 				      unsigned int cryptlen,
637 				      struct safexcel_cipher_req *sreq,
638 				      bool *should_complete, int *ret)
639 {
640 	struct skcipher_request *areq = skcipher_request_cast(async);
641 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
642 	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(skcipher);
643 	struct safexcel_result_desc *rdesc;
644 	int ndesc = 0;
645 
646 	*ret = 0;
647 
648 	if (unlikely(!sreq->rdescs))
649 		return 0;
650 
651 	while (sreq->rdescs--) {
652 		rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
653 		if (IS_ERR(rdesc)) {
654 			dev_err(priv->dev,
655 				"cipher: result: could not retrieve the result descriptor\n");
656 			*ret = PTR_ERR(rdesc);
657 			break;
658 		}
659 
660 		if (likely(!*ret))
661 			*ret = safexcel_rdesc_check_errors(priv, rdesc);
662 
663 		ndesc++;
664 	}
665 
666 	safexcel_complete(priv, ring);
667 
668 	if (src == dst) {
669 		dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
670 	} else {
671 		dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
672 		dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
673 	}
674 
675 	/*
676 	 * Update IV in req from last crypto output word for CBC modes
677 	 */
678 	if ((!ctx->aead) && (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
679 	    (sreq->direction == SAFEXCEL_ENCRYPT)) {
680 		/* For encrypt take the last output word */
681 		sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv,
682 				   crypto_skcipher_ivsize(skcipher),
683 				   (cryptlen -
684 				    crypto_skcipher_ivsize(skcipher)));
685 	}
686 
687 	*should_complete = true;
688 
689 	return ndesc;
690 }
691 
692 static int safexcel_send_req(struct crypto_async_request *base, int ring,
693 			     struct safexcel_cipher_req *sreq,
694 			     struct scatterlist *src, struct scatterlist *dst,
695 			     unsigned int cryptlen, unsigned int assoclen,
696 			     unsigned int digestsize, u8 *iv, int *commands,
697 			     int *results)
698 {
699 	struct skcipher_request *areq = skcipher_request_cast(base);
700 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
701 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
702 	struct safexcel_crypto_priv *priv = ctx->priv;
703 	struct safexcel_command_desc *cdesc;
704 	struct safexcel_command_desc *first_cdesc = NULL;
705 	struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
706 	struct scatterlist *sg;
707 	unsigned int totlen;
708 	unsigned int totlen_src = cryptlen + assoclen;
709 	unsigned int totlen_dst = totlen_src;
710 	struct safexcel_token *atoken;
711 	int n_cdesc = 0, n_rdesc = 0;
712 	int queued, i, ret = 0;
713 	bool first = true;
714 
715 	sreq->nr_src = sg_nents_for_len(src, totlen_src);
716 
717 	if (ctx->aead) {
718 		/*
719 		 * AEAD has auth tag appended to output for encrypt and
720 		 * removed from the output for decrypt!
721 		 */
722 		if (sreq->direction == SAFEXCEL_DECRYPT)
723 			totlen_dst -= digestsize;
724 		else
725 			totlen_dst += digestsize;
726 
727 		memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
728 		       ctx->ipad, ctx->state_sz);
729 		if (!ctx->xcm)
730 			memcpy(ctx->base.ctxr->data + (ctx->key_len +
731 			       ctx->state_sz) / sizeof(u32), ctx->opad,
732 			       ctx->state_sz);
733 	} else if ((ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
734 		   (sreq->direction == SAFEXCEL_DECRYPT)) {
735 		/*
736 		 * Save IV from last crypto input word for CBC modes in decrypt
737 		 * direction. Need to do this first in case of inplace operation
738 		 * as it will be overwritten.
739 		 */
740 		sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv,
741 				   crypto_skcipher_ivsize(skcipher),
742 				   (totlen_src -
743 				    crypto_skcipher_ivsize(skcipher)));
744 	}
745 
746 	sreq->nr_dst = sg_nents_for_len(dst, totlen_dst);
747 
748 	/*
749 	 * Remember actual input length, source buffer length may be
750 	 * updated in case of inline operation below.
751 	 */
752 	totlen = totlen_src;
753 	queued = totlen_src;
754 
755 	if (src == dst) {
756 		sreq->nr_src = max(sreq->nr_src, sreq->nr_dst);
757 		sreq->nr_dst = sreq->nr_src;
758 		if (unlikely((totlen_src || totlen_dst) &&
759 		    (sreq->nr_src <= 0))) {
760 			dev_err(priv->dev, "In-place buffer not large enough (need %d bytes)!",
761 				max(totlen_src, totlen_dst));
762 			return -EINVAL;
763 		}
764 		dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
765 	} else {
766 		if (unlikely(totlen_src && (sreq->nr_src <= 0))) {
767 			dev_err(priv->dev, "Source buffer not large enough (need %d bytes)!",
768 				totlen_src);
769 			return -EINVAL;
770 		}
771 		dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
772 
773 		if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) {
774 			dev_err(priv->dev, "Dest buffer not large enough (need %d bytes)!",
775 				totlen_dst);
776 			dma_unmap_sg(priv->dev, src, sreq->nr_src,
777 				     DMA_TO_DEVICE);
778 			return -EINVAL;
779 		}
780 		dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
781 	}
782 
783 	memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
784 
785 	if (!totlen) {
786 		/*
787 		 * The EIP97 cannot deal with zero length input packets!
788 		 * So stuff a dummy command descriptor indicating a 1 byte
789 		 * (dummy) input packet, using the context record as source.
790 		 */
791 		first_cdesc = safexcel_add_cdesc(priv, ring,
792 						 1, 1, ctx->base.ctxr_dma,
793 						 1, 1, ctx->base.ctxr_dma,
794 						 &atoken);
795 		if (IS_ERR(first_cdesc)) {
796 			/* No space left in the command descriptor ring */
797 			ret = PTR_ERR(first_cdesc);
798 			goto cdesc_rollback;
799 		}
800 		n_cdesc = 1;
801 		goto skip_cdesc;
802 	}
803 
804 	/* command descriptors */
805 	for_each_sg(src, sg, sreq->nr_src, i) {
806 		int len = sg_dma_len(sg);
807 
808 		/* Do not overflow the request */
809 		if (queued < len)
810 			len = queued;
811 
812 		cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
813 					   !(queued - len),
814 					   sg_dma_address(sg), len, totlen,
815 					   ctx->base.ctxr_dma, &atoken);
816 		if (IS_ERR(cdesc)) {
817 			/* No space left in the command descriptor ring */
818 			ret = PTR_ERR(cdesc);
819 			goto cdesc_rollback;
820 		}
821 
822 		if (!n_cdesc)
823 			first_cdesc = cdesc;
824 
825 		n_cdesc++;
826 		queued -= len;
827 		if (!queued)
828 			break;
829 	}
830 skip_cdesc:
831 	/* Add context control words and token to first command descriptor */
832 	safexcel_context_control(ctx, base, sreq, first_cdesc);
833 	if (ctx->aead)
834 		safexcel_aead_token(ctx, iv, first_cdesc, atoken,
835 				    sreq->direction, cryptlen,
836 				    assoclen, digestsize);
837 	else
838 		safexcel_skcipher_token(ctx, iv, first_cdesc, atoken,
839 					cryptlen);
840 
841 	/* result descriptors */
842 	for_each_sg(dst, sg, sreq->nr_dst, i) {
843 		bool last = (i == sreq->nr_dst - 1);
844 		u32 len = sg_dma_len(sg);
845 
846 		/* only allow the part of the buffer we know we need */
847 		if (len > totlen_dst)
848 			len = totlen_dst;
849 		if (unlikely(!len))
850 			break;
851 		totlen_dst -= len;
852 
853 		/* skip over AAD space in buffer - not written */
854 		if (assoclen) {
855 			if (assoclen >= len) {
856 				assoclen -= len;
857 				continue;
858 			}
859 			rdesc = safexcel_add_rdesc(priv, ring, first, last,
860 						   sg_dma_address(sg) +
861 						   assoclen,
862 						   len - assoclen);
863 			assoclen = 0;
864 		} else {
865 			rdesc = safexcel_add_rdesc(priv, ring, first, last,
866 						   sg_dma_address(sg),
867 						   len);
868 		}
869 		if (IS_ERR(rdesc)) {
870 			/* No space left in the result descriptor ring */
871 			ret = PTR_ERR(rdesc);
872 			goto rdesc_rollback;
873 		}
874 		if (first) {
875 			first_rdesc = rdesc;
876 			first = false;
877 		}
878 		n_rdesc++;
879 	}
880 
881 	if (unlikely(first)) {
882 		/*
883 		 * Special case: AEAD decrypt with only AAD data.
884 		 * In this case there is NO output data from the engine,
885 		 * but the engine still needs a result descriptor!
886 		 * Create a dummy one just for catching the result token.
887 		 */
888 		rdesc = safexcel_add_rdesc(priv, ring, true, true, 0, 0);
889 		if (IS_ERR(rdesc)) {
890 			/* No space left in the result descriptor ring */
891 			ret = PTR_ERR(rdesc);
892 			goto rdesc_rollback;
893 		}
894 		first_rdesc = rdesc;
895 		n_rdesc = 1;
896 	}
897 
898 	safexcel_rdr_req_set(priv, ring, first_rdesc, base);
899 
900 	*commands = n_cdesc;
901 	*results = n_rdesc;
902 	return 0;
903 
904 rdesc_rollback:
905 	for (i = 0; i < n_rdesc; i++)
906 		safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
907 cdesc_rollback:
908 	for (i = 0; i < n_cdesc; i++)
909 		safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
910 
911 	if (src == dst) {
912 		dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
913 	} else {
914 		dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
915 		dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
916 	}
917 
918 	return ret;
919 }
920 
921 static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
922 				      int ring,
923 				      struct crypto_async_request *base,
924 				      struct safexcel_cipher_req *sreq,
925 				      bool *should_complete, int *ret)
926 {
927 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
928 	struct safexcel_result_desc *rdesc;
929 	int ndesc = 0, enq_ret;
930 
931 	*ret = 0;
932 
933 	if (unlikely(!sreq->rdescs))
934 		return 0;
935 
936 	while (sreq->rdescs--) {
937 		rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
938 		if (IS_ERR(rdesc)) {
939 			dev_err(priv->dev,
940 				"cipher: invalidate: could not retrieve the result descriptor\n");
941 			*ret = PTR_ERR(rdesc);
942 			break;
943 		}
944 
945 		if (likely(!*ret))
946 			*ret = safexcel_rdesc_check_errors(priv, rdesc);
947 
948 		ndesc++;
949 	}
950 
951 	safexcel_complete(priv, ring);
952 
953 	if (ctx->base.exit_inv) {
954 		dma_pool_free(priv->context_pool, ctx->base.ctxr,
955 			      ctx->base.ctxr_dma);
956 
957 		*should_complete = true;
958 
959 		return ndesc;
960 	}
961 
962 	ring = safexcel_select_ring(priv);
963 	ctx->base.ring = ring;
964 
965 	spin_lock_bh(&priv->ring[ring].queue_lock);
966 	enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
967 	spin_unlock_bh(&priv->ring[ring].queue_lock);
968 
969 	if (enq_ret != -EINPROGRESS)
970 		*ret = enq_ret;
971 
972 	queue_work(priv->ring[ring].workqueue,
973 		   &priv->ring[ring].work_data.work);
974 
975 	*should_complete = false;
976 
977 	return ndesc;
978 }
979 
980 static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
981 					   int ring,
982 					   struct crypto_async_request *async,
983 					   bool *should_complete, int *ret)
984 {
985 	struct skcipher_request *req = skcipher_request_cast(async);
986 	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
987 	int err;
988 
989 	if (sreq->needs_inv) {
990 		sreq->needs_inv = false;
991 		err = safexcel_handle_inv_result(priv, ring, async, sreq,
992 						 should_complete, ret);
993 	} else {
994 		err = safexcel_handle_req_result(priv, ring, async, req->src,
995 						 req->dst, req->cryptlen, sreq,
996 						 should_complete, ret);
997 	}
998 
999 	return err;
1000 }
1001 
1002 static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
1003 				       int ring,
1004 				       struct crypto_async_request *async,
1005 				       bool *should_complete, int *ret)
1006 {
1007 	struct aead_request *req = aead_request_cast(async);
1008 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1009 	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1010 	int err;
1011 
1012 	if (sreq->needs_inv) {
1013 		sreq->needs_inv = false;
1014 		err = safexcel_handle_inv_result(priv, ring, async, sreq,
1015 						 should_complete, ret);
1016 	} else {
1017 		err = safexcel_handle_req_result(priv, ring, async, req->src,
1018 						 req->dst,
1019 						 req->cryptlen + crypto_aead_authsize(tfm),
1020 						 sreq, should_complete, ret);
1021 	}
1022 
1023 	return err;
1024 }
1025 
1026 static int safexcel_cipher_send_inv(struct crypto_async_request *base,
1027 				    int ring, int *commands, int *results)
1028 {
1029 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1030 	struct safexcel_crypto_priv *priv = ctx->priv;
1031 	int ret;
1032 
1033 	ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
1034 	if (unlikely(ret))
1035 		return ret;
1036 
1037 	*commands = 1;
1038 	*results = 1;
1039 
1040 	return 0;
1041 }
1042 
1043 static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
1044 				  int *commands, int *results)
1045 {
1046 	struct skcipher_request *req = skcipher_request_cast(async);
1047 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1048 	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1049 	struct safexcel_crypto_priv *priv = ctx->priv;
1050 	int ret;
1051 
1052 	BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1053 
1054 	if (sreq->needs_inv) {
1055 		ret = safexcel_cipher_send_inv(async, ring, commands, results);
1056 	} else {
1057 		struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1058 		u8 input_iv[AES_BLOCK_SIZE];
1059 
1060 		/*
1061 		 * Save input IV in case of CBC decrypt mode
1062 		 * Will be overwritten with output IV prior to use!
1063 		 */
1064 		memcpy(input_iv, req->iv, crypto_skcipher_ivsize(skcipher));
1065 
1066 		ret = safexcel_send_req(async, ring, sreq, req->src,
1067 					req->dst, req->cryptlen, 0, 0, input_iv,
1068 					commands, results);
1069 	}
1070 
1071 	sreq->rdescs = *results;
1072 	return ret;
1073 }
1074 
1075 static int safexcel_aead_send(struct crypto_async_request *async, int ring,
1076 			      int *commands, int *results)
1077 {
1078 	struct aead_request *req = aead_request_cast(async);
1079 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1080 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1081 	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1082 	struct safexcel_crypto_priv *priv = ctx->priv;
1083 	int ret;
1084 
1085 	BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1086 
1087 	if (sreq->needs_inv)
1088 		ret = safexcel_cipher_send_inv(async, ring, commands, results);
1089 	else
1090 		ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
1091 					req->cryptlen, req->assoclen,
1092 					crypto_aead_authsize(tfm), req->iv,
1093 					commands, results);
1094 	sreq->rdescs = *results;
1095 	return ret;
1096 }
1097 
1098 static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
1099 				    struct crypto_async_request *base,
1100 				    struct safexcel_cipher_req *sreq,
1101 				    struct safexcel_inv_result *result)
1102 {
1103 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1104 	struct safexcel_crypto_priv *priv = ctx->priv;
1105 	int ring = ctx->base.ring;
1106 
1107 	init_completion(&result->completion);
1108 
1109 	ctx = crypto_tfm_ctx(base->tfm);
1110 	ctx->base.exit_inv = true;
1111 	sreq->needs_inv = true;
1112 
1113 	spin_lock_bh(&priv->ring[ring].queue_lock);
1114 	crypto_enqueue_request(&priv->ring[ring].queue, base);
1115 	spin_unlock_bh(&priv->ring[ring].queue_lock);
1116 
1117 	queue_work(priv->ring[ring].workqueue,
1118 		   &priv->ring[ring].work_data.work);
1119 
1120 	wait_for_completion(&result->completion);
1121 
1122 	if (result->error) {
1123 		dev_warn(priv->dev,
1124 			"cipher: sync: invalidate: completion error %d\n",
1125 			 result->error);
1126 		return result->error;
1127 	}
1128 
1129 	return 0;
1130 }
1131 
1132 static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
1133 {
1134 	EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
1135 	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1136 	struct safexcel_inv_result result = {};
1137 
1138 	memset(req, 0, sizeof(struct skcipher_request));
1139 
1140 	skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1141 				      safexcel_inv_complete, &result);
1142 	skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
1143 
1144 	return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1145 }
1146 
1147 static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
1148 {
1149 	EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
1150 	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1151 	struct safexcel_inv_result result = {};
1152 
1153 	memset(req, 0, sizeof(struct aead_request));
1154 
1155 	aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1156 				  safexcel_inv_complete, &result);
1157 	aead_request_set_tfm(req, __crypto_aead_cast(tfm));
1158 
1159 	return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1160 }
1161 
1162 static int safexcel_queue_req(struct crypto_async_request *base,
1163 			struct safexcel_cipher_req *sreq,
1164 			enum safexcel_cipher_direction dir)
1165 {
1166 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1167 	struct safexcel_crypto_priv *priv = ctx->priv;
1168 	int ret, ring;
1169 
1170 	sreq->needs_inv = false;
1171 	sreq->direction = dir;
1172 
1173 	if (ctx->base.ctxr) {
1174 		if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
1175 			sreq->needs_inv = true;
1176 			ctx->base.needs_inv = false;
1177 		}
1178 	} else {
1179 		ctx->base.ring = safexcel_select_ring(priv);
1180 		ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
1181 						 EIP197_GFP_FLAGS(*base),
1182 						 &ctx->base.ctxr_dma);
1183 		if (!ctx->base.ctxr)
1184 			return -ENOMEM;
1185 	}
1186 
1187 	ring = ctx->base.ring;
1188 
1189 	spin_lock_bh(&priv->ring[ring].queue_lock);
1190 	ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
1191 	spin_unlock_bh(&priv->ring[ring].queue_lock);
1192 
1193 	queue_work(priv->ring[ring].workqueue,
1194 		   &priv->ring[ring].work_data.work);
1195 
1196 	return ret;
1197 }
1198 
1199 static int safexcel_encrypt(struct skcipher_request *req)
1200 {
1201 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1202 			SAFEXCEL_ENCRYPT);
1203 }
1204 
1205 static int safexcel_decrypt(struct skcipher_request *req)
1206 {
1207 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1208 			SAFEXCEL_DECRYPT);
1209 }
1210 
1211 static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
1212 {
1213 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1214 	struct safexcel_alg_template *tmpl =
1215 		container_of(tfm->__crt_alg, struct safexcel_alg_template,
1216 			     alg.skcipher.base);
1217 
1218 	crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
1219 				    sizeof(struct safexcel_cipher_req));
1220 
1221 	ctx->priv = tmpl->priv;
1222 
1223 	ctx->base.send = safexcel_skcipher_send;
1224 	ctx->base.handle_result = safexcel_skcipher_handle_result;
1225 	ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1226 	ctx->ctrinit = 1;
1227 	return 0;
1228 }
1229 
1230 static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
1231 {
1232 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1233 
1234 	memzero_explicit(ctx->key, sizeof(ctx->key));
1235 
1236 	/* context not allocated, skip invalidation */
1237 	if (!ctx->base.ctxr)
1238 		return -ENOMEM;
1239 
1240 	memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
1241 	return 0;
1242 }
1243 
1244 static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
1245 {
1246 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1247 	struct safexcel_crypto_priv *priv = ctx->priv;
1248 	int ret;
1249 
1250 	if (safexcel_cipher_cra_exit(tfm))
1251 		return;
1252 
1253 	if (priv->flags & EIP197_TRC_CACHE) {
1254 		ret = safexcel_skcipher_exit_inv(tfm);
1255 		if (ret)
1256 			dev_warn(priv->dev, "skcipher: invalidation error %d\n",
1257 				 ret);
1258 	} else {
1259 		dma_pool_free(priv->context_pool, ctx->base.ctxr,
1260 			      ctx->base.ctxr_dma);
1261 	}
1262 }
1263 
1264 static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
1265 {
1266 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1267 	struct safexcel_crypto_priv *priv = ctx->priv;
1268 	int ret;
1269 
1270 	if (safexcel_cipher_cra_exit(tfm))
1271 		return;
1272 
1273 	if (priv->flags & EIP197_TRC_CACHE) {
1274 		ret = safexcel_aead_exit_inv(tfm);
1275 		if (ret)
1276 			dev_warn(priv->dev, "aead: invalidation error %d\n",
1277 				 ret);
1278 	} else {
1279 		dma_pool_free(priv->context_pool, ctx->base.ctxr,
1280 			      ctx->base.ctxr_dma);
1281 	}
1282 }
1283 
1284 static int safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm *tfm)
1285 {
1286 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1287 
1288 	safexcel_skcipher_cra_init(tfm);
1289 	ctx->alg  = SAFEXCEL_AES;
1290 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1291 	ctx->blocksz = 0;
1292 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1293 	return 0;
1294 }
1295 
1296 struct safexcel_alg_template safexcel_alg_ecb_aes = {
1297 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1298 	.algo_mask = SAFEXCEL_ALG_AES,
1299 	.alg.skcipher = {
1300 		.setkey = safexcel_skcipher_aes_setkey,
1301 		.encrypt = safexcel_encrypt,
1302 		.decrypt = safexcel_decrypt,
1303 		.min_keysize = AES_MIN_KEY_SIZE,
1304 		.max_keysize = AES_MAX_KEY_SIZE,
1305 		.base = {
1306 			.cra_name = "ecb(aes)",
1307 			.cra_driver_name = "safexcel-ecb-aes",
1308 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1309 			.cra_flags = CRYPTO_ALG_ASYNC |
1310 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1311 			.cra_blocksize = AES_BLOCK_SIZE,
1312 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1313 			.cra_alignmask = 0,
1314 			.cra_init = safexcel_skcipher_aes_ecb_cra_init,
1315 			.cra_exit = safexcel_skcipher_cra_exit,
1316 			.cra_module = THIS_MODULE,
1317 		},
1318 	},
1319 };
1320 
1321 static int safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm *tfm)
1322 {
1323 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1324 
1325 	safexcel_skcipher_cra_init(tfm);
1326 	ctx->alg  = SAFEXCEL_AES;
1327 	ctx->blocksz = AES_BLOCK_SIZE;
1328 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1329 	return 0;
1330 }
1331 
1332 struct safexcel_alg_template safexcel_alg_cbc_aes = {
1333 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1334 	.algo_mask = SAFEXCEL_ALG_AES,
1335 	.alg.skcipher = {
1336 		.setkey = safexcel_skcipher_aes_setkey,
1337 		.encrypt = safexcel_encrypt,
1338 		.decrypt = safexcel_decrypt,
1339 		.min_keysize = AES_MIN_KEY_SIZE,
1340 		.max_keysize = AES_MAX_KEY_SIZE,
1341 		.ivsize = AES_BLOCK_SIZE,
1342 		.base = {
1343 			.cra_name = "cbc(aes)",
1344 			.cra_driver_name = "safexcel-cbc-aes",
1345 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1346 			.cra_flags = CRYPTO_ALG_ASYNC |
1347 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1348 			.cra_blocksize = AES_BLOCK_SIZE,
1349 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1350 			.cra_alignmask = 0,
1351 			.cra_init = safexcel_skcipher_aes_cbc_cra_init,
1352 			.cra_exit = safexcel_skcipher_cra_exit,
1353 			.cra_module = THIS_MODULE,
1354 		},
1355 	},
1356 };
1357 
1358 static int safexcel_skcipher_aes_cfb_cra_init(struct crypto_tfm *tfm)
1359 {
1360 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1361 
1362 	safexcel_skcipher_cra_init(tfm);
1363 	ctx->alg  = SAFEXCEL_AES;
1364 	ctx->blocksz = AES_BLOCK_SIZE;
1365 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
1366 	return 0;
1367 }
1368 
1369 struct safexcel_alg_template safexcel_alg_cfb_aes = {
1370 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1371 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1372 	.alg.skcipher = {
1373 		.setkey = safexcel_skcipher_aes_setkey,
1374 		.encrypt = safexcel_encrypt,
1375 		.decrypt = safexcel_decrypt,
1376 		.min_keysize = AES_MIN_KEY_SIZE,
1377 		.max_keysize = AES_MAX_KEY_SIZE,
1378 		.ivsize = AES_BLOCK_SIZE,
1379 		.base = {
1380 			.cra_name = "cfb(aes)",
1381 			.cra_driver_name = "safexcel-cfb-aes",
1382 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1383 			.cra_flags = CRYPTO_ALG_ASYNC |
1384 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1385 			.cra_blocksize = 1,
1386 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1387 			.cra_alignmask = 0,
1388 			.cra_init = safexcel_skcipher_aes_cfb_cra_init,
1389 			.cra_exit = safexcel_skcipher_cra_exit,
1390 			.cra_module = THIS_MODULE,
1391 		},
1392 	},
1393 };
1394 
1395 static int safexcel_skcipher_aes_ofb_cra_init(struct crypto_tfm *tfm)
1396 {
1397 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1398 
1399 	safexcel_skcipher_cra_init(tfm);
1400 	ctx->alg  = SAFEXCEL_AES;
1401 	ctx->blocksz = AES_BLOCK_SIZE;
1402 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
1403 	return 0;
1404 }
1405 
1406 struct safexcel_alg_template safexcel_alg_ofb_aes = {
1407 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1408 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1409 	.alg.skcipher = {
1410 		.setkey = safexcel_skcipher_aes_setkey,
1411 		.encrypt = safexcel_encrypt,
1412 		.decrypt = safexcel_decrypt,
1413 		.min_keysize = AES_MIN_KEY_SIZE,
1414 		.max_keysize = AES_MAX_KEY_SIZE,
1415 		.ivsize = AES_BLOCK_SIZE,
1416 		.base = {
1417 			.cra_name = "ofb(aes)",
1418 			.cra_driver_name = "safexcel-ofb-aes",
1419 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1420 			.cra_flags = CRYPTO_ALG_ASYNC |
1421 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1422 			.cra_blocksize = 1,
1423 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1424 			.cra_alignmask = 0,
1425 			.cra_init = safexcel_skcipher_aes_ofb_cra_init,
1426 			.cra_exit = safexcel_skcipher_cra_exit,
1427 			.cra_module = THIS_MODULE,
1428 		},
1429 	},
1430 };
1431 
1432 static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
1433 					   const u8 *key, unsigned int len)
1434 {
1435 	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
1436 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1437 	struct safexcel_crypto_priv *priv = ctx->priv;
1438 	struct crypto_aes_ctx aes;
1439 	int ret, i;
1440 	unsigned int keylen;
1441 
1442 	/* last 4 bytes of key are the nonce! */
1443 	ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
1444 	/* exclude the nonce here */
1445 	keylen = len - CTR_RFC3686_NONCE_SIZE;
1446 	ret = aes_expandkey(&aes, key, keylen);
1447 	if (ret) {
1448 		crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
1449 		return ret;
1450 	}
1451 
1452 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1453 		for (i = 0; i < keylen / sizeof(u32); i++) {
1454 			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
1455 				ctx->base.needs_inv = true;
1456 				break;
1457 			}
1458 		}
1459 	}
1460 
1461 	for (i = 0; i < keylen / sizeof(u32); i++)
1462 		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
1463 
1464 	ctx->key_len = keylen;
1465 
1466 	memzero_explicit(&aes, sizeof(aes));
1467 	return 0;
1468 }
1469 
1470 static int safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm *tfm)
1471 {
1472 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1473 
1474 	safexcel_skcipher_cra_init(tfm);
1475 	ctx->alg  = SAFEXCEL_AES;
1476 	ctx->blocksz = AES_BLOCK_SIZE;
1477 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
1478 	return 0;
1479 }
1480 
1481 struct safexcel_alg_template safexcel_alg_ctr_aes = {
1482 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1483 	.algo_mask = SAFEXCEL_ALG_AES,
1484 	.alg.skcipher = {
1485 		.setkey = safexcel_skcipher_aesctr_setkey,
1486 		.encrypt = safexcel_encrypt,
1487 		.decrypt = safexcel_decrypt,
1488 		/* Add nonce size */
1489 		.min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1490 		.max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1491 		.ivsize = CTR_RFC3686_IV_SIZE,
1492 		.base = {
1493 			.cra_name = "rfc3686(ctr(aes))",
1494 			.cra_driver_name = "safexcel-ctr-aes",
1495 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1496 			.cra_flags = CRYPTO_ALG_ASYNC |
1497 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1498 			.cra_blocksize = 1,
1499 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1500 			.cra_alignmask = 0,
1501 			.cra_init = safexcel_skcipher_aes_ctr_cra_init,
1502 			.cra_exit = safexcel_skcipher_cra_exit,
1503 			.cra_module = THIS_MODULE,
1504 		},
1505 	},
1506 };
1507 
1508 static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
1509 			       unsigned int len)
1510 {
1511 	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1512 	struct safexcel_crypto_priv *priv = ctx->priv;
1513 	int ret;
1514 
1515 	ret = verify_skcipher_des_key(ctfm, key);
1516 	if (ret)
1517 		return ret;
1518 
1519 	/* if context exits and key changed, need to invalidate it */
1520 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1521 		if (memcmp(ctx->key, key, len))
1522 			ctx->base.needs_inv = true;
1523 
1524 	memcpy(ctx->key, key, len);
1525 	ctx->key_len = len;
1526 
1527 	return 0;
1528 }
1529 
1530 static int safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm *tfm)
1531 {
1532 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1533 
1534 	safexcel_skcipher_cra_init(tfm);
1535 	ctx->alg  = SAFEXCEL_DES;
1536 	ctx->blocksz = DES_BLOCK_SIZE;
1537 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1538 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1539 	return 0;
1540 }
1541 
1542 struct safexcel_alg_template safexcel_alg_cbc_des = {
1543 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1544 	.algo_mask = SAFEXCEL_ALG_DES,
1545 	.alg.skcipher = {
1546 		.setkey = safexcel_des_setkey,
1547 		.encrypt = safexcel_encrypt,
1548 		.decrypt = safexcel_decrypt,
1549 		.min_keysize = DES_KEY_SIZE,
1550 		.max_keysize = DES_KEY_SIZE,
1551 		.ivsize = DES_BLOCK_SIZE,
1552 		.base = {
1553 			.cra_name = "cbc(des)",
1554 			.cra_driver_name = "safexcel-cbc-des",
1555 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1556 			.cra_flags = CRYPTO_ALG_ASYNC |
1557 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1558 			.cra_blocksize = DES_BLOCK_SIZE,
1559 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1560 			.cra_alignmask = 0,
1561 			.cra_init = safexcel_skcipher_des_cbc_cra_init,
1562 			.cra_exit = safexcel_skcipher_cra_exit,
1563 			.cra_module = THIS_MODULE,
1564 		},
1565 	},
1566 };
1567 
1568 static int safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm *tfm)
1569 {
1570 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1571 
1572 	safexcel_skcipher_cra_init(tfm);
1573 	ctx->alg  = SAFEXCEL_DES;
1574 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1575 	ctx->blocksz = 0;
1576 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1577 	return 0;
1578 }
1579 
1580 struct safexcel_alg_template safexcel_alg_ecb_des = {
1581 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1582 	.algo_mask = SAFEXCEL_ALG_DES,
1583 	.alg.skcipher = {
1584 		.setkey = safexcel_des_setkey,
1585 		.encrypt = safexcel_encrypt,
1586 		.decrypt = safexcel_decrypt,
1587 		.min_keysize = DES_KEY_SIZE,
1588 		.max_keysize = DES_KEY_SIZE,
1589 		.base = {
1590 			.cra_name = "ecb(des)",
1591 			.cra_driver_name = "safexcel-ecb-des",
1592 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1593 			.cra_flags = CRYPTO_ALG_ASYNC |
1594 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1595 			.cra_blocksize = DES_BLOCK_SIZE,
1596 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1597 			.cra_alignmask = 0,
1598 			.cra_init = safexcel_skcipher_des_ecb_cra_init,
1599 			.cra_exit = safexcel_skcipher_cra_exit,
1600 			.cra_module = THIS_MODULE,
1601 		},
1602 	},
1603 };
1604 
1605 static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
1606 				   const u8 *key, unsigned int len)
1607 {
1608 	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1609 	struct safexcel_crypto_priv *priv = ctx->priv;
1610 	int err;
1611 
1612 	err = verify_skcipher_des3_key(ctfm, key);
1613 	if (err)
1614 		return err;
1615 
1616 	/* if context exits and key changed, need to invalidate it */
1617 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1618 		if (memcmp(ctx->key, key, len))
1619 			ctx->base.needs_inv = true;
1620 
1621 	memcpy(ctx->key, key, len);
1622 	ctx->key_len = len;
1623 
1624 	return 0;
1625 }
1626 
1627 static int safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm *tfm)
1628 {
1629 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1630 
1631 	safexcel_skcipher_cra_init(tfm);
1632 	ctx->alg  = SAFEXCEL_3DES;
1633 	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1634 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1635 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1636 	return 0;
1637 }
1638 
1639 struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
1640 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1641 	.algo_mask = SAFEXCEL_ALG_DES,
1642 	.alg.skcipher = {
1643 		.setkey = safexcel_des3_ede_setkey,
1644 		.encrypt = safexcel_encrypt,
1645 		.decrypt = safexcel_decrypt,
1646 		.min_keysize = DES3_EDE_KEY_SIZE,
1647 		.max_keysize = DES3_EDE_KEY_SIZE,
1648 		.ivsize = DES3_EDE_BLOCK_SIZE,
1649 		.base = {
1650 			.cra_name = "cbc(des3_ede)",
1651 			.cra_driver_name = "safexcel-cbc-des3_ede",
1652 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1653 			.cra_flags = CRYPTO_ALG_ASYNC |
1654 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1655 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1656 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1657 			.cra_alignmask = 0,
1658 			.cra_init = safexcel_skcipher_des3_cbc_cra_init,
1659 			.cra_exit = safexcel_skcipher_cra_exit,
1660 			.cra_module = THIS_MODULE,
1661 		},
1662 	},
1663 };
1664 
1665 static int safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm *tfm)
1666 {
1667 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1668 
1669 	safexcel_skcipher_cra_init(tfm);
1670 	ctx->alg  = SAFEXCEL_3DES;
1671 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1672 	ctx->blocksz = 0;
1673 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1674 	return 0;
1675 }
1676 
1677 struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
1678 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1679 	.algo_mask = SAFEXCEL_ALG_DES,
1680 	.alg.skcipher = {
1681 		.setkey = safexcel_des3_ede_setkey,
1682 		.encrypt = safexcel_encrypt,
1683 		.decrypt = safexcel_decrypt,
1684 		.min_keysize = DES3_EDE_KEY_SIZE,
1685 		.max_keysize = DES3_EDE_KEY_SIZE,
1686 		.base = {
1687 			.cra_name = "ecb(des3_ede)",
1688 			.cra_driver_name = "safexcel-ecb-des3_ede",
1689 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1690 			.cra_flags = CRYPTO_ALG_ASYNC |
1691 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1692 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1693 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1694 			.cra_alignmask = 0,
1695 			.cra_init = safexcel_skcipher_des3_ecb_cra_init,
1696 			.cra_exit = safexcel_skcipher_cra_exit,
1697 			.cra_module = THIS_MODULE,
1698 		},
1699 	},
1700 };
1701 
1702 static int safexcel_aead_encrypt(struct aead_request *req)
1703 {
1704 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
1705 
1706 	return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
1707 }
1708 
1709 static int safexcel_aead_decrypt(struct aead_request *req)
1710 {
1711 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
1712 
1713 	return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
1714 }
1715 
1716 static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
1717 {
1718 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1719 	struct safexcel_alg_template *tmpl =
1720 		container_of(tfm->__crt_alg, struct safexcel_alg_template,
1721 			     alg.aead.base);
1722 
1723 	crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
1724 				sizeof(struct safexcel_cipher_req));
1725 
1726 	ctx->priv = tmpl->priv;
1727 
1728 	ctx->alg  = SAFEXCEL_AES; /* default */
1729 	ctx->blocksz = AES_BLOCK_SIZE;
1730 	ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1731 	ctx->ctrinit = 1;
1732 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC; /* default */
1733 	ctx->aead = true;
1734 	ctx->base.send = safexcel_aead_send;
1735 	ctx->base.handle_result = safexcel_aead_handle_result;
1736 	return 0;
1737 }
1738 
1739 static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
1740 {
1741 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1742 
1743 	safexcel_aead_cra_init(tfm);
1744 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1745 	ctx->state_sz = SHA1_DIGEST_SIZE;
1746 	return 0;
1747 }
1748 
1749 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
1750 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1751 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
1752 	.alg.aead = {
1753 		.setkey = safexcel_aead_setkey,
1754 		.encrypt = safexcel_aead_encrypt,
1755 		.decrypt = safexcel_aead_decrypt,
1756 		.ivsize = AES_BLOCK_SIZE,
1757 		.maxauthsize = SHA1_DIGEST_SIZE,
1758 		.base = {
1759 			.cra_name = "authenc(hmac(sha1),cbc(aes))",
1760 			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
1761 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1762 			.cra_flags = CRYPTO_ALG_ASYNC |
1763 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1764 			.cra_blocksize = AES_BLOCK_SIZE,
1765 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1766 			.cra_alignmask = 0,
1767 			.cra_init = safexcel_aead_sha1_cra_init,
1768 			.cra_exit = safexcel_aead_cra_exit,
1769 			.cra_module = THIS_MODULE,
1770 		},
1771 	},
1772 };
1773 
1774 static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
1775 {
1776 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1777 
1778 	safexcel_aead_cra_init(tfm);
1779 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1780 	ctx->state_sz = SHA256_DIGEST_SIZE;
1781 	return 0;
1782 }
1783 
1784 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
1785 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1786 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1787 	.alg.aead = {
1788 		.setkey = safexcel_aead_setkey,
1789 		.encrypt = safexcel_aead_encrypt,
1790 		.decrypt = safexcel_aead_decrypt,
1791 		.ivsize = AES_BLOCK_SIZE,
1792 		.maxauthsize = SHA256_DIGEST_SIZE,
1793 		.base = {
1794 			.cra_name = "authenc(hmac(sha256),cbc(aes))",
1795 			.cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
1796 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1797 			.cra_flags = CRYPTO_ALG_ASYNC |
1798 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1799 			.cra_blocksize = AES_BLOCK_SIZE,
1800 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1801 			.cra_alignmask = 0,
1802 			.cra_init = safexcel_aead_sha256_cra_init,
1803 			.cra_exit = safexcel_aead_cra_exit,
1804 			.cra_module = THIS_MODULE,
1805 		},
1806 	},
1807 };
1808 
1809 static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
1810 {
1811 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1812 
1813 	safexcel_aead_cra_init(tfm);
1814 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1815 	ctx->state_sz = SHA256_DIGEST_SIZE;
1816 	return 0;
1817 }
1818 
1819 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
1820 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1821 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1822 	.alg.aead = {
1823 		.setkey = safexcel_aead_setkey,
1824 		.encrypt = safexcel_aead_encrypt,
1825 		.decrypt = safexcel_aead_decrypt,
1826 		.ivsize = AES_BLOCK_SIZE,
1827 		.maxauthsize = SHA224_DIGEST_SIZE,
1828 		.base = {
1829 			.cra_name = "authenc(hmac(sha224),cbc(aes))",
1830 			.cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
1831 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1832 			.cra_flags = CRYPTO_ALG_ASYNC |
1833 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1834 			.cra_blocksize = AES_BLOCK_SIZE,
1835 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1836 			.cra_alignmask = 0,
1837 			.cra_init = safexcel_aead_sha224_cra_init,
1838 			.cra_exit = safexcel_aead_cra_exit,
1839 			.cra_module = THIS_MODULE,
1840 		},
1841 	},
1842 };
1843 
1844 static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm)
1845 {
1846 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1847 
1848 	safexcel_aead_cra_init(tfm);
1849 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1850 	ctx->state_sz = SHA512_DIGEST_SIZE;
1851 	return 0;
1852 }
1853 
1854 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
1855 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1856 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1857 	.alg.aead = {
1858 		.setkey = safexcel_aead_setkey,
1859 		.encrypt = safexcel_aead_encrypt,
1860 		.decrypt = safexcel_aead_decrypt,
1861 		.ivsize = AES_BLOCK_SIZE,
1862 		.maxauthsize = SHA512_DIGEST_SIZE,
1863 		.base = {
1864 			.cra_name = "authenc(hmac(sha512),cbc(aes))",
1865 			.cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
1866 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1867 			.cra_flags = CRYPTO_ALG_ASYNC |
1868 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1869 			.cra_blocksize = AES_BLOCK_SIZE,
1870 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1871 			.cra_alignmask = 0,
1872 			.cra_init = safexcel_aead_sha512_cra_init,
1873 			.cra_exit = safexcel_aead_cra_exit,
1874 			.cra_module = THIS_MODULE,
1875 		},
1876 	},
1877 };
1878 
1879 static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm)
1880 {
1881 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1882 
1883 	safexcel_aead_cra_init(tfm);
1884 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1885 	ctx->state_sz = SHA512_DIGEST_SIZE;
1886 	return 0;
1887 }
1888 
1889 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
1890 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1891 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1892 	.alg.aead = {
1893 		.setkey = safexcel_aead_setkey,
1894 		.encrypt = safexcel_aead_encrypt,
1895 		.decrypt = safexcel_aead_decrypt,
1896 		.ivsize = AES_BLOCK_SIZE,
1897 		.maxauthsize = SHA384_DIGEST_SIZE,
1898 		.base = {
1899 			.cra_name = "authenc(hmac(sha384),cbc(aes))",
1900 			.cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
1901 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1902 			.cra_flags = CRYPTO_ALG_ASYNC |
1903 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1904 			.cra_blocksize = AES_BLOCK_SIZE,
1905 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1906 			.cra_alignmask = 0,
1907 			.cra_init = safexcel_aead_sha384_cra_init,
1908 			.cra_exit = safexcel_aead_cra_exit,
1909 			.cra_module = THIS_MODULE,
1910 		},
1911 	},
1912 };
1913 
1914 static int safexcel_aead_sha1_des3_cra_init(struct crypto_tfm *tfm)
1915 {
1916 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1917 
1918 	safexcel_aead_sha1_cra_init(tfm);
1919 	ctx->alg = SAFEXCEL_3DES; /* override default */
1920 	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1921 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1922 	return 0;
1923 }
1924 
1925 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des3_ede = {
1926 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1927 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
1928 	.alg.aead = {
1929 		.setkey = safexcel_aead_setkey,
1930 		.encrypt = safexcel_aead_encrypt,
1931 		.decrypt = safexcel_aead_decrypt,
1932 		.ivsize = DES3_EDE_BLOCK_SIZE,
1933 		.maxauthsize = SHA1_DIGEST_SIZE,
1934 		.base = {
1935 			.cra_name = "authenc(hmac(sha1),cbc(des3_ede))",
1936 			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des3_ede",
1937 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1938 			.cra_flags = CRYPTO_ALG_ASYNC |
1939 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1940 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1941 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1942 			.cra_alignmask = 0,
1943 			.cra_init = safexcel_aead_sha1_des3_cra_init,
1944 			.cra_exit = safexcel_aead_cra_exit,
1945 			.cra_module = THIS_MODULE,
1946 		},
1947 	},
1948 };
1949 
1950 static int safexcel_aead_sha256_des3_cra_init(struct crypto_tfm *tfm)
1951 {
1952 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1953 
1954 	safexcel_aead_sha256_cra_init(tfm);
1955 	ctx->alg = SAFEXCEL_3DES; /* override default */
1956 	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1957 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1958 	return 0;
1959 }
1960 
1961 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des3_ede = {
1962 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1963 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1964 	.alg.aead = {
1965 		.setkey = safexcel_aead_setkey,
1966 		.encrypt = safexcel_aead_encrypt,
1967 		.decrypt = safexcel_aead_decrypt,
1968 		.ivsize = DES3_EDE_BLOCK_SIZE,
1969 		.maxauthsize = SHA256_DIGEST_SIZE,
1970 		.base = {
1971 			.cra_name = "authenc(hmac(sha256),cbc(des3_ede))",
1972 			.cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des3_ede",
1973 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1974 			.cra_flags = CRYPTO_ALG_ASYNC |
1975 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1976 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1977 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1978 			.cra_alignmask = 0,
1979 			.cra_init = safexcel_aead_sha256_des3_cra_init,
1980 			.cra_exit = safexcel_aead_cra_exit,
1981 			.cra_module = THIS_MODULE,
1982 		},
1983 	},
1984 };
1985 
1986 static int safexcel_aead_sha224_des3_cra_init(struct crypto_tfm *tfm)
1987 {
1988 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1989 
1990 	safexcel_aead_sha224_cra_init(tfm);
1991 	ctx->alg = SAFEXCEL_3DES; /* override default */
1992 	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1993 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1994 	return 0;
1995 }
1996 
1997 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des3_ede = {
1998 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1999 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2000 	.alg.aead = {
2001 		.setkey = safexcel_aead_setkey,
2002 		.encrypt = safexcel_aead_encrypt,
2003 		.decrypt = safexcel_aead_decrypt,
2004 		.ivsize = DES3_EDE_BLOCK_SIZE,
2005 		.maxauthsize = SHA224_DIGEST_SIZE,
2006 		.base = {
2007 			.cra_name = "authenc(hmac(sha224),cbc(des3_ede))",
2008 			.cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des3_ede",
2009 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2010 			.cra_flags = CRYPTO_ALG_ASYNC |
2011 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2012 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2013 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2014 			.cra_alignmask = 0,
2015 			.cra_init = safexcel_aead_sha224_des3_cra_init,
2016 			.cra_exit = safexcel_aead_cra_exit,
2017 			.cra_module = THIS_MODULE,
2018 		},
2019 	},
2020 };
2021 
2022 static int safexcel_aead_sha512_des3_cra_init(struct crypto_tfm *tfm)
2023 {
2024 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2025 
2026 	safexcel_aead_sha512_cra_init(tfm);
2027 	ctx->alg = SAFEXCEL_3DES; /* override default */
2028 	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2029 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2030 	return 0;
2031 }
2032 
2033 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des3_ede = {
2034 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2035 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2036 	.alg.aead = {
2037 		.setkey = safexcel_aead_setkey,
2038 		.encrypt = safexcel_aead_encrypt,
2039 		.decrypt = safexcel_aead_decrypt,
2040 		.ivsize = DES3_EDE_BLOCK_SIZE,
2041 		.maxauthsize = SHA512_DIGEST_SIZE,
2042 		.base = {
2043 			.cra_name = "authenc(hmac(sha512),cbc(des3_ede))",
2044 			.cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des3_ede",
2045 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2046 			.cra_flags = CRYPTO_ALG_ASYNC |
2047 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2048 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2049 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2050 			.cra_alignmask = 0,
2051 			.cra_init = safexcel_aead_sha512_des3_cra_init,
2052 			.cra_exit = safexcel_aead_cra_exit,
2053 			.cra_module = THIS_MODULE,
2054 		},
2055 	},
2056 };
2057 
2058 static int safexcel_aead_sha384_des3_cra_init(struct crypto_tfm *tfm)
2059 {
2060 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2061 
2062 	safexcel_aead_sha384_cra_init(tfm);
2063 	ctx->alg = SAFEXCEL_3DES; /* override default */
2064 	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2065 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2066 	return 0;
2067 }
2068 
2069 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des3_ede = {
2070 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2071 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2072 	.alg.aead = {
2073 		.setkey = safexcel_aead_setkey,
2074 		.encrypt = safexcel_aead_encrypt,
2075 		.decrypt = safexcel_aead_decrypt,
2076 		.ivsize = DES3_EDE_BLOCK_SIZE,
2077 		.maxauthsize = SHA384_DIGEST_SIZE,
2078 		.base = {
2079 			.cra_name = "authenc(hmac(sha384),cbc(des3_ede))",
2080 			.cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des3_ede",
2081 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2082 			.cra_flags = CRYPTO_ALG_ASYNC |
2083 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2084 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2085 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2086 			.cra_alignmask = 0,
2087 			.cra_init = safexcel_aead_sha384_des3_cra_init,
2088 			.cra_exit = safexcel_aead_cra_exit,
2089 			.cra_module = THIS_MODULE,
2090 		},
2091 	},
2092 };
2093 
2094 static int safexcel_aead_sha1_des_cra_init(struct crypto_tfm *tfm)
2095 {
2096 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2097 
2098 	safexcel_aead_sha1_cra_init(tfm);
2099 	ctx->alg = SAFEXCEL_DES; /* override default */
2100 	ctx->blocksz = DES_BLOCK_SIZE;
2101 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2102 	return 0;
2103 }
2104 
2105 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des = {
2106 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2107 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
2108 	.alg.aead = {
2109 		.setkey = safexcel_aead_setkey,
2110 		.encrypt = safexcel_aead_encrypt,
2111 		.decrypt = safexcel_aead_decrypt,
2112 		.ivsize = DES_BLOCK_SIZE,
2113 		.maxauthsize = SHA1_DIGEST_SIZE,
2114 		.base = {
2115 			.cra_name = "authenc(hmac(sha1),cbc(des))",
2116 			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des",
2117 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2118 			.cra_flags = CRYPTO_ALG_ASYNC |
2119 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2120 			.cra_blocksize = DES_BLOCK_SIZE,
2121 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2122 			.cra_alignmask = 0,
2123 			.cra_init = safexcel_aead_sha1_des_cra_init,
2124 			.cra_exit = safexcel_aead_cra_exit,
2125 			.cra_module = THIS_MODULE,
2126 		},
2127 	},
2128 };
2129 
2130 static int safexcel_aead_sha256_des_cra_init(struct crypto_tfm *tfm)
2131 {
2132 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2133 
2134 	safexcel_aead_sha256_cra_init(tfm);
2135 	ctx->alg = SAFEXCEL_DES; /* override default */
2136 	ctx->blocksz = DES_BLOCK_SIZE;
2137 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2138 	return 0;
2139 }
2140 
2141 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des = {
2142 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2143 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2144 	.alg.aead = {
2145 		.setkey = safexcel_aead_setkey,
2146 		.encrypt = safexcel_aead_encrypt,
2147 		.decrypt = safexcel_aead_decrypt,
2148 		.ivsize = DES_BLOCK_SIZE,
2149 		.maxauthsize = SHA256_DIGEST_SIZE,
2150 		.base = {
2151 			.cra_name = "authenc(hmac(sha256),cbc(des))",
2152 			.cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des",
2153 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2154 			.cra_flags = CRYPTO_ALG_ASYNC |
2155 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2156 			.cra_blocksize = DES_BLOCK_SIZE,
2157 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2158 			.cra_alignmask = 0,
2159 			.cra_init = safexcel_aead_sha256_des_cra_init,
2160 			.cra_exit = safexcel_aead_cra_exit,
2161 			.cra_module = THIS_MODULE,
2162 		},
2163 	},
2164 };
2165 
2166 static int safexcel_aead_sha224_des_cra_init(struct crypto_tfm *tfm)
2167 {
2168 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2169 
2170 	safexcel_aead_sha224_cra_init(tfm);
2171 	ctx->alg = SAFEXCEL_DES; /* override default */
2172 	ctx->blocksz = DES_BLOCK_SIZE;
2173 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2174 	return 0;
2175 }
2176 
2177 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des = {
2178 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2179 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2180 	.alg.aead = {
2181 		.setkey = safexcel_aead_setkey,
2182 		.encrypt = safexcel_aead_encrypt,
2183 		.decrypt = safexcel_aead_decrypt,
2184 		.ivsize = DES_BLOCK_SIZE,
2185 		.maxauthsize = SHA224_DIGEST_SIZE,
2186 		.base = {
2187 			.cra_name = "authenc(hmac(sha224),cbc(des))",
2188 			.cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des",
2189 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2190 			.cra_flags = CRYPTO_ALG_ASYNC |
2191 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2192 			.cra_blocksize = DES_BLOCK_SIZE,
2193 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2194 			.cra_alignmask = 0,
2195 			.cra_init = safexcel_aead_sha224_des_cra_init,
2196 			.cra_exit = safexcel_aead_cra_exit,
2197 			.cra_module = THIS_MODULE,
2198 		},
2199 	},
2200 };
2201 
2202 static int safexcel_aead_sha512_des_cra_init(struct crypto_tfm *tfm)
2203 {
2204 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2205 
2206 	safexcel_aead_sha512_cra_init(tfm);
2207 	ctx->alg = SAFEXCEL_DES; /* override default */
2208 	ctx->blocksz = DES_BLOCK_SIZE;
2209 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2210 	return 0;
2211 }
2212 
2213 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des = {
2214 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2215 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2216 	.alg.aead = {
2217 		.setkey = safexcel_aead_setkey,
2218 		.encrypt = safexcel_aead_encrypt,
2219 		.decrypt = safexcel_aead_decrypt,
2220 		.ivsize = DES_BLOCK_SIZE,
2221 		.maxauthsize = SHA512_DIGEST_SIZE,
2222 		.base = {
2223 			.cra_name = "authenc(hmac(sha512),cbc(des))",
2224 			.cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des",
2225 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2226 			.cra_flags = CRYPTO_ALG_ASYNC |
2227 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2228 			.cra_blocksize = DES_BLOCK_SIZE,
2229 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2230 			.cra_alignmask = 0,
2231 			.cra_init = safexcel_aead_sha512_des_cra_init,
2232 			.cra_exit = safexcel_aead_cra_exit,
2233 			.cra_module = THIS_MODULE,
2234 		},
2235 	},
2236 };
2237 
2238 static int safexcel_aead_sha384_des_cra_init(struct crypto_tfm *tfm)
2239 {
2240 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2241 
2242 	safexcel_aead_sha384_cra_init(tfm);
2243 	ctx->alg = SAFEXCEL_DES; /* override default */
2244 	ctx->blocksz = DES_BLOCK_SIZE;
2245 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2246 	return 0;
2247 }
2248 
2249 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des = {
2250 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2251 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2252 	.alg.aead = {
2253 		.setkey = safexcel_aead_setkey,
2254 		.encrypt = safexcel_aead_encrypt,
2255 		.decrypt = safexcel_aead_decrypt,
2256 		.ivsize = DES_BLOCK_SIZE,
2257 		.maxauthsize = SHA384_DIGEST_SIZE,
2258 		.base = {
2259 			.cra_name = "authenc(hmac(sha384),cbc(des))",
2260 			.cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des",
2261 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2262 			.cra_flags = CRYPTO_ALG_ASYNC |
2263 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2264 			.cra_blocksize = DES_BLOCK_SIZE,
2265 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2266 			.cra_alignmask = 0,
2267 			.cra_init = safexcel_aead_sha384_des_cra_init,
2268 			.cra_exit = safexcel_aead_cra_exit,
2269 			.cra_module = THIS_MODULE,
2270 		},
2271 	},
2272 };
2273 
2274 static int safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm *tfm)
2275 {
2276 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2277 
2278 	safexcel_aead_sha1_cra_init(tfm);
2279 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2280 	return 0;
2281 }
2282 
2283 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_aes = {
2284 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2285 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
2286 	.alg.aead = {
2287 		.setkey = safexcel_aead_setkey,
2288 		.encrypt = safexcel_aead_encrypt,
2289 		.decrypt = safexcel_aead_decrypt,
2290 		.ivsize = CTR_RFC3686_IV_SIZE,
2291 		.maxauthsize = SHA1_DIGEST_SIZE,
2292 		.base = {
2293 			.cra_name = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2294 			.cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-aes",
2295 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2296 			.cra_flags = CRYPTO_ALG_ASYNC |
2297 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2298 			.cra_blocksize = 1,
2299 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2300 			.cra_alignmask = 0,
2301 			.cra_init = safexcel_aead_sha1_ctr_cra_init,
2302 			.cra_exit = safexcel_aead_cra_exit,
2303 			.cra_module = THIS_MODULE,
2304 		},
2305 	},
2306 };
2307 
2308 static int safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm *tfm)
2309 {
2310 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2311 
2312 	safexcel_aead_sha256_cra_init(tfm);
2313 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2314 	return 0;
2315 }
2316 
2317 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_ctr_aes = {
2318 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2319 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2320 	.alg.aead = {
2321 		.setkey = safexcel_aead_setkey,
2322 		.encrypt = safexcel_aead_encrypt,
2323 		.decrypt = safexcel_aead_decrypt,
2324 		.ivsize = CTR_RFC3686_IV_SIZE,
2325 		.maxauthsize = SHA256_DIGEST_SIZE,
2326 		.base = {
2327 			.cra_name = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2328 			.cra_driver_name = "safexcel-authenc-hmac-sha256-ctr-aes",
2329 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2330 			.cra_flags = CRYPTO_ALG_ASYNC |
2331 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2332 			.cra_blocksize = 1,
2333 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2334 			.cra_alignmask = 0,
2335 			.cra_init = safexcel_aead_sha256_ctr_cra_init,
2336 			.cra_exit = safexcel_aead_cra_exit,
2337 			.cra_module = THIS_MODULE,
2338 		},
2339 	},
2340 };
2341 
2342 static int safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm *tfm)
2343 {
2344 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2345 
2346 	safexcel_aead_sha224_cra_init(tfm);
2347 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2348 	return 0;
2349 }
2350 
2351 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_ctr_aes = {
2352 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2353 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2354 	.alg.aead = {
2355 		.setkey = safexcel_aead_setkey,
2356 		.encrypt = safexcel_aead_encrypt,
2357 		.decrypt = safexcel_aead_decrypt,
2358 		.ivsize = CTR_RFC3686_IV_SIZE,
2359 		.maxauthsize = SHA224_DIGEST_SIZE,
2360 		.base = {
2361 			.cra_name = "authenc(hmac(sha224),rfc3686(ctr(aes)))",
2362 			.cra_driver_name = "safexcel-authenc-hmac-sha224-ctr-aes",
2363 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2364 			.cra_flags = CRYPTO_ALG_ASYNC |
2365 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2366 			.cra_blocksize = 1,
2367 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2368 			.cra_alignmask = 0,
2369 			.cra_init = safexcel_aead_sha224_ctr_cra_init,
2370 			.cra_exit = safexcel_aead_cra_exit,
2371 			.cra_module = THIS_MODULE,
2372 		},
2373 	},
2374 };
2375 
2376 static int safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm *tfm)
2377 {
2378 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2379 
2380 	safexcel_aead_sha512_cra_init(tfm);
2381 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2382 	return 0;
2383 }
2384 
2385 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_ctr_aes = {
2386 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2387 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2388 	.alg.aead = {
2389 		.setkey = safexcel_aead_setkey,
2390 		.encrypt = safexcel_aead_encrypt,
2391 		.decrypt = safexcel_aead_decrypt,
2392 		.ivsize = CTR_RFC3686_IV_SIZE,
2393 		.maxauthsize = SHA512_DIGEST_SIZE,
2394 		.base = {
2395 			.cra_name = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2396 			.cra_driver_name = "safexcel-authenc-hmac-sha512-ctr-aes",
2397 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2398 			.cra_flags = CRYPTO_ALG_ASYNC |
2399 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2400 			.cra_blocksize = 1,
2401 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2402 			.cra_alignmask = 0,
2403 			.cra_init = safexcel_aead_sha512_ctr_cra_init,
2404 			.cra_exit = safexcel_aead_cra_exit,
2405 			.cra_module = THIS_MODULE,
2406 		},
2407 	},
2408 };
2409 
2410 static int safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm *tfm)
2411 {
2412 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2413 
2414 	safexcel_aead_sha384_cra_init(tfm);
2415 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2416 	return 0;
2417 }
2418 
2419 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes = {
2420 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2421 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2422 	.alg.aead = {
2423 		.setkey = safexcel_aead_setkey,
2424 		.encrypt = safexcel_aead_encrypt,
2425 		.decrypt = safexcel_aead_decrypt,
2426 		.ivsize = CTR_RFC3686_IV_SIZE,
2427 		.maxauthsize = SHA384_DIGEST_SIZE,
2428 		.base = {
2429 			.cra_name = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2430 			.cra_driver_name = "safexcel-authenc-hmac-sha384-ctr-aes",
2431 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2432 			.cra_flags = CRYPTO_ALG_ASYNC |
2433 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2434 			.cra_blocksize = 1,
2435 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2436 			.cra_alignmask = 0,
2437 			.cra_init = safexcel_aead_sha384_ctr_cra_init,
2438 			.cra_exit = safexcel_aead_cra_exit,
2439 			.cra_module = THIS_MODULE,
2440 		},
2441 	},
2442 };
2443 
2444 static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
2445 					   const u8 *key, unsigned int len)
2446 {
2447 	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
2448 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2449 	struct safexcel_crypto_priv *priv = ctx->priv;
2450 	struct crypto_aes_ctx aes;
2451 	int ret, i;
2452 	unsigned int keylen;
2453 
2454 	/* Check for illegal XTS keys */
2455 	ret = xts_verify_key(ctfm, key, len);
2456 	if (ret)
2457 		return ret;
2458 
2459 	/* Only half of the key data is cipher key */
2460 	keylen = (len >> 1);
2461 	ret = aes_expandkey(&aes, key, keylen);
2462 	if (ret) {
2463 		crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2464 		return ret;
2465 	}
2466 
2467 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2468 		for (i = 0; i < keylen / sizeof(u32); i++) {
2469 			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2470 				ctx->base.needs_inv = true;
2471 				break;
2472 			}
2473 		}
2474 	}
2475 
2476 	for (i = 0; i < keylen / sizeof(u32); i++)
2477 		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2478 
2479 	/* The other half is the tweak key */
2480 	ret = aes_expandkey(&aes, (u8 *)(key + keylen), keylen);
2481 	if (ret) {
2482 		crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2483 		return ret;
2484 	}
2485 
2486 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2487 		for (i = 0; i < keylen / sizeof(u32); i++) {
2488 			if (le32_to_cpu(ctx->key[i + keylen / sizeof(u32)]) !=
2489 			    aes.key_enc[i]) {
2490 				ctx->base.needs_inv = true;
2491 				break;
2492 			}
2493 		}
2494 	}
2495 
2496 	for (i = 0; i < keylen / sizeof(u32); i++)
2497 		ctx->key[i + keylen / sizeof(u32)] =
2498 			cpu_to_le32(aes.key_enc[i]);
2499 
2500 	ctx->key_len = keylen << 1;
2501 
2502 	memzero_explicit(&aes, sizeof(aes));
2503 	return 0;
2504 }
2505 
2506 static int safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm *tfm)
2507 {
2508 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2509 
2510 	safexcel_skcipher_cra_init(tfm);
2511 	ctx->alg  = SAFEXCEL_AES;
2512 	ctx->blocksz = AES_BLOCK_SIZE;
2513 	ctx->xts  = 1;
2514 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XTS;
2515 	return 0;
2516 }
2517 
2518 static int safexcel_encrypt_xts(struct skcipher_request *req)
2519 {
2520 	if (req->cryptlen < XTS_BLOCK_SIZE)
2521 		return -EINVAL;
2522 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2523 				  SAFEXCEL_ENCRYPT);
2524 }
2525 
2526 static int safexcel_decrypt_xts(struct skcipher_request *req)
2527 {
2528 	if (req->cryptlen < XTS_BLOCK_SIZE)
2529 		return -EINVAL;
2530 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2531 				  SAFEXCEL_DECRYPT);
2532 }
2533 
2534 struct safexcel_alg_template safexcel_alg_xts_aes = {
2535 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2536 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XTS,
2537 	.alg.skcipher = {
2538 		.setkey = safexcel_skcipher_aesxts_setkey,
2539 		.encrypt = safexcel_encrypt_xts,
2540 		.decrypt = safexcel_decrypt_xts,
2541 		/* XTS actually uses 2 AES keys glued together */
2542 		.min_keysize = AES_MIN_KEY_SIZE * 2,
2543 		.max_keysize = AES_MAX_KEY_SIZE * 2,
2544 		.ivsize = XTS_BLOCK_SIZE,
2545 		.base = {
2546 			.cra_name = "xts(aes)",
2547 			.cra_driver_name = "safexcel-xts-aes",
2548 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2549 			.cra_flags = CRYPTO_ALG_ASYNC |
2550 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2551 			.cra_blocksize = XTS_BLOCK_SIZE,
2552 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2553 			.cra_alignmask = 0,
2554 			.cra_init = safexcel_skcipher_aes_xts_cra_init,
2555 			.cra_exit = safexcel_skcipher_cra_exit,
2556 			.cra_module = THIS_MODULE,
2557 		},
2558 	},
2559 };
2560 
2561 static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
2562 				    unsigned int len)
2563 {
2564 	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2565 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2566 	struct safexcel_crypto_priv *priv = ctx->priv;
2567 	struct crypto_aes_ctx aes;
2568 	u32 hashkey[AES_BLOCK_SIZE >> 2];
2569 	int ret, i;
2570 
2571 	ret = aes_expandkey(&aes, key, len);
2572 	if (ret) {
2573 		crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2574 		memzero_explicit(&aes, sizeof(aes));
2575 		return ret;
2576 	}
2577 
2578 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2579 		for (i = 0; i < len / sizeof(u32); i++) {
2580 			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2581 				ctx->base.needs_inv = true;
2582 				break;
2583 			}
2584 		}
2585 	}
2586 
2587 	for (i = 0; i < len / sizeof(u32); i++)
2588 		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2589 
2590 	ctx->key_len = len;
2591 
2592 	/* Compute hash key by encrypting zeroes with cipher key */
2593 	crypto_cipher_clear_flags(ctx->hkaes, CRYPTO_TFM_REQ_MASK);
2594 	crypto_cipher_set_flags(ctx->hkaes, crypto_aead_get_flags(ctfm) &
2595 				CRYPTO_TFM_REQ_MASK);
2596 	ret = crypto_cipher_setkey(ctx->hkaes, key, len);
2597 	crypto_aead_set_flags(ctfm, crypto_cipher_get_flags(ctx->hkaes) &
2598 			      CRYPTO_TFM_RES_MASK);
2599 	if (ret)
2600 		return ret;
2601 
2602 	memset(hashkey, 0, AES_BLOCK_SIZE);
2603 	crypto_cipher_encrypt_one(ctx->hkaes, (u8 *)hashkey, (u8 *)hashkey);
2604 
2605 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2606 		for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
2607 			if (be32_to_cpu(ctx->ipad[i]) != hashkey[i]) {
2608 				ctx->base.needs_inv = true;
2609 				break;
2610 			}
2611 		}
2612 	}
2613 
2614 	for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
2615 		ctx->ipad[i] = cpu_to_be32(hashkey[i]);
2616 
2617 	memzero_explicit(hashkey, AES_BLOCK_SIZE);
2618 	memzero_explicit(&aes, sizeof(aes));
2619 	return 0;
2620 }
2621 
2622 static int safexcel_aead_gcm_cra_init(struct crypto_tfm *tfm)
2623 {
2624 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2625 
2626 	safexcel_aead_cra_init(tfm);
2627 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_GHASH;
2628 	ctx->state_sz = GHASH_BLOCK_SIZE;
2629 	ctx->xcm = EIP197_XCM_MODE_GCM;
2630 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2631 
2632 	ctx->hkaes = crypto_alloc_cipher("aes", 0, 0);
2633 	return PTR_ERR_OR_ZERO(ctx->hkaes);
2634 }
2635 
2636 static void safexcel_aead_gcm_cra_exit(struct crypto_tfm *tfm)
2637 {
2638 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2639 
2640 	crypto_free_cipher(ctx->hkaes);
2641 	safexcel_aead_cra_exit(tfm);
2642 }
2643 
2644 static int safexcel_aead_gcm_setauthsize(struct crypto_aead *tfm,
2645 					 unsigned int authsize)
2646 {
2647 	return crypto_gcm_check_authsize(authsize);
2648 }
2649 
2650 struct safexcel_alg_template safexcel_alg_gcm = {
2651 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2652 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
2653 	.alg.aead = {
2654 		.setkey = safexcel_aead_gcm_setkey,
2655 		.setauthsize = safexcel_aead_gcm_setauthsize,
2656 		.encrypt = safexcel_aead_encrypt,
2657 		.decrypt = safexcel_aead_decrypt,
2658 		.ivsize = GCM_AES_IV_SIZE,
2659 		.maxauthsize = GHASH_DIGEST_SIZE,
2660 		.base = {
2661 			.cra_name = "gcm(aes)",
2662 			.cra_driver_name = "safexcel-gcm-aes",
2663 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2664 			.cra_flags = CRYPTO_ALG_ASYNC |
2665 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2666 			.cra_blocksize = 1,
2667 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2668 			.cra_alignmask = 0,
2669 			.cra_init = safexcel_aead_gcm_cra_init,
2670 			.cra_exit = safexcel_aead_gcm_cra_exit,
2671 			.cra_module = THIS_MODULE,
2672 		},
2673 	},
2674 };
2675 
2676 static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
2677 				    unsigned int len)
2678 {
2679 	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2680 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2681 	struct safexcel_crypto_priv *priv = ctx->priv;
2682 	struct crypto_aes_ctx aes;
2683 	int ret, i;
2684 
2685 	ret = aes_expandkey(&aes, key, len);
2686 	if (ret) {
2687 		crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2688 		memzero_explicit(&aes, sizeof(aes));
2689 		return ret;
2690 	}
2691 
2692 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2693 		for (i = 0; i < len / sizeof(u32); i++) {
2694 			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2695 				ctx->base.needs_inv = true;
2696 				break;
2697 			}
2698 		}
2699 	}
2700 
2701 	for (i = 0; i < len / sizeof(u32); i++) {
2702 		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2703 		ctx->ipad[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] =
2704 			cpu_to_be32(aes.key_enc[i]);
2705 	}
2706 
2707 	ctx->key_len = len;
2708 	ctx->state_sz = 2 * AES_BLOCK_SIZE + len;
2709 
2710 	if (len == AES_KEYSIZE_192)
2711 		ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2712 	else if (len == AES_KEYSIZE_256)
2713 		ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2714 	else
2715 		ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2716 
2717 	memzero_explicit(&aes, sizeof(aes));
2718 	return 0;
2719 }
2720 
2721 static int safexcel_aead_ccm_cra_init(struct crypto_tfm *tfm)
2722 {
2723 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2724 
2725 	safexcel_aead_cra_init(tfm);
2726 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2727 	ctx->state_sz = 3 * AES_BLOCK_SIZE;
2728 	ctx->xcm = EIP197_XCM_MODE_CCM;
2729 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2730 	ctx->ctrinit = 0;
2731 	return 0;
2732 }
2733 
2734 static int safexcel_aead_ccm_setauthsize(struct crypto_aead *tfm,
2735 					 unsigned int authsize)
2736 {
2737 	/* Borrowed from crypto/ccm.c */
2738 	switch (authsize) {
2739 	case 4:
2740 	case 6:
2741 	case 8:
2742 	case 10:
2743 	case 12:
2744 	case 14:
2745 	case 16:
2746 		break;
2747 	default:
2748 		return -EINVAL;
2749 	}
2750 
2751 	return 0;
2752 }
2753 
2754 static int safexcel_ccm_encrypt(struct aead_request *req)
2755 {
2756 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
2757 
2758 	if (req->iv[0] < 1 || req->iv[0] > 7)
2759 		return -EINVAL;
2760 
2761 	return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
2762 }
2763 
2764 static int safexcel_ccm_decrypt(struct aead_request *req)
2765 {
2766 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
2767 
2768 	if (req->iv[0] < 1 || req->iv[0] > 7)
2769 		return -EINVAL;
2770 
2771 	return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
2772 }
2773 
2774 struct safexcel_alg_template safexcel_alg_ccm = {
2775 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2776 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
2777 	.alg.aead = {
2778 		.setkey = safexcel_aead_ccm_setkey,
2779 		.setauthsize = safexcel_aead_ccm_setauthsize,
2780 		.encrypt = safexcel_ccm_encrypt,
2781 		.decrypt = safexcel_ccm_decrypt,
2782 		.ivsize = AES_BLOCK_SIZE,
2783 		.maxauthsize = AES_BLOCK_SIZE,
2784 		.base = {
2785 			.cra_name = "ccm(aes)",
2786 			.cra_driver_name = "safexcel-ccm-aes",
2787 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2788 			.cra_flags = CRYPTO_ALG_ASYNC |
2789 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2790 			.cra_blocksize = 1,
2791 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2792 			.cra_alignmask = 0,
2793 			.cra_init = safexcel_aead_ccm_cra_init,
2794 			.cra_exit = safexcel_aead_cra_exit,
2795 			.cra_module = THIS_MODULE,
2796 		},
2797 	},
2798 };
2799 
2800 static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx *ctx,
2801 				     const u8 *key)
2802 {
2803 	struct safexcel_crypto_priv *priv = ctx->priv;
2804 
2805 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
2806 		if (memcmp(ctx->key, key, CHACHA_KEY_SIZE))
2807 			ctx->base.needs_inv = true;
2808 
2809 	memcpy(ctx->key, key, CHACHA_KEY_SIZE);
2810 	ctx->key_len = CHACHA_KEY_SIZE;
2811 }
2812 
2813 static int safexcel_skcipher_chacha20_setkey(struct crypto_skcipher *ctfm,
2814 					     const u8 *key, unsigned int len)
2815 {
2816 	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
2817 
2818 	if (len != CHACHA_KEY_SIZE) {
2819 		crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2820 		return -EINVAL;
2821 	}
2822 	safexcel_chacha20_setkey(ctx, key);
2823 
2824 	return 0;
2825 }
2826 
2827 static int safexcel_skcipher_chacha20_cra_init(struct crypto_tfm *tfm)
2828 {
2829 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2830 
2831 	safexcel_skcipher_cra_init(tfm);
2832 	ctx->alg  = SAFEXCEL_CHACHA20;
2833 	ctx->ctrinit = 0;
2834 	ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32;
2835 	return 0;
2836 }
2837 
2838 struct safexcel_alg_template safexcel_alg_chacha20 = {
2839 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2840 	.algo_mask = SAFEXCEL_ALG_CHACHA20,
2841 	.alg.skcipher = {
2842 		.setkey = safexcel_skcipher_chacha20_setkey,
2843 		.encrypt = safexcel_encrypt,
2844 		.decrypt = safexcel_decrypt,
2845 		.min_keysize = CHACHA_KEY_SIZE,
2846 		.max_keysize = CHACHA_KEY_SIZE,
2847 		.ivsize = CHACHA_IV_SIZE,
2848 		.base = {
2849 			.cra_name = "chacha20",
2850 			.cra_driver_name = "safexcel-chacha20",
2851 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2852 			.cra_flags = CRYPTO_ALG_ASYNC |
2853 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2854 			.cra_blocksize = 1,
2855 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2856 			.cra_alignmask = 0,
2857 			.cra_init = safexcel_skcipher_chacha20_cra_init,
2858 			.cra_exit = safexcel_skcipher_cra_exit,
2859 			.cra_module = THIS_MODULE,
2860 		},
2861 	},
2862 };
2863 
2864 static int safexcel_aead_chachapoly_setkey(struct crypto_aead *ctfm,
2865 				    const u8 *key, unsigned int len)
2866 {
2867 	struct safexcel_cipher_ctx *ctx = crypto_aead_ctx(ctfm);
2868 
2869 	if (ctx->aead  == EIP197_AEAD_TYPE_IPSEC_ESP &&
2870 	    len > EIP197_AEAD_IPSEC_NONCE_SIZE) {
2871 		/* ESP variant has nonce appended to key */
2872 		len -= EIP197_AEAD_IPSEC_NONCE_SIZE;
2873 		ctx->nonce = *(u32 *)(key + len);
2874 	}
2875 	if (len != CHACHA_KEY_SIZE) {
2876 		crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2877 		return -EINVAL;
2878 	}
2879 	safexcel_chacha20_setkey(ctx, key);
2880 
2881 	return 0;
2882 }
2883 
2884 static int safexcel_aead_chachapoly_setauthsize(struct crypto_aead *tfm,
2885 					 unsigned int authsize)
2886 {
2887 	if (authsize != POLY1305_DIGEST_SIZE)
2888 		return -EINVAL;
2889 	return 0;
2890 }
2891 
2892 static int safexcel_aead_chachapoly_crypt(struct aead_request *req,
2893 					  enum safexcel_cipher_direction dir)
2894 {
2895 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
2896 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
2897 	struct crypto_tfm *tfm = crypto_aead_tfm(aead);
2898 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2899 	struct aead_request *subreq = aead_request_ctx(req);
2900 	u32 key[CHACHA_KEY_SIZE / sizeof(u32) + 1];
2901 	int ret = 0;
2902 
2903 	/*
2904 	 * Instead of wasting time detecting umpteen silly corner cases,
2905 	 * just dump all "small" requests to the fallback implementation.
2906 	 * HW would not be faster on such small requests anyway.
2907 	 */
2908 	if (likely((ctx->aead != EIP197_AEAD_TYPE_IPSEC_ESP ||
2909 		    req->assoclen >= EIP197_AEAD_IPSEC_IV_SIZE) &&
2910 		   req->cryptlen > POLY1305_DIGEST_SIZE)) {
2911 		return safexcel_queue_req(&req->base, creq, dir);
2912 	}
2913 
2914 	/* HW cannot do full (AAD+payload) zero length, use fallback */
2915 	memcpy(key, ctx->key, CHACHA_KEY_SIZE);
2916 	if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
2917 		/* ESP variant has nonce appended to the key */
2918 		key[CHACHA_KEY_SIZE / sizeof(u32)] = ctx->nonce;
2919 		ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2920 					 CHACHA_KEY_SIZE +
2921 					 EIP197_AEAD_IPSEC_NONCE_SIZE);
2922 	} else {
2923 		ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2924 					 CHACHA_KEY_SIZE);
2925 	}
2926 	if (ret) {
2927 		crypto_aead_clear_flags(aead, CRYPTO_TFM_REQ_MASK);
2928 		crypto_aead_set_flags(aead, crypto_aead_get_flags(ctx->fback) &
2929 					    CRYPTO_TFM_REQ_MASK);
2930 		return ret;
2931 	}
2932 
2933 	aead_request_set_tfm(subreq, ctx->fback);
2934 	aead_request_set_callback(subreq, req->base.flags, req->base.complete,
2935 				  req->base.data);
2936 	aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
2937 			       req->iv);
2938 	aead_request_set_ad(subreq, req->assoclen);
2939 
2940 	return (dir ==  SAFEXCEL_ENCRYPT) ?
2941 		crypto_aead_encrypt(subreq) :
2942 		crypto_aead_decrypt(subreq);
2943 }
2944 
2945 static int safexcel_aead_chachapoly_encrypt(struct aead_request *req)
2946 {
2947 	return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_ENCRYPT);
2948 }
2949 
2950 static int safexcel_aead_chachapoly_decrypt(struct aead_request *req)
2951 {
2952 	return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_DECRYPT);
2953 }
2954 
2955 static int safexcel_aead_fallback_cra_init(struct crypto_tfm *tfm)
2956 {
2957 	struct crypto_aead *aead = __crypto_aead_cast(tfm);
2958 	struct aead_alg *alg = crypto_aead_alg(aead);
2959 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2960 
2961 	safexcel_aead_cra_init(tfm);
2962 
2963 	/* Allocate fallback implementation */
2964 	ctx->fback = crypto_alloc_aead(alg->base.cra_name, 0,
2965 				       CRYPTO_ALG_ASYNC |
2966 				       CRYPTO_ALG_NEED_FALLBACK);
2967 	if (IS_ERR(ctx->fback))
2968 		return PTR_ERR(ctx->fback);
2969 
2970 	crypto_aead_set_reqsize(aead, max(sizeof(struct safexcel_cipher_req),
2971 					  sizeof(struct aead_request) +
2972 					  crypto_aead_reqsize(ctx->fback)));
2973 
2974 	return 0;
2975 }
2976 
2977 static int safexcel_aead_chachapoly_cra_init(struct crypto_tfm *tfm)
2978 {
2979 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2980 
2981 	safexcel_aead_fallback_cra_init(tfm);
2982 	ctx->alg  = SAFEXCEL_CHACHA20;
2983 	ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32 |
2984 		    CONTEXT_CONTROL_CHACHA20_MODE_CALC_OTK;
2985 	ctx->ctrinit = 0;
2986 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_POLY1305;
2987 	ctx->state_sz = 0; /* Precomputed by HW */
2988 	return 0;
2989 }
2990 
2991 static void safexcel_aead_fallback_cra_exit(struct crypto_tfm *tfm)
2992 {
2993 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2994 
2995 	crypto_free_aead(ctx->fback);
2996 	safexcel_aead_cra_exit(tfm);
2997 }
2998 
2999 struct safexcel_alg_template safexcel_alg_chachapoly = {
3000 	.type = SAFEXCEL_ALG_TYPE_AEAD,
3001 	.algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
3002 	.alg.aead = {
3003 		.setkey = safexcel_aead_chachapoly_setkey,
3004 		.setauthsize = safexcel_aead_chachapoly_setauthsize,
3005 		.encrypt = safexcel_aead_chachapoly_encrypt,
3006 		.decrypt = safexcel_aead_chachapoly_decrypt,
3007 		.ivsize = CHACHAPOLY_IV_SIZE,
3008 		.maxauthsize = POLY1305_DIGEST_SIZE,
3009 		.base = {
3010 			.cra_name = "rfc7539(chacha20,poly1305)",
3011 			.cra_driver_name = "safexcel-chacha20-poly1305",
3012 			/* +1 to put it above HW chacha + SW poly */
3013 			.cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
3014 			.cra_flags = CRYPTO_ALG_ASYNC |
3015 				     CRYPTO_ALG_KERN_DRIVER_ONLY |
3016 				     CRYPTO_ALG_NEED_FALLBACK,
3017 			.cra_blocksize = 1,
3018 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3019 			.cra_alignmask = 0,
3020 			.cra_init = safexcel_aead_chachapoly_cra_init,
3021 			.cra_exit = safexcel_aead_fallback_cra_exit,
3022 			.cra_module = THIS_MODULE,
3023 		},
3024 	},
3025 };
3026 
3027 static int safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm *tfm)
3028 {
3029 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3030 	int ret;
3031 
3032 	ret = safexcel_aead_chachapoly_cra_init(tfm);
3033 	ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
3034 	ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3035 	return ret;
3036 }
3037 
3038 struct safexcel_alg_template safexcel_alg_chachapoly_esp = {
3039 	.type = SAFEXCEL_ALG_TYPE_AEAD,
3040 	.algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
3041 	.alg.aead = {
3042 		.setkey = safexcel_aead_chachapoly_setkey,
3043 		.setauthsize = safexcel_aead_chachapoly_setauthsize,
3044 		.encrypt = safexcel_aead_chachapoly_encrypt,
3045 		.decrypt = safexcel_aead_chachapoly_decrypt,
3046 		.ivsize = CHACHAPOLY_IV_SIZE - EIP197_AEAD_IPSEC_NONCE_SIZE,
3047 		.maxauthsize = POLY1305_DIGEST_SIZE,
3048 		.base = {
3049 			.cra_name = "rfc7539esp(chacha20,poly1305)",
3050 			.cra_driver_name = "safexcel-chacha20-poly1305-esp",
3051 			/* +1 to put it above HW chacha + SW poly */
3052 			.cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
3053 			.cra_flags = CRYPTO_ALG_ASYNC |
3054 				     CRYPTO_ALG_KERN_DRIVER_ONLY |
3055 				     CRYPTO_ALG_NEED_FALLBACK,
3056 			.cra_blocksize = 1,
3057 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3058 			.cra_alignmask = 0,
3059 			.cra_init = safexcel_aead_chachapolyesp_cra_init,
3060 			.cra_exit = safexcel_aead_fallback_cra_exit,
3061 			.cra_module = THIS_MODULE,
3062 		},
3063 	},
3064 };
3065 
3066 static int safexcel_skcipher_sm4_setkey(struct crypto_skcipher *ctfm,
3067 					const u8 *key, unsigned int len)
3068 {
3069 	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3070 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3071 	struct safexcel_crypto_priv *priv = ctx->priv;
3072 
3073 	if (len != SM4_KEY_SIZE) {
3074 		crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
3075 		return -EINVAL;
3076 	}
3077 
3078 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
3079 		if (memcmp(ctx->key, key, SM4_KEY_SIZE))
3080 			ctx->base.needs_inv = true;
3081 
3082 	memcpy(ctx->key, key, SM4_KEY_SIZE);
3083 	ctx->key_len = SM4_KEY_SIZE;
3084 
3085 	return 0;
3086 }
3087 
3088 static int safexcel_sm4_blk_encrypt(struct skcipher_request *req)
3089 {
3090 	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3091 	if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3092 		return -EINVAL;
3093 	else
3094 		return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3095 					  SAFEXCEL_ENCRYPT);
3096 }
3097 
3098 static int safexcel_sm4_blk_decrypt(struct skcipher_request *req)
3099 {
3100 	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3101 	if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3102 		return -EINVAL;
3103 	else
3104 		return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3105 					  SAFEXCEL_DECRYPT);
3106 }
3107 
3108 static int safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm *tfm)
3109 {
3110 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3111 
3112 	safexcel_skcipher_cra_init(tfm);
3113 	ctx->alg  = SAFEXCEL_SM4;
3114 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
3115 	ctx->blocksz = 0;
3116 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
3117 	return 0;
3118 }
3119 
3120 struct safexcel_alg_template safexcel_alg_ecb_sm4 = {
3121 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3122 	.algo_mask = SAFEXCEL_ALG_SM4,
3123 	.alg.skcipher = {
3124 		.setkey = safexcel_skcipher_sm4_setkey,
3125 		.encrypt = safexcel_sm4_blk_encrypt,
3126 		.decrypt = safexcel_sm4_blk_decrypt,
3127 		.min_keysize = SM4_KEY_SIZE,
3128 		.max_keysize = SM4_KEY_SIZE,
3129 		.base = {
3130 			.cra_name = "ecb(sm4)",
3131 			.cra_driver_name = "safexcel-ecb-sm4",
3132 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3133 			.cra_flags = CRYPTO_ALG_ASYNC |
3134 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3135 			.cra_blocksize = SM4_BLOCK_SIZE,
3136 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3137 			.cra_alignmask = 0,
3138 			.cra_init = safexcel_skcipher_sm4_ecb_cra_init,
3139 			.cra_exit = safexcel_skcipher_cra_exit,
3140 			.cra_module = THIS_MODULE,
3141 		},
3142 	},
3143 };
3144 
3145 static int safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm *tfm)
3146 {
3147 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3148 
3149 	safexcel_skcipher_cra_init(tfm);
3150 	ctx->alg  = SAFEXCEL_SM4;
3151 	ctx->blocksz = SM4_BLOCK_SIZE;
3152 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
3153 	return 0;
3154 }
3155 
3156 struct safexcel_alg_template safexcel_alg_cbc_sm4 = {
3157 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3158 	.algo_mask = SAFEXCEL_ALG_SM4,
3159 	.alg.skcipher = {
3160 		.setkey = safexcel_skcipher_sm4_setkey,
3161 		.encrypt = safexcel_sm4_blk_encrypt,
3162 		.decrypt = safexcel_sm4_blk_decrypt,
3163 		.min_keysize = SM4_KEY_SIZE,
3164 		.max_keysize = SM4_KEY_SIZE,
3165 		.ivsize = SM4_BLOCK_SIZE,
3166 		.base = {
3167 			.cra_name = "cbc(sm4)",
3168 			.cra_driver_name = "safexcel-cbc-sm4",
3169 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3170 			.cra_flags = CRYPTO_ALG_ASYNC |
3171 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3172 			.cra_blocksize = SM4_BLOCK_SIZE,
3173 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3174 			.cra_alignmask = 0,
3175 			.cra_init = safexcel_skcipher_sm4_cbc_cra_init,
3176 			.cra_exit = safexcel_skcipher_cra_exit,
3177 			.cra_module = THIS_MODULE,
3178 		},
3179 	},
3180 };
3181 
3182 static int safexcel_skcipher_sm4_ofb_cra_init(struct crypto_tfm *tfm)
3183 {
3184 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3185 
3186 	safexcel_skcipher_cra_init(tfm);
3187 	ctx->alg  = SAFEXCEL_SM4;
3188 	ctx->blocksz = SM4_BLOCK_SIZE;
3189 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
3190 	return 0;
3191 }
3192 
3193 struct safexcel_alg_template safexcel_alg_ofb_sm4 = {
3194 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3195 	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
3196 	.alg.skcipher = {
3197 		.setkey = safexcel_skcipher_sm4_setkey,
3198 		.encrypt = safexcel_encrypt,
3199 		.decrypt = safexcel_decrypt,
3200 		.min_keysize = SM4_KEY_SIZE,
3201 		.max_keysize = SM4_KEY_SIZE,
3202 		.ivsize = SM4_BLOCK_SIZE,
3203 		.base = {
3204 			.cra_name = "ofb(sm4)",
3205 			.cra_driver_name = "safexcel-ofb-sm4",
3206 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3207 			.cra_flags = CRYPTO_ALG_ASYNC |
3208 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3209 			.cra_blocksize = 1,
3210 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3211 			.cra_alignmask = 0,
3212 			.cra_init = safexcel_skcipher_sm4_ofb_cra_init,
3213 			.cra_exit = safexcel_skcipher_cra_exit,
3214 			.cra_module = THIS_MODULE,
3215 		},
3216 	},
3217 };
3218 
3219 static int safexcel_skcipher_sm4_cfb_cra_init(struct crypto_tfm *tfm)
3220 {
3221 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3222 
3223 	safexcel_skcipher_cra_init(tfm);
3224 	ctx->alg  = SAFEXCEL_SM4;
3225 	ctx->blocksz = SM4_BLOCK_SIZE;
3226 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
3227 	return 0;
3228 }
3229 
3230 struct safexcel_alg_template safexcel_alg_cfb_sm4 = {
3231 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3232 	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
3233 	.alg.skcipher = {
3234 		.setkey = safexcel_skcipher_sm4_setkey,
3235 		.encrypt = safexcel_encrypt,
3236 		.decrypt = safexcel_decrypt,
3237 		.min_keysize = SM4_KEY_SIZE,
3238 		.max_keysize = SM4_KEY_SIZE,
3239 		.ivsize = SM4_BLOCK_SIZE,
3240 		.base = {
3241 			.cra_name = "cfb(sm4)",
3242 			.cra_driver_name = "safexcel-cfb-sm4",
3243 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3244 			.cra_flags = CRYPTO_ALG_ASYNC |
3245 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3246 			.cra_blocksize = 1,
3247 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3248 			.cra_alignmask = 0,
3249 			.cra_init = safexcel_skcipher_sm4_cfb_cra_init,
3250 			.cra_exit = safexcel_skcipher_cra_exit,
3251 			.cra_module = THIS_MODULE,
3252 		},
3253 	},
3254 };
3255 
3256 static int safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher *ctfm,
3257 					   const u8 *key, unsigned int len)
3258 {
3259 	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3260 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3261 
3262 	/* last 4 bytes of key are the nonce! */
3263 	ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3264 	/* exclude the nonce here */
3265 	len -= CTR_RFC3686_NONCE_SIZE;
3266 
3267 	return safexcel_skcipher_sm4_setkey(ctfm, key, len);
3268 }
3269 
3270 static int safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm *tfm)
3271 {
3272 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3273 
3274 	safexcel_skcipher_cra_init(tfm);
3275 	ctx->alg  = SAFEXCEL_SM4;
3276 	ctx->blocksz = SM4_BLOCK_SIZE;
3277 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3278 	return 0;
3279 }
3280 
3281 struct safexcel_alg_template safexcel_alg_ctr_sm4 = {
3282 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3283 	.algo_mask = SAFEXCEL_ALG_SM4,
3284 	.alg.skcipher = {
3285 		.setkey = safexcel_skcipher_sm4ctr_setkey,
3286 		.encrypt = safexcel_encrypt,
3287 		.decrypt = safexcel_decrypt,
3288 		/* Add nonce size */
3289 		.min_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3290 		.max_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3291 		.ivsize = CTR_RFC3686_IV_SIZE,
3292 		.base = {
3293 			.cra_name = "rfc3686(ctr(sm4))",
3294 			.cra_driver_name = "safexcel-ctr-sm4",
3295 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3296 			.cra_flags = CRYPTO_ALG_ASYNC |
3297 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3298 			.cra_blocksize = 1,
3299 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3300 			.cra_alignmask = 0,
3301 			.cra_init = safexcel_skcipher_sm4_ctr_cra_init,
3302 			.cra_exit = safexcel_skcipher_cra_exit,
3303 			.cra_module = THIS_MODULE,
3304 		},
3305 	},
3306 };
3307 
3308 static int safexcel_aead_sm4_blk_encrypt(struct aead_request *req)
3309 {
3310 	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3311 	if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3312 		return -EINVAL;
3313 
3314 	return safexcel_queue_req(&req->base, aead_request_ctx(req),
3315 				  SAFEXCEL_ENCRYPT);
3316 }
3317 
3318 static int safexcel_aead_sm4_blk_decrypt(struct aead_request *req)
3319 {
3320 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3321 
3322 	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3323 	if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3324 		return -EINVAL;
3325 
3326 	return safexcel_queue_req(&req->base, aead_request_ctx(req),
3327 				  SAFEXCEL_DECRYPT);
3328 }
3329 
3330 static int safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm *tfm)
3331 {
3332 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3333 
3334 	safexcel_aead_cra_init(tfm);
3335 	ctx->alg = SAFEXCEL_SM4;
3336 	ctx->blocksz = SM4_BLOCK_SIZE;
3337 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
3338 	ctx->state_sz = SHA1_DIGEST_SIZE;
3339 	return 0;
3340 }
3341 
3342 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_sm4 = {
3343 	.type = SAFEXCEL_ALG_TYPE_AEAD,
3344 	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3345 	.alg.aead = {
3346 		.setkey = safexcel_aead_setkey,
3347 		.encrypt = safexcel_aead_sm4_blk_encrypt,
3348 		.decrypt = safexcel_aead_sm4_blk_decrypt,
3349 		.ivsize = SM4_BLOCK_SIZE,
3350 		.maxauthsize = SHA1_DIGEST_SIZE,
3351 		.base = {
3352 			.cra_name = "authenc(hmac(sha1),cbc(sm4))",
3353 			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-sm4",
3354 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3355 			.cra_flags = CRYPTO_ALG_ASYNC |
3356 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3357 			.cra_blocksize = SM4_BLOCK_SIZE,
3358 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3359 			.cra_alignmask = 0,
3360 			.cra_init = safexcel_aead_sm4cbc_sha1_cra_init,
3361 			.cra_exit = safexcel_aead_cra_exit,
3362 			.cra_module = THIS_MODULE,
3363 		},
3364 	},
3365 };
3366 
3367 static int safexcel_aead_fallback_setkey(struct crypto_aead *ctfm,
3368 					 const u8 *key, unsigned int len)
3369 {
3370 	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3371 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3372 
3373 	/* Keep fallback cipher synchronized */
3374 	return crypto_aead_setkey(ctx->fback, (u8 *)key, len) ?:
3375 	       safexcel_aead_setkey(ctfm, key, len);
3376 }
3377 
3378 static int safexcel_aead_fallback_setauthsize(struct crypto_aead *ctfm,
3379 					      unsigned int authsize)
3380 {
3381 	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3382 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3383 
3384 	/* Keep fallback cipher synchronized */
3385 	return crypto_aead_setauthsize(ctx->fback, authsize);
3386 }
3387 
3388 static int safexcel_aead_fallback_crypt(struct aead_request *req,
3389 					enum safexcel_cipher_direction dir)
3390 {
3391 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
3392 	struct crypto_tfm *tfm = crypto_aead_tfm(aead);
3393 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3394 	struct aead_request *subreq = aead_request_ctx(req);
3395 
3396 	aead_request_set_tfm(subreq, ctx->fback);
3397 	aead_request_set_callback(subreq, req->base.flags, req->base.complete,
3398 				  req->base.data);
3399 	aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
3400 			       req->iv);
3401 	aead_request_set_ad(subreq, req->assoclen);
3402 
3403 	return (dir ==  SAFEXCEL_ENCRYPT) ?
3404 		crypto_aead_encrypt(subreq) :
3405 		crypto_aead_decrypt(subreq);
3406 }
3407 
3408 static int safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request *req)
3409 {
3410 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
3411 
3412 	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3413 	if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3414 		return -EINVAL;
3415 	else if (req->cryptlen || req->assoclen) /* If input length > 0 only */
3416 		return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3417 
3418 	/* HW cannot do full (AAD+payload) zero length, use fallback */
3419 	return safexcel_aead_fallback_crypt(req, SAFEXCEL_ENCRYPT);
3420 }
3421 
3422 static int safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request *req)
3423 {
3424 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
3425 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3426 
3427 	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3428 	if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3429 		return -EINVAL;
3430 	else if (req->cryptlen > crypto_aead_authsize(tfm) || req->assoclen)
3431 		/* If input length > 0 only */
3432 		return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3433 
3434 	/* HW cannot do full (AAD+payload) zero length, use fallback */
3435 	return safexcel_aead_fallback_crypt(req, SAFEXCEL_DECRYPT);
3436 }
3437 
3438 static int safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm *tfm)
3439 {
3440 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3441 
3442 	safexcel_aead_fallback_cra_init(tfm);
3443 	ctx->alg = SAFEXCEL_SM4;
3444 	ctx->blocksz = SM4_BLOCK_SIZE;
3445 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
3446 	ctx->state_sz = SM3_DIGEST_SIZE;
3447 	return 0;
3448 }
3449 
3450 struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_cbc_sm4 = {
3451 	.type = SAFEXCEL_ALG_TYPE_AEAD,
3452 	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3453 	.alg.aead = {
3454 		.setkey = safexcel_aead_fallback_setkey,
3455 		.setauthsize = safexcel_aead_fallback_setauthsize,
3456 		.encrypt = safexcel_aead_sm4cbc_sm3_encrypt,
3457 		.decrypt = safexcel_aead_sm4cbc_sm3_decrypt,
3458 		.ivsize = SM4_BLOCK_SIZE,
3459 		.maxauthsize = SM3_DIGEST_SIZE,
3460 		.base = {
3461 			.cra_name = "authenc(hmac(sm3),cbc(sm4))",
3462 			.cra_driver_name = "safexcel-authenc-hmac-sm3-cbc-sm4",
3463 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3464 			.cra_flags = CRYPTO_ALG_ASYNC |
3465 				     CRYPTO_ALG_KERN_DRIVER_ONLY |
3466 				     CRYPTO_ALG_NEED_FALLBACK,
3467 			.cra_blocksize = SM4_BLOCK_SIZE,
3468 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3469 			.cra_alignmask = 0,
3470 			.cra_init = safexcel_aead_sm4cbc_sm3_cra_init,
3471 			.cra_exit = safexcel_aead_fallback_cra_exit,
3472 			.cra_module = THIS_MODULE,
3473 		},
3474 	},
3475 };
3476 
3477 static int safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm *tfm)
3478 {
3479 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3480 
3481 	safexcel_aead_sm4cbc_sha1_cra_init(tfm);
3482 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3483 	return 0;
3484 }
3485 
3486 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_sm4 = {
3487 	.type = SAFEXCEL_ALG_TYPE_AEAD,
3488 	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3489 	.alg.aead = {
3490 		.setkey = safexcel_aead_setkey,
3491 		.encrypt = safexcel_aead_encrypt,
3492 		.decrypt = safexcel_aead_decrypt,
3493 		.ivsize = CTR_RFC3686_IV_SIZE,
3494 		.maxauthsize = SHA1_DIGEST_SIZE,
3495 		.base = {
3496 			.cra_name = "authenc(hmac(sha1),rfc3686(ctr(sm4)))",
3497 			.cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-sm4",
3498 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3499 			.cra_flags = CRYPTO_ALG_ASYNC |
3500 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3501 			.cra_blocksize = 1,
3502 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3503 			.cra_alignmask = 0,
3504 			.cra_init = safexcel_aead_sm4ctr_sha1_cra_init,
3505 			.cra_exit = safexcel_aead_cra_exit,
3506 			.cra_module = THIS_MODULE,
3507 		},
3508 	},
3509 };
3510 
3511 static int safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm *tfm)
3512 {
3513 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3514 
3515 	safexcel_aead_sm4cbc_sm3_cra_init(tfm);
3516 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3517 	return 0;
3518 }
3519 
3520 struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_ctr_sm4 = {
3521 	.type = SAFEXCEL_ALG_TYPE_AEAD,
3522 	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3523 	.alg.aead = {
3524 		.setkey = safexcel_aead_setkey,
3525 		.encrypt = safexcel_aead_encrypt,
3526 		.decrypt = safexcel_aead_decrypt,
3527 		.ivsize = CTR_RFC3686_IV_SIZE,
3528 		.maxauthsize = SM3_DIGEST_SIZE,
3529 		.base = {
3530 			.cra_name = "authenc(hmac(sm3),rfc3686(ctr(sm4)))",
3531 			.cra_driver_name = "safexcel-authenc-hmac-sm3-ctr-sm4",
3532 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3533 			.cra_flags = CRYPTO_ALG_ASYNC |
3534 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3535 			.cra_blocksize = 1,
3536 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3537 			.cra_alignmask = 0,
3538 			.cra_init = safexcel_aead_sm4ctr_sm3_cra_init,
3539 			.cra_exit = safexcel_aead_cra_exit,
3540 			.cra_module = THIS_MODULE,
3541 		},
3542 	},
3543 };
3544 
3545 static int safexcel_rfc4106_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
3546 				       unsigned int len)
3547 {
3548 	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3549 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3550 
3551 	/* last 4 bytes of key are the nonce! */
3552 	ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3553 
3554 	len -= CTR_RFC3686_NONCE_SIZE;
3555 	return safexcel_aead_gcm_setkey(ctfm, key, len);
3556 }
3557 
3558 static int safexcel_rfc4106_gcm_setauthsize(struct crypto_aead *tfm,
3559 					    unsigned int authsize)
3560 {
3561 	return crypto_rfc4106_check_authsize(authsize);
3562 }
3563 
3564 static int safexcel_rfc4106_encrypt(struct aead_request *req)
3565 {
3566 	return crypto_ipsec_check_assoclen(req->assoclen) ?:
3567 	       safexcel_aead_encrypt(req);
3568 }
3569 
3570 static int safexcel_rfc4106_decrypt(struct aead_request *req)
3571 {
3572 	return crypto_ipsec_check_assoclen(req->assoclen) ?:
3573 	       safexcel_aead_decrypt(req);
3574 }
3575 
3576 static int safexcel_rfc4106_gcm_cra_init(struct crypto_tfm *tfm)
3577 {
3578 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3579 	int ret;
3580 
3581 	ret = safexcel_aead_gcm_cra_init(tfm);
3582 	ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
3583 	ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3584 	return ret;
3585 }
3586 
3587 struct safexcel_alg_template safexcel_alg_rfc4106_gcm = {
3588 	.type = SAFEXCEL_ALG_TYPE_AEAD,
3589 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3590 	.alg.aead = {
3591 		.setkey = safexcel_rfc4106_gcm_setkey,
3592 		.setauthsize = safexcel_rfc4106_gcm_setauthsize,
3593 		.encrypt = safexcel_rfc4106_encrypt,
3594 		.decrypt = safexcel_rfc4106_decrypt,
3595 		.ivsize = GCM_RFC4106_IV_SIZE,
3596 		.maxauthsize = GHASH_DIGEST_SIZE,
3597 		.base = {
3598 			.cra_name = "rfc4106(gcm(aes))",
3599 			.cra_driver_name = "safexcel-rfc4106-gcm-aes",
3600 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3601 			.cra_flags = CRYPTO_ALG_ASYNC |
3602 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3603 			.cra_blocksize = 1,
3604 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3605 			.cra_alignmask = 0,
3606 			.cra_init = safexcel_rfc4106_gcm_cra_init,
3607 			.cra_exit = safexcel_aead_gcm_cra_exit,
3608 		},
3609 	},
3610 };
3611 
3612 static int safexcel_rfc4543_gcm_setauthsize(struct crypto_aead *tfm,
3613 					    unsigned int authsize)
3614 {
3615 	if (authsize != GHASH_DIGEST_SIZE)
3616 		return -EINVAL;
3617 
3618 	return 0;
3619 }
3620 
3621 static int safexcel_rfc4543_gcm_cra_init(struct crypto_tfm *tfm)
3622 {
3623 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3624 	int ret;
3625 
3626 	ret = safexcel_aead_gcm_cra_init(tfm);
3627 	ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP_GMAC;
3628 	return ret;
3629 }
3630 
3631 struct safexcel_alg_template safexcel_alg_rfc4543_gcm = {
3632 	.type = SAFEXCEL_ALG_TYPE_AEAD,
3633 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3634 	.alg.aead = {
3635 		.setkey = safexcel_rfc4106_gcm_setkey,
3636 		.setauthsize = safexcel_rfc4543_gcm_setauthsize,
3637 		.encrypt = safexcel_rfc4106_encrypt,
3638 		.decrypt = safexcel_rfc4106_decrypt,
3639 		.ivsize = GCM_RFC4543_IV_SIZE,
3640 		.maxauthsize = GHASH_DIGEST_SIZE,
3641 		.base = {
3642 			.cra_name = "rfc4543(gcm(aes))",
3643 			.cra_driver_name = "safexcel-rfc4543-gcm-aes",
3644 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3645 			.cra_flags = CRYPTO_ALG_ASYNC |
3646 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3647 			.cra_blocksize = 1,
3648 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3649 			.cra_alignmask = 0,
3650 			.cra_init = safexcel_rfc4543_gcm_cra_init,
3651 			.cra_exit = safexcel_aead_gcm_cra_exit,
3652 		},
3653 	},
3654 };
3655 
3656 static int safexcel_rfc4309_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
3657 				       unsigned int len)
3658 {
3659 	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3660 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3661 
3662 	/* First byte of the nonce = L = always 3 for RFC4309 (4 byte ctr) */
3663 	*(u8 *)&ctx->nonce = EIP197_AEAD_IPSEC_COUNTER_SIZE - 1;
3664 	/* last 3 bytes of key are the nonce! */
3665 	memcpy((u8 *)&ctx->nonce + 1, key + len -
3666 	       EIP197_AEAD_IPSEC_CCM_NONCE_SIZE,
3667 	       EIP197_AEAD_IPSEC_CCM_NONCE_SIZE);
3668 
3669 	len -= EIP197_AEAD_IPSEC_CCM_NONCE_SIZE;
3670 	return safexcel_aead_ccm_setkey(ctfm, key, len);
3671 }
3672 
3673 static int safexcel_rfc4309_ccm_setauthsize(struct crypto_aead *tfm,
3674 					    unsigned int authsize)
3675 {
3676 	/* Borrowed from crypto/ccm.c */
3677 	switch (authsize) {
3678 	case 8:
3679 	case 12:
3680 	case 16:
3681 		break;
3682 	default:
3683 		return -EINVAL;
3684 	}
3685 
3686 	return 0;
3687 }
3688 
3689 static int safexcel_rfc4309_ccm_encrypt(struct aead_request *req)
3690 {
3691 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
3692 
3693 	/* Borrowed from crypto/ccm.c */
3694 	if (req->assoclen != 16 && req->assoclen != 20)
3695 		return -EINVAL;
3696 
3697 	return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3698 }
3699 
3700 static int safexcel_rfc4309_ccm_decrypt(struct aead_request *req)
3701 {
3702 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
3703 
3704 	/* Borrowed from crypto/ccm.c */
3705 	if (req->assoclen != 16 && req->assoclen != 20)
3706 		return -EINVAL;
3707 
3708 	return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3709 }
3710 
3711 static int safexcel_rfc4309_ccm_cra_init(struct crypto_tfm *tfm)
3712 {
3713 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3714 	int ret;
3715 
3716 	ret = safexcel_aead_ccm_cra_init(tfm);
3717 	ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
3718 	ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3719 	return ret;
3720 }
3721 
3722 struct safexcel_alg_template safexcel_alg_rfc4309_ccm = {
3723 	.type = SAFEXCEL_ALG_TYPE_AEAD,
3724 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
3725 	.alg.aead = {
3726 		.setkey = safexcel_rfc4309_ccm_setkey,
3727 		.setauthsize = safexcel_rfc4309_ccm_setauthsize,
3728 		.encrypt = safexcel_rfc4309_ccm_encrypt,
3729 		.decrypt = safexcel_rfc4309_ccm_decrypt,
3730 		.ivsize = EIP197_AEAD_IPSEC_IV_SIZE,
3731 		.maxauthsize = AES_BLOCK_SIZE,
3732 		.base = {
3733 			.cra_name = "rfc4309(ccm(aes))",
3734 			.cra_driver_name = "safexcel-rfc4309-ccm-aes",
3735 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3736 			.cra_flags = CRYPTO_ALG_ASYNC |
3737 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3738 			.cra_blocksize = 1,
3739 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3740 			.cra_alignmask = 0,
3741 			.cra_init = safexcel_rfc4309_ccm_cra_init,
3742 			.cra_exit = safexcel_aead_cra_exit,
3743 			.cra_module = THIS_MODULE,
3744 		},
3745 	},
3746 };
3747