1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * Copyright (C) 2017 Marvell
4 *
5 * Antoine Tenart <antoine.tenart@free-electrons.com>
6 */
7
8 #include <linux/unaligned.h>
9 #include <linux/device.h>
10 #include <linux/dma-mapping.h>
11 #include <linux/dmapool.h>
12 #include <crypto/aead.h>
13 #include <crypto/aes.h>
14 #include <crypto/authenc.h>
15 #include <crypto/chacha.h>
16 #include <crypto/ctr.h>
17 #include <crypto/internal/des.h>
18 #include <crypto/gcm.h>
19 #include <crypto/ghash.h>
20 #include <crypto/poly1305.h>
21 #include <crypto/sha1.h>
22 #include <crypto/sha2.h>
23 #include <crypto/sm3.h>
24 #include <crypto/sm4.h>
25 #include <crypto/xts.h>
26 #include <crypto/skcipher.h>
27 #include <crypto/internal/aead.h>
28 #include <crypto/internal/skcipher.h>
29
30 #include "safexcel.h"
31
32 enum safexcel_cipher_direction {
33 SAFEXCEL_ENCRYPT,
34 SAFEXCEL_DECRYPT,
35 };
36
37 enum safexcel_cipher_alg {
38 SAFEXCEL_DES,
39 SAFEXCEL_3DES,
40 SAFEXCEL_AES,
41 SAFEXCEL_CHACHA20,
42 SAFEXCEL_SM4,
43 };
44
45 struct safexcel_cipher_ctx {
46 struct safexcel_context base;
47 struct safexcel_crypto_priv *priv;
48
49 u32 mode;
50 enum safexcel_cipher_alg alg;
51 u8 aead; /* !=0=AEAD, 2=IPSec ESP AEAD, 3=IPsec ESP GMAC */
52 u8 xcm; /* 0=authenc, 1=GCM, 2 reserved for CCM */
53 u8 aadskip;
54 u8 blocksz;
55 u32 ivmask;
56 u32 ctrinit;
57
58 __le32 key[16];
59 u32 nonce;
60 unsigned int key_len, xts;
61
62 /* All the below is AEAD specific */
63 u32 hash_alg;
64 u32 state_sz;
65
66 struct crypto_aead *fback;
67 };
68
69 struct safexcel_cipher_req {
70 enum safexcel_cipher_direction direction;
71 /* Number of result descriptors associated to the request */
72 unsigned int rdescs;
73 bool needs_inv;
74 int nr_src, nr_dst;
75 };
76
safexcel_skcipher_iv(struct safexcel_cipher_ctx * ctx,u8 * iv,struct safexcel_command_desc * cdesc)77 static int safexcel_skcipher_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
78 struct safexcel_command_desc *cdesc)
79 {
80 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
81 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
82 /* 32 bit nonce */
83 cdesc->control_data.token[0] = ctx->nonce;
84 /* 64 bit IV part */
85 memcpy(&cdesc->control_data.token[1], iv, 8);
86 /* 32 bit counter, start at 0 or 1 (big endian!) */
87 cdesc->control_data.token[3] =
88 (__force u32)cpu_to_be32(ctx->ctrinit);
89 return 4;
90 }
91 if (ctx->alg == SAFEXCEL_CHACHA20) {
92 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
93 /* 96 bit nonce part */
94 memcpy(&cdesc->control_data.token[0], &iv[4], 12);
95 /* 32 bit counter */
96 cdesc->control_data.token[3] = *(u32 *)iv;
97 return 4;
98 }
99
100 cdesc->control_data.options |= ctx->ivmask;
101 memcpy(cdesc->control_data.token, iv, ctx->blocksz);
102 return ctx->blocksz / sizeof(u32);
103 }
104
safexcel_skcipher_token(struct safexcel_cipher_ctx * ctx,u8 * iv,struct safexcel_command_desc * cdesc,struct safexcel_token * atoken,u32 length)105 static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
106 struct safexcel_command_desc *cdesc,
107 struct safexcel_token *atoken,
108 u32 length)
109 {
110 struct safexcel_token *token;
111 int ivlen;
112
113 ivlen = safexcel_skcipher_iv(ctx, iv, cdesc);
114 if (ivlen == 4) {
115 /* No space in cdesc, instruction moves to atoken */
116 cdesc->additional_cdata_size = 1;
117 token = atoken;
118 } else {
119 /* Everything fits in cdesc */
120 token = (struct safexcel_token *)(cdesc->control_data.token + 2);
121 /* Need to pad with NOP */
122 eip197_noop_token(&token[1]);
123 }
124
125 token->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
126 token->packet_length = length;
127 token->stat = EIP197_TOKEN_STAT_LAST_PACKET |
128 EIP197_TOKEN_STAT_LAST_HASH;
129 token->instructions = EIP197_TOKEN_INS_LAST |
130 EIP197_TOKEN_INS_TYPE_CRYPTO |
131 EIP197_TOKEN_INS_TYPE_OUTPUT;
132 }
133
safexcel_aead_iv(struct safexcel_cipher_ctx * ctx,u8 * iv,struct safexcel_command_desc * cdesc)134 static void safexcel_aead_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
135 struct safexcel_command_desc *cdesc)
136 {
137 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD ||
138 ctx->aead & EIP197_AEAD_TYPE_IPSEC_ESP) { /* _ESP and _ESP_GMAC */
139 /* 32 bit nonce */
140 cdesc->control_data.token[0] = ctx->nonce;
141 /* 64 bit IV part */
142 memcpy(&cdesc->control_data.token[1], iv, 8);
143 /* 32 bit counter, start at 0 or 1 (big endian!) */
144 cdesc->control_data.token[3] =
145 (__force u32)cpu_to_be32(ctx->ctrinit);
146 return;
147 }
148 if (ctx->xcm == EIP197_XCM_MODE_GCM || ctx->alg == SAFEXCEL_CHACHA20) {
149 /* 96 bit IV part */
150 memcpy(&cdesc->control_data.token[0], iv, 12);
151 /* 32 bit counter, start at 0 or 1 (big endian!) */
152 cdesc->control_data.token[3] =
153 (__force u32)cpu_to_be32(ctx->ctrinit);
154 return;
155 }
156 /* CBC */
157 memcpy(cdesc->control_data.token, iv, ctx->blocksz);
158 }
159
safexcel_aead_token(struct safexcel_cipher_ctx * ctx,u8 * iv,struct safexcel_command_desc * cdesc,struct safexcel_token * atoken,enum safexcel_cipher_direction direction,u32 cryptlen,u32 assoclen,u32 digestsize)160 static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
161 struct safexcel_command_desc *cdesc,
162 struct safexcel_token *atoken,
163 enum safexcel_cipher_direction direction,
164 u32 cryptlen, u32 assoclen, u32 digestsize)
165 {
166 struct safexcel_token *aadref;
167 int atoksize = 2; /* Start with minimum size */
168 int assocadj = assoclen - ctx->aadskip, aadalign;
169
170 /* Always 4 dwords of embedded IV for AEAD modes */
171 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
172
173 if (direction == SAFEXCEL_DECRYPT)
174 cryptlen -= digestsize;
175
176 if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM)) {
177 /* Construct IV block B0 for the CBC-MAC */
178 u8 *final_iv = (u8 *)cdesc->control_data.token;
179 u8 *cbcmaciv = (u8 *)&atoken[1];
180 __le32 *aadlen = (__le32 *)&atoken[5];
181
182 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
183 /* Length + nonce */
184 cdesc->control_data.token[0] = ctx->nonce;
185 /* Fixup flags byte */
186 *(__le32 *)cbcmaciv =
187 cpu_to_le32(ctx->nonce |
188 ((assocadj > 0) << 6) |
189 ((digestsize - 2) << 2));
190 /* 64 bit IV part */
191 memcpy(&cdesc->control_data.token[1], iv, 8);
192 memcpy(cbcmaciv + 4, iv, 8);
193 /* Start counter at 0 */
194 cdesc->control_data.token[3] = 0;
195 /* Message length */
196 *(__be32 *)(cbcmaciv + 12) = cpu_to_be32(cryptlen);
197 } else {
198 /* Variable length IV part */
199 memcpy(final_iv, iv, 15 - iv[0]);
200 memcpy(cbcmaciv, iv, 15 - iv[0]);
201 /* Start variable length counter at 0 */
202 memset(final_iv + 15 - iv[0], 0, iv[0] + 1);
203 memset(cbcmaciv + 15 - iv[0], 0, iv[0] - 1);
204 /* fixup flags byte */
205 cbcmaciv[0] |= ((assocadj > 0) << 6) |
206 ((digestsize - 2) << 2);
207 /* insert lower 2 bytes of message length */
208 cbcmaciv[14] = cryptlen >> 8;
209 cbcmaciv[15] = cryptlen & 255;
210 }
211
212 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
213 atoken->packet_length = AES_BLOCK_SIZE +
214 ((assocadj > 0) << 1);
215 atoken->stat = 0;
216 atoken->instructions = EIP197_TOKEN_INS_ORIGIN_TOKEN |
217 EIP197_TOKEN_INS_TYPE_HASH;
218
219 if (likely(assocadj)) {
220 *aadlen = cpu_to_le32((assocadj >> 8) |
221 (assocadj & 255) << 8);
222 atoken += 6;
223 atoksize += 7;
224 } else {
225 atoken += 5;
226 atoksize += 6;
227 }
228
229 /* Process AAD data */
230 aadref = atoken;
231 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
232 atoken->packet_length = assocadj;
233 atoken->stat = 0;
234 atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
235 atoken++;
236
237 /* For CCM only, align AAD data towards hash engine */
238 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
239 aadalign = (assocadj + 2) & 15;
240 atoken->packet_length = assocadj && aadalign ?
241 16 - aadalign :
242 0;
243 if (likely(cryptlen)) {
244 atoken->stat = 0;
245 atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
246 } else {
247 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
248 atoken->instructions = EIP197_TOKEN_INS_LAST |
249 EIP197_TOKEN_INS_TYPE_HASH;
250 }
251 } else {
252 safexcel_aead_iv(ctx, iv, cdesc);
253
254 /* Process AAD data */
255 aadref = atoken;
256 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
257 atoken->packet_length = assocadj;
258 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
259 atoken->instructions = EIP197_TOKEN_INS_LAST |
260 EIP197_TOKEN_INS_TYPE_HASH;
261 }
262 atoken++;
263
264 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
265 /* For ESP mode (and not GMAC), skip over the IV */
266 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
267 atoken->packet_length = EIP197_AEAD_IPSEC_IV_SIZE;
268 atoken->stat = 0;
269 atoken->instructions = 0;
270 atoken++;
271 atoksize++;
272 } else if (unlikely(ctx->alg == SAFEXCEL_CHACHA20 &&
273 direction == SAFEXCEL_DECRYPT)) {
274 /* Poly-chacha decryption needs a dummy NOP here ... */
275 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
276 atoken->packet_length = 16; /* According to Op Manual */
277 atoken->stat = 0;
278 atoken->instructions = 0;
279 atoken++;
280 atoksize++;
281 }
282
283 if (ctx->xcm) {
284 /* For GCM and CCM, obtain enc(Y0) */
285 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT_REMRES;
286 atoken->packet_length = 0;
287 atoken->stat = 0;
288 atoken->instructions = AES_BLOCK_SIZE;
289 atoken++;
290
291 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
292 atoken->packet_length = AES_BLOCK_SIZE;
293 atoken->stat = 0;
294 atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
295 EIP197_TOKEN_INS_TYPE_CRYPTO;
296 atoken++;
297 atoksize += 2;
298 }
299
300 if (likely(cryptlen || ctx->alg == SAFEXCEL_CHACHA20)) {
301 /* Fixup stat field for AAD direction instruction */
302 aadref->stat = 0;
303
304 /* Process crypto data */
305 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
306 atoken->packet_length = cryptlen;
307
308 if (unlikely(ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC)) {
309 /* Fixup instruction field for AAD dir instruction */
310 aadref->instructions = EIP197_TOKEN_INS_TYPE_HASH;
311
312 /* Do not send to crypt engine in case of GMAC */
313 atoken->instructions = EIP197_TOKEN_INS_LAST |
314 EIP197_TOKEN_INS_TYPE_HASH |
315 EIP197_TOKEN_INS_TYPE_OUTPUT;
316 } else {
317 atoken->instructions = EIP197_TOKEN_INS_LAST |
318 EIP197_TOKEN_INS_TYPE_CRYPTO |
319 EIP197_TOKEN_INS_TYPE_HASH |
320 EIP197_TOKEN_INS_TYPE_OUTPUT;
321 }
322
323 cryptlen &= 15;
324 if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM && cryptlen)) {
325 atoken->stat = 0;
326 /* For CCM only, pad crypto data to the hash engine */
327 atoken++;
328 atoksize++;
329 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
330 atoken->packet_length = 16 - cryptlen;
331 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
332 atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
333 } else {
334 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
335 }
336 atoken++;
337 atoksize++;
338 }
339
340 if (direction == SAFEXCEL_ENCRYPT) {
341 /* Append ICV */
342 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
343 atoken->packet_length = digestsize;
344 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
345 EIP197_TOKEN_STAT_LAST_PACKET;
346 atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
347 EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
348 } else {
349 /* Extract ICV */
350 atoken->opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
351 atoken->packet_length = digestsize;
352 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
353 EIP197_TOKEN_STAT_LAST_PACKET;
354 atoken->instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
355 atoken++;
356 atoksize++;
357
358 /* Verify ICV */
359 atoken->opcode = EIP197_TOKEN_OPCODE_VERIFY;
360 atoken->packet_length = digestsize |
361 EIP197_TOKEN_HASH_RESULT_VERIFY;
362 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
363 EIP197_TOKEN_STAT_LAST_PACKET;
364 atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
365 }
366
367 /* Fixup length of the token in the command descriptor */
368 cdesc->additional_cdata_size = atoksize;
369 }
370
safexcel_skcipher_aes_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)371 static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
372 const u8 *key, unsigned int len)
373 {
374 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
375 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
376 struct safexcel_crypto_priv *priv = ctx->base.priv;
377 struct crypto_aes_ctx aes;
378 int ret, i;
379
380 ret = aes_expandkey(&aes, key, len);
381 if (ret)
382 return ret;
383
384 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
385 for (i = 0; i < len / sizeof(u32); i++) {
386 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
387 ctx->base.needs_inv = true;
388 break;
389 }
390 }
391 }
392
393 for (i = 0; i < len / sizeof(u32); i++)
394 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
395
396 ctx->key_len = len;
397
398 memzero_explicit(&aes, sizeof(aes));
399 return 0;
400 }
401
safexcel_aead_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)402 static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
403 unsigned int len)
404 {
405 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
406 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
407 struct safexcel_crypto_priv *priv = ctx->base.priv;
408 struct crypto_authenc_keys keys;
409 struct crypto_aes_ctx aes;
410 int err = -EINVAL, i;
411 const char *alg;
412
413 if (unlikely(crypto_authenc_extractkeys(&keys, key, len)))
414 goto badkey;
415
416 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
417 /* Must have at least space for the nonce here */
418 if (unlikely(keys.enckeylen < CTR_RFC3686_NONCE_SIZE))
419 goto badkey;
420 /* last 4 bytes of key are the nonce! */
421 ctx->nonce = *(u32 *)(keys.enckey + keys.enckeylen -
422 CTR_RFC3686_NONCE_SIZE);
423 /* exclude the nonce here */
424 keys.enckeylen -= CTR_RFC3686_NONCE_SIZE;
425 }
426
427 /* Encryption key */
428 switch (ctx->alg) {
429 case SAFEXCEL_DES:
430 err = verify_aead_des_key(ctfm, keys.enckey, keys.enckeylen);
431 if (unlikely(err))
432 goto badkey;
433 break;
434 case SAFEXCEL_3DES:
435 err = verify_aead_des3_key(ctfm, keys.enckey, keys.enckeylen);
436 if (unlikely(err))
437 goto badkey;
438 break;
439 case SAFEXCEL_AES:
440 err = aes_expandkey(&aes, keys.enckey, keys.enckeylen);
441 if (unlikely(err))
442 goto badkey;
443 break;
444 case SAFEXCEL_SM4:
445 if (unlikely(keys.enckeylen != SM4_KEY_SIZE))
446 goto badkey;
447 break;
448 default:
449 dev_err(priv->dev, "aead: unsupported cipher algorithm\n");
450 goto badkey;
451 }
452
453 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
454 for (i = 0; i < keys.enckeylen / sizeof(u32); i++) {
455 if (le32_to_cpu(ctx->key[i]) !=
456 ((u32 *)keys.enckey)[i]) {
457 ctx->base.needs_inv = true;
458 break;
459 }
460 }
461 }
462
463 /* Auth key */
464 switch (ctx->hash_alg) {
465 case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
466 alg = "safexcel-sha1";
467 break;
468 case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
469 alg = "safexcel-sha224";
470 break;
471 case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
472 alg = "safexcel-sha256";
473 break;
474 case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
475 alg = "safexcel-sha384";
476 break;
477 case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
478 alg = "safexcel-sha512";
479 break;
480 case CONTEXT_CONTROL_CRYPTO_ALG_SM3:
481 alg = "safexcel-sm3";
482 break;
483 default:
484 dev_err(priv->dev, "aead: unsupported hash algorithm\n");
485 goto badkey;
486 }
487
488 if (safexcel_hmac_setkey(&ctx->base, keys.authkey, keys.authkeylen,
489 alg, ctx->state_sz))
490 goto badkey;
491
492 /* Now copy the keys into the context */
493 for (i = 0; i < keys.enckeylen / sizeof(u32); i++)
494 ctx->key[i] = cpu_to_le32(((u32 *)keys.enckey)[i]);
495 ctx->key_len = keys.enckeylen;
496
497 memzero_explicit(&keys, sizeof(keys));
498 return 0;
499
500 badkey:
501 memzero_explicit(&keys, sizeof(keys));
502 return err;
503 }
504
safexcel_context_control(struct safexcel_cipher_ctx * ctx,struct crypto_async_request * async,struct safexcel_cipher_req * sreq,struct safexcel_command_desc * cdesc)505 static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
506 struct crypto_async_request *async,
507 struct safexcel_cipher_req *sreq,
508 struct safexcel_command_desc *cdesc)
509 {
510 struct safexcel_crypto_priv *priv = ctx->base.priv;
511 int ctrl_size = ctx->key_len / sizeof(u32);
512
513 cdesc->control_data.control1 = ctx->mode;
514
515 if (ctx->aead) {
516 /* Take in account the ipad+opad digests */
517 if (ctx->xcm) {
518 ctrl_size += ctx->state_sz / sizeof(u32);
519 cdesc->control_data.control0 =
520 CONTEXT_CONTROL_KEY_EN |
521 CONTEXT_CONTROL_DIGEST_XCM |
522 ctx->hash_alg |
523 CONTEXT_CONTROL_SIZE(ctrl_size);
524 } else if (ctx->alg == SAFEXCEL_CHACHA20) {
525 /* Chacha20-Poly1305 */
526 cdesc->control_data.control0 =
527 CONTEXT_CONTROL_KEY_EN |
528 CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20 |
529 (sreq->direction == SAFEXCEL_ENCRYPT ?
530 CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT :
531 CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN) |
532 ctx->hash_alg |
533 CONTEXT_CONTROL_SIZE(ctrl_size);
534 return 0;
535 } else {
536 ctrl_size += ctx->state_sz / sizeof(u32) * 2;
537 cdesc->control_data.control0 =
538 CONTEXT_CONTROL_KEY_EN |
539 CONTEXT_CONTROL_DIGEST_HMAC |
540 ctx->hash_alg |
541 CONTEXT_CONTROL_SIZE(ctrl_size);
542 }
543
544 if (sreq->direction == SAFEXCEL_ENCRYPT &&
545 (ctx->xcm == EIP197_XCM_MODE_CCM ||
546 ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC))
547 cdesc->control_data.control0 |=
548 CONTEXT_CONTROL_TYPE_HASH_ENCRYPT_OUT;
549 else if (sreq->direction == SAFEXCEL_ENCRYPT)
550 cdesc->control_data.control0 |=
551 CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
552 else if (ctx->xcm == EIP197_XCM_MODE_CCM)
553 cdesc->control_data.control0 |=
554 CONTEXT_CONTROL_TYPE_DECRYPT_HASH_IN;
555 else
556 cdesc->control_data.control0 |=
557 CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
558 } else {
559 if (sreq->direction == SAFEXCEL_ENCRYPT)
560 cdesc->control_data.control0 =
561 CONTEXT_CONTROL_TYPE_CRYPTO_OUT |
562 CONTEXT_CONTROL_KEY_EN |
563 CONTEXT_CONTROL_SIZE(ctrl_size);
564 else
565 cdesc->control_data.control0 =
566 CONTEXT_CONTROL_TYPE_CRYPTO_IN |
567 CONTEXT_CONTROL_KEY_EN |
568 CONTEXT_CONTROL_SIZE(ctrl_size);
569 }
570
571 if (ctx->alg == SAFEXCEL_DES) {
572 cdesc->control_data.control0 |=
573 CONTEXT_CONTROL_CRYPTO_ALG_DES;
574 } else if (ctx->alg == SAFEXCEL_3DES) {
575 cdesc->control_data.control0 |=
576 CONTEXT_CONTROL_CRYPTO_ALG_3DES;
577 } else if (ctx->alg == SAFEXCEL_AES) {
578 switch (ctx->key_len >> ctx->xts) {
579 case AES_KEYSIZE_128:
580 cdesc->control_data.control0 |=
581 CONTEXT_CONTROL_CRYPTO_ALG_AES128;
582 break;
583 case AES_KEYSIZE_192:
584 cdesc->control_data.control0 |=
585 CONTEXT_CONTROL_CRYPTO_ALG_AES192;
586 break;
587 case AES_KEYSIZE_256:
588 cdesc->control_data.control0 |=
589 CONTEXT_CONTROL_CRYPTO_ALG_AES256;
590 break;
591 default:
592 dev_err(priv->dev, "aes keysize not supported: %u\n",
593 ctx->key_len >> ctx->xts);
594 return -EINVAL;
595 }
596 } else if (ctx->alg == SAFEXCEL_CHACHA20) {
597 cdesc->control_data.control0 |=
598 CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20;
599 } else if (ctx->alg == SAFEXCEL_SM4) {
600 cdesc->control_data.control0 |=
601 CONTEXT_CONTROL_CRYPTO_ALG_SM4;
602 }
603
604 return 0;
605 }
606
safexcel_handle_req_result(struct safexcel_crypto_priv * priv,int ring,struct crypto_async_request * async,struct scatterlist * src,struct scatterlist * dst,unsigned int cryptlen,struct safexcel_cipher_req * sreq,bool * should_complete,int * ret)607 static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
608 struct crypto_async_request *async,
609 struct scatterlist *src,
610 struct scatterlist *dst,
611 unsigned int cryptlen,
612 struct safexcel_cipher_req *sreq,
613 bool *should_complete, int *ret)
614 {
615 struct skcipher_request *areq = skcipher_request_cast(async);
616 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
617 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(skcipher);
618 struct safexcel_result_desc *rdesc;
619 int ndesc = 0;
620
621 *ret = 0;
622
623 if (unlikely(!sreq->rdescs))
624 return 0;
625
626 while (sreq->rdescs--) {
627 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
628 if (IS_ERR(rdesc)) {
629 dev_err(priv->dev,
630 "cipher: result: could not retrieve the result descriptor\n");
631 *ret = PTR_ERR(rdesc);
632 break;
633 }
634
635 if (likely(!*ret))
636 *ret = safexcel_rdesc_check_errors(priv, rdesc);
637
638 ndesc++;
639 }
640
641 safexcel_complete(priv, ring);
642
643 if (src == dst) {
644 if (sreq->nr_src > 0)
645 dma_unmap_sg(priv->dev, src, sreq->nr_src,
646 DMA_BIDIRECTIONAL);
647 } else {
648 if (sreq->nr_src > 0)
649 dma_unmap_sg(priv->dev, src, sreq->nr_src,
650 DMA_TO_DEVICE);
651 if (sreq->nr_dst > 0)
652 dma_unmap_sg(priv->dev, dst, sreq->nr_dst,
653 DMA_FROM_DEVICE);
654 }
655
656 /*
657 * Update IV in req from last crypto output word for CBC modes
658 */
659 if ((!ctx->aead) && (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
660 (sreq->direction == SAFEXCEL_ENCRYPT)) {
661 /* For encrypt take the last output word */
662 sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv,
663 crypto_skcipher_ivsize(skcipher),
664 (cryptlen -
665 crypto_skcipher_ivsize(skcipher)));
666 }
667
668 *should_complete = true;
669
670 return ndesc;
671 }
672
safexcel_send_req(struct crypto_async_request * base,int ring,struct safexcel_cipher_req * sreq,struct scatterlist * src,struct scatterlist * dst,unsigned int cryptlen,unsigned int assoclen,unsigned int digestsize,u8 * iv,int * commands,int * results)673 static int safexcel_send_req(struct crypto_async_request *base, int ring,
674 struct safexcel_cipher_req *sreq,
675 struct scatterlist *src, struct scatterlist *dst,
676 unsigned int cryptlen, unsigned int assoclen,
677 unsigned int digestsize, u8 *iv, int *commands,
678 int *results)
679 {
680 struct skcipher_request *areq = skcipher_request_cast(base);
681 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
682 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
683 struct safexcel_crypto_priv *priv = ctx->base.priv;
684 struct safexcel_command_desc *cdesc;
685 struct safexcel_command_desc *first_cdesc = NULL;
686 struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
687 struct scatterlist *sg;
688 unsigned int totlen;
689 unsigned int totlen_src = cryptlen + assoclen;
690 unsigned int totlen_dst = totlen_src;
691 struct safexcel_token *atoken;
692 int n_cdesc = 0, n_rdesc = 0;
693 int queued, i, ret = 0;
694 bool first = true;
695
696 sreq->nr_src = sg_nents_for_len(src, totlen_src);
697
698 if (ctx->aead) {
699 /*
700 * AEAD has auth tag appended to output for encrypt and
701 * removed from the output for decrypt!
702 */
703 if (sreq->direction == SAFEXCEL_DECRYPT)
704 totlen_dst -= digestsize;
705 else
706 totlen_dst += digestsize;
707
708 memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
709 &ctx->base.ipad, ctx->state_sz);
710 if (!ctx->xcm)
711 memcpy(ctx->base.ctxr->data + (ctx->key_len +
712 ctx->state_sz) / sizeof(u32), &ctx->base.opad,
713 ctx->state_sz);
714 } else if ((ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
715 (sreq->direction == SAFEXCEL_DECRYPT)) {
716 /*
717 * Save IV from last crypto input word for CBC modes in decrypt
718 * direction. Need to do this first in case of inplace operation
719 * as it will be overwritten.
720 */
721 sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv,
722 crypto_skcipher_ivsize(skcipher),
723 (totlen_src -
724 crypto_skcipher_ivsize(skcipher)));
725 }
726
727 sreq->nr_dst = sg_nents_for_len(dst, totlen_dst);
728
729 /*
730 * Remember actual input length, source buffer length may be
731 * updated in case of inline operation below.
732 */
733 totlen = totlen_src;
734 queued = totlen_src;
735
736 if (src == dst) {
737 sreq->nr_src = max(sreq->nr_src, sreq->nr_dst);
738 sreq->nr_dst = sreq->nr_src;
739 if (unlikely((totlen_src || totlen_dst) &&
740 (sreq->nr_src <= 0))) {
741 dev_err(priv->dev, "In-place buffer not large enough (need %d bytes)!",
742 max(totlen_src, totlen_dst));
743 return -EINVAL;
744 }
745 if (sreq->nr_src > 0 &&
746 !dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL))
747 return -EIO;
748 } else {
749 if (unlikely(totlen_src && (sreq->nr_src <= 0))) {
750 dev_err(priv->dev, "Source buffer not large enough (need %d bytes)!",
751 totlen_src);
752 return -EINVAL;
753 }
754
755 if (sreq->nr_src > 0 &&
756 !dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE))
757 return -EIO;
758
759 if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) {
760 dev_err(priv->dev, "Dest buffer not large enough (need %d bytes)!",
761 totlen_dst);
762 ret = -EINVAL;
763 goto unmap;
764 }
765
766 if (sreq->nr_dst > 0 &&
767 !dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE)) {
768 ret = -EIO;
769 goto unmap;
770 }
771 }
772
773 memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
774
775 if (!totlen) {
776 /*
777 * The EIP97 cannot deal with zero length input packets!
778 * So stuff a dummy command descriptor indicating a 1 byte
779 * (dummy) input packet, using the context record as source.
780 */
781 first_cdesc = safexcel_add_cdesc(priv, ring,
782 1, 1, ctx->base.ctxr_dma,
783 1, 1, ctx->base.ctxr_dma,
784 &atoken);
785 if (IS_ERR(first_cdesc)) {
786 /* No space left in the command descriptor ring */
787 ret = PTR_ERR(first_cdesc);
788 goto cdesc_rollback;
789 }
790 n_cdesc = 1;
791 goto skip_cdesc;
792 }
793
794 /* command descriptors */
795 for_each_sg(src, sg, sreq->nr_src, i) {
796 int len = sg_dma_len(sg);
797
798 /* Do not overflow the request */
799 if (queued < len)
800 len = queued;
801
802 cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
803 !(queued - len),
804 sg_dma_address(sg), len, totlen,
805 ctx->base.ctxr_dma, &atoken);
806 if (IS_ERR(cdesc)) {
807 /* No space left in the command descriptor ring */
808 ret = PTR_ERR(cdesc);
809 goto cdesc_rollback;
810 }
811
812 if (!n_cdesc)
813 first_cdesc = cdesc;
814
815 n_cdesc++;
816 queued -= len;
817 if (!queued)
818 break;
819 }
820 skip_cdesc:
821 /* Add context control words and token to first command descriptor */
822 safexcel_context_control(ctx, base, sreq, first_cdesc);
823 if (ctx->aead)
824 safexcel_aead_token(ctx, iv, first_cdesc, atoken,
825 sreq->direction, cryptlen,
826 assoclen, digestsize);
827 else
828 safexcel_skcipher_token(ctx, iv, first_cdesc, atoken,
829 cryptlen);
830
831 /* result descriptors */
832 for_each_sg(dst, sg, sreq->nr_dst, i) {
833 bool last = (i == sreq->nr_dst - 1);
834 u32 len = sg_dma_len(sg);
835
836 /* only allow the part of the buffer we know we need */
837 if (len > totlen_dst)
838 len = totlen_dst;
839 if (unlikely(!len))
840 break;
841 totlen_dst -= len;
842
843 /* skip over AAD space in buffer - not written */
844 if (assoclen) {
845 if (assoclen >= len) {
846 assoclen -= len;
847 continue;
848 }
849 rdesc = safexcel_add_rdesc(priv, ring, first, last,
850 sg_dma_address(sg) +
851 assoclen,
852 len - assoclen);
853 assoclen = 0;
854 } else {
855 rdesc = safexcel_add_rdesc(priv, ring, first, last,
856 sg_dma_address(sg),
857 len);
858 }
859 if (IS_ERR(rdesc)) {
860 /* No space left in the result descriptor ring */
861 ret = PTR_ERR(rdesc);
862 goto rdesc_rollback;
863 }
864 if (first) {
865 first_rdesc = rdesc;
866 first = false;
867 }
868 n_rdesc++;
869 }
870
871 if (unlikely(first)) {
872 /*
873 * Special case: AEAD decrypt with only AAD data.
874 * In this case there is NO output data from the engine,
875 * but the engine still needs a result descriptor!
876 * Create a dummy one just for catching the result token.
877 */
878 rdesc = safexcel_add_rdesc(priv, ring, true, true, 0, 0);
879 if (IS_ERR(rdesc)) {
880 /* No space left in the result descriptor ring */
881 ret = PTR_ERR(rdesc);
882 goto rdesc_rollback;
883 }
884 first_rdesc = rdesc;
885 n_rdesc = 1;
886 }
887
888 safexcel_rdr_req_set(priv, ring, first_rdesc, base);
889
890 *commands = n_cdesc;
891 *results = n_rdesc;
892 return 0;
893
894 rdesc_rollback:
895 for (i = 0; i < n_rdesc; i++)
896 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
897 cdesc_rollback:
898 for (i = 0; i < n_cdesc; i++)
899 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
900 unmap:
901 if (src == dst) {
902 if (sreq->nr_src > 0)
903 dma_unmap_sg(priv->dev, src, sreq->nr_src,
904 DMA_BIDIRECTIONAL);
905 } else {
906 if (sreq->nr_src > 0)
907 dma_unmap_sg(priv->dev, src, sreq->nr_src,
908 DMA_TO_DEVICE);
909 if (sreq->nr_dst > 0)
910 dma_unmap_sg(priv->dev, dst, sreq->nr_dst,
911 DMA_FROM_DEVICE);
912 }
913
914 return ret;
915 }
916
safexcel_handle_inv_result(struct safexcel_crypto_priv * priv,int ring,struct crypto_async_request * base,struct safexcel_cipher_req * sreq,bool * should_complete,int * ret)917 static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
918 int ring,
919 struct crypto_async_request *base,
920 struct safexcel_cipher_req *sreq,
921 bool *should_complete, int *ret)
922 {
923 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
924 struct safexcel_result_desc *rdesc;
925 int ndesc = 0, enq_ret;
926
927 *ret = 0;
928
929 if (unlikely(!sreq->rdescs))
930 return 0;
931
932 while (sreq->rdescs--) {
933 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
934 if (IS_ERR(rdesc)) {
935 dev_err(priv->dev,
936 "cipher: invalidate: could not retrieve the result descriptor\n");
937 *ret = PTR_ERR(rdesc);
938 break;
939 }
940
941 if (likely(!*ret))
942 *ret = safexcel_rdesc_check_errors(priv, rdesc);
943
944 ndesc++;
945 }
946
947 safexcel_complete(priv, ring);
948
949 if (ctx->base.exit_inv) {
950 dma_pool_free(priv->context_pool, ctx->base.ctxr,
951 ctx->base.ctxr_dma);
952
953 *should_complete = true;
954
955 return ndesc;
956 }
957
958 ring = safexcel_select_ring(priv);
959 ctx->base.ring = ring;
960
961 spin_lock_bh(&priv->ring[ring].queue_lock);
962 enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
963 spin_unlock_bh(&priv->ring[ring].queue_lock);
964
965 if (enq_ret != -EINPROGRESS)
966 *ret = enq_ret;
967
968 queue_work(priv->ring[ring].workqueue,
969 &priv->ring[ring].work_data.work);
970
971 *should_complete = false;
972
973 return ndesc;
974 }
975
safexcel_skcipher_handle_result(struct safexcel_crypto_priv * priv,int ring,struct crypto_async_request * async,bool * should_complete,int * ret)976 static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
977 int ring,
978 struct crypto_async_request *async,
979 bool *should_complete, int *ret)
980 {
981 struct skcipher_request *req = skcipher_request_cast(async);
982 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
983 int err;
984
985 if (sreq->needs_inv) {
986 sreq->needs_inv = false;
987 err = safexcel_handle_inv_result(priv, ring, async, sreq,
988 should_complete, ret);
989 } else {
990 err = safexcel_handle_req_result(priv, ring, async, req->src,
991 req->dst, req->cryptlen, sreq,
992 should_complete, ret);
993 }
994
995 return err;
996 }
997
safexcel_aead_handle_result(struct safexcel_crypto_priv * priv,int ring,struct crypto_async_request * async,bool * should_complete,int * ret)998 static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
999 int ring,
1000 struct crypto_async_request *async,
1001 bool *should_complete, int *ret)
1002 {
1003 struct aead_request *req = aead_request_cast(async);
1004 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1005 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1006 int err;
1007
1008 if (sreq->needs_inv) {
1009 sreq->needs_inv = false;
1010 err = safexcel_handle_inv_result(priv, ring, async, sreq,
1011 should_complete, ret);
1012 } else {
1013 err = safexcel_handle_req_result(priv, ring, async, req->src,
1014 req->dst,
1015 req->cryptlen + crypto_aead_authsize(tfm),
1016 sreq, should_complete, ret);
1017 }
1018
1019 return err;
1020 }
1021
safexcel_cipher_send_inv(struct crypto_async_request * base,int ring,int * commands,int * results)1022 static int safexcel_cipher_send_inv(struct crypto_async_request *base,
1023 int ring, int *commands, int *results)
1024 {
1025 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1026 struct safexcel_crypto_priv *priv = ctx->base.priv;
1027 int ret;
1028
1029 ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
1030 if (unlikely(ret))
1031 return ret;
1032
1033 *commands = 1;
1034 *results = 1;
1035
1036 return 0;
1037 }
1038
safexcel_skcipher_send(struct crypto_async_request * async,int ring,int * commands,int * results)1039 static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
1040 int *commands, int *results)
1041 {
1042 struct skcipher_request *req = skcipher_request_cast(async);
1043 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1044 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1045 struct safexcel_crypto_priv *priv = ctx->base.priv;
1046 int ret;
1047
1048 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1049
1050 if (sreq->needs_inv) {
1051 ret = safexcel_cipher_send_inv(async, ring, commands, results);
1052 } else {
1053 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1054 u8 input_iv[AES_BLOCK_SIZE];
1055
1056 /*
1057 * Save input IV in case of CBC decrypt mode
1058 * Will be overwritten with output IV prior to use!
1059 */
1060 memcpy(input_iv, req->iv, crypto_skcipher_ivsize(skcipher));
1061
1062 ret = safexcel_send_req(async, ring, sreq, req->src,
1063 req->dst, req->cryptlen, 0, 0, input_iv,
1064 commands, results);
1065 }
1066
1067 sreq->rdescs = *results;
1068 return ret;
1069 }
1070
safexcel_aead_send(struct crypto_async_request * async,int ring,int * commands,int * results)1071 static int safexcel_aead_send(struct crypto_async_request *async, int ring,
1072 int *commands, int *results)
1073 {
1074 struct aead_request *req = aead_request_cast(async);
1075 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1076 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1077 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1078 struct safexcel_crypto_priv *priv = ctx->base.priv;
1079 int ret;
1080
1081 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1082
1083 if (sreq->needs_inv)
1084 ret = safexcel_cipher_send_inv(async, ring, commands, results);
1085 else
1086 ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
1087 req->cryptlen, req->assoclen,
1088 crypto_aead_authsize(tfm), req->iv,
1089 commands, results);
1090 sreq->rdescs = *results;
1091 return ret;
1092 }
1093
safexcel_cipher_exit_inv(struct crypto_tfm * tfm,struct crypto_async_request * base,struct safexcel_cipher_req * sreq,struct crypto_wait * result)1094 static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
1095 struct crypto_async_request *base,
1096 struct safexcel_cipher_req *sreq,
1097 struct crypto_wait *result)
1098 {
1099 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1100 struct safexcel_crypto_priv *priv = ctx->base.priv;
1101 int ring = ctx->base.ring;
1102 int err;
1103
1104 ctx = crypto_tfm_ctx(base->tfm);
1105 ctx->base.exit_inv = true;
1106 sreq->needs_inv = true;
1107
1108 spin_lock_bh(&priv->ring[ring].queue_lock);
1109 crypto_enqueue_request(&priv->ring[ring].queue, base);
1110 spin_unlock_bh(&priv->ring[ring].queue_lock);
1111
1112 queue_work(priv->ring[ring].workqueue,
1113 &priv->ring[ring].work_data.work);
1114
1115 err = crypto_wait_req(-EINPROGRESS, result);
1116
1117 if (err) {
1118 dev_warn(priv->dev,
1119 "cipher: sync: invalidate: completion error %d\n",
1120 err);
1121 return err;
1122 }
1123
1124 return 0;
1125 }
1126
safexcel_skcipher_exit_inv(struct crypto_tfm * tfm)1127 static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
1128 {
1129 EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
1130 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1131 DECLARE_CRYPTO_WAIT(result);
1132
1133 memset(req, 0, sizeof(struct skcipher_request));
1134
1135 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1136 crypto_req_done, &result);
1137 skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
1138
1139 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1140 }
1141
safexcel_aead_exit_inv(struct crypto_tfm * tfm)1142 static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
1143 {
1144 EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
1145 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1146 DECLARE_CRYPTO_WAIT(result);
1147
1148 memset(req, 0, sizeof(struct aead_request));
1149
1150 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1151 crypto_req_done, &result);
1152 aead_request_set_tfm(req, __crypto_aead_cast(tfm));
1153
1154 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1155 }
1156
safexcel_queue_req(struct crypto_async_request * base,struct safexcel_cipher_req * sreq,enum safexcel_cipher_direction dir)1157 static int safexcel_queue_req(struct crypto_async_request *base,
1158 struct safexcel_cipher_req *sreq,
1159 enum safexcel_cipher_direction dir)
1160 {
1161 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1162 struct safexcel_crypto_priv *priv = ctx->base.priv;
1163 int ret, ring;
1164
1165 sreq->needs_inv = false;
1166 sreq->direction = dir;
1167
1168 if (ctx->base.ctxr) {
1169 if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
1170 sreq->needs_inv = true;
1171 ctx->base.needs_inv = false;
1172 }
1173 } else {
1174 ctx->base.ring = safexcel_select_ring(priv);
1175 ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
1176 EIP197_GFP_FLAGS(*base),
1177 &ctx->base.ctxr_dma);
1178 if (!ctx->base.ctxr)
1179 return -ENOMEM;
1180 }
1181
1182 ring = ctx->base.ring;
1183
1184 spin_lock_bh(&priv->ring[ring].queue_lock);
1185 ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
1186 spin_unlock_bh(&priv->ring[ring].queue_lock);
1187
1188 queue_work(priv->ring[ring].workqueue,
1189 &priv->ring[ring].work_data.work);
1190
1191 return ret;
1192 }
1193
safexcel_encrypt(struct skcipher_request * req)1194 static int safexcel_encrypt(struct skcipher_request *req)
1195 {
1196 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1197 SAFEXCEL_ENCRYPT);
1198 }
1199
safexcel_decrypt(struct skcipher_request * req)1200 static int safexcel_decrypt(struct skcipher_request *req)
1201 {
1202 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1203 SAFEXCEL_DECRYPT);
1204 }
1205
safexcel_skcipher_cra_init(struct crypto_tfm * tfm)1206 static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
1207 {
1208 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1209 struct safexcel_alg_template *tmpl =
1210 container_of(tfm->__crt_alg, struct safexcel_alg_template,
1211 alg.skcipher.base);
1212
1213 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
1214 sizeof(struct safexcel_cipher_req));
1215
1216 ctx->base.priv = tmpl->priv;
1217
1218 ctx->base.send = safexcel_skcipher_send;
1219 ctx->base.handle_result = safexcel_skcipher_handle_result;
1220 ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1221 ctx->ctrinit = 1;
1222 return 0;
1223 }
1224
safexcel_cipher_cra_exit(struct crypto_tfm * tfm)1225 static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
1226 {
1227 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1228
1229 memzero_explicit(ctx->key, sizeof(ctx->key));
1230
1231 /* context not allocated, skip invalidation */
1232 if (!ctx->base.ctxr)
1233 return -ENOMEM;
1234
1235 memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
1236 return 0;
1237 }
1238
safexcel_skcipher_cra_exit(struct crypto_tfm * tfm)1239 static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
1240 {
1241 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1242 struct safexcel_crypto_priv *priv = ctx->base.priv;
1243 int ret;
1244
1245 if (safexcel_cipher_cra_exit(tfm))
1246 return;
1247
1248 if (priv->flags & EIP197_TRC_CACHE) {
1249 ret = safexcel_skcipher_exit_inv(tfm);
1250 if (ret)
1251 dev_warn(priv->dev, "skcipher: invalidation error %d\n",
1252 ret);
1253 } else {
1254 dma_pool_free(priv->context_pool, ctx->base.ctxr,
1255 ctx->base.ctxr_dma);
1256 }
1257 }
1258
safexcel_aead_cra_exit(struct crypto_tfm * tfm)1259 static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
1260 {
1261 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1262 struct safexcel_crypto_priv *priv = ctx->base.priv;
1263 int ret;
1264
1265 if (safexcel_cipher_cra_exit(tfm))
1266 return;
1267
1268 if (priv->flags & EIP197_TRC_CACHE) {
1269 ret = safexcel_aead_exit_inv(tfm);
1270 if (ret)
1271 dev_warn(priv->dev, "aead: invalidation error %d\n",
1272 ret);
1273 } else {
1274 dma_pool_free(priv->context_pool, ctx->base.ctxr,
1275 ctx->base.ctxr_dma);
1276 }
1277 }
1278
safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm * tfm)1279 static int safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm *tfm)
1280 {
1281 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1282
1283 safexcel_skcipher_cra_init(tfm);
1284 ctx->alg = SAFEXCEL_AES;
1285 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1286 ctx->blocksz = 0;
1287 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1288 return 0;
1289 }
1290
1291 struct safexcel_alg_template safexcel_alg_ecb_aes = {
1292 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1293 .algo_mask = SAFEXCEL_ALG_AES,
1294 .alg.skcipher = {
1295 .setkey = safexcel_skcipher_aes_setkey,
1296 .encrypt = safexcel_encrypt,
1297 .decrypt = safexcel_decrypt,
1298 .min_keysize = AES_MIN_KEY_SIZE,
1299 .max_keysize = AES_MAX_KEY_SIZE,
1300 .base = {
1301 .cra_name = "ecb(aes)",
1302 .cra_driver_name = "safexcel-ecb-aes",
1303 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1304 .cra_flags = CRYPTO_ALG_ASYNC |
1305 CRYPTO_ALG_ALLOCATES_MEMORY |
1306 CRYPTO_ALG_KERN_DRIVER_ONLY,
1307 .cra_blocksize = AES_BLOCK_SIZE,
1308 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1309 .cra_alignmask = 0,
1310 .cra_init = safexcel_skcipher_aes_ecb_cra_init,
1311 .cra_exit = safexcel_skcipher_cra_exit,
1312 .cra_module = THIS_MODULE,
1313 },
1314 },
1315 };
1316
safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm * tfm)1317 static int safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm *tfm)
1318 {
1319 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1320
1321 safexcel_skcipher_cra_init(tfm);
1322 ctx->alg = SAFEXCEL_AES;
1323 ctx->blocksz = AES_BLOCK_SIZE;
1324 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1325 return 0;
1326 }
1327
1328 struct safexcel_alg_template safexcel_alg_cbc_aes = {
1329 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1330 .algo_mask = SAFEXCEL_ALG_AES,
1331 .alg.skcipher = {
1332 .setkey = safexcel_skcipher_aes_setkey,
1333 .encrypt = safexcel_encrypt,
1334 .decrypt = safexcel_decrypt,
1335 .min_keysize = AES_MIN_KEY_SIZE,
1336 .max_keysize = AES_MAX_KEY_SIZE,
1337 .ivsize = AES_BLOCK_SIZE,
1338 .base = {
1339 .cra_name = "cbc(aes)",
1340 .cra_driver_name = "safexcel-cbc-aes",
1341 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1342 .cra_flags = CRYPTO_ALG_ASYNC |
1343 CRYPTO_ALG_ALLOCATES_MEMORY |
1344 CRYPTO_ALG_KERN_DRIVER_ONLY,
1345 .cra_blocksize = AES_BLOCK_SIZE,
1346 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1347 .cra_alignmask = 0,
1348 .cra_init = safexcel_skcipher_aes_cbc_cra_init,
1349 .cra_exit = safexcel_skcipher_cra_exit,
1350 .cra_module = THIS_MODULE,
1351 },
1352 },
1353 };
1354
safexcel_skcipher_aesctr_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)1355 static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
1356 const u8 *key, unsigned int len)
1357 {
1358 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
1359 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1360 struct safexcel_crypto_priv *priv = ctx->base.priv;
1361 struct crypto_aes_ctx aes;
1362 int ret, i;
1363 unsigned int keylen;
1364
1365 /* last 4 bytes of key are the nonce! */
1366 ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
1367 /* exclude the nonce here */
1368 keylen = len - CTR_RFC3686_NONCE_SIZE;
1369 ret = aes_expandkey(&aes, key, keylen);
1370 if (ret)
1371 return ret;
1372
1373 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1374 for (i = 0; i < keylen / sizeof(u32); i++) {
1375 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
1376 ctx->base.needs_inv = true;
1377 break;
1378 }
1379 }
1380 }
1381
1382 for (i = 0; i < keylen / sizeof(u32); i++)
1383 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
1384
1385 ctx->key_len = keylen;
1386
1387 memzero_explicit(&aes, sizeof(aes));
1388 return 0;
1389 }
1390
safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm * tfm)1391 static int safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm *tfm)
1392 {
1393 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1394
1395 safexcel_skcipher_cra_init(tfm);
1396 ctx->alg = SAFEXCEL_AES;
1397 ctx->blocksz = AES_BLOCK_SIZE;
1398 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
1399 return 0;
1400 }
1401
1402 struct safexcel_alg_template safexcel_alg_ctr_aes = {
1403 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1404 .algo_mask = SAFEXCEL_ALG_AES,
1405 .alg.skcipher = {
1406 .setkey = safexcel_skcipher_aesctr_setkey,
1407 .encrypt = safexcel_encrypt,
1408 .decrypt = safexcel_decrypt,
1409 /* Add nonce size */
1410 .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1411 .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1412 .ivsize = CTR_RFC3686_IV_SIZE,
1413 .base = {
1414 .cra_name = "rfc3686(ctr(aes))",
1415 .cra_driver_name = "safexcel-ctr-aes",
1416 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1417 .cra_flags = CRYPTO_ALG_ASYNC |
1418 CRYPTO_ALG_ALLOCATES_MEMORY |
1419 CRYPTO_ALG_KERN_DRIVER_ONLY,
1420 .cra_blocksize = 1,
1421 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1422 .cra_alignmask = 0,
1423 .cra_init = safexcel_skcipher_aes_ctr_cra_init,
1424 .cra_exit = safexcel_skcipher_cra_exit,
1425 .cra_module = THIS_MODULE,
1426 },
1427 },
1428 };
1429
safexcel_des_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)1430 static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
1431 unsigned int len)
1432 {
1433 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1434 struct safexcel_crypto_priv *priv = ctx->base.priv;
1435 int ret;
1436
1437 ret = verify_skcipher_des_key(ctfm, key);
1438 if (ret)
1439 return ret;
1440
1441 /* if context exits and key changed, need to invalidate it */
1442 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1443 if (memcmp(ctx->key, key, len))
1444 ctx->base.needs_inv = true;
1445
1446 memcpy(ctx->key, key, len);
1447 ctx->key_len = len;
1448
1449 return 0;
1450 }
1451
safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm * tfm)1452 static int safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm *tfm)
1453 {
1454 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1455
1456 safexcel_skcipher_cra_init(tfm);
1457 ctx->alg = SAFEXCEL_DES;
1458 ctx->blocksz = DES_BLOCK_SIZE;
1459 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1460 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1461 return 0;
1462 }
1463
1464 struct safexcel_alg_template safexcel_alg_cbc_des = {
1465 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1466 .algo_mask = SAFEXCEL_ALG_DES,
1467 .alg.skcipher = {
1468 .setkey = safexcel_des_setkey,
1469 .encrypt = safexcel_encrypt,
1470 .decrypt = safexcel_decrypt,
1471 .min_keysize = DES_KEY_SIZE,
1472 .max_keysize = DES_KEY_SIZE,
1473 .ivsize = DES_BLOCK_SIZE,
1474 .base = {
1475 .cra_name = "cbc(des)",
1476 .cra_driver_name = "safexcel-cbc-des",
1477 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1478 .cra_flags = CRYPTO_ALG_ASYNC |
1479 CRYPTO_ALG_ALLOCATES_MEMORY |
1480 CRYPTO_ALG_KERN_DRIVER_ONLY,
1481 .cra_blocksize = DES_BLOCK_SIZE,
1482 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1483 .cra_alignmask = 0,
1484 .cra_init = safexcel_skcipher_des_cbc_cra_init,
1485 .cra_exit = safexcel_skcipher_cra_exit,
1486 .cra_module = THIS_MODULE,
1487 },
1488 },
1489 };
1490
safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm * tfm)1491 static int safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm *tfm)
1492 {
1493 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1494
1495 safexcel_skcipher_cra_init(tfm);
1496 ctx->alg = SAFEXCEL_DES;
1497 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1498 ctx->blocksz = 0;
1499 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1500 return 0;
1501 }
1502
1503 struct safexcel_alg_template safexcel_alg_ecb_des = {
1504 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1505 .algo_mask = SAFEXCEL_ALG_DES,
1506 .alg.skcipher = {
1507 .setkey = safexcel_des_setkey,
1508 .encrypt = safexcel_encrypt,
1509 .decrypt = safexcel_decrypt,
1510 .min_keysize = DES_KEY_SIZE,
1511 .max_keysize = DES_KEY_SIZE,
1512 .base = {
1513 .cra_name = "ecb(des)",
1514 .cra_driver_name = "safexcel-ecb-des",
1515 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1516 .cra_flags = CRYPTO_ALG_ASYNC |
1517 CRYPTO_ALG_ALLOCATES_MEMORY |
1518 CRYPTO_ALG_KERN_DRIVER_ONLY,
1519 .cra_blocksize = DES_BLOCK_SIZE,
1520 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1521 .cra_alignmask = 0,
1522 .cra_init = safexcel_skcipher_des_ecb_cra_init,
1523 .cra_exit = safexcel_skcipher_cra_exit,
1524 .cra_module = THIS_MODULE,
1525 },
1526 },
1527 };
1528
safexcel_des3_ede_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)1529 static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
1530 const u8 *key, unsigned int len)
1531 {
1532 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1533 struct safexcel_crypto_priv *priv = ctx->base.priv;
1534 int err;
1535
1536 err = verify_skcipher_des3_key(ctfm, key);
1537 if (err)
1538 return err;
1539
1540 /* if context exits and key changed, need to invalidate it */
1541 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1542 if (memcmp(ctx->key, key, len))
1543 ctx->base.needs_inv = true;
1544
1545 memcpy(ctx->key, key, len);
1546 ctx->key_len = len;
1547
1548 return 0;
1549 }
1550
safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm * tfm)1551 static int safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm *tfm)
1552 {
1553 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1554
1555 safexcel_skcipher_cra_init(tfm);
1556 ctx->alg = SAFEXCEL_3DES;
1557 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1558 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1559 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1560 return 0;
1561 }
1562
1563 struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
1564 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1565 .algo_mask = SAFEXCEL_ALG_DES,
1566 .alg.skcipher = {
1567 .setkey = safexcel_des3_ede_setkey,
1568 .encrypt = safexcel_encrypt,
1569 .decrypt = safexcel_decrypt,
1570 .min_keysize = DES3_EDE_KEY_SIZE,
1571 .max_keysize = DES3_EDE_KEY_SIZE,
1572 .ivsize = DES3_EDE_BLOCK_SIZE,
1573 .base = {
1574 .cra_name = "cbc(des3_ede)",
1575 .cra_driver_name = "safexcel-cbc-des3_ede",
1576 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1577 .cra_flags = CRYPTO_ALG_ASYNC |
1578 CRYPTO_ALG_ALLOCATES_MEMORY |
1579 CRYPTO_ALG_KERN_DRIVER_ONLY,
1580 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1581 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1582 .cra_alignmask = 0,
1583 .cra_init = safexcel_skcipher_des3_cbc_cra_init,
1584 .cra_exit = safexcel_skcipher_cra_exit,
1585 .cra_module = THIS_MODULE,
1586 },
1587 },
1588 };
1589
safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm * tfm)1590 static int safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm *tfm)
1591 {
1592 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1593
1594 safexcel_skcipher_cra_init(tfm);
1595 ctx->alg = SAFEXCEL_3DES;
1596 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1597 ctx->blocksz = 0;
1598 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1599 return 0;
1600 }
1601
1602 struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
1603 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1604 .algo_mask = SAFEXCEL_ALG_DES,
1605 .alg.skcipher = {
1606 .setkey = safexcel_des3_ede_setkey,
1607 .encrypt = safexcel_encrypt,
1608 .decrypt = safexcel_decrypt,
1609 .min_keysize = DES3_EDE_KEY_SIZE,
1610 .max_keysize = DES3_EDE_KEY_SIZE,
1611 .base = {
1612 .cra_name = "ecb(des3_ede)",
1613 .cra_driver_name = "safexcel-ecb-des3_ede",
1614 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1615 .cra_flags = CRYPTO_ALG_ASYNC |
1616 CRYPTO_ALG_ALLOCATES_MEMORY |
1617 CRYPTO_ALG_KERN_DRIVER_ONLY,
1618 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1619 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1620 .cra_alignmask = 0,
1621 .cra_init = safexcel_skcipher_des3_ecb_cra_init,
1622 .cra_exit = safexcel_skcipher_cra_exit,
1623 .cra_module = THIS_MODULE,
1624 },
1625 },
1626 };
1627
safexcel_aead_encrypt(struct aead_request * req)1628 static int safexcel_aead_encrypt(struct aead_request *req)
1629 {
1630 struct safexcel_cipher_req *creq = aead_request_ctx(req);
1631
1632 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
1633 }
1634
safexcel_aead_decrypt(struct aead_request * req)1635 static int safexcel_aead_decrypt(struct aead_request *req)
1636 {
1637 struct safexcel_cipher_req *creq = aead_request_ctx(req);
1638
1639 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
1640 }
1641
safexcel_aead_cra_init(struct crypto_tfm * tfm)1642 static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
1643 {
1644 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1645 struct safexcel_alg_template *tmpl =
1646 container_of(tfm->__crt_alg, struct safexcel_alg_template,
1647 alg.aead.base);
1648
1649 crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
1650 sizeof(struct safexcel_cipher_req));
1651
1652 ctx->base.priv = tmpl->priv;
1653
1654 ctx->alg = SAFEXCEL_AES; /* default */
1655 ctx->blocksz = AES_BLOCK_SIZE;
1656 ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1657 ctx->ctrinit = 1;
1658 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC; /* default */
1659 ctx->aead = true;
1660 ctx->base.send = safexcel_aead_send;
1661 ctx->base.handle_result = safexcel_aead_handle_result;
1662 return 0;
1663 }
1664
safexcel_aead_sha1_cra_init(struct crypto_tfm * tfm)1665 static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
1666 {
1667 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1668
1669 safexcel_aead_cra_init(tfm);
1670 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1671 ctx->state_sz = SHA1_DIGEST_SIZE;
1672 return 0;
1673 }
1674
1675 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
1676 .type = SAFEXCEL_ALG_TYPE_AEAD,
1677 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
1678 .alg.aead = {
1679 .setkey = safexcel_aead_setkey,
1680 .encrypt = safexcel_aead_encrypt,
1681 .decrypt = safexcel_aead_decrypt,
1682 .ivsize = AES_BLOCK_SIZE,
1683 .maxauthsize = SHA1_DIGEST_SIZE,
1684 .base = {
1685 .cra_name = "authenc(hmac(sha1),cbc(aes))",
1686 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
1687 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1688 .cra_flags = CRYPTO_ALG_ASYNC |
1689 CRYPTO_ALG_ALLOCATES_MEMORY |
1690 CRYPTO_ALG_KERN_DRIVER_ONLY,
1691 .cra_blocksize = AES_BLOCK_SIZE,
1692 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1693 .cra_alignmask = 0,
1694 .cra_init = safexcel_aead_sha1_cra_init,
1695 .cra_exit = safexcel_aead_cra_exit,
1696 .cra_module = THIS_MODULE,
1697 },
1698 },
1699 };
1700
safexcel_aead_sha256_cra_init(struct crypto_tfm * tfm)1701 static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
1702 {
1703 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1704
1705 safexcel_aead_cra_init(tfm);
1706 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1707 ctx->state_sz = SHA256_DIGEST_SIZE;
1708 return 0;
1709 }
1710
1711 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
1712 .type = SAFEXCEL_ALG_TYPE_AEAD,
1713 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1714 .alg.aead = {
1715 .setkey = safexcel_aead_setkey,
1716 .encrypt = safexcel_aead_encrypt,
1717 .decrypt = safexcel_aead_decrypt,
1718 .ivsize = AES_BLOCK_SIZE,
1719 .maxauthsize = SHA256_DIGEST_SIZE,
1720 .base = {
1721 .cra_name = "authenc(hmac(sha256),cbc(aes))",
1722 .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
1723 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1724 .cra_flags = CRYPTO_ALG_ASYNC |
1725 CRYPTO_ALG_ALLOCATES_MEMORY |
1726 CRYPTO_ALG_KERN_DRIVER_ONLY,
1727 .cra_blocksize = AES_BLOCK_SIZE,
1728 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1729 .cra_alignmask = 0,
1730 .cra_init = safexcel_aead_sha256_cra_init,
1731 .cra_exit = safexcel_aead_cra_exit,
1732 .cra_module = THIS_MODULE,
1733 },
1734 },
1735 };
1736
safexcel_aead_sha224_cra_init(struct crypto_tfm * tfm)1737 static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
1738 {
1739 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1740
1741 safexcel_aead_cra_init(tfm);
1742 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1743 ctx->state_sz = SHA256_DIGEST_SIZE;
1744 return 0;
1745 }
1746
1747 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
1748 .type = SAFEXCEL_ALG_TYPE_AEAD,
1749 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1750 .alg.aead = {
1751 .setkey = safexcel_aead_setkey,
1752 .encrypt = safexcel_aead_encrypt,
1753 .decrypt = safexcel_aead_decrypt,
1754 .ivsize = AES_BLOCK_SIZE,
1755 .maxauthsize = SHA224_DIGEST_SIZE,
1756 .base = {
1757 .cra_name = "authenc(hmac(sha224),cbc(aes))",
1758 .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
1759 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1760 .cra_flags = CRYPTO_ALG_ASYNC |
1761 CRYPTO_ALG_ALLOCATES_MEMORY |
1762 CRYPTO_ALG_KERN_DRIVER_ONLY,
1763 .cra_blocksize = AES_BLOCK_SIZE,
1764 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1765 .cra_alignmask = 0,
1766 .cra_init = safexcel_aead_sha224_cra_init,
1767 .cra_exit = safexcel_aead_cra_exit,
1768 .cra_module = THIS_MODULE,
1769 },
1770 },
1771 };
1772
safexcel_aead_sha512_cra_init(struct crypto_tfm * tfm)1773 static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm)
1774 {
1775 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1776
1777 safexcel_aead_cra_init(tfm);
1778 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1779 ctx->state_sz = SHA512_DIGEST_SIZE;
1780 return 0;
1781 }
1782
1783 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
1784 .type = SAFEXCEL_ALG_TYPE_AEAD,
1785 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1786 .alg.aead = {
1787 .setkey = safexcel_aead_setkey,
1788 .encrypt = safexcel_aead_encrypt,
1789 .decrypt = safexcel_aead_decrypt,
1790 .ivsize = AES_BLOCK_SIZE,
1791 .maxauthsize = SHA512_DIGEST_SIZE,
1792 .base = {
1793 .cra_name = "authenc(hmac(sha512),cbc(aes))",
1794 .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
1795 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1796 .cra_flags = CRYPTO_ALG_ASYNC |
1797 CRYPTO_ALG_ALLOCATES_MEMORY |
1798 CRYPTO_ALG_KERN_DRIVER_ONLY,
1799 .cra_blocksize = AES_BLOCK_SIZE,
1800 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1801 .cra_alignmask = 0,
1802 .cra_init = safexcel_aead_sha512_cra_init,
1803 .cra_exit = safexcel_aead_cra_exit,
1804 .cra_module = THIS_MODULE,
1805 },
1806 },
1807 };
1808
safexcel_aead_sha384_cra_init(struct crypto_tfm * tfm)1809 static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm)
1810 {
1811 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1812
1813 safexcel_aead_cra_init(tfm);
1814 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1815 ctx->state_sz = SHA512_DIGEST_SIZE;
1816 return 0;
1817 }
1818
1819 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
1820 .type = SAFEXCEL_ALG_TYPE_AEAD,
1821 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1822 .alg.aead = {
1823 .setkey = safexcel_aead_setkey,
1824 .encrypt = safexcel_aead_encrypt,
1825 .decrypt = safexcel_aead_decrypt,
1826 .ivsize = AES_BLOCK_SIZE,
1827 .maxauthsize = SHA384_DIGEST_SIZE,
1828 .base = {
1829 .cra_name = "authenc(hmac(sha384),cbc(aes))",
1830 .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
1831 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1832 .cra_flags = CRYPTO_ALG_ASYNC |
1833 CRYPTO_ALG_ALLOCATES_MEMORY |
1834 CRYPTO_ALG_KERN_DRIVER_ONLY,
1835 .cra_blocksize = AES_BLOCK_SIZE,
1836 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1837 .cra_alignmask = 0,
1838 .cra_init = safexcel_aead_sha384_cra_init,
1839 .cra_exit = safexcel_aead_cra_exit,
1840 .cra_module = THIS_MODULE,
1841 },
1842 },
1843 };
1844
safexcel_aead_sha1_des3_cra_init(struct crypto_tfm * tfm)1845 static int safexcel_aead_sha1_des3_cra_init(struct crypto_tfm *tfm)
1846 {
1847 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1848
1849 safexcel_aead_sha1_cra_init(tfm);
1850 ctx->alg = SAFEXCEL_3DES; /* override default */
1851 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1852 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1853 return 0;
1854 }
1855
1856 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des3_ede = {
1857 .type = SAFEXCEL_ALG_TYPE_AEAD,
1858 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
1859 .alg.aead = {
1860 .setkey = safexcel_aead_setkey,
1861 .encrypt = safexcel_aead_encrypt,
1862 .decrypt = safexcel_aead_decrypt,
1863 .ivsize = DES3_EDE_BLOCK_SIZE,
1864 .maxauthsize = SHA1_DIGEST_SIZE,
1865 .base = {
1866 .cra_name = "authenc(hmac(sha1),cbc(des3_ede))",
1867 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des3_ede",
1868 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1869 .cra_flags = CRYPTO_ALG_ASYNC |
1870 CRYPTO_ALG_ALLOCATES_MEMORY |
1871 CRYPTO_ALG_KERN_DRIVER_ONLY,
1872 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1873 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1874 .cra_alignmask = 0,
1875 .cra_init = safexcel_aead_sha1_des3_cra_init,
1876 .cra_exit = safexcel_aead_cra_exit,
1877 .cra_module = THIS_MODULE,
1878 },
1879 },
1880 };
1881
safexcel_aead_sha256_des3_cra_init(struct crypto_tfm * tfm)1882 static int safexcel_aead_sha256_des3_cra_init(struct crypto_tfm *tfm)
1883 {
1884 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1885
1886 safexcel_aead_sha256_cra_init(tfm);
1887 ctx->alg = SAFEXCEL_3DES; /* override default */
1888 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1889 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1890 return 0;
1891 }
1892
1893 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des3_ede = {
1894 .type = SAFEXCEL_ALG_TYPE_AEAD,
1895 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1896 .alg.aead = {
1897 .setkey = safexcel_aead_setkey,
1898 .encrypt = safexcel_aead_encrypt,
1899 .decrypt = safexcel_aead_decrypt,
1900 .ivsize = DES3_EDE_BLOCK_SIZE,
1901 .maxauthsize = SHA256_DIGEST_SIZE,
1902 .base = {
1903 .cra_name = "authenc(hmac(sha256),cbc(des3_ede))",
1904 .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des3_ede",
1905 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1906 .cra_flags = CRYPTO_ALG_ASYNC |
1907 CRYPTO_ALG_ALLOCATES_MEMORY |
1908 CRYPTO_ALG_KERN_DRIVER_ONLY,
1909 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1910 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1911 .cra_alignmask = 0,
1912 .cra_init = safexcel_aead_sha256_des3_cra_init,
1913 .cra_exit = safexcel_aead_cra_exit,
1914 .cra_module = THIS_MODULE,
1915 },
1916 },
1917 };
1918
safexcel_aead_sha224_des3_cra_init(struct crypto_tfm * tfm)1919 static int safexcel_aead_sha224_des3_cra_init(struct crypto_tfm *tfm)
1920 {
1921 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1922
1923 safexcel_aead_sha224_cra_init(tfm);
1924 ctx->alg = SAFEXCEL_3DES; /* override default */
1925 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1926 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1927 return 0;
1928 }
1929
1930 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des3_ede = {
1931 .type = SAFEXCEL_ALG_TYPE_AEAD,
1932 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1933 .alg.aead = {
1934 .setkey = safexcel_aead_setkey,
1935 .encrypt = safexcel_aead_encrypt,
1936 .decrypt = safexcel_aead_decrypt,
1937 .ivsize = DES3_EDE_BLOCK_SIZE,
1938 .maxauthsize = SHA224_DIGEST_SIZE,
1939 .base = {
1940 .cra_name = "authenc(hmac(sha224),cbc(des3_ede))",
1941 .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des3_ede",
1942 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1943 .cra_flags = CRYPTO_ALG_ASYNC |
1944 CRYPTO_ALG_ALLOCATES_MEMORY |
1945 CRYPTO_ALG_KERN_DRIVER_ONLY,
1946 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1947 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1948 .cra_alignmask = 0,
1949 .cra_init = safexcel_aead_sha224_des3_cra_init,
1950 .cra_exit = safexcel_aead_cra_exit,
1951 .cra_module = THIS_MODULE,
1952 },
1953 },
1954 };
1955
safexcel_aead_sha512_des3_cra_init(struct crypto_tfm * tfm)1956 static int safexcel_aead_sha512_des3_cra_init(struct crypto_tfm *tfm)
1957 {
1958 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1959
1960 safexcel_aead_sha512_cra_init(tfm);
1961 ctx->alg = SAFEXCEL_3DES; /* override default */
1962 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1963 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1964 return 0;
1965 }
1966
1967 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des3_ede = {
1968 .type = SAFEXCEL_ALG_TYPE_AEAD,
1969 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
1970 .alg.aead = {
1971 .setkey = safexcel_aead_setkey,
1972 .encrypt = safexcel_aead_encrypt,
1973 .decrypt = safexcel_aead_decrypt,
1974 .ivsize = DES3_EDE_BLOCK_SIZE,
1975 .maxauthsize = SHA512_DIGEST_SIZE,
1976 .base = {
1977 .cra_name = "authenc(hmac(sha512),cbc(des3_ede))",
1978 .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des3_ede",
1979 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1980 .cra_flags = CRYPTO_ALG_ASYNC |
1981 CRYPTO_ALG_ALLOCATES_MEMORY |
1982 CRYPTO_ALG_KERN_DRIVER_ONLY,
1983 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1984 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1985 .cra_alignmask = 0,
1986 .cra_init = safexcel_aead_sha512_des3_cra_init,
1987 .cra_exit = safexcel_aead_cra_exit,
1988 .cra_module = THIS_MODULE,
1989 },
1990 },
1991 };
1992
safexcel_aead_sha384_des3_cra_init(struct crypto_tfm * tfm)1993 static int safexcel_aead_sha384_des3_cra_init(struct crypto_tfm *tfm)
1994 {
1995 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1996
1997 safexcel_aead_sha384_cra_init(tfm);
1998 ctx->alg = SAFEXCEL_3DES; /* override default */
1999 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2000 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2001 return 0;
2002 }
2003
2004 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des3_ede = {
2005 .type = SAFEXCEL_ALG_TYPE_AEAD,
2006 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2007 .alg.aead = {
2008 .setkey = safexcel_aead_setkey,
2009 .encrypt = safexcel_aead_encrypt,
2010 .decrypt = safexcel_aead_decrypt,
2011 .ivsize = DES3_EDE_BLOCK_SIZE,
2012 .maxauthsize = SHA384_DIGEST_SIZE,
2013 .base = {
2014 .cra_name = "authenc(hmac(sha384),cbc(des3_ede))",
2015 .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des3_ede",
2016 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2017 .cra_flags = CRYPTO_ALG_ASYNC |
2018 CRYPTO_ALG_ALLOCATES_MEMORY |
2019 CRYPTO_ALG_KERN_DRIVER_ONLY,
2020 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2021 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2022 .cra_alignmask = 0,
2023 .cra_init = safexcel_aead_sha384_des3_cra_init,
2024 .cra_exit = safexcel_aead_cra_exit,
2025 .cra_module = THIS_MODULE,
2026 },
2027 },
2028 };
2029
safexcel_aead_sha1_des_cra_init(struct crypto_tfm * tfm)2030 static int safexcel_aead_sha1_des_cra_init(struct crypto_tfm *tfm)
2031 {
2032 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2033
2034 safexcel_aead_sha1_cra_init(tfm);
2035 ctx->alg = SAFEXCEL_DES; /* override default */
2036 ctx->blocksz = DES_BLOCK_SIZE;
2037 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2038 return 0;
2039 }
2040
2041 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des = {
2042 .type = SAFEXCEL_ALG_TYPE_AEAD,
2043 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
2044 .alg.aead = {
2045 .setkey = safexcel_aead_setkey,
2046 .encrypt = safexcel_aead_encrypt,
2047 .decrypt = safexcel_aead_decrypt,
2048 .ivsize = DES_BLOCK_SIZE,
2049 .maxauthsize = SHA1_DIGEST_SIZE,
2050 .base = {
2051 .cra_name = "authenc(hmac(sha1),cbc(des))",
2052 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des",
2053 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2054 .cra_flags = CRYPTO_ALG_ASYNC |
2055 CRYPTO_ALG_ALLOCATES_MEMORY |
2056 CRYPTO_ALG_KERN_DRIVER_ONLY,
2057 .cra_blocksize = DES_BLOCK_SIZE,
2058 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2059 .cra_alignmask = 0,
2060 .cra_init = safexcel_aead_sha1_des_cra_init,
2061 .cra_exit = safexcel_aead_cra_exit,
2062 .cra_module = THIS_MODULE,
2063 },
2064 },
2065 };
2066
safexcel_aead_sha256_des_cra_init(struct crypto_tfm * tfm)2067 static int safexcel_aead_sha256_des_cra_init(struct crypto_tfm *tfm)
2068 {
2069 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2070
2071 safexcel_aead_sha256_cra_init(tfm);
2072 ctx->alg = SAFEXCEL_DES; /* override default */
2073 ctx->blocksz = DES_BLOCK_SIZE;
2074 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2075 return 0;
2076 }
2077
2078 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des = {
2079 .type = SAFEXCEL_ALG_TYPE_AEAD,
2080 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2081 .alg.aead = {
2082 .setkey = safexcel_aead_setkey,
2083 .encrypt = safexcel_aead_encrypt,
2084 .decrypt = safexcel_aead_decrypt,
2085 .ivsize = DES_BLOCK_SIZE,
2086 .maxauthsize = SHA256_DIGEST_SIZE,
2087 .base = {
2088 .cra_name = "authenc(hmac(sha256),cbc(des))",
2089 .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des",
2090 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2091 .cra_flags = CRYPTO_ALG_ASYNC |
2092 CRYPTO_ALG_ALLOCATES_MEMORY |
2093 CRYPTO_ALG_KERN_DRIVER_ONLY,
2094 .cra_blocksize = DES_BLOCK_SIZE,
2095 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2096 .cra_alignmask = 0,
2097 .cra_init = safexcel_aead_sha256_des_cra_init,
2098 .cra_exit = safexcel_aead_cra_exit,
2099 .cra_module = THIS_MODULE,
2100 },
2101 },
2102 };
2103
safexcel_aead_sha224_des_cra_init(struct crypto_tfm * tfm)2104 static int safexcel_aead_sha224_des_cra_init(struct crypto_tfm *tfm)
2105 {
2106 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2107
2108 safexcel_aead_sha224_cra_init(tfm);
2109 ctx->alg = SAFEXCEL_DES; /* override default */
2110 ctx->blocksz = DES_BLOCK_SIZE;
2111 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2112 return 0;
2113 }
2114
2115 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des = {
2116 .type = SAFEXCEL_ALG_TYPE_AEAD,
2117 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2118 .alg.aead = {
2119 .setkey = safexcel_aead_setkey,
2120 .encrypt = safexcel_aead_encrypt,
2121 .decrypt = safexcel_aead_decrypt,
2122 .ivsize = DES_BLOCK_SIZE,
2123 .maxauthsize = SHA224_DIGEST_SIZE,
2124 .base = {
2125 .cra_name = "authenc(hmac(sha224),cbc(des))",
2126 .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des",
2127 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2128 .cra_flags = CRYPTO_ALG_ASYNC |
2129 CRYPTO_ALG_ALLOCATES_MEMORY |
2130 CRYPTO_ALG_KERN_DRIVER_ONLY,
2131 .cra_blocksize = DES_BLOCK_SIZE,
2132 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2133 .cra_alignmask = 0,
2134 .cra_init = safexcel_aead_sha224_des_cra_init,
2135 .cra_exit = safexcel_aead_cra_exit,
2136 .cra_module = THIS_MODULE,
2137 },
2138 },
2139 };
2140
safexcel_aead_sha512_des_cra_init(struct crypto_tfm * tfm)2141 static int safexcel_aead_sha512_des_cra_init(struct crypto_tfm *tfm)
2142 {
2143 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2144
2145 safexcel_aead_sha512_cra_init(tfm);
2146 ctx->alg = SAFEXCEL_DES; /* override default */
2147 ctx->blocksz = DES_BLOCK_SIZE;
2148 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2149 return 0;
2150 }
2151
2152 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des = {
2153 .type = SAFEXCEL_ALG_TYPE_AEAD,
2154 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2155 .alg.aead = {
2156 .setkey = safexcel_aead_setkey,
2157 .encrypt = safexcel_aead_encrypt,
2158 .decrypt = safexcel_aead_decrypt,
2159 .ivsize = DES_BLOCK_SIZE,
2160 .maxauthsize = SHA512_DIGEST_SIZE,
2161 .base = {
2162 .cra_name = "authenc(hmac(sha512),cbc(des))",
2163 .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des",
2164 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2165 .cra_flags = CRYPTO_ALG_ASYNC |
2166 CRYPTO_ALG_ALLOCATES_MEMORY |
2167 CRYPTO_ALG_KERN_DRIVER_ONLY,
2168 .cra_blocksize = DES_BLOCK_SIZE,
2169 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2170 .cra_alignmask = 0,
2171 .cra_init = safexcel_aead_sha512_des_cra_init,
2172 .cra_exit = safexcel_aead_cra_exit,
2173 .cra_module = THIS_MODULE,
2174 },
2175 },
2176 };
2177
safexcel_aead_sha384_des_cra_init(struct crypto_tfm * tfm)2178 static int safexcel_aead_sha384_des_cra_init(struct crypto_tfm *tfm)
2179 {
2180 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2181
2182 safexcel_aead_sha384_cra_init(tfm);
2183 ctx->alg = SAFEXCEL_DES; /* override default */
2184 ctx->blocksz = DES_BLOCK_SIZE;
2185 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2186 return 0;
2187 }
2188
2189 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des = {
2190 .type = SAFEXCEL_ALG_TYPE_AEAD,
2191 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2192 .alg.aead = {
2193 .setkey = safexcel_aead_setkey,
2194 .encrypt = safexcel_aead_encrypt,
2195 .decrypt = safexcel_aead_decrypt,
2196 .ivsize = DES_BLOCK_SIZE,
2197 .maxauthsize = SHA384_DIGEST_SIZE,
2198 .base = {
2199 .cra_name = "authenc(hmac(sha384),cbc(des))",
2200 .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des",
2201 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2202 .cra_flags = CRYPTO_ALG_ASYNC |
2203 CRYPTO_ALG_ALLOCATES_MEMORY |
2204 CRYPTO_ALG_KERN_DRIVER_ONLY,
2205 .cra_blocksize = DES_BLOCK_SIZE,
2206 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2207 .cra_alignmask = 0,
2208 .cra_init = safexcel_aead_sha384_des_cra_init,
2209 .cra_exit = safexcel_aead_cra_exit,
2210 .cra_module = THIS_MODULE,
2211 },
2212 },
2213 };
2214
safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm * tfm)2215 static int safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm *tfm)
2216 {
2217 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2218
2219 safexcel_aead_sha1_cra_init(tfm);
2220 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2221 return 0;
2222 }
2223
2224 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_aes = {
2225 .type = SAFEXCEL_ALG_TYPE_AEAD,
2226 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
2227 .alg.aead = {
2228 .setkey = safexcel_aead_setkey,
2229 .encrypt = safexcel_aead_encrypt,
2230 .decrypt = safexcel_aead_decrypt,
2231 .ivsize = CTR_RFC3686_IV_SIZE,
2232 .maxauthsize = SHA1_DIGEST_SIZE,
2233 .base = {
2234 .cra_name = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2235 .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-aes",
2236 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2237 .cra_flags = CRYPTO_ALG_ASYNC |
2238 CRYPTO_ALG_ALLOCATES_MEMORY |
2239 CRYPTO_ALG_KERN_DRIVER_ONLY,
2240 .cra_blocksize = 1,
2241 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2242 .cra_alignmask = 0,
2243 .cra_init = safexcel_aead_sha1_ctr_cra_init,
2244 .cra_exit = safexcel_aead_cra_exit,
2245 .cra_module = THIS_MODULE,
2246 },
2247 },
2248 };
2249
safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm * tfm)2250 static int safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm *tfm)
2251 {
2252 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2253
2254 safexcel_aead_sha256_cra_init(tfm);
2255 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2256 return 0;
2257 }
2258
2259 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_ctr_aes = {
2260 .type = SAFEXCEL_ALG_TYPE_AEAD,
2261 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2262 .alg.aead = {
2263 .setkey = safexcel_aead_setkey,
2264 .encrypt = safexcel_aead_encrypt,
2265 .decrypt = safexcel_aead_decrypt,
2266 .ivsize = CTR_RFC3686_IV_SIZE,
2267 .maxauthsize = SHA256_DIGEST_SIZE,
2268 .base = {
2269 .cra_name = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2270 .cra_driver_name = "safexcel-authenc-hmac-sha256-ctr-aes",
2271 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2272 .cra_flags = CRYPTO_ALG_ASYNC |
2273 CRYPTO_ALG_ALLOCATES_MEMORY |
2274 CRYPTO_ALG_KERN_DRIVER_ONLY,
2275 .cra_blocksize = 1,
2276 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2277 .cra_alignmask = 0,
2278 .cra_init = safexcel_aead_sha256_ctr_cra_init,
2279 .cra_exit = safexcel_aead_cra_exit,
2280 .cra_module = THIS_MODULE,
2281 },
2282 },
2283 };
2284
safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm * tfm)2285 static int safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm *tfm)
2286 {
2287 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2288
2289 safexcel_aead_sha224_cra_init(tfm);
2290 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2291 return 0;
2292 }
2293
2294 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_ctr_aes = {
2295 .type = SAFEXCEL_ALG_TYPE_AEAD,
2296 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2297 .alg.aead = {
2298 .setkey = safexcel_aead_setkey,
2299 .encrypt = safexcel_aead_encrypt,
2300 .decrypt = safexcel_aead_decrypt,
2301 .ivsize = CTR_RFC3686_IV_SIZE,
2302 .maxauthsize = SHA224_DIGEST_SIZE,
2303 .base = {
2304 .cra_name = "authenc(hmac(sha224),rfc3686(ctr(aes)))",
2305 .cra_driver_name = "safexcel-authenc-hmac-sha224-ctr-aes",
2306 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2307 .cra_flags = CRYPTO_ALG_ASYNC |
2308 CRYPTO_ALG_ALLOCATES_MEMORY |
2309 CRYPTO_ALG_KERN_DRIVER_ONLY,
2310 .cra_blocksize = 1,
2311 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2312 .cra_alignmask = 0,
2313 .cra_init = safexcel_aead_sha224_ctr_cra_init,
2314 .cra_exit = safexcel_aead_cra_exit,
2315 .cra_module = THIS_MODULE,
2316 },
2317 },
2318 };
2319
safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm * tfm)2320 static int safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm *tfm)
2321 {
2322 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2323
2324 safexcel_aead_sha512_cra_init(tfm);
2325 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2326 return 0;
2327 }
2328
2329 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_ctr_aes = {
2330 .type = SAFEXCEL_ALG_TYPE_AEAD,
2331 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2332 .alg.aead = {
2333 .setkey = safexcel_aead_setkey,
2334 .encrypt = safexcel_aead_encrypt,
2335 .decrypt = safexcel_aead_decrypt,
2336 .ivsize = CTR_RFC3686_IV_SIZE,
2337 .maxauthsize = SHA512_DIGEST_SIZE,
2338 .base = {
2339 .cra_name = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2340 .cra_driver_name = "safexcel-authenc-hmac-sha512-ctr-aes",
2341 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2342 .cra_flags = CRYPTO_ALG_ASYNC |
2343 CRYPTO_ALG_ALLOCATES_MEMORY |
2344 CRYPTO_ALG_KERN_DRIVER_ONLY,
2345 .cra_blocksize = 1,
2346 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2347 .cra_alignmask = 0,
2348 .cra_init = safexcel_aead_sha512_ctr_cra_init,
2349 .cra_exit = safexcel_aead_cra_exit,
2350 .cra_module = THIS_MODULE,
2351 },
2352 },
2353 };
2354
safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm * tfm)2355 static int safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm *tfm)
2356 {
2357 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2358
2359 safexcel_aead_sha384_cra_init(tfm);
2360 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2361 return 0;
2362 }
2363
2364 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes = {
2365 .type = SAFEXCEL_ALG_TYPE_AEAD,
2366 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2367 .alg.aead = {
2368 .setkey = safexcel_aead_setkey,
2369 .encrypt = safexcel_aead_encrypt,
2370 .decrypt = safexcel_aead_decrypt,
2371 .ivsize = CTR_RFC3686_IV_SIZE,
2372 .maxauthsize = SHA384_DIGEST_SIZE,
2373 .base = {
2374 .cra_name = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2375 .cra_driver_name = "safexcel-authenc-hmac-sha384-ctr-aes",
2376 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2377 .cra_flags = CRYPTO_ALG_ASYNC |
2378 CRYPTO_ALG_ALLOCATES_MEMORY |
2379 CRYPTO_ALG_KERN_DRIVER_ONLY,
2380 .cra_blocksize = 1,
2381 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2382 .cra_alignmask = 0,
2383 .cra_init = safexcel_aead_sha384_ctr_cra_init,
2384 .cra_exit = safexcel_aead_cra_exit,
2385 .cra_module = THIS_MODULE,
2386 },
2387 },
2388 };
2389
safexcel_skcipher_aesxts_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)2390 static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
2391 const u8 *key, unsigned int len)
2392 {
2393 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
2394 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2395 struct safexcel_crypto_priv *priv = ctx->base.priv;
2396 struct crypto_aes_ctx aes;
2397 int ret, i;
2398 unsigned int keylen;
2399
2400 /* Check for illegal XTS keys */
2401 ret = xts_verify_key(ctfm, key, len);
2402 if (ret)
2403 return ret;
2404
2405 /* Only half of the key data is cipher key */
2406 keylen = (len >> 1);
2407 ret = aes_expandkey(&aes, key, keylen);
2408 if (ret)
2409 return ret;
2410
2411 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2412 for (i = 0; i < keylen / sizeof(u32); i++) {
2413 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2414 ctx->base.needs_inv = true;
2415 break;
2416 }
2417 }
2418 }
2419
2420 for (i = 0; i < keylen / sizeof(u32); i++)
2421 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2422
2423 /* The other half is the tweak key */
2424 ret = aes_expandkey(&aes, (u8 *)(key + keylen), keylen);
2425 if (ret)
2426 return ret;
2427
2428 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2429 for (i = 0; i < keylen / sizeof(u32); i++) {
2430 if (le32_to_cpu(ctx->key[i + keylen / sizeof(u32)]) !=
2431 aes.key_enc[i]) {
2432 ctx->base.needs_inv = true;
2433 break;
2434 }
2435 }
2436 }
2437
2438 for (i = 0; i < keylen / sizeof(u32); i++)
2439 ctx->key[i + keylen / sizeof(u32)] =
2440 cpu_to_le32(aes.key_enc[i]);
2441
2442 ctx->key_len = keylen << 1;
2443
2444 memzero_explicit(&aes, sizeof(aes));
2445 return 0;
2446 }
2447
safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm * tfm)2448 static int safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm *tfm)
2449 {
2450 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2451
2452 safexcel_skcipher_cra_init(tfm);
2453 ctx->alg = SAFEXCEL_AES;
2454 ctx->blocksz = AES_BLOCK_SIZE;
2455 ctx->xts = 1;
2456 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XTS;
2457 return 0;
2458 }
2459
safexcel_encrypt_xts(struct skcipher_request * req)2460 static int safexcel_encrypt_xts(struct skcipher_request *req)
2461 {
2462 if (req->cryptlen < XTS_BLOCK_SIZE)
2463 return -EINVAL;
2464 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2465 SAFEXCEL_ENCRYPT);
2466 }
2467
safexcel_decrypt_xts(struct skcipher_request * req)2468 static int safexcel_decrypt_xts(struct skcipher_request *req)
2469 {
2470 if (req->cryptlen < XTS_BLOCK_SIZE)
2471 return -EINVAL;
2472 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2473 SAFEXCEL_DECRYPT);
2474 }
2475
2476 struct safexcel_alg_template safexcel_alg_xts_aes = {
2477 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2478 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XTS,
2479 .alg.skcipher = {
2480 .setkey = safexcel_skcipher_aesxts_setkey,
2481 .encrypt = safexcel_encrypt_xts,
2482 .decrypt = safexcel_decrypt_xts,
2483 /* XTS actually uses 2 AES keys glued together */
2484 .min_keysize = AES_MIN_KEY_SIZE * 2,
2485 .max_keysize = AES_MAX_KEY_SIZE * 2,
2486 .ivsize = XTS_BLOCK_SIZE,
2487 .base = {
2488 .cra_name = "xts(aes)",
2489 .cra_driver_name = "safexcel-xts-aes",
2490 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2491 .cra_flags = CRYPTO_ALG_ASYNC |
2492 CRYPTO_ALG_ALLOCATES_MEMORY |
2493 CRYPTO_ALG_KERN_DRIVER_ONLY,
2494 .cra_blocksize = XTS_BLOCK_SIZE,
2495 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2496 .cra_alignmask = 0,
2497 .cra_init = safexcel_skcipher_aes_xts_cra_init,
2498 .cra_exit = safexcel_skcipher_cra_exit,
2499 .cra_module = THIS_MODULE,
2500 },
2501 },
2502 };
2503
safexcel_aead_gcm_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)2504 static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
2505 unsigned int len)
2506 {
2507 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2508 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2509 struct safexcel_crypto_priv *priv = ctx->base.priv;
2510 struct crypto_aes_ctx aes;
2511 u32 hashkey[AES_BLOCK_SIZE >> 2];
2512 int ret, i;
2513
2514 ret = aes_expandkey(&aes, key, len);
2515 if (ret) {
2516 memzero_explicit(&aes, sizeof(aes));
2517 return ret;
2518 }
2519
2520 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2521 for (i = 0; i < len / sizeof(u32); i++) {
2522 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2523 ctx->base.needs_inv = true;
2524 break;
2525 }
2526 }
2527 }
2528
2529 for (i = 0; i < len / sizeof(u32); i++)
2530 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2531
2532 ctx->key_len = len;
2533
2534 /* Compute hash key by encrypting zeroes with cipher key */
2535 memset(hashkey, 0, AES_BLOCK_SIZE);
2536 aes_encrypt(&aes, (u8 *)hashkey, (u8 *)hashkey);
2537
2538 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2539 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
2540 if (be32_to_cpu(ctx->base.ipad.be[i]) != hashkey[i]) {
2541 ctx->base.needs_inv = true;
2542 break;
2543 }
2544 }
2545 }
2546
2547 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
2548 ctx->base.ipad.be[i] = cpu_to_be32(hashkey[i]);
2549
2550 memzero_explicit(hashkey, AES_BLOCK_SIZE);
2551 memzero_explicit(&aes, sizeof(aes));
2552 return 0;
2553 }
2554
safexcel_aead_gcm_cra_init(struct crypto_tfm * tfm)2555 static int safexcel_aead_gcm_cra_init(struct crypto_tfm *tfm)
2556 {
2557 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2558
2559 safexcel_aead_cra_init(tfm);
2560 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_GHASH;
2561 ctx->state_sz = GHASH_BLOCK_SIZE;
2562 ctx->xcm = EIP197_XCM_MODE_GCM;
2563 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2564
2565 return 0;
2566 }
2567
safexcel_aead_gcm_cra_exit(struct crypto_tfm * tfm)2568 static void safexcel_aead_gcm_cra_exit(struct crypto_tfm *tfm)
2569 {
2570 safexcel_aead_cra_exit(tfm);
2571 }
2572
safexcel_aead_gcm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)2573 static int safexcel_aead_gcm_setauthsize(struct crypto_aead *tfm,
2574 unsigned int authsize)
2575 {
2576 return crypto_gcm_check_authsize(authsize);
2577 }
2578
2579 struct safexcel_alg_template safexcel_alg_gcm = {
2580 .type = SAFEXCEL_ALG_TYPE_AEAD,
2581 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
2582 .alg.aead = {
2583 .setkey = safexcel_aead_gcm_setkey,
2584 .setauthsize = safexcel_aead_gcm_setauthsize,
2585 .encrypt = safexcel_aead_encrypt,
2586 .decrypt = safexcel_aead_decrypt,
2587 .ivsize = GCM_AES_IV_SIZE,
2588 .maxauthsize = GHASH_DIGEST_SIZE,
2589 .base = {
2590 .cra_name = "gcm(aes)",
2591 .cra_driver_name = "safexcel-gcm-aes",
2592 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2593 .cra_flags = CRYPTO_ALG_ASYNC |
2594 CRYPTO_ALG_ALLOCATES_MEMORY |
2595 CRYPTO_ALG_KERN_DRIVER_ONLY,
2596 .cra_blocksize = 1,
2597 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2598 .cra_alignmask = 0,
2599 .cra_init = safexcel_aead_gcm_cra_init,
2600 .cra_exit = safexcel_aead_gcm_cra_exit,
2601 .cra_module = THIS_MODULE,
2602 },
2603 },
2604 };
2605
safexcel_aead_ccm_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)2606 static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
2607 unsigned int len)
2608 {
2609 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2610 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2611 struct safexcel_crypto_priv *priv = ctx->base.priv;
2612 struct crypto_aes_ctx aes;
2613 int ret, i;
2614
2615 ret = aes_expandkey(&aes, key, len);
2616 if (ret) {
2617 memzero_explicit(&aes, sizeof(aes));
2618 return ret;
2619 }
2620
2621 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2622 for (i = 0; i < len / sizeof(u32); i++) {
2623 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2624 ctx->base.needs_inv = true;
2625 break;
2626 }
2627 }
2628 }
2629
2630 for (i = 0; i < len / sizeof(u32); i++) {
2631 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2632 ctx->base.ipad.be[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] =
2633 cpu_to_be32(aes.key_enc[i]);
2634 }
2635
2636 ctx->key_len = len;
2637 ctx->state_sz = 2 * AES_BLOCK_SIZE + len;
2638
2639 if (len == AES_KEYSIZE_192)
2640 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2641 else if (len == AES_KEYSIZE_256)
2642 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2643 else
2644 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2645
2646 memzero_explicit(&aes, sizeof(aes));
2647 return 0;
2648 }
2649
safexcel_aead_ccm_cra_init(struct crypto_tfm * tfm)2650 static int safexcel_aead_ccm_cra_init(struct crypto_tfm *tfm)
2651 {
2652 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2653
2654 safexcel_aead_cra_init(tfm);
2655 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2656 ctx->state_sz = 3 * AES_BLOCK_SIZE;
2657 ctx->xcm = EIP197_XCM_MODE_CCM;
2658 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2659 ctx->ctrinit = 0;
2660 return 0;
2661 }
2662
safexcel_aead_ccm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)2663 static int safexcel_aead_ccm_setauthsize(struct crypto_aead *tfm,
2664 unsigned int authsize)
2665 {
2666 /* Borrowed from crypto/ccm.c */
2667 switch (authsize) {
2668 case 4:
2669 case 6:
2670 case 8:
2671 case 10:
2672 case 12:
2673 case 14:
2674 case 16:
2675 break;
2676 default:
2677 return -EINVAL;
2678 }
2679
2680 return 0;
2681 }
2682
safexcel_ccm_encrypt(struct aead_request * req)2683 static int safexcel_ccm_encrypt(struct aead_request *req)
2684 {
2685 struct safexcel_cipher_req *creq = aead_request_ctx(req);
2686
2687 if (req->iv[0] < 1 || req->iv[0] > 7)
2688 return -EINVAL;
2689
2690 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
2691 }
2692
safexcel_ccm_decrypt(struct aead_request * req)2693 static int safexcel_ccm_decrypt(struct aead_request *req)
2694 {
2695 struct safexcel_cipher_req *creq = aead_request_ctx(req);
2696
2697 if (req->iv[0] < 1 || req->iv[0] > 7)
2698 return -EINVAL;
2699
2700 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
2701 }
2702
2703 struct safexcel_alg_template safexcel_alg_ccm = {
2704 .type = SAFEXCEL_ALG_TYPE_AEAD,
2705 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
2706 .alg.aead = {
2707 .setkey = safexcel_aead_ccm_setkey,
2708 .setauthsize = safexcel_aead_ccm_setauthsize,
2709 .encrypt = safexcel_ccm_encrypt,
2710 .decrypt = safexcel_ccm_decrypt,
2711 .ivsize = AES_BLOCK_SIZE,
2712 .maxauthsize = AES_BLOCK_SIZE,
2713 .base = {
2714 .cra_name = "ccm(aes)",
2715 .cra_driver_name = "safexcel-ccm-aes",
2716 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2717 .cra_flags = CRYPTO_ALG_ASYNC |
2718 CRYPTO_ALG_ALLOCATES_MEMORY |
2719 CRYPTO_ALG_KERN_DRIVER_ONLY,
2720 .cra_blocksize = 1,
2721 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2722 .cra_alignmask = 0,
2723 .cra_init = safexcel_aead_ccm_cra_init,
2724 .cra_exit = safexcel_aead_cra_exit,
2725 .cra_module = THIS_MODULE,
2726 },
2727 },
2728 };
2729
safexcel_chacha20_setkey(struct safexcel_cipher_ctx * ctx,const u8 * key)2730 static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx *ctx,
2731 const u8 *key)
2732 {
2733 struct safexcel_crypto_priv *priv = ctx->base.priv;
2734
2735 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
2736 if (memcmp(ctx->key, key, CHACHA_KEY_SIZE))
2737 ctx->base.needs_inv = true;
2738
2739 memcpy(ctx->key, key, CHACHA_KEY_SIZE);
2740 ctx->key_len = CHACHA_KEY_SIZE;
2741 }
2742
safexcel_skcipher_chacha20_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)2743 static int safexcel_skcipher_chacha20_setkey(struct crypto_skcipher *ctfm,
2744 const u8 *key, unsigned int len)
2745 {
2746 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
2747
2748 if (len != CHACHA_KEY_SIZE)
2749 return -EINVAL;
2750
2751 safexcel_chacha20_setkey(ctx, key);
2752
2753 return 0;
2754 }
2755
safexcel_skcipher_chacha20_cra_init(struct crypto_tfm * tfm)2756 static int safexcel_skcipher_chacha20_cra_init(struct crypto_tfm *tfm)
2757 {
2758 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2759
2760 safexcel_skcipher_cra_init(tfm);
2761 ctx->alg = SAFEXCEL_CHACHA20;
2762 ctx->ctrinit = 0;
2763 ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32;
2764 return 0;
2765 }
2766
2767 struct safexcel_alg_template safexcel_alg_chacha20 = {
2768 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2769 .algo_mask = SAFEXCEL_ALG_CHACHA20,
2770 .alg.skcipher = {
2771 .setkey = safexcel_skcipher_chacha20_setkey,
2772 .encrypt = safexcel_encrypt,
2773 .decrypt = safexcel_decrypt,
2774 .min_keysize = CHACHA_KEY_SIZE,
2775 .max_keysize = CHACHA_KEY_SIZE,
2776 .ivsize = CHACHA_IV_SIZE,
2777 .base = {
2778 .cra_name = "chacha20",
2779 .cra_driver_name = "safexcel-chacha20",
2780 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2781 .cra_flags = CRYPTO_ALG_ASYNC |
2782 CRYPTO_ALG_ALLOCATES_MEMORY |
2783 CRYPTO_ALG_KERN_DRIVER_ONLY,
2784 .cra_blocksize = 1,
2785 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2786 .cra_alignmask = 0,
2787 .cra_init = safexcel_skcipher_chacha20_cra_init,
2788 .cra_exit = safexcel_skcipher_cra_exit,
2789 .cra_module = THIS_MODULE,
2790 },
2791 },
2792 };
2793
safexcel_aead_chachapoly_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)2794 static int safexcel_aead_chachapoly_setkey(struct crypto_aead *ctfm,
2795 const u8 *key, unsigned int len)
2796 {
2797 struct safexcel_cipher_ctx *ctx = crypto_aead_ctx(ctfm);
2798
2799 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP &&
2800 len > EIP197_AEAD_IPSEC_NONCE_SIZE) {
2801 /* ESP variant has nonce appended to key */
2802 len -= EIP197_AEAD_IPSEC_NONCE_SIZE;
2803 ctx->nonce = *(u32 *)(key + len);
2804 }
2805 if (len != CHACHA_KEY_SIZE)
2806 return -EINVAL;
2807
2808 safexcel_chacha20_setkey(ctx, key);
2809
2810 return 0;
2811 }
2812
safexcel_aead_chachapoly_setauthsize(struct crypto_aead * tfm,unsigned int authsize)2813 static int safexcel_aead_chachapoly_setauthsize(struct crypto_aead *tfm,
2814 unsigned int authsize)
2815 {
2816 if (authsize != POLY1305_DIGEST_SIZE)
2817 return -EINVAL;
2818 return 0;
2819 }
2820
safexcel_aead_chachapoly_crypt(struct aead_request * req,enum safexcel_cipher_direction dir)2821 static int safexcel_aead_chachapoly_crypt(struct aead_request *req,
2822 enum safexcel_cipher_direction dir)
2823 {
2824 struct safexcel_cipher_req *creq = aead_request_ctx(req);
2825 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2826 struct crypto_tfm *tfm = crypto_aead_tfm(aead);
2827 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2828 struct aead_request *subreq = aead_request_ctx(req);
2829 u32 key[CHACHA_KEY_SIZE / sizeof(u32) + 1];
2830 int ret = 0;
2831
2832 /*
2833 * Instead of wasting time detecting umpteen silly corner cases,
2834 * just dump all "small" requests to the fallback implementation.
2835 * HW would not be faster on such small requests anyway.
2836 */
2837 if (likely((ctx->aead != EIP197_AEAD_TYPE_IPSEC_ESP ||
2838 req->assoclen >= EIP197_AEAD_IPSEC_IV_SIZE) &&
2839 req->cryptlen > POLY1305_DIGEST_SIZE)) {
2840 return safexcel_queue_req(&req->base, creq, dir);
2841 }
2842
2843 /* HW cannot do full (AAD+payload) zero length, use fallback */
2844 memcpy(key, ctx->key, CHACHA_KEY_SIZE);
2845 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
2846 /* ESP variant has nonce appended to the key */
2847 key[CHACHA_KEY_SIZE / sizeof(u32)] = ctx->nonce;
2848 ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2849 CHACHA_KEY_SIZE +
2850 EIP197_AEAD_IPSEC_NONCE_SIZE);
2851 } else {
2852 ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2853 CHACHA_KEY_SIZE);
2854 }
2855 if (ret) {
2856 crypto_aead_clear_flags(aead, CRYPTO_TFM_REQ_MASK);
2857 crypto_aead_set_flags(aead, crypto_aead_get_flags(ctx->fback) &
2858 CRYPTO_TFM_REQ_MASK);
2859 return ret;
2860 }
2861
2862 aead_request_set_tfm(subreq, ctx->fback);
2863 aead_request_set_callback(subreq, req->base.flags, req->base.complete,
2864 req->base.data);
2865 aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
2866 req->iv);
2867 aead_request_set_ad(subreq, req->assoclen);
2868
2869 return (dir == SAFEXCEL_ENCRYPT) ?
2870 crypto_aead_encrypt(subreq) :
2871 crypto_aead_decrypt(subreq);
2872 }
2873
safexcel_aead_chachapoly_encrypt(struct aead_request * req)2874 static int safexcel_aead_chachapoly_encrypt(struct aead_request *req)
2875 {
2876 return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_ENCRYPT);
2877 }
2878
safexcel_aead_chachapoly_decrypt(struct aead_request * req)2879 static int safexcel_aead_chachapoly_decrypt(struct aead_request *req)
2880 {
2881 return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_DECRYPT);
2882 }
2883
safexcel_aead_fallback_cra_init(struct crypto_tfm * tfm)2884 static int safexcel_aead_fallback_cra_init(struct crypto_tfm *tfm)
2885 {
2886 struct crypto_aead *aead = __crypto_aead_cast(tfm);
2887 struct aead_alg *alg = crypto_aead_alg(aead);
2888 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2889
2890 safexcel_aead_cra_init(tfm);
2891
2892 /* Allocate fallback implementation */
2893 ctx->fback = crypto_alloc_aead(alg->base.cra_name, 0,
2894 CRYPTO_ALG_ASYNC |
2895 CRYPTO_ALG_NEED_FALLBACK);
2896 if (IS_ERR(ctx->fback))
2897 return PTR_ERR(ctx->fback);
2898
2899 crypto_aead_set_reqsize(aead, max(sizeof(struct safexcel_cipher_req),
2900 sizeof(struct aead_request) +
2901 crypto_aead_reqsize(ctx->fback)));
2902
2903 return 0;
2904 }
2905
safexcel_aead_chachapoly_cra_init(struct crypto_tfm * tfm)2906 static int safexcel_aead_chachapoly_cra_init(struct crypto_tfm *tfm)
2907 {
2908 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2909
2910 safexcel_aead_fallback_cra_init(tfm);
2911 ctx->alg = SAFEXCEL_CHACHA20;
2912 ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32 |
2913 CONTEXT_CONTROL_CHACHA20_MODE_CALC_OTK;
2914 ctx->ctrinit = 0;
2915 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_POLY1305;
2916 ctx->state_sz = 0; /* Precomputed by HW */
2917 return 0;
2918 }
2919
safexcel_aead_fallback_cra_exit(struct crypto_tfm * tfm)2920 static void safexcel_aead_fallback_cra_exit(struct crypto_tfm *tfm)
2921 {
2922 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2923
2924 crypto_free_aead(ctx->fback);
2925 safexcel_aead_cra_exit(tfm);
2926 }
2927
2928 struct safexcel_alg_template safexcel_alg_chachapoly = {
2929 .type = SAFEXCEL_ALG_TYPE_AEAD,
2930 .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
2931 .alg.aead = {
2932 .setkey = safexcel_aead_chachapoly_setkey,
2933 .setauthsize = safexcel_aead_chachapoly_setauthsize,
2934 .encrypt = safexcel_aead_chachapoly_encrypt,
2935 .decrypt = safexcel_aead_chachapoly_decrypt,
2936 .ivsize = CHACHAPOLY_IV_SIZE,
2937 .maxauthsize = POLY1305_DIGEST_SIZE,
2938 .base = {
2939 .cra_name = "rfc7539(chacha20,poly1305)",
2940 .cra_driver_name = "safexcel-chacha20-poly1305",
2941 /* +1 to put it above HW chacha + SW poly */
2942 .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
2943 .cra_flags = CRYPTO_ALG_ASYNC |
2944 CRYPTO_ALG_ALLOCATES_MEMORY |
2945 CRYPTO_ALG_KERN_DRIVER_ONLY |
2946 CRYPTO_ALG_NEED_FALLBACK,
2947 .cra_blocksize = 1,
2948 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2949 .cra_alignmask = 0,
2950 .cra_init = safexcel_aead_chachapoly_cra_init,
2951 .cra_exit = safexcel_aead_fallback_cra_exit,
2952 .cra_module = THIS_MODULE,
2953 },
2954 },
2955 };
2956
safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm * tfm)2957 static int safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm *tfm)
2958 {
2959 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2960 int ret;
2961
2962 ret = safexcel_aead_chachapoly_cra_init(tfm);
2963 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
2964 ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
2965 return ret;
2966 }
2967
2968 struct safexcel_alg_template safexcel_alg_chachapoly_esp = {
2969 .type = SAFEXCEL_ALG_TYPE_AEAD,
2970 .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
2971 .alg.aead = {
2972 .setkey = safexcel_aead_chachapoly_setkey,
2973 .setauthsize = safexcel_aead_chachapoly_setauthsize,
2974 .encrypt = safexcel_aead_chachapoly_encrypt,
2975 .decrypt = safexcel_aead_chachapoly_decrypt,
2976 .ivsize = CHACHAPOLY_IV_SIZE - EIP197_AEAD_IPSEC_NONCE_SIZE,
2977 .maxauthsize = POLY1305_DIGEST_SIZE,
2978 .base = {
2979 .cra_name = "rfc7539esp(chacha20,poly1305)",
2980 .cra_driver_name = "safexcel-chacha20-poly1305-esp",
2981 /* +1 to put it above HW chacha + SW poly */
2982 .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
2983 .cra_flags = CRYPTO_ALG_ASYNC |
2984 CRYPTO_ALG_ALLOCATES_MEMORY |
2985 CRYPTO_ALG_KERN_DRIVER_ONLY |
2986 CRYPTO_ALG_NEED_FALLBACK,
2987 .cra_blocksize = 1,
2988 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2989 .cra_alignmask = 0,
2990 .cra_init = safexcel_aead_chachapolyesp_cra_init,
2991 .cra_exit = safexcel_aead_fallback_cra_exit,
2992 .cra_module = THIS_MODULE,
2993 },
2994 },
2995 };
2996
safexcel_skcipher_sm4_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)2997 static int safexcel_skcipher_sm4_setkey(struct crypto_skcipher *ctfm,
2998 const u8 *key, unsigned int len)
2999 {
3000 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3001 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3002 struct safexcel_crypto_priv *priv = ctx->base.priv;
3003
3004 if (len != SM4_KEY_SIZE)
3005 return -EINVAL;
3006
3007 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
3008 if (memcmp(ctx->key, key, SM4_KEY_SIZE))
3009 ctx->base.needs_inv = true;
3010
3011 memcpy(ctx->key, key, SM4_KEY_SIZE);
3012 ctx->key_len = SM4_KEY_SIZE;
3013
3014 return 0;
3015 }
3016
safexcel_sm4_blk_encrypt(struct skcipher_request * req)3017 static int safexcel_sm4_blk_encrypt(struct skcipher_request *req)
3018 {
3019 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3020 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3021 return -EINVAL;
3022 else
3023 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3024 SAFEXCEL_ENCRYPT);
3025 }
3026
safexcel_sm4_blk_decrypt(struct skcipher_request * req)3027 static int safexcel_sm4_blk_decrypt(struct skcipher_request *req)
3028 {
3029 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3030 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3031 return -EINVAL;
3032 else
3033 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3034 SAFEXCEL_DECRYPT);
3035 }
3036
safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm * tfm)3037 static int safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm *tfm)
3038 {
3039 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3040
3041 safexcel_skcipher_cra_init(tfm);
3042 ctx->alg = SAFEXCEL_SM4;
3043 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
3044 ctx->blocksz = 0;
3045 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
3046 return 0;
3047 }
3048
3049 struct safexcel_alg_template safexcel_alg_ecb_sm4 = {
3050 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3051 .algo_mask = SAFEXCEL_ALG_SM4,
3052 .alg.skcipher = {
3053 .setkey = safexcel_skcipher_sm4_setkey,
3054 .encrypt = safexcel_sm4_blk_encrypt,
3055 .decrypt = safexcel_sm4_blk_decrypt,
3056 .min_keysize = SM4_KEY_SIZE,
3057 .max_keysize = SM4_KEY_SIZE,
3058 .base = {
3059 .cra_name = "ecb(sm4)",
3060 .cra_driver_name = "safexcel-ecb-sm4",
3061 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3062 .cra_flags = CRYPTO_ALG_ASYNC |
3063 CRYPTO_ALG_ALLOCATES_MEMORY |
3064 CRYPTO_ALG_KERN_DRIVER_ONLY,
3065 .cra_blocksize = SM4_BLOCK_SIZE,
3066 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3067 .cra_alignmask = 0,
3068 .cra_init = safexcel_skcipher_sm4_ecb_cra_init,
3069 .cra_exit = safexcel_skcipher_cra_exit,
3070 .cra_module = THIS_MODULE,
3071 },
3072 },
3073 };
3074
safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm * tfm)3075 static int safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm *tfm)
3076 {
3077 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3078
3079 safexcel_skcipher_cra_init(tfm);
3080 ctx->alg = SAFEXCEL_SM4;
3081 ctx->blocksz = SM4_BLOCK_SIZE;
3082 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
3083 return 0;
3084 }
3085
3086 struct safexcel_alg_template safexcel_alg_cbc_sm4 = {
3087 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3088 .algo_mask = SAFEXCEL_ALG_SM4,
3089 .alg.skcipher = {
3090 .setkey = safexcel_skcipher_sm4_setkey,
3091 .encrypt = safexcel_sm4_blk_encrypt,
3092 .decrypt = safexcel_sm4_blk_decrypt,
3093 .min_keysize = SM4_KEY_SIZE,
3094 .max_keysize = SM4_KEY_SIZE,
3095 .ivsize = SM4_BLOCK_SIZE,
3096 .base = {
3097 .cra_name = "cbc(sm4)",
3098 .cra_driver_name = "safexcel-cbc-sm4",
3099 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3100 .cra_flags = CRYPTO_ALG_ASYNC |
3101 CRYPTO_ALG_ALLOCATES_MEMORY |
3102 CRYPTO_ALG_KERN_DRIVER_ONLY,
3103 .cra_blocksize = SM4_BLOCK_SIZE,
3104 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3105 .cra_alignmask = 0,
3106 .cra_init = safexcel_skcipher_sm4_cbc_cra_init,
3107 .cra_exit = safexcel_skcipher_cra_exit,
3108 .cra_module = THIS_MODULE,
3109 },
3110 },
3111 };
3112
safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)3113 static int safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher *ctfm,
3114 const u8 *key, unsigned int len)
3115 {
3116 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3117 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3118
3119 /* last 4 bytes of key are the nonce! */
3120 ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3121 /* exclude the nonce here */
3122 len -= CTR_RFC3686_NONCE_SIZE;
3123
3124 return safexcel_skcipher_sm4_setkey(ctfm, key, len);
3125 }
3126
safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm * tfm)3127 static int safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm *tfm)
3128 {
3129 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3130
3131 safexcel_skcipher_cra_init(tfm);
3132 ctx->alg = SAFEXCEL_SM4;
3133 ctx->blocksz = SM4_BLOCK_SIZE;
3134 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3135 return 0;
3136 }
3137
3138 struct safexcel_alg_template safexcel_alg_ctr_sm4 = {
3139 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3140 .algo_mask = SAFEXCEL_ALG_SM4,
3141 .alg.skcipher = {
3142 .setkey = safexcel_skcipher_sm4ctr_setkey,
3143 .encrypt = safexcel_encrypt,
3144 .decrypt = safexcel_decrypt,
3145 /* Add nonce size */
3146 .min_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3147 .max_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3148 .ivsize = CTR_RFC3686_IV_SIZE,
3149 .base = {
3150 .cra_name = "rfc3686(ctr(sm4))",
3151 .cra_driver_name = "safexcel-ctr-sm4",
3152 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3153 .cra_flags = CRYPTO_ALG_ASYNC |
3154 CRYPTO_ALG_ALLOCATES_MEMORY |
3155 CRYPTO_ALG_KERN_DRIVER_ONLY,
3156 .cra_blocksize = 1,
3157 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3158 .cra_alignmask = 0,
3159 .cra_init = safexcel_skcipher_sm4_ctr_cra_init,
3160 .cra_exit = safexcel_skcipher_cra_exit,
3161 .cra_module = THIS_MODULE,
3162 },
3163 },
3164 };
3165
safexcel_aead_sm4_blk_encrypt(struct aead_request * req)3166 static int safexcel_aead_sm4_blk_encrypt(struct aead_request *req)
3167 {
3168 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3169 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3170 return -EINVAL;
3171
3172 return safexcel_queue_req(&req->base, aead_request_ctx(req),
3173 SAFEXCEL_ENCRYPT);
3174 }
3175
safexcel_aead_sm4_blk_decrypt(struct aead_request * req)3176 static int safexcel_aead_sm4_blk_decrypt(struct aead_request *req)
3177 {
3178 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3179
3180 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3181 if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3182 return -EINVAL;
3183
3184 return safexcel_queue_req(&req->base, aead_request_ctx(req),
3185 SAFEXCEL_DECRYPT);
3186 }
3187
safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm * tfm)3188 static int safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm *tfm)
3189 {
3190 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3191
3192 safexcel_aead_cra_init(tfm);
3193 ctx->alg = SAFEXCEL_SM4;
3194 ctx->blocksz = SM4_BLOCK_SIZE;
3195 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
3196 ctx->state_sz = SHA1_DIGEST_SIZE;
3197 return 0;
3198 }
3199
3200 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_sm4 = {
3201 .type = SAFEXCEL_ALG_TYPE_AEAD,
3202 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3203 .alg.aead = {
3204 .setkey = safexcel_aead_setkey,
3205 .encrypt = safexcel_aead_sm4_blk_encrypt,
3206 .decrypt = safexcel_aead_sm4_blk_decrypt,
3207 .ivsize = SM4_BLOCK_SIZE,
3208 .maxauthsize = SHA1_DIGEST_SIZE,
3209 .base = {
3210 .cra_name = "authenc(hmac(sha1),cbc(sm4))",
3211 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-sm4",
3212 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3213 .cra_flags = CRYPTO_ALG_ASYNC |
3214 CRYPTO_ALG_ALLOCATES_MEMORY |
3215 CRYPTO_ALG_KERN_DRIVER_ONLY,
3216 .cra_blocksize = SM4_BLOCK_SIZE,
3217 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3218 .cra_alignmask = 0,
3219 .cra_init = safexcel_aead_sm4cbc_sha1_cra_init,
3220 .cra_exit = safexcel_aead_cra_exit,
3221 .cra_module = THIS_MODULE,
3222 },
3223 },
3224 };
3225
safexcel_aead_fallback_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)3226 static int safexcel_aead_fallback_setkey(struct crypto_aead *ctfm,
3227 const u8 *key, unsigned int len)
3228 {
3229 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3230 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3231
3232 /* Keep fallback cipher synchronized */
3233 return crypto_aead_setkey(ctx->fback, (u8 *)key, len) ?:
3234 safexcel_aead_setkey(ctfm, key, len);
3235 }
3236
safexcel_aead_fallback_setauthsize(struct crypto_aead * ctfm,unsigned int authsize)3237 static int safexcel_aead_fallback_setauthsize(struct crypto_aead *ctfm,
3238 unsigned int authsize)
3239 {
3240 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3241 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3242
3243 /* Keep fallback cipher synchronized */
3244 return crypto_aead_setauthsize(ctx->fback, authsize);
3245 }
3246
safexcel_aead_fallback_crypt(struct aead_request * req,enum safexcel_cipher_direction dir)3247 static int safexcel_aead_fallback_crypt(struct aead_request *req,
3248 enum safexcel_cipher_direction dir)
3249 {
3250 struct crypto_aead *aead = crypto_aead_reqtfm(req);
3251 struct crypto_tfm *tfm = crypto_aead_tfm(aead);
3252 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3253 struct aead_request *subreq = aead_request_ctx(req);
3254
3255 aead_request_set_tfm(subreq, ctx->fback);
3256 aead_request_set_callback(subreq, req->base.flags, req->base.complete,
3257 req->base.data);
3258 aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
3259 req->iv);
3260 aead_request_set_ad(subreq, req->assoclen);
3261
3262 return (dir == SAFEXCEL_ENCRYPT) ?
3263 crypto_aead_encrypt(subreq) :
3264 crypto_aead_decrypt(subreq);
3265 }
3266
safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request * req)3267 static int safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request *req)
3268 {
3269 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3270
3271 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3272 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3273 return -EINVAL;
3274 else if (req->cryptlen || req->assoclen) /* If input length > 0 only */
3275 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3276
3277 /* HW cannot do full (AAD+payload) zero length, use fallback */
3278 return safexcel_aead_fallback_crypt(req, SAFEXCEL_ENCRYPT);
3279 }
3280
safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request * req)3281 static int safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request *req)
3282 {
3283 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3284 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3285
3286 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3287 if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3288 return -EINVAL;
3289 else if (req->cryptlen > crypto_aead_authsize(tfm) || req->assoclen)
3290 /* If input length > 0 only */
3291 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3292
3293 /* HW cannot do full (AAD+payload) zero length, use fallback */
3294 return safexcel_aead_fallback_crypt(req, SAFEXCEL_DECRYPT);
3295 }
3296
safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm * tfm)3297 static int safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm *tfm)
3298 {
3299 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3300
3301 safexcel_aead_fallback_cra_init(tfm);
3302 ctx->alg = SAFEXCEL_SM4;
3303 ctx->blocksz = SM4_BLOCK_SIZE;
3304 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
3305 ctx->state_sz = SM3_DIGEST_SIZE;
3306 return 0;
3307 }
3308
3309 struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_cbc_sm4 = {
3310 .type = SAFEXCEL_ALG_TYPE_AEAD,
3311 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3312 .alg.aead = {
3313 .setkey = safexcel_aead_fallback_setkey,
3314 .setauthsize = safexcel_aead_fallback_setauthsize,
3315 .encrypt = safexcel_aead_sm4cbc_sm3_encrypt,
3316 .decrypt = safexcel_aead_sm4cbc_sm3_decrypt,
3317 .ivsize = SM4_BLOCK_SIZE,
3318 .maxauthsize = SM3_DIGEST_SIZE,
3319 .base = {
3320 .cra_name = "authenc(hmac(sm3),cbc(sm4))",
3321 .cra_driver_name = "safexcel-authenc-hmac-sm3-cbc-sm4",
3322 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3323 .cra_flags = CRYPTO_ALG_ASYNC |
3324 CRYPTO_ALG_ALLOCATES_MEMORY |
3325 CRYPTO_ALG_KERN_DRIVER_ONLY |
3326 CRYPTO_ALG_NEED_FALLBACK,
3327 .cra_blocksize = SM4_BLOCK_SIZE,
3328 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3329 .cra_alignmask = 0,
3330 .cra_init = safexcel_aead_sm4cbc_sm3_cra_init,
3331 .cra_exit = safexcel_aead_fallback_cra_exit,
3332 .cra_module = THIS_MODULE,
3333 },
3334 },
3335 };
3336
safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm * tfm)3337 static int safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm *tfm)
3338 {
3339 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3340
3341 safexcel_aead_sm4cbc_sha1_cra_init(tfm);
3342 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3343 return 0;
3344 }
3345
3346 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_sm4 = {
3347 .type = SAFEXCEL_ALG_TYPE_AEAD,
3348 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3349 .alg.aead = {
3350 .setkey = safexcel_aead_setkey,
3351 .encrypt = safexcel_aead_encrypt,
3352 .decrypt = safexcel_aead_decrypt,
3353 .ivsize = CTR_RFC3686_IV_SIZE,
3354 .maxauthsize = SHA1_DIGEST_SIZE,
3355 .base = {
3356 .cra_name = "authenc(hmac(sha1),rfc3686(ctr(sm4)))",
3357 .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-sm4",
3358 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3359 .cra_flags = CRYPTO_ALG_ASYNC |
3360 CRYPTO_ALG_ALLOCATES_MEMORY |
3361 CRYPTO_ALG_KERN_DRIVER_ONLY,
3362 .cra_blocksize = 1,
3363 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3364 .cra_alignmask = 0,
3365 .cra_init = safexcel_aead_sm4ctr_sha1_cra_init,
3366 .cra_exit = safexcel_aead_cra_exit,
3367 .cra_module = THIS_MODULE,
3368 },
3369 },
3370 };
3371
safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm * tfm)3372 static int safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm *tfm)
3373 {
3374 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3375
3376 safexcel_aead_sm4cbc_sm3_cra_init(tfm);
3377 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3378 return 0;
3379 }
3380
3381 struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_ctr_sm4 = {
3382 .type = SAFEXCEL_ALG_TYPE_AEAD,
3383 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3384 .alg.aead = {
3385 .setkey = safexcel_aead_setkey,
3386 .encrypt = safexcel_aead_encrypt,
3387 .decrypt = safexcel_aead_decrypt,
3388 .ivsize = CTR_RFC3686_IV_SIZE,
3389 .maxauthsize = SM3_DIGEST_SIZE,
3390 .base = {
3391 .cra_name = "authenc(hmac(sm3),rfc3686(ctr(sm4)))",
3392 .cra_driver_name = "safexcel-authenc-hmac-sm3-ctr-sm4",
3393 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3394 .cra_flags = CRYPTO_ALG_ASYNC |
3395 CRYPTO_ALG_ALLOCATES_MEMORY |
3396 CRYPTO_ALG_KERN_DRIVER_ONLY,
3397 .cra_blocksize = 1,
3398 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3399 .cra_alignmask = 0,
3400 .cra_init = safexcel_aead_sm4ctr_sm3_cra_init,
3401 .cra_exit = safexcel_aead_cra_exit,
3402 .cra_module = THIS_MODULE,
3403 },
3404 },
3405 };
3406
safexcel_rfc4106_gcm_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)3407 static int safexcel_rfc4106_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
3408 unsigned int len)
3409 {
3410 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3411 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3412
3413 /* last 4 bytes of key are the nonce! */
3414 ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3415
3416 len -= CTR_RFC3686_NONCE_SIZE;
3417 return safexcel_aead_gcm_setkey(ctfm, key, len);
3418 }
3419
safexcel_rfc4106_gcm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)3420 static int safexcel_rfc4106_gcm_setauthsize(struct crypto_aead *tfm,
3421 unsigned int authsize)
3422 {
3423 return crypto_rfc4106_check_authsize(authsize);
3424 }
3425
safexcel_rfc4106_encrypt(struct aead_request * req)3426 static int safexcel_rfc4106_encrypt(struct aead_request *req)
3427 {
3428 return crypto_ipsec_check_assoclen(req->assoclen) ?:
3429 safexcel_aead_encrypt(req);
3430 }
3431
safexcel_rfc4106_decrypt(struct aead_request * req)3432 static int safexcel_rfc4106_decrypt(struct aead_request *req)
3433 {
3434 return crypto_ipsec_check_assoclen(req->assoclen) ?:
3435 safexcel_aead_decrypt(req);
3436 }
3437
safexcel_rfc4106_gcm_cra_init(struct crypto_tfm * tfm)3438 static int safexcel_rfc4106_gcm_cra_init(struct crypto_tfm *tfm)
3439 {
3440 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3441 int ret;
3442
3443 ret = safexcel_aead_gcm_cra_init(tfm);
3444 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
3445 ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3446 return ret;
3447 }
3448
3449 struct safexcel_alg_template safexcel_alg_rfc4106_gcm = {
3450 .type = SAFEXCEL_ALG_TYPE_AEAD,
3451 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3452 .alg.aead = {
3453 .setkey = safexcel_rfc4106_gcm_setkey,
3454 .setauthsize = safexcel_rfc4106_gcm_setauthsize,
3455 .encrypt = safexcel_rfc4106_encrypt,
3456 .decrypt = safexcel_rfc4106_decrypt,
3457 .ivsize = GCM_RFC4106_IV_SIZE,
3458 .maxauthsize = GHASH_DIGEST_SIZE,
3459 .base = {
3460 .cra_name = "rfc4106(gcm(aes))",
3461 .cra_driver_name = "safexcel-rfc4106-gcm-aes",
3462 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3463 .cra_flags = CRYPTO_ALG_ASYNC |
3464 CRYPTO_ALG_ALLOCATES_MEMORY |
3465 CRYPTO_ALG_KERN_DRIVER_ONLY,
3466 .cra_blocksize = 1,
3467 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3468 .cra_alignmask = 0,
3469 .cra_init = safexcel_rfc4106_gcm_cra_init,
3470 .cra_exit = safexcel_aead_gcm_cra_exit,
3471 },
3472 },
3473 };
3474
safexcel_rfc4543_gcm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)3475 static int safexcel_rfc4543_gcm_setauthsize(struct crypto_aead *tfm,
3476 unsigned int authsize)
3477 {
3478 if (authsize != GHASH_DIGEST_SIZE)
3479 return -EINVAL;
3480
3481 return 0;
3482 }
3483
safexcel_rfc4543_gcm_cra_init(struct crypto_tfm * tfm)3484 static int safexcel_rfc4543_gcm_cra_init(struct crypto_tfm *tfm)
3485 {
3486 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3487 int ret;
3488
3489 ret = safexcel_aead_gcm_cra_init(tfm);
3490 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP_GMAC;
3491 return ret;
3492 }
3493
3494 struct safexcel_alg_template safexcel_alg_rfc4543_gcm = {
3495 .type = SAFEXCEL_ALG_TYPE_AEAD,
3496 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3497 .alg.aead = {
3498 .setkey = safexcel_rfc4106_gcm_setkey,
3499 .setauthsize = safexcel_rfc4543_gcm_setauthsize,
3500 .encrypt = safexcel_rfc4106_encrypt,
3501 .decrypt = safexcel_rfc4106_decrypt,
3502 .ivsize = GCM_RFC4543_IV_SIZE,
3503 .maxauthsize = GHASH_DIGEST_SIZE,
3504 .base = {
3505 .cra_name = "rfc4543(gcm(aes))",
3506 .cra_driver_name = "safexcel-rfc4543-gcm-aes",
3507 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3508 .cra_flags = CRYPTO_ALG_ASYNC |
3509 CRYPTO_ALG_ALLOCATES_MEMORY |
3510 CRYPTO_ALG_KERN_DRIVER_ONLY,
3511 .cra_blocksize = 1,
3512 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3513 .cra_alignmask = 0,
3514 .cra_init = safexcel_rfc4543_gcm_cra_init,
3515 .cra_exit = safexcel_aead_gcm_cra_exit,
3516 },
3517 },
3518 };
3519
safexcel_rfc4309_ccm_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)3520 static int safexcel_rfc4309_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
3521 unsigned int len)
3522 {
3523 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3524 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3525
3526 /* First byte of the nonce = L = always 3 for RFC4309 (4 byte ctr) */
3527 *(u8 *)&ctx->nonce = EIP197_AEAD_IPSEC_COUNTER_SIZE - 1;
3528 /* last 3 bytes of key are the nonce! */
3529 memcpy((u8 *)&ctx->nonce + 1, key + len -
3530 EIP197_AEAD_IPSEC_CCM_NONCE_SIZE,
3531 EIP197_AEAD_IPSEC_CCM_NONCE_SIZE);
3532
3533 len -= EIP197_AEAD_IPSEC_CCM_NONCE_SIZE;
3534 return safexcel_aead_ccm_setkey(ctfm, key, len);
3535 }
3536
safexcel_rfc4309_ccm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)3537 static int safexcel_rfc4309_ccm_setauthsize(struct crypto_aead *tfm,
3538 unsigned int authsize)
3539 {
3540 /* Borrowed from crypto/ccm.c */
3541 switch (authsize) {
3542 case 8:
3543 case 12:
3544 case 16:
3545 break;
3546 default:
3547 return -EINVAL;
3548 }
3549
3550 return 0;
3551 }
3552
safexcel_rfc4309_ccm_encrypt(struct aead_request * req)3553 static int safexcel_rfc4309_ccm_encrypt(struct aead_request *req)
3554 {
3555 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3556
3557 /* Borrowed from crypto/ccm.c */
3558 if (req->assoclen != 16 && req->assoclen != 20)
3559 return -EINVAL;
3560
3561 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3562 }
3563
safexcel_rfc4309_ccm_decrypt(struct aead_request * req)3564 static int safexcel_rfc4309_ccm_decrypt(struct aead_request *req)
3565 {
3566 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3567
3568 /* Borrowed from crypto/ccm.c */
3569 if (req->assoclen != 16 && req->assoclen != 20)
3570 return -EINVAL;
3571
3572 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3573 }
3574
safexcel_rfc4309_ccm_cra_init(struct crypto_tfm * tfm)3575 static int safexcel_rfc4309_ccm_cra_init(struct crypto_tfm *tfm)
3576 {
3577 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3578 int ret;
3579
3580 ret = safexcel_aead_ccm_cra_init(tfm);
3581 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
3582 ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3583 return ret;
3584 }
3585
3586 struct safexcel_alg_template safexcel_alg_rfc4309_ccm = {
3587 .type = SAFEXCEL_ALG_TYPE_AEAD,
3588 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
3589 .alg.aead = {
3590 .setkey = safexcel_rfc4309_ccm_setkey,
3591 .setauthsize = safexcel_rfc4309_ccm_setauthsize,
3592 .encrypt = safexcel_rfc4309_ccm_encrypt,
3593 .decrypt = safexcel_rfc4309_ccm_decrypt,
3594 .ivsize = EIP197_AEAD_IPSEC_IV_SIZE,
3595 .maxauthsize = AES_BLOCK_SIZE,
3596 .base = {
3597 .cra_name = "rfc4309(ccm(aes))",
3598 .cra_driver_name = "safexcel-rfc4309-ccm-aes",
3599 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3600 .cra_flags = CRYPTO_ALG_ASYNC |
3601 CRYPTO_ALG_ALLOCATES_MEMORY |
3602 CRYPTO_ALG_KERN_DRIVER_ONLY,
3603 .cra_blocksize = 1,
3604 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3605 .cra_alignmask = 0,
3606 .cra_init = safexcel_rfc4309_ccm_cra_init,
3607 .cra_exit = safexcel_aead_cra_exit,
3608 .cra_module = THIS_MODULE,
3609 },
3610 },
3611 };
3612