xref: /linux/drivers/crypto/caam/caamalg.c (revision 7ec462100ef9142344ddbf86f2c3008b97acddbe)
1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3  * caam - Freescale FSL CAAM support for crypto API
4  *
5  * Copyright 2008-2011 Freescale Semiconductor, Inc.
6  * Copyright 2016-2019, 2023 NXP
7  *
8  * Based on talitos crypto API driver.
9  *
10  * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
11  *
12  * ---------------                     ---------------
13  * | JobDesc #1  |-------------------->|  ShareDesc  |
14  * | *(packet 1) |                     |   (PDB)     |
15  * ---------------      |------------->|  (hashKey)  |
16  *       .              |              | (cipherKey) |
17  *       .              |    |-------->| (operation) |
18  * ---------------      |    |         ---------------
19  * | JobDesc #2  |------|    |
20  * | *(packet 2) |           |
21  * ---------------           |
22  *       .                   |
23  *       .                   |
24  * ---------------           |
25  * | JobDesc #3  |------------
26  * | *(packet 3) |
27  * ---------------
28  *
29  * The SharedDesc never changes for a connection unless rekeyed, but
30  * each packet will likely be in a different place. So all we need
31  * to know to process the packet is where the input is, where the
32  * output goes, and what context we want to process with. Context is
33  * in the SharedDesc, packet references in the JobDesc.
34  *
35  * So, a job desc looks like:
36  *
37  * ---------------------
38  * | Header            |
39  * | ShareDesc Pointer |
40  * | SEQ_OUT_PTR       |
41  * | (output buffer)   |
42  * | (output length)   |
43  * | SEQ_IN_PTR        |
44  * | (input buffer)    |
45  * | (input length)    |
46  * ---------------------
47  */
48 
49 #include "compat.h"
50 
51 #include "regs.h"
52 #include "intern.h"
53 #include "desc_constr.h"
54 #include "jr.h"
55 #include "error.h"
56 #include "sg_sw_sec4.h"
57 #include "key_gen.h"
58 #include "caamalg_desc.h"
59 #include <linux/unaligned.h>
60 #include <crypto/internal/aead.h>
61 #include <crypto/internal/engine.h>
62 #include <crypto/internal/skcipher.h>
63 #include <crypto/xts.h>
64 #include <linux/dma-mapping.h>
65 #include <linux/device.h>
66 #include <linux/err.h>
67 #include <linux/module.h>
68 #include <linux/kernel.h>
69 #include <linux/slab.h>
70 #include <linux/string.h>
71 
72 /*
73  * crypto alg
74  */
75 #define CAAM_CRA_PRIORITY		3000
76 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
77 #define CAAM_MAX_KEY_SIZE		(AES_MAX_KEY_SIZE + \
78 					 CTR_RFC3686_NONCE_SIZE + \
79 					 SHA512_DIGEST_SIZE * 2)
80 
81 #define AEAD_DESC_JOB_IO_LEN		(DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
82 #define GCM_DESC_JOB_IO_LEN		(AEAD_DESC_JOB_IO_LEN + \
83 					 CAAM_CMD_SZ * 4)
84 #define AUTHENC_DESC_JOB_IO_LEN		(AEAD_DESC_JOB_IO_LEN + \
85 					 CAAM_CMD_SZ * 5)
86 
87 #define CHACHAPOLY_DESC_JOB_IO_LEN	(AEAD_DESC_JOB_IO_LEN + CAAM_CMD_SZ * 6)
88 
89 #define DESC_MAX_USED_BYTES		(CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN_MIN)
90 #define DESC_MAX_USED_LEN		(DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
91 
92 struct caam_alg_entry {
93 	int class1_alg_type;
94 	int class2_alg_type;
95 	bool rfc3686;
96 	bool geniv;
97 	bool nodkp;
98 };
99 
100 struct caam_aead_alg {
101 	struct aead_engine_alg aead;
102 	struct caam_alg_entry caam;
103 	bool registered;
104 };
105 
106 struct caam_skcipher_alg {
107 	struct skcipher_engine_alg skcipher;
108 	struct caam_alg_entry caam;
109 	bool registered;
110 };
111 
112 /*
113  * per-session context
114  */
115 struct caam_ctx {
116 	u32 sh_desc_enc[DESC_MAX_USED_LEN];
117 	u32 sh_desc_dec[DESC_MAX_USED_LEN];
118 	u8 key[CAAM_MAX_KEY_SIZE];
119 	dma_addr_t sh_desc_enc_dma;
120 	dma_addr_t sh_desc_dec_dma;
121 	dma_addr_t key_dma;
122 	enum dma_data_direction dir;
123 	struct device *jrdev;
124 	struct alginfo adata;
125 	struct alginfo cdata;
126 	unsigned int authsize;
127 	bool xts_key_fallback;
128 	struct crypto_skcipher *fallback;
129 };
130 
131 struct caam_skcipher_req_ctx {
132 	struct skcipher_edesc *edesc;
133 	struct skcipher_request fallback_req;
134 };
135 
136 struct caam_aead_req_ctx {
137 	struct aead_edesc *edesc;
138 };
139 
aead_null_set_sh_desc(struct crypto_aead * aead)140 static int aead_null_set_sh_desc(struct crypto_aead *aead)
141 {
142 	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
143 	struct device *jrdev = ctx->jrdev;
144 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
145 	u32 *desc;
146 	int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN -
147 			ctx->adata.keylen_pad;
148 
149 	/*
150 	 * Job Descriptor and Shared Descriptors
151 	 * must all fit into the 64-word Descriptor h/w Buffer
152 	 */
153 	if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) {
154 		ctx->adata.key_inline = true;
155 		ctx->adata.key_virt = ctx->key;
156 	} else {
157 		ctx->adata.key_inline = false;
158 		ctx->adata.key_dma = ctx->key_dma;
159 	}
160 
161 	/* aead_encrypt shared descriptor */
162 	desc = ctx->sh_desc_enc;
163 	cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize,
164 				    ctrlpriv->era);
165 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
166 				   desc_bytes(desc), ctx->dir);
167 
168 	/*
169 	 * Job Descriptor and Shared Descriptors
170 	 * must all fit into the 64-word Descriptor h/w Buffer
171 	 */
172 	if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) {
173 		ctx->adata.key_inline = true;
174 		ctx->adata.key_virt = ctx->key;
175 	} else {
176 		ctx->adata.key_inline = false;
177 		ctx->adata.key_dma = ctx->key_dma;
178 	}
179 
180 	/* aead_decrypt shared descriptor */
181 	desc = ctx->sh_desc_dec;
182 	cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize,
183 				    ctrlpriv->era);
184 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
185 				   desc_bytes(desc), ctx->dir);
186 
187 	return 0;
188 }
189 
aead_set_sh_desc(struct crypto_aead * aead)190 static int aead_set_sh_desc(struct crypto_aead *aead)
191 {
192 	struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
193 						 struct caam_aead_alg,
194 						 aead.base);
195 	unsigned int ivsize = crypto_aead_ivsize(aead);
196 	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
197 	struct device *jrdev = ctx->jrdev;
198 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
199 	u32 ctx1_iv_off = 0;
200 	u32 *desc, *nonce = NULL;
201 	u32 inl_mask;
202 	unsigned int data_len[2];
203 	const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
204 			       OP_ALG_AAI_CTR_MOD128);
205 	const bool is_rfc3686 = alg->caam.rfc3686;
206 
207 	if (!ctx->authsize)
208 		return 0;
209 
210 	/* NULL encryption / decryption */
211 	if (!ctx->cdata.keylen)
212 		return aead_null_set_sh_desc(aead);
213 
214 	/*
215 	 * AES-CTR needs to load IV in CONTEXT1 reg
216 	 * at an offset of 128bits (16bytes)
217 	 * CONTEXT1[255:128] = IV
218 	 */
219 	if (ctr_mode)
220 		ctx1_iv_off = 16;
221 
222 	/*
223 	 * RFC3686 specific:
224 	 *	CONTEXT1[255:128] = {NONCE, IV, COUNTER}
225 	 */
226 	if (is_rfc3686) {
227 		ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
228 		nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
229 				ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
230 	}
231 
232 	/*
233 	 * In case |user key| > |derived key|, using DKP<imm,imm>
234 	 * would result in invalid opcodes (last bytes of user key) in
235 	 * the resulting descriptor. Use DKP<ptr,imm> instead => both
236 	 * virtual and dma key addresses are needed.
237 	 */
238 	ctx->adata.key_virt = ctx->key;
239 	ctx->adata.key_dma = ctx->key_dma;
240 
241 	ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
242 	ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
243 
244 	data_len[0] = ctx->adata.keylen_pad;
245 	data_len[1] = ctx->cdata.keylen;
246 
247 	if (alg->caam.geniv)
248 		goto skip_enc;
249 
250 	/*
251 	 * Job Descriptor and Shared Descriptors
252 	 * must all fit into the 64-word Descriptor h/w Buffer
253 	 */
254 	if (desc_inline_query(DESC_AEAD_ENC_LEN +
255 			      (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
256 			      AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
257 			      ARRAY_SIZE(data_len)) < 0)
258 		return -EINVAL;
259 
260 	ctx->adata.key_inline = !!(inl_mask & 1);
261 	ctx->cdata.key_inline = !!(inl_mask & 2);
262 
263 	/* aead_encrypt shared descriptor */
264 	desc = ctx->sh_desc_enc;
265 	cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize,
266 			       ctx->authsize, is_rfc3686, nonce, ctx1_iv_off,
267 			       false, ctrlpriv->era);
268 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
269 				   desc_bytes(desc), ctx->dir);
270 
271 skip_enc:
272 	/*
273 	 * Job Descriptor and Shared Descriptors
274 	 * must all fit into the 64-word Descriptor h/w Buffer
275 	 */
276 	if (desc_inline_query(DESC_AEAD_DEC_LEN +
277 			      (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
278 			      AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
279 			      ARRAY_SIZE(data_len)) < 0)
280 		return -EINVAL;
281 
282 	ctx->adata.key_inline = !!(inl_mask & 1);
283 	ctx->cdata.key_inline = !!(inl_mask & 2);
284 
285 	/* aead_decrypt shared descriptor */
286 	desc = ctx->sh_desc_dec;
287 	cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize,
288 			       ctx->authsize, alg->caam.geniv, is_rfc3686,
289 			       nonce, ctx1_iv_off, false, ctrlpriv->era);
290 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
291 				   desc_bytes(desc), ctx->dir);
292 
293 	if (!alg->caam.geniv)
294 		goto skip_givenc;
295 
296 	/*
297 	 * Job Descriptor and Shared Descriptors
298 	 * must all fit into the 64-word Descriptor h/w Buffer
299 	 */
300 	if (desc_inline_query(DESC_AEAD_GIVENC_LEN +
301 			      (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
302 			      AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
303 			      ARRAY_SIZE(data_len)) < 0)
304 		return -EINVAL;
305 
306 	ctx->adata.key_inline = !!(inl_mask & 1);
307 	ctx->cdata.key_inline = !!(inl_mask & 2);
308 
309 	/* aead_givencrypt shared descriptor */
310 	desc = ctx->sh_desc_enc;
311 	cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize,
312 				  ctx->authsize, is_rfc3686, nonce,
313 				  ctx1_iv_off, false, ctrlpriv->era);
314 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
315 				   desc_bytes(desc), ctx->dir);
316 
317 skip_givenc:
318 	return 0;
319 }
320 
aead_setauthsize(struct crypto_aead * authenc,unsigned int authsize)321 static int aead_setauthsize(struct crypto_aead *authenc,
322 				    unsigned int authsize)
323 {
324 	struct caam_ctx *ctx = crypto_aead_ctx_dma(authenc);
325 
326 	ctx->authsize = authsize;
327 	aead_set_sh_desc(authenc);
328 
329 	return 0;
330 }
331 
gcm_set_sh_desc(struct crypto_aead * aead)332 static int gcm_set_sh_desc(struct crypto_aead *aead)
333 {
334 	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
335 	struct device *jrdev = ctx->jrdev;
336 	unsigned int ivsize = crypto_aead_ivsize(aead);
337 	u32 *desc;
338 	int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
339 			ctx->cdata.keylen;
340 
341 	if (!ctx->cdata.keylen || !ctx->authsize)
342 		return 0;
343 
344 	/*
345 	 * AES GCM encrypt shared descriptor
346 	 * Job Descriptor and Shared Descriptor
347 	 * must fit into the 64-word Descriptor h/w Buffer
348 	 */
349 	if (rem_bytes >= DESC_GCM_ENC_LEN) {
350 		ctx->cdata.key_inline = true;
351 		ctx->cdata.key_virt = ctx->key;
352 	} else {
353 		ctx->cdata.key_inline = false;
354 		ctx->cdata.key_dma = ctx->key_dma;
355 	}
356 
357 	desc = ctx->sh_desc_enc;
358 	cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
359 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
360 				   desc_bytes(desc), ctx->dir);
361 
362 	/*
363 	 * Job Descriptor and Shared Descriptors
364 	 * must all fit into the 64-word Descriptor h/w Buffer
365 	 */
366 	if (rem_bytes >= DESC_GCM_DEC_LEN) {
367 		ctx->cdata.key_inline = true;
368 		ctx->cdata.key_virt = ctx->key;
369 	} else {
370 		ctx->cdata.key_inline = false;
371 		ctx->cdata.key_dma = ctx->key_dma;
372 	}
373 
374 	desc = ctx->sh_desc_dec;
375 	cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
376 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
377 				   desc_bytes(desc), ctx->dir);
378 
379 	return 0;
380 }
381 
gcm_setauthsize(struct crypto_aead * authenc,unsigned int authsize)382 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
383 {
384 	struct caam_ctx *ctx = crypto_aead_ctx_dma(authenc);
385 	int err;
386 
387 	err = crypto_gcm_check_authsize(authsize);
388 	if (err)
389 		return err;
390 
391 	ctx->authsize = authsize;
392 	gcm_set_sh_desc(authenc);
393 
394 	return 0;
395 }
396 
rfc4106_set_sh_desc(struct crypto_aead * aead)397 static int rfc4106_set_sh_desc(struct crypto_aead *aead)
398 {
399 	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
400 	struct device *jrdev = ctx->jrdev;
401 	unsigned int ivsize = crypto_aead_ivsize(aead);
402 	u32 *desc;
403 	int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
404 			ctx->cdata.keylen;
405 
406 	if (!ctx->cdata.keylen || !ctx->authsize)
407 		return 0;
408 
409 	/*
410 	 * RFC4106 encrypt shared descriptor
411 	 * Job Descriptor and Shared Descriptor
412 	 * must fit into the 64-word Descriptor h/w Buffer
413 	 */
414 	if (rem_bytes >= DESC_RFC4106_ENC_LEN) {
415 		ctx->cdata.key_inline = true;
416 		ctx->cdata.key_virt = ctx->key;
417 	} else {
418 		ctx->cdata.key_inline = false;
419 		ctx->cdata.key_dma = ctx->key_dma;
420 	}
421 
422 	desc = ctx->sh_desc_enc;
423 	cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
424 				  false);
425 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
426 				   desc_bytes(desc), ctx->dir);
427 
428 	/*
429 	 * Job Descriptor and Shared Descriptors
430 	 * must all fit into the 64-word Descriptor h/w Buffer
431 	 */
432 	if (rem_bytes >= DESC_RFC4106_DEC_LEN) {
433 		ctx->cdata.key_inline = true;
434 		ctx->cdata.key_virt = ctx->key;
435 	} else {
436 		ctx->cdata.key_inline = false;
437 		ctx->cdata.key_dma = ctx->key_dma;
438 	}
439 
440 	desc = ctx->sh_desc_dec;
441 	cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
442 				  false);
443 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
444 				   desc_bytes(desc), ctx->dir);
445 
446 	return 0;
447 }
448 
rfc4106_setauthsize(struct crypto_aead * authenc,unsigned int authsize)449 static int rfc4106_setauthsize(struct crypto_aead *authenc,
450 			       unsigned int authsize)
451 {
452 	struct caam_ctx *ctx = crypto_aead_ctx_dma(authenc);
453 	int err;
454 
455 	err = crypto_rfc4106_check_authsize(authsize);
456 	if (err)
457 		return err;
458 
459 	ctx->authsize = authsize;
460 	rfc4106_set_sh_desc(authenc);
461 
462 	return 0;
463 }
464 
rfc4543_set_sh_desc(struct crypto_aead * aead)465 static int rfc4543_set_sh_desc(struct crypto_aead *aead)
466 {
467 	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
468 	struct device *jrdev = ctx->jrdev;
469 	unsigned int ivsize = crypto_aead_ivsize(aead);
470 	u32 *desc;
471 	int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
472 			ctx->cdata.keylen;
473 
474 	if (!ctx->cdata.keylen || !ctx->authsize)
475 		return 0;
476 
477 	/*
478 	 * RFC4543 encrypt shared descriptor
479 	 * Job Descriptor and Shared Descriptor
480 	 * must fit into the 64-word Descriptor h/w Buffer
481 	 */
482 	if (rem_bytes >= DESC_RFC4543_ENC_LEN) {
483 		ctx->cdata.key_inline = true;
484 		ctx->cdata.key_virt = ctx->key;
485 	} else {
486 		ctx->cdata.key_inline = false;
487 		ctx->cdata.key_dma = ctx->key_dma;
488 	}
489 
490 	desc = ctx->sh_desc_enc;
491 	cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
492 				  false);
493 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
494 				   desc_bytes(desc), ctx->dir);
495 
496 	/*
497 	 * Job Descriptor and Shared Descriptors
498 	 * must all fit into the 64-word Descriptor h/w Buffer
499 	 */
500 	if (rem_bytes >= DESC_RFC4543_DEC_LEN) {
501 		ctx->cdata.key_inline = true;
502 		ctx->cdata.key_virt = ctx->key;
503 	} else {
504 		ctx->cdata.key_inline = false;
505 		ctx->cdata.key_dma = ctx->key_dma;
506 	}
507 
508 	desc = ctx->sh_desc_dec;
509 	cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
510 				  false);
511 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
512 				   desc_bytes(desc), ctx->dir);
513 
514 	return 0;
515 }
516 
rfc4543_setauthsize(struct crypto_aead * authenc,unsigned int authsize)517 static int rfc4543_setauthsize(struct crypto_aead *authenc,
518 			       unsigned int authsize)
519 {
520 	struct caam_ctx *ctx = crypto_aead_ctx_dma(authenc);
521 
522 	if (authsize != 16)
523 		return -EINVAL;
524 
525 	ctx->authsize = authsize;
526 	rfc4543_set_sh_desc(authenc);
527 
528 	return 0;
529 }
530 
chachapoly_set_sh_desc(struct crypto_aead * aead)531 static int chachapoly_set_sh_desc(struct crypto_aead *aead)
532 {
533 	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
534 	struct device *jrdev = ctx->jrdev;
535 	unsigned int ivsize = crypto_aead_ivsize(aead);
536 	u32 *desc;
537 
538 	if (!ctx->cdata.keylen || !ctx->authsize)
539 		return 0;
540 
541 	desc = ctx->sh_desc_enc;
542 	cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize,
543 			       ctx->authsize, true, false);
544 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
545 				   desc_bytes(desc), ctx->dir);
546 
547 	desc = ctx->sh_desc_dec;
548 	cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize,
549 			       ctx->authsize, false, false);
550 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
551 				   desc_bytes(desc), ctx->dir);
552 
553 	return 0;
554 }
555 
chachapoly_setauthsize(struct crypto_aead * aead,unsigned int authsize)556 static int chachapoly_setauthsize(struct crypto_aead *aead,
557 				  unsigned int authsize)
558 {
559 	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
560 
561 	if (authsize != POLY1305_DIGEST_SIZE)
562 		return -EINVAL;
563 
564 	ctx->authsize = authsize;
565 	return chachapoly_set_sh_desc(aead);
566 }
567 
chachapoly_setkey(struct crypto_aead * aead,const u8 * key,unsigned int keylen)568 static int chachapoly_setkey(struct crypto_aead *aead, const u8 *key,
569 			     unsigned int keylen)
570 {
571 	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
572 	unsigned int ivsize = crypto_aead_ivsize(aead);
573 	unsigned int saltlen = CHACHAPOLY_IV_SIZE - ivsize;
574 
575 	if (keylen != CHACHA_KEY_SIZE + saltlen)
576 		return -EINVAL;
577 
578 	memcpy(ctx->key, key, keylen);
579 	ctx->cdata.key_virt = ctx->key;
580 	ctx->cdata.keylen = keylen - saltlen;
581 
582 	return chachapoly_set_sh_desc(aead);
583 }
584 
aead_setkey(struct crypto_aead * aead,const u8 * key,unsigned int keylen)585 static int aead_setkey(struct crypto_aead *aead,
586 			       const u8 *key, unsigned int keylen)
587 {
588 	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
589 	struct device *jrdev = ctx->jrdev;
590 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
591 	struct crypto_authenc_keys keys;
592 	int ret = 0;
593 
594 	if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
595 		goto badkey;
596 
597 	dev_dbg(jrdev, "keylen %d enckeylen %d authkeylen %d\n",
598 	       keys.authkeylen + keys.enckeylen, keys.enckeylen,
599 	       keys.authkeylen);
600 	print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
601 			     DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
602 
603 	/*
604 	 * If DKP is supported, use it in the shared descriptor to generate
605 	 * the split key.
606 	 */
607 	if (ctrlpriv->era >= 6) {
608 		ctx->adata.keylen = keys.authkeylen;
609 		ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype &
610 						      OP_ALG_ALGSEL_MASK);
611 
612 		if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE)
613 			goto badkey;
614 
615 		memcpy(ctx->key, keys.authkey, keys.authkeylen);
616 		memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey,
617 		       keys.enckeylen);
618 		dma_sync_single_for_device(jrdev, ctx->key_dma,
619 					   ctx->adata.keylen_pad +
620 					   keys.enckeylen, ctx->dir);
621 		goto skip_split_key;
622 	}
623 
624 	ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey,
625 			    keys.authkeylen, CAAM_MAX_KEY_SIZE -
626 			    keys.enckeylen);
627 	if (ret) {
628 		goto badkey;
629 	}
630 
631 	/* postpend encryption key to auth split key */
632 	memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
633 	dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
634 				   keys.enckeylen, ctx->dir);
635 
636 	print_hex_dump_debug("ctx.key@"__stringify(__LINE__)": ",
637 			     DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
638 			     ctx->adata.keylen_pad + keys.enckeylen, 1);
639 
640 skip_split_key:
641 	ctx->cdata.keylen = keys.enckeylen;
642 	memzero_explicit(&keys, sizeof(keys));
643 	return aead_set_sh_desc(aead);
644 badkey:
645 	memzero_explicit(&keys, sizeof(keys));
646 	return -EINVAL;
647 }
648 
des3_aead_setkey(struct crypto_aead * aead,const u8 * key,unsigned int keylen)649 static int des3_aead_setkey(struct crypto_aead *aead, const u8 *key,
650 			    unsigned int keylen)
651 {
652 	struct crypto_authenc_keys keys;
653 	int err;
654 
655 	err = crypto_authenc_extractkeys(&keys, key, keylen);
656 	if (unlikely(err))
657 		return err;
658 
659 	err = verify_aead_des3_key(aead, keys.enckey, keys.enckeylen) ?:
660 	      aead_setkey(aead, key, keylen);
661 
662 	memzero_explicit(&keys, sizeof(keys));
663 	return err;
664 }
665 
gcm_setkey(struct crypto_aead * aead,const u8 * key,unsigned int keylen)666 static int gcm_setkey(struct crypto_aead *aead,
667 		      const u8 *key, unsigned int keylen)
668 {
669 	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
670 	struct device *jrdev = ctx->jrdev;
671 	int err;
672 
673 	err = aes_check_keylen(keylen);
674 	if (err)
675 		return err;
676 
677 	print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
678 			     DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
679 
680 	memcpy(ctx->key, key, keylen);
681 	dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir);
682 	ctx->cdata.keylen = keylen;
683 
684 	return gcm_set_sh_desc(aead);
685 }
686 
rfc4106_setkey(struct crypto_aead * aead,const u8 * key,unsigned int keylen)687 static int rfc4106_setkey(struct crypto_aead *aead,
688 			  const u8 *key, unsigned int keylen)
689 {
690 	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
691 	struct device *jrdev = ctx->jrdev;
692 	int err;
693 
694 	err = aes_check_keylen(keylen - 4);
695 	if (err)
696 		return err;
697 
698 	print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
699 			     DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
700 
701 	memcpy(ctx->key, key, keylen);
702 
703 	/*
704 	 * The last four bytes of the key material are used as the salt value
705 	 * in the nonce. Update the AES key length.
706 	 */
707 	ctx->cdata.keylen = keylen - 4;
708 	dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
709 				   ctx->dir);
710 	return rfc4106_set_sh_desc(aead);
711 }
712 
rfc4543_setkey(struct crypto_aead * aead,const u8 * key,unsigned int keylen)713 static int rfc4543_setkey(struct crypto_aead *aead,
714 			  const u8 *key, unsigned int keylen)
715 {
716 	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
717 	struct device *jrdev = ctx->jrdev;
718 	int err;
719 
720 	err = aes_check_keylen(keylen - 4);
721 	if (err)
722 		return err;
723 
724 	print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
725 			     DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
726 
727 	memcpy(ctx->key, key, keylen);
728 
729 	/*
730 	 * The last four bytes of the key material are used as the salt value
731 	 * in the nonce. Update the AES key length.
732 	 */
733 	ctx->cdata.keylen = keylen - 4;
734 	dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
735 				   ctx->dir);
736 	return rfc4543_set_sh_desc(aead);
737 }
738 
skcipher_setkey(struct crypto_skcipher * skcipher,const u8 * key,unsigned int keylen,const u32 ctx1_iv_off)739 static int skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
740 			   unsigned int keylen, const u32 ctx1_iv_off)
741 {
742 	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher);
743 	struct caam_skcipher_alg *alg =
744 		container_of(crypto_skcipher_alg(skcipher), typeof(*alg),
745 			     skcipher.base);
746 	struct device *jrdev = ctx->jrdev;
747 	unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
748 	u32 *desc;
749 	const bool is_rfc3686 = alg->caam.rfc3686;
750 
751 	print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
752 			     DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
753 
754 	ctx->cdata.keylen = keylen;
755 	ctx->cdata.key_virt = key;
756 	ctx->cdata.key_inline = true;
757 
758 	/* skcipher_encrypt shared descriptor */
759 	desc = ctx->sh_desc_enc;
760 	cnstr_shdsc_skcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686,
761 				   ctx1_iv_off);
762 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
763 				   desc_bytes(desc), ctx->dir);
764 
765 	/* skcipher_decrypt shared descriptor */
766 	desc = ctx->sh_desc_dec;
767 	cnstr_shdsc_skcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686,
768 				   ctx1_iv_off);
769 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
770 				   desc_bytes(desc), ctx->dir);
771 
772 	return 0;
773 }
774 
aes_skcipher_setkey(struct crypto_skcipher * skcipher,const u8 * key,unsigned int keylen)775 static int aes_skcipher_setkey(struct crypto_skcipher *skcipher,
776 			       const u8 *key, unsigned int keylen)
777 {
778 	int err;
779 
780 	err = aes_check_keylen(keylen);
781 	if (err)
782 		return err;
783 
784 	return skcipher_setkey(skcipher, key, keylen, 0);
785 }
786 
rfc3686_skcipher_setkey(struct crypto_skcipher * skcipher,const u8 * key,unsigned int keylen)787 static int rfc3686_skcipher_setkey(struct crypto_skcipher *skcipher,
788 				   const u8 *key, unsigned int keylen)
789 {
790 	u32 ctx1_iv_off;
791 	int err;
792 
793 	/*
794 	 * RFC3686 specific:
795 	 *	| CONTEXT1[255:128] = {NONCE, IV, COUNTER}
796 	 *	| *key = {KEY, NONCE}
797 	 */
798 	ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
799 	keylen -= CTR_RFC3686_NONCE_SIZE;
800 
801 	err = aes_check_keylen(keylen);
802 	if (err)
803 		return err;
804 
805 	return skcipher_setkey(skcipher, key, keylen, ctx1_iv_off);
806 }
807 
ctr_skcipher_setkey(struct crypto_skcipher * skcipher,const u8 * key,unsigned int keylen)808 static int ctr_skcipher_setkey(struct crypto_skcipher *skcipher,
809 			       const u8 *key, unsigned int keylen)
810 {
811 	u32 ctx1_iv_off;
812 	int err;
813 
814 	/*
815 	 * AES-CTR needs to load IV in CONTEXT1 reg
816 	 * at an offset of 128bits (16bytes)
817 	 * CONTEXT1[255:128] = IV
818 	 */
819 	ctx1_iv_off = 16;
820 
821 	err = aes_check_keylen(keylen);
822 	if (err)
823 		return err;
824 
825 	return skcipher_setkey(skcipher, key, keylen, ctx1_iv_off);
826 }
827 
des_skcipher_setkey(struct crypto_skcipher * skcipher,const u8 * key,unsigned int keylen)828 static int des_skcipher_setkey(struct crypto_skcipher *skcipher,
829 			       const u8 *key, unsigned int keylen)
830 {
831 	return verify_skcipher_des_key(skcipher, key) ?:
832 	       skcipher_setkey(skcipher, key, keylen, 0);
833 }
834 
des3_skcipher_setkey(struct crypto_skcipher * skcipher,const u8 * key,unsigned int keylen)835 static int des3_skcipher_setkey(struct crypto_skcipher *skcipher,
836 				const u8 *key, unsigned int keylen)
837 {
838 	return verify_skcipher_des3_key(skcipher, key) ?:
839 	       skcipher_setkey(skcipher, key, keylen, 0);
840 }
841 
xts_skcipher_setkey(struct crypto_skcipher * skcipher,const u8 * key,unsigned int keylen)842 static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
843 			       unsigned int keylen)
844 {
845 	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher);
846 	struct device *jrdev = ctx->jrdev;
847 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
848 	u32 *desc;
849 	int err;
850 
851 	err = xts_verify_key(skcipher, key, keylen);
852 	if (err) {
853 		dev_dbg(jrdev, "key size mismatch\n");
854 		return err;
855 	}
856 
857 	if (keylen != 2 * AES_KEYSIZE_128 && keylen != 2 * AES_KEYSIZE_256)
858 		ctx->xts_key_fallback = true;
859 
860 	if (ctrlpriv->era <= 8 || ctx->xts_key_fallback) {
861 		err = crypto_skcipher_setkey(ctx->fallback, key, keylen);
862 		if (err)
863 			return err;
864 	}
865 
866 	ctx->cdata.keylen = keylen;
867 	ctx->cdata.key_virt = key;
868 	ctx->cdata.key_inline = true;
869 
870 	/* xts_skcipher_encrypt shared descriptor */
871 	desc = ctx->sh_desc_enc;
872 	cnstr_shdsc_xts_skcipher_encap(desc, &ctx->cdata);
873 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
874 				   desc_bytes(desc), ctx->dir);
875 
876 	/* xts_skcipher_decrypt shared descriptor */
877 	desc = ctx->sh_desc_dec;
878 	cnstr_shdsc_xts_skcipher_decap(desc, &ctx->cdata);
879 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
880 				   desc_bytes(desc), ctx->dir);
881 
882 	return 0;
883 }
884 
885 /*
886  * aead_edesc - s/w-extended aead descriptor
887  * @src_nents: number of segments in input s/w scatterlist
888  * @dst_nents: number of segments in output s/w scatterlist
889  * @mapped_src_nents: number of segments in input h/w link table
890  * @mapped_dst_nents: number of segments in output h/w link table
891  * @sec4_sg_bytes: length of dma mapped sec4_sg space
892  * @bklog: stored to determine if the request needs backlog
893  * @sec4_sg_dma: bus physical mapped address of h/w link table
894  * @sec4_sg: pointer to h/w link table
895  * @hw_desc: the h/w job descriptor followed by any referenced link tables
896  */
897 struct aead_edesc {
898 	int src_nents;
899 	int dst_nents;
900 	int mapped_src_nents;
901 	int mapped_dst_nents;
902 	int sec4_sg_bytes;
903 	bool bklog;
904 	dma_addr_t sec4_sg_dma;
905 	struct sec4_sg_entry *sec4_sg;
906 	u32 hw_desc[];
907 };
908 
909 /*
910  * skcipher_edesc - s/w-extended skcipher descriptor
911  * @src_nents: number of segments in input s/w scatterlist
912  * @dst_nents: number of segments in output s/w scatterlist
913  * @mapped_src_nents: number of segments in input h/w link table
914  * @mapped_dst_nents: number of segments in output h/w link table
915  * @iv_dma: dma address of iv for checking continuity and link table
916  * @sec4_sg_bytes: length of dma mapped sec4_sg space
917  * @bklog: stored to determine if the request needs backlog
918  * @sec4_sg_dma: bus physical mapped address of h/w link table
919  * @sec4_sg: pointer to h/w link table
920  * @hw_desc: the h/w job descriptor followed by any referenced link tables
921  *	     and IV
922  */
923 struct skcipher_edesc {
924 	int src_nents;
925 	int dst_nents;
926 	int mapped_src_nents;
927 	int mapped_dst_nents;
928 	dma_addr_t iv_dma;
929 	int sec4_sg_bytes;
930 	bool bklog;
931 	dma_addr_t sec4_sg_dma;
932 	struct sec4_sg_entry *sec4_sg;
933 	u32 hw_desc[];
934 };
935 
caam_unmap(struct device * dev,struct scatterlist * src,struct scatterlist * dst,int src_nents,int dst_nents,dma_addr_t iv_dma,int ivsize,dma_addr_t sec4_sg_dma,int sec4_sg_bytes)936 static void caam_unmap(struct device *dev, struct scatterlist *src,
937 		       struct scatterlist *dst, int src_nents,
938 		       int dst_nents,
939 		       dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma,
940 		       int sec4_sg_bytes)
941 {
942 	if (dst != src) {
943 		if (src_nents)
944 			dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
945 		if (dst_nents)
946 			dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
947 	} else {
948 		dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
949 	}
950 
951 	if (iv_dma)
952 		dma_unmap_single(dev, iv_dma, ivsize, DMA_BIDIRECTIONAL);
953 	if (sec4_sg_bytes)
954 		dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
955 				 DMA_TO_DEVICE);
956 }
957 
aead_unmap(struct device * dev,struct aead_edesc * edesc,struct aead_request * req)958 static void aead_unmap(struct device *dev,
959 		       struct aead_edesc *edesc,
960 		       struct aead_request *req)
961 {
962 	caam_unmap(dev, req->src, req->dst,
963 		   edesc->src_nents, edesc->dst_nents, 0, 0,
964 		   edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
965 }
966 
skcipher_unmap(struct device * dev,struct skcipher_edesc * edesc,struct skcipher_request * req)967 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc,
968 			   struct skcipher_request *req)
969 {
970 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
971 	int ivsize = crypto_skcipher_ivsize(skcipher);
972 
973 	caam_unmap(dev, req->src, req->dst,
974 		   edesc->src_nents, edesc->dst_nents,
975 		   edesc->iv_dma, ivsize,
976 		   edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
977 }
978 
aead_crypt_done(struct device * jrdev,u32 * desc,u32 err,void * context)979 static void aead_crypt_done(struct device *jrdev, u32 *desc, u32 err,
980 			    void *context)
981 {
982 	struct aead_request *req = context;
983 	struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
984 	struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev);
985 	struct aead_edesc *edesc;
986 	int ecode = 0;
987 	bool has_bklog;
988 
989 	dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
990 
991 	edesc = rctx->edesc;
992 	has_bklog = edesc->bklog;
993 
994 	if (err)
995 		ecode = caam_jr_strstatus(jrdev, err);
996 
997 	aead_unmap(jrdev, edesc, req);
998 
999 	kfree(edesc);
1000 
1001 	/*
1002 	 * If no backlog flag, the completion of the request is done
1003 	 * by CAAM, not crypto engine.
1004 	 */
1005 	if (!has_bklog)
1006 		aead_request_complete(req, ecode);
1007 	else
1008 		crypto_finalize_aead_request(jrp->engine, req, ecode);
1009 }
1010 
skcipher_edesc_iv(struct skcipher_edesc * edesc)1011 static inline u8 *skcipher_edesc_iv(struct skcipher_edesc *edesc)
1012 {
1013 
1014 	return PTR_ALIGN((u8 *)edesc->sec4_sg + edesc->sec4_sg_bytes,
1015 			 dma_get_cache_alignment());
1016 }
1017 
skcipher_crypt_done(struct device * jrdev,u32 * desc,u32 err,void * context)1018 static void skcipher_crypt_done(struct device *jrdev, u32 *desc, u32 err,
1019 				void *context)
1020 {
1021 	struct skcipher_request *req = context;
1022 	struct skcipher_edesc *edesc;
1023 	struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
1024 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1025 	struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev);
1026 	int ivsize = crypto_skcipher_ivsize(skcipher);
1027 	int ecode = 0;
1028 	bool has_bklog;
1029 
1030 	dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
1031 
1032 	edesc = rctx->edesc;
1033 	has_bklog = edesc->bklog;
1034 	if (err)
1035 		ecode = caam_jr_strstatus(jrdev, err);
1036 
1037 	skcipher_unmap(jrdev, edesc, req);
1038 
1039 	/*
1040 	 * The crypto API expects us to set the IV (req->iv) to the last
1041 	 * ciphertext block (CBC mode) or last counter (CTR mode).
1042 	 * This is used e.g. by the CTS mode.
1043 	 */
1044 	if (ivsize && !ecode) {
1045 		memcpy(req->iv, skcipher_edesc_iv(edesc), ivsize);
1046 
1047 		print_hex_dump_debug("dstiv  @" __stringify(__LINE__)": ",
1048 				     DUMP_PREFIX_ADDRESS, 16, 4, req->iv,
1049 				     ivsize, 1);
1050 	}
1051 
1052 	caam_dump_sg("dst    @" __stringify(__LINE__)": ",
1053 		     DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
1054 		     edesc->dst_nents > 1 ? 100 : req->cryptlen, 1);
1055 
1056 	kfree(edesc);
1057 
1058 	/*
1059 	 * If no backlog flag, the completion of the request is done
1060 	 * by CAAM, not crypto engine.
1061 	 */
1062 	if (!has_bklog)
1063 		skcipher_request_complete(req, ecode);
1064 	else
1065 		crypto_finalize_skcipher_request(jrp->engine, req, ecode);
1066 }
1067 
1068 /*
1069  * Fill in aead job descriptor
1070  */
init_aead_job(struct aead_request * req,struct aead_edesc * edesc,bool all_contig,bool encrypt)1071 static void init_aead_job(struct aead_request *req,
1072 			  struct aead_edesc *edesc,
1073 			  bool all_contig, bool encrypt)
1074 {
1075 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1076 	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
1077 	int authsize = ctx->authsize;
1078 	u32 *desc = edesc->hw_desc;
1079 	u32 out_options, in_options;
1080 	dma_addr_t dst_dma, src_dma;
1081 	int len, sec4_sg_index = 0;
1082 	dma_addr_t ptr;
1083 	u32 *sh_desc;
1084 
1085 	sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
1086 	ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
1087 
1088 	len = desc_len(sh_desc);
1089 	init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1090 
1091 	if (all_contig) {
1092 		src_dma = edesc->mapped_src_nents ? sg_dma_address(req->src) :
1093 						    0;
1094 		in_options = 0;
1095 	} else {
1096 		src_dma = edesc->sec4_sg_dma;
1097 		sec4_sg_index += edesc->mapped_src_nents;
1098 		in_options = LDST_SGF;
1099 	}
1100 
1101 	append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
1102 			  in_options);
1103 
1104 	dst_dma = src_dma;
1105 	out_options = in_options;
1106 
1107 	if (unlikely(req->src != req->dst)) {
1108 		if (!edesc->mapped_dst_nents) {
1109 			dst_dma = 0;
1110 			out_options = 0;
1111 		} else if (edesc->mapped_dst_nents == 1) {
1112 			dst_dma = sg_dma_address(req->dst);
1113 			out_options = 0;
1114 		} else {
1115 			dst_dma = edesc->sec4_sg_dma +
1116 				  sec4_sg_index *
1117 				  sizeof(struct sec4_sg_entry);
1118 			out_options = LDST_SGF;
1119 		}
1120 	}
1121 
1122 	if (encrypt)
1123 		append_seq_out_ptr(desc, dst_dma,
1124 				   req->assoclen + req->cryptlen + authsize,
1125 				   out_options);
1126 	else
1127 		append_seq_out_ptr(desc, dst_dma,
1128 				   req->assoclen + req->cryptlen - authsize,
1129 				   out_options);
1130 }
1131 
init_gcm_job(struct aead_request * req,struct aead_edesc * edesc,bool all_contig,bool encrypt)1132 static void init_gcm_job(struct aead_request *req,
1133 			 struct aead_edesc *edesc,
1134 			 bool all_contig, bool encrypt)
1135 {
1136 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1137 	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
1138 	unsigned int ivsize = crypto_aead_ivsize(aead);
1139 	u32 *desc = edesc->hw_desc;
1140 	bool generic_gcm = (ivsize == GCM_AES_IV_SIZE);
1141 	unsigned int last;
1142 
1143 	init_aead_job(req, edesc, all_contig, encrypt);
1144 	append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1145 
1146 	/* BUG This should not be specific to generic GCM. */
1147 	last = 0;
1148 	if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
1149 		last = FIFOLD_TYPE_LAST1;
1150 
1151 	/* Read GCM IV */
1152 	append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
1153 			 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | GCM_AES_IV_SIZE | last);
1154 	/* Append Salt */
1155 	if (!generic_gcm)
1156 		append_data(desc, ctx->key + ctx->cdata.keylen, 4);
1157 	/* Append IV */
1158 	append_data(desc, req->iv, ivsize);
1159 	/* End of blank commands */
1160 }
1161 
init_chachapoly_job(struct aead_request * req,struct aead_edesc * edesc,bool all_contig,bool encrypt)1162 static void init_chachapoly_job(struct aead_request *req,
1163 				struct aead_edesc *edesc, bool all_contig,
1164 				bool encrypt)
1165 {
1166 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1167 	unsigned int ivsize = crypto_aead_ivsize(aead);
1168 	unsigned int assoclen = req->assoclen;
1169 	u32 *desc = edesc->hw_desc;
1170 	u32 ctx_iv_off = 4;
1171 
1172 	init_aead_job(req, edesc, all_contig, encrypt);
1173 
1174 	if (ivsize != CHACHAPOLY_IV_SIZE) {
1175 		/* IPsec specific: CONTEXT1[223:128] = {NONCE, IV} */
1176 		ctx_iv_off += 4;
1177 
1178 		/*
1179 		 * The associated data comes already with the IV but we need
1180 		 * to skip it when we authenticate or encrypt...
1181 		 */
1182 		assoclen -= ivsize;
1183 	}
1184 
1185 	append_math_add_imm_u32(desc, REG3, ZERO, IMM, assoclen);
1186 
1187 	/*
1188 	 * For IPsec load the IV further in the same register.
1189 	 * For RFC7539 simply load the 12 bytes nonce in a single operation
1190 	 */
1191 	append_load_as_imm(desc, req->iv, ivsize, LDST_CLASS_1_CCB |
1192 			   LDST_SRCDST_BYTE_CONTEXT |
1193 			   ctx_iv_off << LDST_OFFSET_SHIFT);
1194 }
1195 
init_authenc_job(struct aead_request * req,struct aead_edesc * edesc,bool all_contig,bool encrypt)1196 static void init_authenc_job(struct aead_request *req,
1197 			     struct aead_edesc *edesc,
1198 			     bool all_contig, bool encrypt)
1199 {
1200 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1201 	struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
1202 						 struct caam_aead_alg,
1203 						 aead.base);
1204 	unsigned int ivsize = crypto_aead_ivsize(aead);
1205 	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
1206 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent);
1207 	const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
1208 			       OP_ALG_AAI_CTR_MOD128);
1209 	const bool is_rfc3686 = alg->caam.rfc3686;
1210 	u32 *desc = edesc->hw_desc;
1211 	u32 ivoffset = 0;
1212 
1213 	/*
1214 	 * AES-CTR needs to load IV in CONTEXT1 reg
1215 	 * at an offset of 128bits (16bytes)
1216 	 * CONTEXT1[255:128] = IV
1217 	 */
1218 	if (ctr_mode)
1219 		ivoffset = 16;
1220 
1221 	/*
1222 	 * RFC3686 specific:
1223 	 *	CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1224 	 */
1225 	if (is_rfc3686)
1226 		ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
1227 
1228 	init_aead_job(req, edesc, all_contig, encrypt);
1229 
1230 	/*
1231 	 * {REG3, DPOVRD} = assoclen, depending on whether MATH command supports
1232 	 * having DPOVRD as destination.
1233 	 */
1234 	if (ctrlpriv->era < 3)
1235 		append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1236 	else
1237 		append_math_add_imm_u32(desc, DPOVRD, ZERO, IMM, req->assoclen);
1238 
1239 	if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
1240 		append_load_as_imm(desc, req->iv, ivsize,
1241 				   LDST_CLASS_1_CCB |
1242 				   LDST_SRCDST_BYTE_CONTEXT |
1243 				   (ivoffset << LDST_OFFSET_SHIFT));
1244 }
1245 
1246 /*
1247  * Fill in skcipher job descriptor
1248  */
init_skcipher_job(struct skcipher_request * req,struct skcipher_edesc * edesc,const bool encrypt)1249 static void init_skcipher_job(struct skcipher_request *req,
1250 			      struct skcipher_edesc *edesc,
1251 			      const bool encrypt)
1252 {
1253 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1254 	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher);
1255 	struct device *jrdev = ctx->jrdev;
1256 	int ivsize = crypto_skcipher_ivsize(skcipher);
1257 	u32 *desc = edesc->hw_desc;
1258 	u32 *sh_desc;
1259 	u32 in_options = 0, out_options = 0;
1260 	dma_addr_t src_dma, dst_dma, ptr;
1261 	int len, sec4_sg_index = 0;
1262 
1263 	print_hex_dump_debug("presciv@"__stringify(__LINE__)": ",
1264 			     DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1);
1265 	dev_dbg(jrdev, "asked=%d, cryptlen%d\n",
1266 	       (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen);
1267 
1268 	caam_dump_sg("src    @" __stringify(__LINE__)": ",
1269 		     DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1270 		     edesc->src_nents > 1 ? 100 : req->cryptlen, 1);
1271 
1272 	sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
1273 	ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
1274 
1275 	len = desc_len(sh_desc);
1276 	init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1277 
1278 	if (ivsize || edesc->mapped_src_nents > 1) {
1279 		src_dma = edesc->sec4_sg_dma;
1280 		sec4_sg_index = edesc->mapped_src_nents + !!ivsize;
1281 		in_options = LDST_SGF;
1282 	} else {
1283 		src_dma = sg_dma_address(req->src);
1284 	}
1285 
1286 	append_seq_in_ptr(desc, src_dma, req->cryptlen + ivsize, in_options);
1287 
1288 	if (likely(req->src == req->dst)) {
1289 		dst_dma = src_dma + !!ivsize * sizeof(struct sec4_sg_entry);
1290 		out_options = in_options;
1291 	} else if (!ivsize && edesc->mapped_dst_nents == 1) {
1292 		dst_dma = sg_dma_address(req->dst);
1293 	} else {
1294 		dst_dma = edesc->sec4_sg_dma + sec4_sg_index *
1295 			  sizeof(struct sec4_sg_entry);
1296 		out_options = LDST_SGF;
1297 	}
1298 
1299 	append_seq_out_ptr(desc, dst_dma, req->cryptlen + ivsize, out_options);
1300 }
1301 
1302 /*
1303  * allocate and map the aead extended descriptor
1304  */
aead_edesc_alloc(struct aead_request * req,int desc_bytes,bool * all_contig_ptr,bool encrypt)1305 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
1306 					   int desc_bytes, bool *all_contig_ptr,
1307 					   bool encrypt)
1308 {
1309 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1310 	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
1311 	struct device *jrdev = ctx->jrdev;
1312 	struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
1313 	gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1314 		       GFP_KERNEL : GFP_ATOMIC;
1315 	int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1316 	int src_len, dst_len = 0;
1317 	struct aead_edesc *edesc;
1318 	int sec4_sg_index, sec4_sg_len, sec4_sg_bytes;
1319 	unsigned int authsize = ctx->authsize;
1320 
1321 	if (unlikely(req->dst != req->src)) {
1322 		src_len = req->assoclen + req->cryptlen;
1323 		dst_len = src_len + (encrypt ? authsize : (-authsize));
1324 
1325 		src_nents = sg_nents_for_len(req->src, src_len);
1326 		if (unlikely(src_nents < 0)) {
1327 			dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1328 				src_len);
1329 			return ERR_PTR(src_nents);
1330 		}
1331 
1332 		dst_nents = sg_nents_for_len(req->dst, dst_len);
1333 		if (unlikely(dst_nents < 0)) {
1334 			dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1335 				dst_len);
1336 			return ERR_PTR(dst_nents);
1337 		}
1338 	} else {
1339 		src_len = req->assoclen + req->cryptlen +
1340 			  (encrypt ? authsize : 0);
1341 
1342 		src_nents = sg_nents_for_len(req->src, src_len);
1343 		if (unlikely(src_nents < 0)) {
1344 			dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1345 				src_len);
1346 			return ERR_PTR(src_nents);
1347 		}
1348 	}
1349 
1350 	if (likely(req->src == req->dst)) {
1351 		mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1352 					      DMA_BIDIRECTIONAL);
1353 		if (unlikely(!mapped_src_nents)) {
1354 			dev_err(jrdev, "unable to map source\n");
1355 			return ERR_PTR(-ENOMEM);
1356 		}
1357 	} else {
1358 		/* Cover also the case of null (zero length) input data */
1359 		if (src_nents) {
1360 			mapped_src_nents = dma_map_sg(jrdev, req->src,
1361 						      src_nents, DMA_TO_DEVICE);
1362 			if (unlikely(!mapped_src_nents)) {
1363 				dev_err(jrdev, "unable to map source\n");
1364 				return ERR_PTR(-ENOMEM);
1365 			}
1366 		} else {
1367 			mapped_src_nents = 0;
1368 		}
1369 
1370 		/* Cover also the case of null (zero length) output data */
1371 		if (dst_nents) {
1372 			mapped_dst_nents = dma_map_sg(jrdev, req->dst,
1373 						      dst_nents,
1374 						      DMA_FROM_DEVICE);
1375 			if (unlikely(!mapped_dst_nents)) {
1376 				dev_err(jrdev, "unable to map destination\n");
1377 				dma_unmap_sg(jrdev, req->src, src_nents,
1378 					     DMA_TO_DEVICE);
1379 				return ERR_PTR(-ENOMEM);
1380 			}
1381 		} else {
1382 			mapped_dst_nents = 0;
1383 		}
1384 	}
1385 
1386 	/*
1387 	 * HW reads 4 S/G entries at a time; make sure the reads don't go beyond
1388 	 * the end of the table by allocating more S/G entries.
1389 	 */
1390 	sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0;
1391 	if (mapped_dst_nents > 1)
1392 		sec4_sg_len += pad_sg_nents(mapped_dst_nents);
1393 	else
1394 		sec4_sg_len = pad_sg_nents(sec4_sg_len);
1395 
1396 	sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
1397 
1398 	/* allocate space for base edesc and hw desc commands, link tables */
1399 	edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes, flags);
1400 	if (!edesc) {
1401 		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1402 			   0, 0, 0);
1403 		return ERR_PTR(-ENOMEM);
1404 	}
1405 
1406 	edesc->src_nents = src_nents;
1407 	edesc->dst_nents = dst_nents;
1408 	edesc->mapped_src_nents = mapped_src_nents;
1409 	edesc->mapped_dst_nents = mapped_dst_nents;
1410 	edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
1411 			 desc_bytes;
1412 
1413 	rctx->edesc = edesc;
1414 
1415 	*all_contig_ptr = !(mapped_src_nents > 1);
1416 
1417 	sec4_sg_index = 0;
1418 	if (mapped_src_nents > 1) {
1419 		sg_to_sec4_sg_last(req->src, src_len,
1420 				   edesc->sec4_sg + sec4_sg_index, 0);
1421 		sec4_sg_index += mapped_src_nents;
1422 	}
1423 	if (mapped_dst_nents > 1) {
1424 		sg_to_sec4_sg_last(req->dst, dst_len,
1425 				   edesc->sec4_sg + sec4_sg_index, 0);
1426 	}
1427 
1428 	if (!sec4_sg_bytes)
1429 		return edesc;
1430 
1431 	edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1432 					    sec4_sg_bytes, DMA_TO_DEVICE);
1433 	if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1434 		dev_err(jrdev, "unable to map S/G table\n");
1435 		aead_unmap(jrdev, edesc, req);
1436 		kfree(edesc);
1437 		return ERR_PTR(-ENOMEM);
1438 	}
1439 
1440 	edesc->sec4_sg_bytes = sec4_sg_bytes;
1441 
1442 	return edesc;
1443 }
1444 
aead_enqueue_req(struct device * jrdev,struct aead_request * req)1445 static int aead_enqueue_req(struct device *jrdev, struct aead_request *req)
1446 {
1447 	struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev);
1448 	struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
1449 	struct aead_edesc *edesc = rctx->edesc;
1450 	u32 *desc = edesc->hw_desc;
1451 	int ret;
1452 
1453 	/*
1454 	 * Only the backlog request are sent to crypto-engine since the others
1455 	 * can be handled by CAAM, if free, especially since JR has up to 1024
1456 	 * entries (more than the 10 entries from crypto-engine).
1457 	 */
1458 	if (req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG)
1459 		ret = crypto_transfer_aead_request_to_engine(jrpriv->engine,
1460 							     req);
1461 	else
1462 		ret = caam_jr_enqueue(jrdev, desc, aead_crypt_done, req);
1463 
1464 	if ((ret != -EINPROGRESS) && (ret != -EBUSY)) {
1465 		aead_unmap(jrdev, edesc, req);
1466 		kfree(rctx->edesc);
1467 	}
1468 
1469 	return ret;
1470 }
1471 
chachapoly_crypt(struct aead_request * req,bool encrypt)1472 static inline int chachapoly_crypt(struct aead_request *req, bool encrypt)
1473 {
1474 	struct aead_edesc *edesc;
1475 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1476 	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
1477 	struct device *jrdev = ctx->jrdev;
1478 	bool all_contig;
1479 	u32 *desc;
1480 
1481 	edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig,
1482 				 encrypt);
1483 	if (IS_ERR(edesc))
1484 		return PTR_ERR(edesc);
1485 
1486 	desc = edesc->hw_desc;
1487 
1488 	init_chachapoly_job(req, edesc, all_contig, encrypt);
1489 	print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__)": ",
1490 			     DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1491 			     1);
1492 
1493 	return aead_enqueue_req(jrdev, req);
1494 }
1495 
chachapoly_encrypt(struct aead_request * req)1496 static int chachapoly_encrypt(struct aead_request *req)
1497 {
1498 	return chachapoly_crypt(req, true);
1499 }
1500 
chachapoly_decrypt(struct aead_request * req)1501 static int chachapoly_decrypt(struct aead_request *req)
1502 {
1503 	return chachapoly_crypt(req, false);
1504 }
1505 
aead_crypt(struct aead_request * req,bool encrypt)1506 static inline int aead_crypt(struct aead_request *req, bool encrypt)
1507 {
1508 	struct aead_edesc *edesc;
1509 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1510 	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
1511 	struct device *jrdev = ctx->jrdev;
1512 	bool all_contig;
1513 
1514 	/* allocate extended descriptor */
1515 	edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1516 				 &all_contig, encrypt);
1517 	if (IS_ERR(edesc))
1518 		return PTR_ERR(edesc);
1519 
1520 	/* Create and submit job descriptor */
1521 	init_authenc_job(req, edesc, all_contig, encrypt);
1522 
1523 	print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__)": ",
1524 			     DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1525 			     desc_bytes(edesc->hw_desc), 1);
1526 
1527 	return aead_enqueue_req(jrdev, req);
1528 }
1529 
aead_encrypt(struct aead_request * req)1530 static int aead_encrypt(struct aead_request *req)
1531 {
1532 	return aead_crypt(req, true);
1533 }
1534 
aead_decrypt(struct aead_request * req)1535 static int aead_decrypt(struct aead_request *req)
1536 {
1537 	return aead_crypt(req, false);
1538 }
1539 
aead_do_one_req(struct crypto_engine * engine,void * areq)1540 static int aead_do_one_req(struct crypto_engine *engine, void *areq)
1541 {
1542 	struct aead_request *req = aead_request_cast(areq);
1543 	struct caam_ctx *ctx = crypto_aead_ctx_dma(crypto_aead_reqtfm(req));
1544 	struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
1545 	u32 *desc = rctx->edesc->hw_desc;
1546 	int ret;
1547 
1548 	rctx->edesc->bklog = true;
1549 
1550 	ret = caam_jr_enqueue(ctx->jrdev, desc, aead_crypt_done, req);
1551 
1552 	if (ret == -ENOSPC && engine->retry_support)
1553 		return ret;
1554 
1555 	if (ret != -EINPROGRESS) {
1556 		aead_unmap(ctx->jrdev, rctx->edesc, req);
1557 		kfree(rctx->edesc);
1558 	} else {
1559 		ret = 0;
1560 	}
1561 
1562 	return ret;
1563 }
1564 
gcm_crypt(struct aead_request * req,bool encrypt)1565 static inline int gcm_crypt(struct aead_request *req, bool encrypt)
1566 {
1567 	struct aead_edesc *edesc;
1568 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1569 	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
1570 	struct device *jrdev = ctx->jrdev;
1571 	bool all_contig;
1572 
1573 	/* allocate extended descriptor */
1574 	edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig,
1575 				 encrypt);
1576 	if (IS_ERR(edesc))
1577 		return PTR_ERR(edesc);
1578 
1579 	/* Create and submit job descriptor */
1580 	init_gcm_job(req, edesc, all_contig, encrypt);
1581 
1582 	print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__)": ",
1583 			     DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1584 			     desc_bytes(edesc->hw_desc), 1);
1585 
1586 	return aead_enqueue_req(jrdev, req);
1587 }
1588 
gcm_encrypt(struct aead_request * req)1589 static int gcm_encrypt(struct aead_request *req)
1590 {
1591 	return gcm_crypt(req, true);
1592 }
1593 
gcm_decrypt(struct aead_request * req)1594 static int gcm_decrypt(struct aead_request *req)
1595 {
1596 	return gcm_crypt(req, false);
1597 }
1598 
ipsec_gcm_encrypt(struct aead_request * req)1599 static int ipsec_gcm_encrypt(struct aead_request *req)
1600 {
1601 	return crypto_ipsec_check_assoclen(req->assoclen) ? : gcm_encrypt(req);
1602 }
1603 
ipsec_gcm_decrypt(struct aead_request * req)1604 static int ipsec_gcm_decrypt(struct aead_request *req)
1605 {
1606 	return crypto_ipsec_check_assoclen(req->assoclen) ? : gcm_decrypt(req);
1607 }
1608 
1609 /*
1610  * allocate and map the skcipher extended descriptor for skcipher
1611  */
skcipher_edesc_alloc(struct skcipher_request * req,int desc_bytes)1612 static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req,
1613 						   int desc_bytes)
1614 {
1615 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1616 	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher);
1617 	struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
1618 	struct device *jrdev = ctx->jrdev;
1619 	gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1620 		       GFP_KERNEL : GFP_ATOMIC;
1621 	int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1622 	struct skcipher_edesc *edesc;
1623 	dma_addr_t iv_dma = 0;
1624 	u8 *iv;
1625 	int ivsize = crypto_skcipher_ivsize(skcipher);
1626 	int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
1627 	unsigned int aligned_size;
1628 
1629 	src_nents = sg_nents_for_len(req->src, req->cryptlen);
1630 	if (unlikely(src_nents < 0)) {
1631 		dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1632 			req->cryptlen);
1633 		return ERR_PTR(src_nents);
1634 	}
1635 
1636 	if (req->dst != req->src) {
1637 		dst_nents = sg_nents_for_len(req->dst, req->cryptlen);
1638 		if (unlikely(dst_nents < 0)) {
1639 			dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1640 				req->cryptlen);
1641 			return ERR_PTR(dst_nents);
1642 		}
1643 	}
1644 
1645 	if (likely(req->src == req->dst)) {
1646 		mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1647 					      DMA_BIDIRECTIONAL);
1648 		if (unlikely(!mapped_src_nents)) {
1649 			dev_err(jrdev, "unable to map source\n");
1650 			return ERR_PTR(-ENOMEM);
1651 		}
1652 	} else {
1653 		mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1654 					      DMA_TO_DEVICE);
1655 		if (unlikely(!mapped_src_nents)) {
1656 			dev_err(jrdev, "unable to map source\n");
1657 			return ERR_PTR(-ENOMEM);
1658 		}
1659 		mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1660 					      DMA_FROM_DEVICE);
1661 		if (unlikely(!mapped_dst_nents)) {
1662 			dev_err(jrdev, "unable to map destination\n");
1663 			dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
1664 			return ERR_PTR(-ENOMEM);
1665 		}
1666 	}
1667 
1668 	if (!ivsize && mapped_src_nents == 1)
1669 		sec4_sg_ents = 0; // no need for an input hw s/g table
1670 	else
1671 		sec4_sg_ents = mapped_src_nents + !!ivsize;
1672 	dst_sg_idx = sec4_sg_ents;
1673 
1674 	/*
1675 	 * Input, output HW S/G tables: [IV, src][dst, IV]
1676 	 * IV entries point to the same buffer
1677 	 * If src == dst, S/G entries are reused (S/G tables overlap)
1678 	 *
1679 	 * HW reads 4 S/G entries at a time; make sure the reads don't go beyond
1680 	 * the end of the table by allocating more S/G entries. Logic:
1681 	 * if (output S/G)
1682 	 *      pad output S/G, if needed
1683 	 * else if (input S/G) ...
1684 	 *      pad input S/G, if needed
1685 	 */
1686 	if (ivsize || mapped_dst_nents > 1) {
1687 		if (req->src == req->dst)
1688 			sec4_sg_ents = !!ivsize + pad_sg_nents(sec4_sg_ents);
1689 		else
1690 			sec4_sg_ents += pad_sg_nents(mapped_dst_nents +
1691 						     !!ivsize);
1692 	} else {
1693 		sec4_sg_ents = pad_sg_nents(sec4_sg_ents);
1694 	}
1695 
1696 	sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
1697 
1698 	/*
1699 	 * allocate space for base edesc and hw desc commands, link tables, IV
1700 	 */
1701 	aligned_size = sizeof(*edesc) + desc_bytes + sec4_sg_bytes;
1702 	aligned_size = ALIGN(aligned_size, dma_get_cache_alignment());
1703 	aligned_size += ~(ARCH_KMALLOC_MINALIGN - 1) &
1704 			(dma_get_cache_alignment() - 1);
1705 	aligned_size += ALIGN(ivsize, dma_get_cache_alignment());
1706 	edesc = kzalloc(aligned_size, flags);
1707 	if (!edesc) {
1708 		dev_err(jrdev, "could not allocate extended descriptor\n");
1709 		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1710 			   0, 0, 0);
1711 		return ERR_PTR(-ENOMEM);
1712 	}
1713 
1714 	edesc->src_nents = src_nents;
1715 	edesc->dst_nents = dst_nents;
1716 	edesc->mapped_src_nents = mapped_src_nents;
1717 	edesc->mapped_dst_nents = mapped_dst_nents;
1718 	edesc->sec4_sg_bytes = sec4_sg_bytes;
1719 	edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc +
1720 						  desc_bytes);
1721 	rctx->edesc = edesc;
1722 
1723 	/* Make sure IV is located in a DMAable area */
1724 	if (ivsize) {
1725 		iv = skcipher_edesc_iv(edesc);
1726 		memcpy(iv, req->iv, ivsize);
1727 
1728 		iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_BIDIRECTIONAL);
1729 		if (dma_mapping_error(jrdev, iv_dma)) {
1730 			dev_err(jrdev, "unable to map IV\n");
1731 			caam_unmap(jrdev, req->src, req->dst, src_nents,
1732 				   dst_nents, 0, 0, 0, 0);
1733 			kfree(edesc);
1734 			return ERR_PTR(-ENOMEM);
1735 		}
1736 
1737 		dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
1738 	}
1739 	if (dst_sg_idx)
1740 		sg_to_sec4_sg(req->src, req->cryptlen, edesc->sec4_sg +
1741 			      !!ivsize, 0);
1742 
1743 	if (req->src != req->dst && (ivsize || mapped_dst_nents > 1))
1744 		sg_to_sec4_sg(req->dst, req->cryptlen, edesc->sec4_sg +
1745 			      dst_sg_idx, 0);
1746 
1747 	if (ivsize)
1748 		dma_to_sec4_sg_one(edesc->sec4_sg + dst_sg_idx +
1749 				   mapped_dst_nents, iv_dma, ivsize, 0);
1750 
1751 	if (ivsize || mapped_dst_nents > 1)
1752 		sg_to_sec4_set_last(edesc->sec4_sg + dst_sg_idx +
1753 				    mapped_dst_nents - 1 + !!ivsize);
1754 
1755 	if (sec4_sg_bytes) {
1756 		edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1757 						    sec4_sg_bytes,
1758 						    DMA_TO_DEVICE);
1759 		if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1760 			dev_err(jrdev, "unable to map S/G table\n");
1761 			caam_unmap(jrdev, req->src, req->dst, src_nents,
1762 				   dst_nents, iv_dma, ivsize, 0, 0);
1763 			kfree(edesc);
1764 			return ERR_PTR(-ENOMEM);
1765 		}
1766 	}
1767 
1768 	edesc->iv_dma = iv_dma;
1769 
1770 	print_hex_dump_debug("skcipher sec4_sg@" __stringify(__LINE__)": ",
1771 			     DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
1772 			     sec4_sg_bytes, 1);
1773 
1774 	return edesc;
1775 }
1776 
skcipher_do_one_req(struct crypto_engine * engine,void * areq)1777 static int skcipher_do_one_req(struct crypto_engine *engine, void *areq)
1778 {
1779 	struct skcipher_request *req = skcipher_request_cast(areq);
1780 	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(crypto_skcipher_reqtfm(req));
1781 	struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
1782 	u32 *desc = rctx->edesc->hw_desc;
1783 	int ret;
1784 
1785 	rctx->edesc->bklog = true;
1786 
1787 	ret = caam_jr_enqueue(ctx->jrdev, desc, skcipher_crypt_done, req);
1788 
1789 	if (ret == -ENOSPC && engine->retry_support)
1790 		return ret;
1791 
1792 	if (ret != -EINPROGRESS) {
1793 		skcipher_unmap(ctx->jrdev, rctx->edesc, req);
1794 		kfree(rctx->edesc);
1795 	} else {
1796 		ret = 0;
1797 	}
1798 
1799 	return ret;
1800 }
1801 
xts_skcipher_ivsize(struct skcipher_request * req)1802 static inline bool xts_skcipher_ivsize(struct skcipher_request *req)
1803 {
1804 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1805 	unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
1806 
1807 	return !!get_unaligned((u64 *)(req->iv + (ivsize / 2)));
1808 }
1809 
skcipher_crypt(struct skcipher_request * req,bool encrypt)1810 static inline int skcipher_crypt(struct skcipher_request *req, bool encrypt)
1811 {
1812 	struct skcipher_edesc *edesc;
1813 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1814 	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher);
1815 	struct device *jrdev = ctx->jrdev;
1816 	struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev);
1817 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
1818 	u32 *desc;
1819 	int ret = 0;
1820 
1821 	/*
1822 	 * XTS is expected to return an error even for input length = 0
1823 	 * Note that the case input length < block size will be caught during
1824 	 * HW offloading and return an error.
1825 	 */
1826 	if (!req->cryptlen && !ctx->fallback)
1827 		return 0;
1828 
1829 	if (ctx->fallback && ((ctrlpriv->era <= 8 && xts_skcipher_ivsize(req)) ||
1830 			      ctx->xts_key_fallback)) {
1831 		struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
1832 
1833 		skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback);
1834 		skcipher_request_set_callback(&rctx->fallback_req,
1835 					      req->base.flags,
1836 					      req->base.complete,
1837 					      req->base.data);
1838 		skcipher_request_set_crypt(&rctx->fallback_req, req->src,
1839 					   req->dst, req->cryptlen, req->iv);
1840 
1841 		return encrypt ? crypto_skcipher_encrypt(&rctx->fallback_req) :
1842 				 crypto_skcipher_decrypt(&rctx->fallback_req);
1843 	}
1844 
1845 	/* allocate extended descriptor */
1846 	edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
1847 	if (IS_ERR(edesc))
1848 		return PTR_ERR(edesc);
1849 
1850 	/* Create and submit job descriptor*/
1851 	init_skcipher_job(req, edesc, encrypt);
1852 
1853 	print_hex_dump_debug("skcipher jobdesc@" __stringify(__LINE__)": ",
1854 			     DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1855 			     desc_bytes(edesc->hw_desc), 1);
1856 
1857 	desc = edesc->hw_desc;
1858 	/*
1859 	 * Only the backlog request are sent to crypto-engine since the others
1860 	 * can be handled by CAAM, if free, especially since JR has up to 1024
1861 	 * entries (more than the 10 entries from crypto-engine).
1862 	 */
1863 	if (req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG)
1864 		ret = crypto_transfer_skcipher_request_to_engine(jrpriv->engine,
1865 								 req);
1866 	else
1867 		ret = caam_jr_enqueue(jrdev, desc, skcipher_crypt_done, req);
1868 
1869 	if ((ret != -EINPROGRESS) && (ret != -EBUSY)) {
1870 		skcipher_unmap(jrdev, edesc, req);
1871 		kfree(edesc);
1872 	}
1873 
1874 	return ret;
1875 }
1876 
skcipher_encrypt(struct skcipher_request * req)1877 static int skcipher_encrypt(struct skcipher_request *req)
1878 {
1879 	return skcipher_crypt(req, true);
1880 }
1881 
skcipher_decrypt(struct skcipher_request * req)1882 static int skcipher_decrypt(struct skcipher_request *req)
1883 {
1884 	return skcipher_crypt(req, false);
1885 }
1886 
1887 static struct caam_skcipher_alg driver_algs[] = {
1888 	{
1889 		.skcipher.base = {
1890 			.base = {
1891 				.cra_name = "cbc(aes)",
1892 				.cra_driver_name = "cbc-aes-caam",
1893 				.cra_blocksize = AES_BLOCK_SIZE,
1894 			},
1895 			.setkey = aes_skcipher_setkey,
1896 			.encrypt = skcipher_encrypt,
1897 			.decrypt = skcipher_decrypt,
1898 			.min_keysize = AES_MIN_KEY_SIZE,
1899 			.max_keysize = AES_MAX_KEY_SIZE,
1900 			.ivsize = AES_BLOCK_SIZE,
1901 		},
1902 		.skcipher.op = {
1903 			.do_one_request = skcipher_do_one_req,
1904 		},
1905 		.caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1906 	},
1907 	{
1908 		.skcipher.base = {
1909 			.base = {
1910 				.cra_name = "cbc(des3_ede)",
1911 				.cra_driver_name = "cbc-3des-caam",
1912 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1913 			},
1914 			.setkey = des3_skcipher_setkey,
1915 			.encrypt = skcipher_encrypt,
1916 			.decrypt = skcipher_decrypt,
1917 			.min_keysize = DES3_EDE_KEY_SIZE,
1918 			.max_keysize = DES3_EDE_KEY_SIZE,
1919 			.ivsize = DES3_EDE_BLOCK_SIZE,
1920 		},
1921 		.skcipher.op = {
1922 			.do_one_request = skcipher_do_one_req,
1923 		},
1924 		.caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1925 	},
1926 	{
1927 		.skcipher.base = {
1928 			.base = {
1929 				.cra_name = "cbc(des)",
1930 				.cra_driver_name = "cbc-des-caam",
1931 				.cra_blocksize = DES_BLOCK_SIZE,
1932 			},
1933 			.setkey = des_skcipher_setkey,
1934 			.encrypt = skcipher_encrypt,
1935 			.decrypt = skcipher_decrypt,
1936 			.min_keysize = DES_KEY_SIZE,
1937 			.max_keysize = DES_KEY_SIZE,
1938 			.ivsize = DES_BLOCK_SIZE,
1939 		},
1940 		.skcipher.op = {
1941 			.do_one_request = skcipher_do_one_req,
1942 		},
1943 		.caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
1944 	},
1945 	{
1946 		.skcipher.base = {
1947 			.base = {
1948 				.cra_name = "ctr(aes)",
1949 				.cra_driver_name = "ctr-aes-caam",
1950 				.cra_blocksize = 1,
1951 			},
1952 			.setkey = ctr_skcipher_setkey,
1953 			.encrypt = skcipher_encrypt,
1954 			.decrypt = skcipher_decrypt,
1955 			.min_keysize = AES_MIN_KEY_SIZE,
1956 			.max_keysize = AES_MAX_KEY_SIZE,
1957 			.ivsize = AES_BLOCK_SIZE,
1958 			.chunksize = AES_BLOCK_SIZE,
1959 		},
1960 		.skcipher.op = {
1961 			.do_one_request = skcipher_do_one_req,
1962 		},
1963 		.caam.class1_alg_type = OP_ALG_ALGSEL_AES |
1964 					OP_ALG_AAI_CTR_MOD128,
1965 	},
1966 	{
1967 		.skcipher.base = {
1968 			.base = {
1969 				.cra_name = "rfc3686(ctr(aes))",
1970 				.cra_driver_name = "rfc3686-ctr-aes-caam",
1971 				.cra_blocksize = 1,
1972 			},
1973 			.setkey = rfc3686_skcipher_setkey,
1974 			.encrypt = skcipher_encrypt,
1975 			.decrypt = skcipher_decrypt,
1976 			.min_keysize = AES_MIN_KEY_SIZE +
1977 				       CTR_RFC3686_NONCE_SIZE,
1978 			.max_keysize = AES_MAX_KEY_SIZE +
1979 				       CTR_RFC3686_NONCE_SIZE,
1980 			.ivsize = CTR_RFC3686_IV_SIZE,
1981 			.chunksize = AES_BLOCK_SIZE,
1982 		},
1983 		.skcipher.op = {
1984 			.do_one_request = skcipher_do_one_req,
1985 		},
1986 		.caam = {
1987 			.class1_alg_type = OP_ALG_ALGSEL_AES |
1988 					   OP_ALG_AAI_CTR_MOD128,
1989 			.rfc3686 = true,
1990 		},
1991 	},
1992 	{
1993 		.skcipher.base = {
1994 			.base = {
1995 				.cra_name = "xts(aes)",
1996 				.cra_driver_name = "xts-aes-caam",
1997 				.cra_flags = CRYPTO_ALG_NEED_FALLBACK,
1998 				.cra_blocksize = AES_BLOCK_SIZE,
1999 			},
2000 			.setkey = xts_skcipher_setkey,
2001 			.encrypt = skcipher_encrypt,
2002 			.decrypt = skcipher_decrypt,
2003 			.min_keysize = 2 * AES_MIN_KEY_SIZE,
2004 			.max_keysize = 2 * AES_MAX_KEY_SIZE,
2005 			.ivsize = AES_BLOCK_SIZE,
2006 		},
2007 		.skcipher.op = {
2008 			.do_one_request = skcipher_do_one_req,
2009 		},
2010 		.caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
2011 	},
2012 	{
2013 		.skcipher.base = {
2014 			.base = {
2015 				.cra_name = "ecb(des)",
2016 				.cra_driver_name = "ecb-des-caam",
2017 				.cra_blocksize = DES_BLOCK_SIZE,
2018 			},
2019 			.setkey = des_skcipher_setkey,
2020 			.encrypt = skcipher_encrypt,
2021 			.decrypt = skcipher_decrypt,
2022 			.min_keysize = DES_KEY_SIZE,
2023 			.max_keysize = DES_KEY_SIZE,
2024 		},
2025 		.skcipher.op = {
2026 			.do_one_request = skcipher_do_one_req,
2027 		},
2028 		.caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_ECB,
2029 	},
2030 	{
2031 		.skcipher.base = {
2032 			.base = {
2033 				.cra_name = "ecb(aes)",
2034 				.cra_driver_name = "ecb-aes-caam",
2035 				.cra_blocksize = AES_BLOCK_SIZE,
2036 			},
2037 			.setkey = aes_skcipher_setkey,
2038 			.encrypt = skcipher_encrypt,
2039 			.decrypt = skcipher_decrypt,
2040 			.min_keysize = AES_MIN_KEY_SIZE,
2041 			.max_keysize = AES_MAX_KEY_SIZE,
2042 		},
2043 		.skcipher.op = {
2044 			.do_one_request = skcipher_do_one_req,
2045 		},
2046 		.caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_ECB,
2047 	},
2048 	{
2049 		.skcipher.base = {
2050 			.base = {
2051 				.cra_name = "ecb(des3_ede)",
2052 				.cra_driver_name = "ecb-des3-caam",
2053 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2054 			},
2055 			.setkey = des3_skcipher_setkey,
2056 			.encrypt = skcipher_encrypt,
2057 			.decrypt = skcipher_decrypt,
2058 			.min_keysize = DES3_EDE_KEY_SIZE,
2059 			.max_keysize = DES3_EDE_KEY_SIZE,
2060 		},
2061 		.skcipher.op = {
2062 			.do_one_request = skcipher_do_one_req,
2063 		},
2064 		.caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_ECB,
2065 	},
2066 };
2067 
2068 static struct caam_aead_alg driver_aeads[] = {
2069 	{
2070 		.aead.base = {
2071 			.base = {
2072 				.cra_name = "rfc4106(gcm(aes))",
2073 				.cra_driver_name = "rfc4106-gcm-aes-caam",
2074 				.cra_blocksize = 1,
2075 			},
2076 			.setkey = rfc4106_setkey,
2077 			.setauthsize = rfc4106_setauthsize,
2078 			.encrypt = ipsec_gcm_encrypt,
2079 			.decrypt = ipsec_gcm_decrypt,
2080 			.ivsize = GCM_RFC4106_IV_SIZE,
2081 			.maxauthsize = AES_BLOCK_SIZE,
2082 		},
2083 		.aead.op = {
2084 			.do_one_request = aead_do_one_req,
2085 		},
2086 		.caam = {
2087 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2088 			.nodkp = true,
2089 		},
2090 	},
2091 	{
2092 		.aead.base = {
2093 			.base = {
2094 				.cra_name = "rfc4543(gcm(aes))",
2095 				.cra_driver_name = "rfc4543-gcm-aes-caam",
2096 				.cra_blocksize = 1,
2097 			},
2098 			.setkey = rfc4543_setkey,
2099 			.setauthsize = rfc4543_setauthsize,
2100 			.encrypt = ipsec_gcm_encrypt,
2101 			.decrypt = ipsec_gcm_decrypt,
2102 			.ivsize = GCM_RFC4543_IV_SIZE,
2103 			.maxauthsize = AES_BLOCK_SIZE,
2104 		},
2105 		.aead.op = {
2106 			.do_one_request = aead_do_one_req,
2107 		},
2108 		.caam = {
2109 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2110 			.nodkp = true,
2111 		},
2112 	},
2113 	/* Galois Counter Mode */
2114 	{
2115 		.aead.base = {
2116 			.base = {
2117 				.cra_name = "gcm(aes)",
2118 				.cra_driver_name = "gcm-aes-caam",
2119 				.cra_blocksize = 1,
2120 			},
2121 			.setkey = gcm_setkey,
2122 			.setauthsize = gcm_setauthsize,
2123 			.encrypt = gcm_encrypt,
2124 			.decrypt = gcm_decrypt,
2125 			.ivsize = GCM_AES_IV_SIZE,
2126 			.maxauthsize = AES_BLOCK_SIZE,
2127 		},
2128 		.aead.op = {
2129 			.do_one_request = aead_do_one_req,
2130 		},
2131 		.caam = {
2132 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2133 			.nodkp = true,
2134 		},
2135 	},
2136 	/* single-pass ipsec_esp descriptor */
2137 	{
2138 		.aead.base = {
2139 			.base = {
2140 				.cra_name = "authenc(hmac(md5),"
2141 					    "ecb(cipher_null))",
2142 				.cra_driver_name = "authenc-hmac-md5-"
2143 						   "ecb-cipher_null-caam",
2144 				.cra_blocksize = NULL_BLOCK_SIZE,
2145 			},
2146 			.setkey = aead_setkey,
2147 			.setauthsize = aead_setauthsize,
2148 			.encrypt = aead_encrypt,
2149 			.decrypt = aead_decrypt,
2150 			.ivsize = NULL_IV_SIZE,
2151 			.maxauthsize = MD5_DIGEST_SIZE,
2152 		},
2153 		.aead.op = {
2154 			.do_one_request = aead_do_one_req,
2155 		},
2156 		.caam = {
2157 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2158 					   OP_ALG_AAI_HMAC_PRECOMP,
2159 		},
2160 	},
2161 	{
2162 		.aead.base = {
2163 			.base = {
2164 				.cra_name = "authenc(hmac(sha1),"
2165 					    "ecb(cipher_null))",
2166 				.cra_driver_name = "authenc-hmac-sha1-"
2167 						   "ecb-cipher_null-caam",
2168 				.cra_blocksize = NULL_BLOCK_SIZE,
2169 			},
2170 			.setkey = aead_setkey,
2171 			.setauthsize = aead_setauthsize,
2172 			.encrypt = aead_encrypt,
2173 			.decrypt = aead_decrypt,
2174 			.ivsize = NULL_IV_SIZE,
2175 			.maxauthsize = SHA1_DIGEST_SIZE,
2176 		},
2177 		.aead.op = {
2178 			.do_one_request = aead_do_one_req,
2179 		},
2180 		.caam = {
2181 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2182 					   OP_ALG_AAI_HMAC_PRECOMP,
2183 		},
2184 	},
2185 	{
2186 		.aead.base = {
2187 			.base = {
2188 				.cra_name = "authenc(hmac(sha224),"
2189 					    "ecb(cipher_null))",
2190 				.cra_driver_name = "authenc-hmac-sha224-"
2191 						   "ecb-cipher_null-caam",
2192 				.cra_blocksize = NULL_BLOCK_SIZE,
2193 			},
2194 			.setkey = aead_setkey,
2195 			.setauthsize = aead_setauthsize,
2196 			.encrypt = aead_encrypt,
2197 			.decrypt = aead_decrypt,
2198 			.ivsize = NULL_IV_SIZE,
2199 			.maxauthsize = SHA224_DIGEST_SIZE,
2200 		},
2201 		.aead.op = {
2202 			.do_one_request = aead_do_one_req,
2203 		},
2204 		.caam = {
2205 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2206 					   OP_ALG_AAI_HMAC_PRECOMP,
2207 		},
2208 	},
2209 	{
2210 		.aead.base = {
2211 			.base = {
2212 				.cra_name = "authenc(hmac(sha256),"
2213 					    "ecb(cipher_null))",
2214 				.cra_driver_name = "authenc-hmac-sha256-"
2215 						   "ecb-cipher_null-caam",
2216 				.cra_blocksize = NULL_BLOCK_SIZE,
2217 			},
2218 			.setkey = aead_setkey,
2219 			.setauthsize = aead_setauthsize,
2220 			.encrypt = aead_encrypt,
2221 			.decrypt = aead_decrypt,
2222 			.ivsize = NULL_IV_SIZE,
2223 			.maxauthsize = SHA256_DIGEST_SIZE,
2224 		},
2225 		.aead.op = {
2226 			.do_one_request = aead_do_one_req,
2227 		},
2228 		.caam = {
2229 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2230 					   OP_ALG_AAI_HMAC_PRECOMP,
2231 		},
2232 	},
2233 	{
2234 		.aead.base = {
2235 			.base = {
2236 				.cra_name = "authenc(hmac(sha384),"
2237 					    "ecb(cipher_null))",
2238 				.cra_driver_name = "authenc-hmac-sha384-"
2239 						   "ecb-cipher_null-caam",
2240 				.cra_blocksize = NULL_BLOCK_SIZE,
2241 			},
2242 			.setkey = aead_setkey,
2243 			.setauthsize = aead_setauthsize,
2244 			.encrypt = aead_encrypt,
2245 			.decrypt = aead_decrypt,
2246 			.ivsize = NULL_IV_SIZE,
2247 			.maxauthsize = SHA384_DIGEST_SIZE,
2248 		},
2249 		.aead.op = {
2250 			.do_one_request = aead_do_one_req,
2251 		},
2252 		.caam = {
2253 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2254 					   OP_ALG_AAI_HMAC_PRECOMP,
2255 		},
2256 	},
2257 	{
2258 		.aead.base = {
2259 			.base = {
2260 				.cra_name = "authenc(hmac(sha512),"
2261 					    "ecb(cipher_null))",
2262 				.cra_driver_name = "authenc-hmac-sha512-"
2263 						   "ecb-cipher_null-caam",
2264 				.cra_blocksize = NULL_BLOCK_SIZE,
2265 			},
2266 			.setkey = aead_setkey,
2267 			.setauthsize = aead_setauthsize,
2268 			.encrypt = aead_encrypt,
2269 			.decrypt = aead_decrypt,
2270 			.ivsize = NULL_IV_SIZE,
2271 			.maxauthsize = SHA512_DIGEST_SIZE,
2272 		},
2273 		.aead.op = {
2274 			.do_one_request = aead_do_one_req,
2275 		},
2276 		.caam = {
2277 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2278 					   OP_ALG_AAI_HMAC_PRECOMP,
2279 		},
2280 	},
2281 	{
2282 		.aead.base = {
2283 			.base = {
2284 				.cra_name = "authenc(hmac(md5),cbc(aes))",
2285 				.cra_driver_name = "authenc-hmac-md5-"
2286 						   "cbc-aes-caam",
2287 				.cra_blocksize = AES_BLOCK_SIZE,
2288 			},
2289 			.setkey = aead_setkey,
2290 			.setauthsize = aead_setauthsize,
2291 			.encrypt = aead_encrypt,
2292 			.decrypt = aead_decrypt,
2293 			.ivsize = AES_BLOCK_SIZE,
2294 			.maxauthsize = MD5_DIGEST_SIZE,
2295 		},
2296 		.aead.op = {
2297 			.do_one_request = aead_do_one_req,
2298 		},
2299 		.caam = {
2300 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2301 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2302 					   OP_ALG_AAI_HMAC_PRECOMP,
2303 		},
2304 	},
2305 	{
2306 		.aead.base = {
2307 			.base = {
2308 				.cra_name = "echainiv(authenc(hmac(md5),"
2309 					    "cbc(aes)))",
2310 				.cra_driver_name = "echainiv-authenc-hmac-md5-"
2311 						   "cbc-aes-caam",
2312 				.cra_blocksize = AES_BLOCK_SIZE,
2313 			},
2314 			.setkey = aead_setkey,
2315 			.setauthsize = aead_setauthsize,
2316 			.encrypt = aead_encrypt,
2317 			.decrypt = aead_decrypt,
2318 			.ivsize = AES_BLOCK_SIZE,
2319 			.maxauthsize = MD5_DIGEST_SIZE,
2320 		},
2321 		.aead.op = {
2322 			.do_one_request = aead_do_one_req,
2323 		},
2324 		.caam = {
2325 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2326 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2327 					   OP_ALG_AAI_HMAC_PRECOMP,
2328 			.geniv = true,
2329 		},
2330 	},
2331 	{
2332 		.aead.base = {
2333 			.base = {
2334 				.cra_name = "authenc(hmac(sha1),cbc(aes))",
2335 				.cra_driver_name = "authenc-hmac-sha1-"
2336 						   "cbc-aes-caam",
2337 				.cra_blocksize = AES_BLOCK_SIZE,
2338 			},
2339 			.setkey = aead_setkey,
2340 			.setauthsize = aead_setauthsize,
2341 			.encrypt = aead_encrypt,
2342 			.decrypt = aead_decrypt,
2343 			.ivsize = AES_BLOCK_SIZE,
2344 			.maxauthsize = SHA1_DIGEST_SIZE,
2345 		},
2346 		.aead.op = {
2347 			.do_one_request = aead_do_one_req,
2348 		},
2349 		.caam = {
2350 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2351 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2352 					   OP_ALG_AAI_HMAC_PRECOMP,
2353 		},
2354 	},
2355 	{
2356 		.aead.base = {
2357 			.base = {
2358 				.cra_name = "echainiv(authenc(hmac(sha1),"
2359 					    "cbc(aes)))",
2360 				.cra_driver_name = "echainiv-authenc-"
2361 						   "hmac-sha1-cbc-aes-caam",
2362 				.cra_blocksize = AES_BLOCK_SIZE,
2363 			},
2364 			.setkey = aead_setkey,
2365 			.setauthsize = aead_setauthsize,
2366 			.encrypt = aead_encrypt,
2367 			.decrypt = aead_decrypt,
2368 			.ivsize = AES_BLOCK_SIZE,
2369 			.maxauthsize = SHA1_DIGEST_SIZE,
2370 		},
2371 		.aead.op = {
2372 			.do_one_request = aead_do_one_req,
2373 		},
2374 		.caam = {
2375 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2376 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2377 					   OP_ALG_AAI_HMAC_PRECOMP,
2378 			.geniv = true,
2379 		},
2380 	},
2381 	{
2382 		.aead.base = {
2383 			.base = {
2384 				.cra_name = "authenc(hmac(sha224),cbc(aes))",
2385 				.cra_driver_name = "authenc-hmac-sha224-"
2386 						   "cbc-aes-caam",
2387 				.cra_blocksize = AES_BLOCK_SIZE,
2388 			},
2389 			.setkey = aead_setkey,
2390 			.setauthsize = aead_setauthsize,
2391 			.encrypt = aead_encrypt,
2392 			.decrypt = aead_decrypt,
2393 			.ivsize = AES_BLOCK_SIZE,
2394 			.maxauthsize = SHA224_DIGEST_SIZE,
2395 		},
2396 		.aead.op = {
2397 			.do_one_request = aead_do_one_req,
2398 		},
2399 		.caam = {
2400 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2401 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2402 					   OP_ALG_AAI_HMAC_PRECOMP,
2403 		},
2404 	},
2405 	{
2406 		.aead.base = {
2407 			.base = {
2408 				.cra_name = "echainiv(authenc(hmac(sha224),"
2409 					    "cbc(aes)))",
2410 				.cra_driver_name = "echainiv-authenc-"
2411 						   "hmac-sha224-cbc-aes-caam",
2412 				.cra_blocksize = AES_BLOCK_SIZE,
2413 			},
2414 			.setkey = aead_setkey,
2415 			.setauthsize = aead_setauthsize,
2416 			.encrypt = aead_encrypt,
2417 			.decrypt = aead_decrypt,
2418 			.ivsize = AES_BLOCK_SIZE,
2419 			.maxauthsize = SHA224_DIGEST_SIZE,
2420 		},
2421 		.aead.op = {
2422 			.do_one_request = aead_do_one_req,
2423 		},
2424 		.caam = {
2425 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2426 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2427 					   OP_ALG_AAI_HMAC_PRECOMP,
2428 			.geniv = true,
2429 		},
2430 	},
2431 	{
2432 		.aead.base = {
2433 			.base = {
2434 				.cra_name = "authenc(hmac(sha256),cbc(aes))",
2435 				.cra_driver_name = "authenc-hmac-sha256-"
2436 						   "cbc-aes-caam",
2437 				.cra_blocksize = AES_BLOCK_SIZE,
2438 			},
2439 			.setkey = aead_setkey,
2440 			.setauthsize = aead_setauthsize,
2441 			.encrypt = aead_encrypt,
2442 			.decrypt = aead_decrypt,
2443 			.ivsize = AES_BLOCK_SIZE,
2444 			.maxauthsize = SHA256_DIGEST_SIZE,
2445 		},
2446 		.aead.op = {
2447 			.do_one_request = aead_do_one_req,
2448 		},
2449 		.caam = {
2450 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2451 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2452 					   OP_ALG_AAI_HMAC_PRECOMP,
2453 		},
2454 	},
2455 	{
2456 		.aead.base = {
2457 			.base = {
2458 				.cra_name = "echainiv(authenc(hmac(sha256),"
2459 					    "cbc(aes)))",
2460 				.cra_driver_name = "echainiv-authenc-"
2461 						   "hmac-sha256-cbc-aes-caam",
2462 				.cra_blocksize = AES_BLOCK_SIZE,
2463 			},
2464 			.setkey = aead_setkey,
2465 			.setauthsize = aead_setauthsize,
2466 			.encrypt = aead_encrypt,
2467 			.decrypt = aead_decrypt,
2468 			.ivsize = AES_BLOCK_SIZE,
2469 			.maxauthsize = SHA256_DIGEST_SIZE,
2470 		},
2471 		.aead.op = {
2472 			.do_one_request = aead_do_one_req,
2473 		},
2474 		.caam = {
2475 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2476 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2477 					   OP_ALG_AAI_HMAC_PRECOMP,
2478 			.geniv = true,
2479 		},
2480 	},
2481 	{
2482 		.aead.base = {
2483 			.base = {
2484 				.cra_name = "authenc(hmac(sha384),cbc(aes))",
2485 				.cra_driver_name = "authenc-hmac-sha384-"
2486 						   "cbc-aes-caam",
2487 				.cra_blocksize = AES_BLOCK_SIZE,
2488 			},
2489 			.setkey = aead_setkey,
2490 			.setauthsize = aead_setauthsize,
2491 			.encrypt = aead_encrypt,
2492 			.decrypt = aead_decrypt,
2493 			.ivsize = AES_BLOCK_SIZE,
2494 			.maxauthsize = SHA384_DIGEST_SIZE,
2495 		},
2496 		.aead.op = {
2497 			.do_one_request = aead_do_one_req,
2498 		},
2499 		.caam = {
2500 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2501 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2502 					   OP_ALG_AAI_HMAC_PRECOMP,
2503 		},
2504 	},
2505 	{
2506 		.aead.base = {
2507 			.base = {
2508 				.cra_name = "echainiv(authenc(hmac(sha384),"
2509 					    "cbc(aes)))",
2510 				.cra_driver_name = "echainiv-authenc-"
2511 						   "hmac-sha384-cbc-aes-caam",
2512 				.cra_blocksize = AES_BLOCK_SIZE,
2513 			},
2514 			.setkey = aead_setkey,
2515 			.setauthsize = aead_setauthsize,
2516 			.encrypt = aead_encrypt,
2517 			.decrypt = aead_decrypt,
2518 			.ivsize = AES_BLOCK_SIZE,
2519 			.maxauthsize = SHA384_DIGEST_SIZE,
2520 		},
2521 		.aead.op = {
2522 			.do_one_request = aead_do_one_req,
2523 		},
2524 		.caam = {
2525 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2526 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2527 					   OP_ALG_AAI_HMAC_PRECOMP,
2528 			.geniv = true,
2529 		},
2530 	},
2531 	{
2532 		.aead.base = {
2533 			.base = {
2534 				.cra_name = "authenc(hmac(sha512),cbc(aes))",
2535 				.cra_driver_name = "authenc-hmac-sha512-"
2536 						   "cbc-aes-caam",
2537 				.cra_blocksize = AES_BLOCK_SIZE,
2538 			},
2539 			.setkey = aead_setkey,
2540 			.setauthsize = aead_setauthsize,
2541 			.encrypt = aead_encrypt,
2542 			.decrypt = aead_decrypt,
2543 			.ivsize = AES_BLOCK_SIZE,
2544 			.maxauthsize = SHA512_DIGEST_SIZE,
2545 		},
2546 		.aead.op = {
2547 			.do_one_request = aead_do_one_req,
2548 		},
2549 		.caam = {
2550 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2551 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2552 					   OP_ALG_AAI_HMAC_PRECOMP,
2553 		},
2554 	},
2555 	{
2556 		.aead.base = {
2557 			.base = {
2558 				.cra_name = "echainiv(authenc(hmac(sha512),"
2559 					    "cbc(aes)))",
2560 				.cra_driver_name = "echainiv-authenc-"
2561 						   "hmac-sha512-cbc-aes-caam",
2562 				.cra_blocksize = AES_BLOCK_SIZE,
2563 			},
2564 			.setkey = aead_setkey,
2565 			.setauthsize = aead_setauthsize,
2566 			.encrypt = aead_encrypt,
2567 			.decrypt = aead_decrypt,
2568 			.ivsize = AES_BLOCK_SIZE,
2569 			.maxauthsize = SHA512_DIGEST_SIZE,
2570 		},
2571 		.aead.op = {
2572 			.do_one_request = aead_do_one_req,
2573 		},
2574 		.caam = {
2575 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2576 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2577 					   OP_ALG_AAI_HMAC_PRECOMP,
2578 			.geniv = true,
2579 		},
2580 	},
2581 	{
2582 		.aead.base = {
2583 			.base = {
2584 				.cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2585 				.cra_driver_name = "authenc-hmac-md5-"
2586 						   "cbc-des3_ede-caam",
2587 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2588 			},
2589 			.setkey = des3_aead_setkey,
2590 			.setauthsize = aead_setauthsize,
2591 			.encrypt = aead_encrypt,
2592 			.decrypt = aead_decrypt,
2593 			.ivsize = DES3_EDE_BLOCK_SIZE,
2594 			.maxauthsize = MD5_DIGEST_SIZE,
2595 		},
2596 		.aead.op = {
2597 			.do_one_request = aead_do_one_req,
2598 		},
2599 		.caam = {
2600 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2601 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2602 					   OP_ALG_AAI_HMAC_PRECOMP,
2603 		}
2604 	},
2605 	{
2606 		.aead.base = {
2607 			.base = {
2608 				.cra_name = "echainiv(authenc(hmac(md5),"
2609 					    "cbc(des3_ede)))",
2610 				.cra_driver_name = "echainiv-authenc-hmac-md5-"
2611 						   "cbc-des3_ede-caam",
2612 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2613 			},
2614 			.setkey = des3_aead_setkey,
2615 			.setauthsize = aead_setauthsize,
2616 			.encrypt = aead_encrypt,
2617 			.decrypt = aead_decrypt,
2618 			.ivsize = DES3_EDE_BLOCK_SIZE,
2619 			.maxauthsize = MD5_DIGEST_SIZE,
2620 		},
2621 		.aead.op = {
2622 			.do_one_request = aead_do_one_req,
2623 		},
2624 		.caam = {
2625 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2626 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2627 					   OP_ALG_AAI_HMAC_PRECOMP,
2628 			.geniv = true,
2629 		}
2630 	},
2631 	{
2632 		.aead.base = {
2633 			.base = {
2634 				.cra_name = "authenc(hmac(sha1),"
2635 					    "cbc(des3_ede))",
2636 				.cra_driver_name = "authenc-hmac-sha1-"
2637 						   "cbc-des3_ede-caam",
2638 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2639 			},
2640 			.setkey = des3_aead_setkey,
2641 			.setauthsize = aead_setauthsize,
2642 			.encrypt = aead_encrypt,
2643 			.decrypt = aead_decrypt,
2644 			.ivsize = DES3_EDE_BLOCK_SIZE,
2645 			.maxauthsize = SHA1_DIGEST_SIZE,
2646 		},
2647 		.aead.op = {
2648 			.do_one_request = aead_do_one_req,
2649 		},
2650 		.caam = {
2651 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2652 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2653 					   OP_ALG_AAI_HMAC_PRECOMP,
2654 		},
2655 	},
2656 	{
2657 		.aead.base = {
2658 			.base = {
2659 				.cra_name = "echainiv(authenc(hmac(sha1),"
2660 					    "cbc(des3_ede)))",
2661 				.cra_driver_name = "echainiv-authenc-"
2662 						   "hmac-sha1-"
2663 						   "cbc-des3_ede-caam",
2664 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2665 			},
2666 			.setkey = des3_aead_setkey,
2667 			.setauthsize = aead_setauthsize,
2668 			.encrypt = aead_encrypt,
2669 			.decrypt = aead_decrypt,
2670 			.ivsize = DES3_EDE_BLOCK_SIZE,
2671 			.maxauthsize = SHA1_DIGEST_SIZE,
2672 		},
2673 		.aead.op = {
2674 			.do_one_request = aead_do_one_req,
2675 		},
2676 		.caam = {
2677 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2678 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2679 					   OP_ALG_AAI_HMAC_PRECOMP,
2680 			.geniv = true,
2681 		},
2682 	},
2683 	{
2684 		.aead.base = {
2685 			.base = {
2686 				.cra_name = "authenc(hmac(sha224),"
2687 					    "cbc(des3_ede))",
2688 				.cra_driver_name = "authenc-hmac-sha224-"
2689 						   "cbc-des3_ede-caam",
2690 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2691 			},
2692 			.setkey = des3_aead_setkey,
2693 			.setauthsize = aead_setauthsize,
2694 			.encrypt = aead_encrypt,
2695 			.decrypt = aead_decrypt,
2696 			.ivsize = DES3_EDE_BLOCK_SIZE,
2697 			.maxauthsize = SHA224_DIGEST_SIZE,
2698 		},
2699 		.aead.op = {
2700 			.do_one_request = aead_do_one_req,
2701 		},
2702 		.caam = {
2703 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2704 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2705 					   OP_ALG_AAI_HMAC_PRECOMP,
2706 		},
2707 	},
2708 	{
2709 		.aead.base = {
2710 			.base = {
2711 				.cra_name = "echainiv(authenc(hmac(sha224),"
2712 					    "cbc(des3_ede)))",
2713 				.cra_driver_name = "echainiv-authenc-"
2714 						   "hmac-sha224-"
2715 						   "cbc-des3_ede-caam",
2716 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2717 			},
2718 			.setkey = des3_aead_setkey,
2719 			.setauthsize = aead_setauthsize,
2720 			.encrypt = aead_encrypt,
2721 			.decrypt = aead_decrypt,
2722 			.ivsize = DES3_EDE_BLOCK_SIZE,
2723 			.maxauthsize = SHA224_DIGEST_SIZE,
2724 		},
2725 		.aead.op = {
2726 			.do_one_request = aead_do_one_req,
2727 		},
2728 		.caam = {
2729 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2730 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2731 					   OP_ALG_AAI_HMAC_PRECOMP,
2732 			.geniv = true,
2733 		},
2734 	},
2735 	{
2736 		.aead.base = {
2737 			.base = {
2738 				.cra_name = "authenc(hmac(sha256),"
2739 					    "cbc(des3_ede))",
2740 				.cra_driver_name = "authenc-hmac-sha256-"
2741 						   "cbc-des3_ede-caam",
2742 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2743 			},
2744 			.setkey = des3_aead_setkey,
2745 			.setauthsize = aead_setauthsize,
2746 			.encrypt = aead_encrypt,
2747 			.decrypt = aead_decrypt,
2748 			.ivsize = DES3_EDE_BLOCK_SIZE,
2749 			.maxauthsize = SHA256_DIGEST_SIZE,
2750 		},
2751 		.aead.op = {
2752 			.do_one_request = aead_do_one_req,
2753 		},
2754 		.caam = {
2755 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2756 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2757 					   OP_ALG_AAI_HMAC_PRECOMP,
2758 		},
2759 	},
2760 	{
2761 		.aead.base = {
2762 			.base = {
2763 				.cra_name = "echainiv(authenc(hmac(sha256),"
2764 					    "cbc(des3_ede)))",
2765 				.cra_driver_name = "echainiv-authenc-"
2766 						   "hmac-sha256-"
2767 						   "cbc-des3_ede-caam",
2768 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2769 			},
2770 			.setkey = des3_aead_setkey,
2771 			.setauthsize = aead_setauthsize,
2772 			.encrypt = aead_encrypt,
2773 			.decrypt = aead_decrypt,
2774 			.ivsize = DES3_EDE_BLOCK_SIZE,
2775 			.maxauthsize = SHA256_DIGEST_SIZE,
2776 		},
2777 		.aead.op = {
2778 			.do_one_request = aead_do_one_req,
2779 		},
2780 		.caam = {
2781 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2782 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2783 					   OP_ALG_AAI_HMAC_PRECOMP,
2784 			.geniv = true,
2785 		},
2786 	},
2787 	{
2788 		.aead.base = {
2789 			.base = {
2790 				.cra_name = "authenc(hmac(sha384),"
2791 					    "cbc(des3_ede))",
2792 				.cra_driver_name = "authenc-hmac-sha384-"
2793 						   "cbc-des3_ede-caam",
2794 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2795 			},
2796 			.setkey = des3_aead_setkey,
2797 			.setauthsize = aead_setauthsize,
2798 			.encrypt = aead_encrypt,
2799 			.decrypt = aead_decrypt,
2800 			.ivsize = DES3_EDE_BLOCK_SIZE,
2801 			.maxauthsize = SHA384_DIGEST_SIZE,
2802 		},
2803 		.aead.op = {
2804 			.do_one_request = aead_do_one_req,
2805 		},
2806 		.caam = {
2807 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2808 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2809 					   OP_ALG_AAI_HMAC_PRECOMP,
2810 		},
2811 	},
2812 	{
2813 		.aead.base = {
2814 			.base = {
2815 				.cra_name = "echainiv(authenc(hmac(sha384),"
2816 					    "cbc(des3_ede)))",
2817 				.cra_driver_name = "echainiv-authenc-"
2818 						   "hmac-sha384-"
2819 						   "cbc-des3_ede-caam",
2820 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2821 			},
2822 			.setkey = des3_aead_setkey,
2823 			.setauthsize = aead_setauthsize,
2824 			.encrypt = aead_encrypt,
2825 			.decrypt = aead_decrypt,
2826 			.ivsize = DES3_EDE_BLOCK_SIZE,
2827 			.maxauthsize = SHA384_DIGEST_SIZE,
2828 		},
2829 		.aead.op = {
2830 			.do_one_request = aead_do_one_req,
2831 		},
2832 		.caam = {
2833 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2834 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2835 					   OP_ALG_AAI_HMAC_PRECOMP,
2836 			.geniv = true,
2837 		},
2838 	},
2839 	{
2840 		.aead.base = {
2841 			.base = {
2842 				.cra_name = "authenc(hmac(sha512),"
2843 					    "cbc(des3_ede))",
2844 				.cra_driver_name = "authenc-hmac-sha512-"
2845 						   "cbc-des3_ede-caam",
2846 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2847 			},
2848 			.setkey = des3_aead_setkey,
2849 			.setauthsize = aead_setauthsize,
2850 			.encrypt = aead_encrypt,
2851 			.decrypt = aead_decrypt,
2852 			.ivsize = DES3_EDE_BLOCK_SIZE,
2853 			.maxauthsize = SHA512_DIGEST_SIZE,
2854 		},
2855 		.aead.op = {
2856 			.do_one_request = aead_do_one_req,
2857 		},
2858 		.caam = {
2859 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2860 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2861 					   OP_ALG_AAI_HMAC_PRECOMP,
2862 		},
2863 	},
2864 	{
2865 		.aead.base = {
2866 			.base = {
2867 				.cra_name = "echainiv(authenc(hmac(sha512),"
2868 					    "cbc(des3_ede)))",
2869 				.cra_driver_name = "echainiv-authenc-"
2870 						   "hmac-sha512-"
2871 						   "cbc-des3_ede-caam",
2872 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2873 			},
2874 			.setkey = des3_aead_setkey,
2875 			.setauthsize = aead_setauthsize,
2876 			.encrypt = aead_encrypt,
2877 			.decrypt = aead_decrypt,
2878 			.ivsize = DES3_EDE_BLOCK_SIZE,
2879 			.maxauthsize = SHA512_DIGEST_SIZE,
2880 		},
2881 		.aead.op = {
2882 			.do_one_request = aead_do_one_req,
2883 		},
2884 		.caam = {
2885 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2886 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2887 					   OP_ALG_AAI_HMAC_PRECOMP,
2888 			.geniv = true,
2889 		},
2890 	},
2891 	{
2892 		.aead.base = {
2893 			.base = {
2894 				.cra_name = "authenc(hmac(md5),cbc(des))",
2895 				.cra_driver_name = "authenc-hmac-md5-"
2896 						   "cbc-des-caam",
2897 				.cra_blocksize = DES_BLOCK_SIZE,
2898 			},
2899 			.setkey = aead_setkey,
2900 			.setauthsize = aead_setauthsize,
2901 			.encrypt = aead_encrypt,
2902 			.decrypt = aead_decrypt,
2903 			.ivsize = DES_BLOCK_SIZE,
2904 			.maxauthsize = MD5_DIGEST_SIZE,
2905 		},
2906 		.aead.op = {
2907 			.do_one_request = aead_do_one_req,
2908 		},
2909 		.caam = {
2910 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2911 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2912 					   OP_ALG_AAI_HMAC_PRECOMP,
2913 		},
2914 	},
2915 	{
2916 		.aead.base = {
2917 			.base = {
2918 				.cra_name = "echainiv(authenc(hmac(md5),"
2919 					    "cbc(des)))",
2920 				.cra_driver_name = "echainiv-authenc-hmac-md5-"
2921 						   "cbc-des-caam",
2922 				.cra_blocksize = DES_BLOCK_SIZE,
2923 			},
2924 			.setkey = aead_setkey,
2925 			.setauthsize = aead_setauthsize,
2926 			.encrypt = aead_encrypt,
2927 			.decrypt = aead_decrypt,
2928 			.ivsize = DES_BLOCK_SIZE,
2929 			.maxauthsize = MD5_DIGEST_SIZE,
2930 		},
2931 		.aead.op = {
2932 			.do_one_request = aead_do_one_req,
2933 		},
2934 		.caam = {
2935 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2936 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2937 					   OP_ALG_AAI_HMAC_PRECOMP,
2938 			.geniv = true,
2939 		},
2940 	},
2941 	{
2942 		.aead.base = {
2943 			.base = {
2944 				.cra_name = "authenc(hmac(sha1),cbc(des))",
2945 				.cra_driver_name = "authenc-hmac-sha1-"
2946 						   "cbc-des-caam",
2947 				.cra_blocksize = DES_BLOCK_SIZE,
2948 			},
2949 			.setkey = aead_setkey,
2950 			.setauthsize = aead_setauthsize,
2951 			.encrypt = aead_encrypt,
2952 			.decrypt = aead_decrypt,
2953 			.ivsize = DES_BLOCK_SIZE,
2954 			.maxauthsize = SHA1_DIGEST_SIZE,
2955 		},
2956 		.aead.op = {
2957 			.do_one_request = aead_do_one_req,
2958 		},
2959 		.caam = {
2960 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2961 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2962 					   OP_ALG_AAI_HMAC_PRECOMP,
2963 		},
2964 	},
2965 	{
2966 		.aead.base = {
2967 			.base = {
2968 				.cra_name = "echainiv(authenc(hmac(sha1),"
2969 					    "cbc(des)))",
2970 				.cra_driver_name = "echainiv-authenc-"
2971 						   "hmac-sha1-cbc-des-caam",
2972 				.cra_blocksize = DES_BLOCK_SIZE,
2973 			},
2974 			.setkey = aead_setkey,
2975 			.setauthsize = aead_setauthsize,
2976 			.encrypt = aead_encrypt,
2977 			.decrypt = aead_decrypt,
2978 			.ivsize = DES_BLOCK_SIZE,
2979 			.maxauthsize = SHA1_DIGEST_SIZE,
2980 		},
2981 		.aead.op = {
2982 			.do_one_request = aead_do_one_req,
2983 		},
2984 		.caam = {
2985 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2986 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2987 					   OP_ALG_AAI_HMAC_PRECOMP,
2988 			.geniv = true,
2989 		},
2990 	},
2991 	{
2992 		.aead.base = {
2993 			.base = {
2994 				.cra_name = "authenc(hmac(sha224),cbc(des))",
2995 				.cra_driver_name = "authenc-hmac-sha224-"
2996 						   "cbc-des-caam",
2997 				.cra_blocksize = DES_BLOCK_SIZE,
2998 			},
2999 			.setkey = aead_setkey,
3000 			.setauthsize = aead_setauthsize,
3001 			.encrypt = aead_encrypt,
3002 			.decrypt = aead_decrypt,
3003 			.ivsize = DES_BLOCK_SIZE,
3004 			.maxauthsize = SHA224_DIGEST_SIZE,
3005 		},
3006 		.aead.op = {
3007 			.do_one_request = aead_do_one_req,
3008 		},
3009 		.caam = {
3010 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3011 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3012 					   OP_ALG_AAI_HMAC_PRECOMP,
3013 		},
3014 	},
3015 	{
3016 		.aead.base = {
3017 			.base = {
3018 				.cra_name = "echainiv(authenc(hmac(sha224),"
3019 					    "cbc(des)))",
3020 				.cra_driver_name = "echainiv-authenc-"
3021 						   "hmac-sha224-cbc-des-caam",
3022 				.cra_blocksize = DES_BLOCK_SIZE,
3023 			},
3024 			.setkey = aead_setkey,
3025 			.setauthsize = aead_setauthsize,
3026 			.encrypt = aead_encrypt,
3027 			.decrypt = aead_decrypt,
3028 			.ivsize = DES_BLOCK_SIZE,
3029 			.maxauthsize = SHA224_DIGEST_SIZE,
3030 		},
3031 		.aead.op = {
3032 			.do_one_request = aead_do_one_req,
3033 		},
3034 		.caam = {
3035 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3036 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3037 					   OP_ALG_AAI_HMAC_PRECOMP,
3038 			.geniv = true,
3039 		},
3040 	},
3041 	{
3042 		.aead.base = {
3043 			.base = {
3044 				.cra_name = "authenc(hmac(sha256),cbc(des))",
3045 				.cra_driver_name = "authenc-hmac-sha256-"
3046 						   "cbc-des-caam",
3047 				.cra_blocksize = DES_BLOCK_SIZE,
3048 			},
3049 			.setkey = aead_setkey,
3050 			.setauthsize = aead_setauthsize,
3051 			.encrypt = aead_encrypt,
3052 			.decrypt = aead_decrypt,
3053 			.ivsize = DES_BLOCK_SIZE,
3054 			.maxauthsize = SHA256_DIGEST_SIZE,
3055 		},
3056 		.aead.op = {
3057 			.do_one_request = aead_do_one_req,
3058 		},
3059 		.caam = {
3060 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3061 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3062 					   OP_ALG_AAI_HMAC_PRECOMP,
3063 		},
3064 	},
3065 	{
3066 		.aead.base = {
3067 			.base = {
3068 				.cra_name = "echainiv(authenc(hmac(sha256),"
3069 					    "cbc(des)))",
3070 				.cra_driver_name = "echainiv-authenc-"
3071 						   "hmac-sha256-cbc-des-caam",
3072 				.cra_blocksize = DES_BLOCK_SIZE,
3073 			},
3074 			.setkey = aead_setkey,
3075 			.setauthsize = aead_setauthsize,
3076 			.encrypt = aead_encrypt,
3077 			.decrypt = aead_decrypt,
3078 			.ivsize = DES_BLOCK_SIZE,
3079 			.maxauthsize = SHA256_DIGEST_SIZE,
3080 		},
3081 		.aead.op = {
3082 			.do_one_request = aead_do_one_req,
3083 		},
3084 		.caam = {
3085 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3086 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3087 					   OP_ALG_AAI_HMAC_PRECOMP,
3088 			.geniv = true,
3089 		},
3090 	},
3091 	{
3092 		.aead.base = {
3093 			.base = {
3094 				.cra_name = "authenc(hmac(sha384),cbc(des))",
3095 				.cra_driver_name = "authenc-hmac-sha384-"
3096 						   "cbc-des-caam",
3097 				.cra_blocksize = DES_BLOCK_SIZE,
3098 			},
3099 			.setkey = aead_setkey,
3100 			.setauthsize = aead_setauthsize,
3101 			.encrypt = aead_encrypt,
3102 			.decrypt = aead_decrypt,
3103 			.ivsize = DES_BLOCK_SIZE,
3104 			.maxauthsize = SHA384_DIGEST_SIZE,
3105 		},
3106 		.aead.op = {
3107 			.do_one_request = aead_do_one_req,
3108 		},
3109 		.caam = {
3110 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3111 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3112 					   OP_ALG_AAI_HMAC_PRECOMP,
3113 		},
3114 	},
3115 	{
3116 		.aead.base = {
3117 			.base = {
3118 				.cra_name = "echainiv(authenc(hmac(sha384),"
3119 					    "cbc(des)))",
3120 				.cra_driver_name = "echainiv-authenc-"
3121 						   "hmac-sha384-cbc-des-caam",
3122 				.cra_blocksize = DES_BLOCK_SIZE,
3123 			},
3124 			.setkey = aead_setkey,
3125 			.setauthsize = aead_setauthsize,
3126 			.encrypt = aead_encrypt,
3127 			.decrypt = aead_decrypt,
3128 			.ivsize = DES_BLOCK_SIZE,
3129 			.maxauthsize = SHA384_DIGEST_SIZE,
3130 		},
3131 		.aead.op = {
3132 			.do_one_request = aead_do_one_req,
3133 		},
3134 		.caam = {
3135 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3136 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3137 					   OP_ALG_AAI_HMAC_PRECOMP,
3138 			.geniv = true,
3139 		},
3140 	},
3141 	{
3142 		.aead.base = {
3143 			.base = {
3144 				.cra_name = "authenc(hmac(sha512),cbc(des))",
3145 				.cra_driver_name = "authenc-hmac-sha512-"
3146 						   "cbc-des-caam",
3147 				.cra_blocksize = DES_BLOCK_SIZE,
3148 			},
3149 			.setkey = aead_setkey,
3150 			.setauthsize = aead_setauthsize,
3151 			.encrypt = aead_encrypt,
3152 			.decrypt = aead_decrypt,
3153 			.ivsize = DES_BLOCK_SIZE,
3154 			.maxauthsize = SHA512_DIGEST_SIZE,
3155 		},
3156 		.aead.op = {
3157 			.do_one_request = aead_do_one_req,
3158 		},
3159 		.caam = {
3160 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3161 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3162 					   OP_ALG_AAI_HMAC_PRECOMP,
3163 		},
3164 	},
3165 	{
3166 		.aead.base = {
3167 			.base = {
3168 				.cra_name = "echainiv(authenc(hmac(sha512),"
3169 					    "cbc(des)))",
3170 				.cra_driver_name = "echainiv-authenc-"
3171 						   "hmac-sha512-cbc-des-caam",
3172 				.cra_blocksize = DES_BLOCK_SIZE,
3173 			},
3174 			.setkey = aead_setkey,
3175 			.setauthsize = aead_setauthsize,
3176 			.encrypt = aead_encrypt,
3177 			.decrypt = aead_decrypt,
3178 			.ivsize = DES_BLOCK_SIZE,
3179 			.maxauthsize = SHA512_DIGEST_SIZE,
3180 		},
3181 		.aead.op = {
3182 			.do_one_request = aead_do_one_req,
3183 		},
3184 		.caam = {
3185 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3186 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3187 					   OP_ALG_AAI_HMAC_PRECOMP,
3188 			.geniv = true,
3189 		},
3190 	},
3191 	{
3192 		.aead.base = {
3193 			.base = {
3194 				.cra_name = "authenc(hmac(md5),"
3195 					    "rfc3686(ctr(aes)))",
3196 				.cra_driver_name = "authenc-hmac-md5-"
3197 						   "rfc3686-ctr-aes-caam",
3198 				.cra_blocksize = 1,
3199 			},
3200 			.setkey = aead_setkey,
3201 			.setauthsize = aead_setauthsize,
3202 			.encrypt = aead_encrypt,
3203 			.decrypt = aead_decrypt,
3204 			.ivsize = CTR_RFC3686_IV_SIZE,
3205 			.maxauthsize = MD5_DIGEST_SIZE,
3206 		},
3207 		.aead.op = {
3208 			.do_one_request = aead_do_one_req,
3209 		},
3210 		.caam = {
3211 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3212 					   OP_ALG_AAI_CTR_MOD128,
3213 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
3214 					   OP_ALG_AAI_HMAC_PRECOMP,
3215 			.rfc3686 = true,
3216 		},
3217 	},
3218 	{
3219 		.aead.base = {
3220 			.base = {
3221 				.cra_name = "seqiv(authenc("
3222 					    "hmac(md5),rfc3686(ctr(aes))))",
3223 				.cra_driver_name = "seqiv-authenc-hmac-md5-"
3224 						   "rfc3686-ctr-aes-caam",
3225 				.cra_blocksize = 1,
3226 			},
3227 			.setkey = aead_setkey,
3228 			.setauthsize = aead_setauthsize,
3229 			.encrypt = aead_encrypt,
3230 			.decrypt = aead_decrypt,
3231 			.ivsize = CTR_RFC3686_IV_SIZE,
3232 			.maxauthsize = MD5_DIGEST_SIZE,
3233 		},
3234 		.aead.op = {
3235 			.do_one_request = aead_do_one_req,
3236 		},
3237 		.caam = {
3238 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3239 					   OP_ALG_AAI_CTR_MOD128,
3240 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
3241 					   OP_ALG_AAI_HMAC_PRECOMP,
3242 			.rfc3686 = true,
3243 			.geniv = true,
3244 		},
3245 	},
3246 	{
3247 		.aead.base = {
3248 			.base = {
3249 				.cra_name = "authenc(hmac(sha1),"
3250 					    "rfc3686(ctr(aes)))",
3251 				.cra_driver_name = "authenc-hmac-sha1-"
3252 						   "rfc3686-ctr-aes-caam",
3253 				.cra_blocksize = 1,
3254 			},
3255 			.setkey = aead_setkey,
3256 			.setauthsize = aead_setauthsize,
3257 			.encrypt = aead_encrypt,
3258 			.decrypt = aead_decrypt,
3259 			.ivsize = CTR_RFC3686_IV_SIZE,
3260 			.maxauthsize = SHA1_DIGEST_SIZE,
3261 		},
3262 		.aead.op = {
3263 			.do_one_request = aead_do_one_req,
3264 		},
3265 		.caam = {
3266 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3267 					   OP_ALG_AAI_CTR_MOD128,
3268 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3269 					   OP_ALG_AAI_HMAC_PRECOMP,
3270 			.rfc3686 = true,
3271 		},
3272 	},
3273 	{
3274 		.aead.base = {
3275 			.base = {
3276 				.cra_name = "seqiv(authenc("
3277 					    "hmac(sha1),rfc3686(ctr(aes))))",
3278 				.cra_driver_name = "seqiv-authenc-hmac-sha1-"
3279 						   "rfc3686-ctr-aes-caam",
3280 				.cra_blocksize = 1,
3281 			},
3282 			.setkey = aead_setkey,
3283 			.setauthsize = aead_setauthsize,
3284 			.encrypt = aead_encrypt,
3285 			.decrypt = aead_decrypt,
3286 			.ivsize = CTR_RFC3686_IV_SIZE,
3287 			.maxauthsize = SHA1_DIGEST_SIZE,
3288 		},
3289 		.aead.op = {
3290 			.do_one_request = aead_do_one_req,
3291 		},
3292 		.caam = {
3293 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3294 					   OP_ALG_AAI_CTR_MOD128,
3295 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3296 					   OP_ALG_AAI_HMAC_PRECOMP,
3297 			.rfc3686 = true,
3298 			.geniv = true,
3299 		},
3300 	},
3301 	{
3302 		.aead.base = {
3303 			.base = {
3304 				.cra_name = "authenc(hmac(sha224),"
3305 					    "rfc3686(ctr(aes)))",
3306 				.cra_driver_name = "authenc-hmac-sha224-"
3307 						   "rfc3686-ctr-aes-caam",
3308 				.cra_blocksize = 1,
3309 			},
3310 			.setkey = aead_setkey,
3311 			.setauthsize = aead_setauthsize,
3312 			.encrypt = aead_encrypt,
3313 			.decrypt = aead_decrypt,
3314 			.ivsize = CTR_RFC3686_IV_SIZE,
3315 			.maxauthsize = SHA224_DIGEST_SIZE,
3316 		},
3317 		.aead.op = {
3318 			.do_one_request = aead_do_one_req,
3319 		},
3320 		.caam = {
3321 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3322 					   OP_ALG_AAI_CTR_MOD128,
3323 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3324 					   OP_ALG_AAI_HMAC_PRECOMP,
3325 			.rfc3686 = true,
3326 		},
3327 	},
3328 	{
3329 		.aead.base = {
3330 			.base = {
3331 				.cra_name = "seqiv(authenc("
3332 					    "hmac(sha224),rfc3686(ctr(aes))))",
3333 				.cra_driver_name = "seqiv-authenc-hmac-sha224-"
3334 						   "rfc3686-ctr-aes-caam",
3335 				.cra_blocksize = 1,
3336 			},
3337 			.setkey = aead_setkey,
3338 			.setauthsize = aead_setauthsize,
3339 			.encrypt = aead_encrypt,
3340 			.decrypt = aead_decrypt,
3341 			.ivsize = CTR_RFC3686_IV_SIZE,
3342 			.maxauthsize = SHA224_DIGEST_SIZE,
3343 		},
3344 		.aead.op = {
3345 			.do_one_request = aead_do_one_req,
3346 		},
3347 		.caam = {
3348 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3349 					   OP_ALG_AAI_CTR_MOD128,
3350 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3351 					   OP_ALG_AAI_HMAC_PRECOMP,
3352 			.rfc3686 = true,
3353 			.geniv = true,
3354 		},
3355 	},
3356 	{
3357 		.aead.base = {
3358 			.base = {
3359 				.cra_name = "authenc(hmac(sha256),"
3360 					    "rfc3686(ctr(aes)))",
3361 				.cra_driver_name = "authenc-hmac-sha256-"
3362 						   "rfc3686-ctr-aes-caam",
3363 				.cra_blocksize = 1,
3364 			},
3365 			.setkey = aead_setkey,
3366 			.setauthsize = aead_setauthsize,
3367 			.encrypt = aead_encrypt,
3368 			.decrypt = aead_decrypt,
3369 			.ivsize = CTR_RFC3686_IV_SIZE,
3370 			.maxauthsize = SHA256_DIGEST_SIZE,
3371 		},
3372 		.aead.op = {
3373 			.do_one_request = aead_do_one_req,
3374 		},
3375 		.caam = {
3376 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3377 					   OP_ALG_AAI_CTR_MOD128,
3378 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3379 					   OP_ALG_AAI_HMAC_PRECOMP,
3380 			.rfc3686 = true,
3381 		},
3382 	},
3383 	{
3384 		.aead.base = {
3385 			.base = {
3386 				.cra_name = "seqiv(authenc(hmac(sha256),"
3387 					    "rfc3686(ctr(aes))))",
3388 				.cra_driver_name = "seqiv-authenc-hmac-sha256-"
3389 						   "rfc3686-ctr-aes-caam",
3390 				.cra_blocksize = 1,
3391 			},
3392 			.setkey = aead_setkey,
3393 			.setauthsize = aead_setauthsize,
3394 			.encrypt = aead_encrypt,
3395 			.decrypt = aead_decrypt,
3396 			.ivsize = CTR_RFC3686_IV_SIZE,
3397 			.maxauthsize = SHA256_DIGEST_SIZE,
3398 		},
3399 		.aead.op = {
3400 			.do_one_request = aead_do_one_req,
3401 		},
3402 		.caam = {
3403 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3404 					   OP_ALG_AAI_CTR_MOD128,
3405 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3406 					   OP_ALG_AAI_HMAC_PRECOMP,
3407 			.rfc3686 = true,
3408 			.geniv = true,
3409 		},
3410 	},
3411 	{
3412 		.aead.base = {
3413 			.base = {
3414 				.cra_name = "authenc(hmac(sha384),"
3415 					    "rfc3686(ctr(aes)))",
3416 				.cra_driver_name = "authenc-hmac-sha384-"
3417 						   "rfc3686-ctr-aes-caam",
3418 				.cra_blocksize = 1,
3419 			},
3420 			.setkey = aead_setkey,
3421 			.setauthsize = aead_setauthsize,
3422 			.encrypt = aead_encrypt,
3423 			.decrypt = aead_decrypt,
3424 			.ivsize = CTR_RFC3686_IV_SIZE,
3425 			.maxauthsize = SHA384_DIGEST_SIZE,
3426 		},
3427 		.aead.op = {
3428 			.do_one_request = aead_do_one_req,
3429 		},
3430 		.caam = {
3431 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3432 					   OP_ALG_AAI_CTR_MOD128,
3433 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3434 					   OP_ALG_AAI_HMAC_PRECOMP,
3435 			.rfc3686 = true,
3436 		},
3437 	},
3438 	{
3439 		.aead.base = {
3440 			.base = {
3441 				.cra_name = "seqiv(authenc(hmac(sha384),"
3442 					    "rfc3686(ctr(aes))))",
3443 				.cra_driver_name = "seqiv-authenc-hmac-sha384-"
3444 						   "rfc3686-ctr-aes-caam",
3445 				.cra_blocksize = 1,
3446 			},
3447 			.setkey = aead_setkey,
3448 			.setauthsize = aead_setauthsize,
3449 			.encrypt = aead_encrypt,
3450 			.decrypt = aead_decrypt,
3451 			.ivsize = CTR_RFC3686_IV_SIZE,
3452 			.maxauthsize = SHA384_DIGEST_SIZE,
3453 		},
3454 		.aead.op = {
3455 			.do_one_request = aead_do_one_req,
3456 		},
3457 		.caam = {
3458 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3459 					   OP_ALG_AAI_CTR_MOD128,
3460 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3461 					   OP_ALG_AAI_HMAC_PRECOMP,
3462 			.rfc3686 = true,
3463 			.geniv = true,
3464 		},
3465 	},
3466 	{
3467 		.aead.base = {
3468 			.base = {
3469 				.cra_name = "authenc(hmac(sha512),"
3470 					    "rfc3686(ctr(aes)))",
3471 				.cra_driver_name = "authenc-hmac-sha512-"
3472 						   "rfc3686-ctr-aes-caam",
3473 				.cra_blocksize = 1,
3474 			},
3475 			.setkey = aead_setkey,
3476 			.setauthsize = aead_setauthsize,
3477 			.encrypt = aead_encrypt,
3478 			.decrypt = aead_decrypt,
3479 			.ivsize = CTR_RFC3686_IV_SIZE,
3480 			.maxauthsize = SHA512_DIGEST_SIZE,
3481 		},
3482 		.aead.op = {
3483 			.do_one_request = aead_do_one_req,
3484 		},
3485 		.caam = {
3486 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3487 					   OP_ALG_AAI_CTR_MOD128,
3488 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3489 					   OP_ALG_AAI_HMAC_PRECOMP,
3490 			.rfc3686 = true,
3491 		},
3492 	},
3493 	{
3494 		.aead.base = {
3495 			.base = {
3496 				.cra_name = "seqiv(authenc(hmac(sha512),"
3497 					    "rfc3686(ctr(aes))))",
3498 				.cra_driver_name = "seqiv-authenc-hmac-sha512-"
3499 						   "rfc3686-ctr-aes-caam",
3500 				.cra_blocksize = 1,
3501 			},
3502 			.setkey = aead_setkey,
3503 			.setauthsize = aead_setauthsize,
3504 			.encrypt = aead_encrypt,
3505 			.decrypt = aead_decrypt,
3506 			.ivsize = CTR_RFC3686_IV_SIZE,
3507 			.maxauthsize = SHA512_DIGEST_SIZE,
3508 		},
3509 		.aead.op = {
3510 			.do_one_request = aead_do_one_req,
3511 		},
3512 		.caam = {
3513 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3514 					   OP_ALG_AAI_CTR_MOD128,
3515 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3516 					   OP_ALG_AAI_HMAC_PRECOMP,
3517 			.rfc3686 = true,
3518 			.geniv = true,
3519 		},
3520 	},
3521 	{
3522 		.aead.base = {
3523 			.base = {
3524 				.cra_name = "rfc7539(chacha20,poly1305)",
3525 				.cra_driver_name = "rfc7539-chacha20-poly1305-"
3526 						   "caam",
3527 				.cra_blocksize = 1,
3528 			},
3529 			.setkey = chachapoly_setkey,
3530 			.setauthsize = chachapoly_setauthsize,
3531 			.encrypt = chachapoly_encrypt,
3532 			.decrypt = chachapoly_decrypt,
3533 			.ivsize = CHACHAPOLY_IV_SIZE,
3534 			.maxauthsize = POLY1305_DIGEST_SIZE,
3535 		},
3536 		.aead.op = {
3537 			.do_one_request = aead_do_one_req,
3538 		},
3539 		.caam = {
3540 			.class1_alg_type = OP_ALG_ALGSEL_CHACHA20 |
3541 					   OP_ALG_AAI_AEAD,
3542 			.class2_alg_type = OP_ALG_ALGSEL_POLY1305 |
3543 					   OP_ALG_AAI_AEAD,
3544 			.nodkp = true,
3545 		},
3546 	},
3547 	{
3548 		.aead.base = {
3549 			.base = {
3550 				.cra_name = "rfc7539esp(chacha20,poly1305)",
3551 				.cra_driver_name = "rfc7539esp-chacha20-"
3552 						   "poly1305-caam",
3553 				.cra_blocksize = 1,
3554 			},
3555 			.setkey = chachapoly_setkey,
3556 			.setauthsize = chachapoly_setauthsize,
3557 			.encrypt = chachapoly_encrypt,
3558 			.decrypt = chachapoly_decrypt,
3559 			.ivsize = 8,
3560 			.maxauthsize = POLY1305_DIGEST_SIZE,
3561 		},
3562 		.aead.op = {
3563 			.do_one_request = aead_do_one_req,
3564 		},
3565 		.caam = {
3566 			.class1_alg_type = OP_ALG_ALGSEL_CHACHA20 |
3567 					   OP_ALG_AAI_AEAD,
3568 			.class2_alg_type = OP_ALG_ALGSEL_POLY1305 |
3569 					   OP_ALG_AAI_AEAD,
3570 			.nodkp = true,
3571 		},
3572 	},
3573 };
3574 
caam_init_common(struct caam_ctx * ctx,struct caam_alg_entry * caam,bool uses_dkp)3575 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam,
3576 			    bool uses_dkp)
3577 {
3578 	dma_addr_t dma_addr;
3579 	struct caam_drv_private *priv;
3580 	const size_t sh_desc_enc_offset = offsetof(struct caam_ctx,
3581 						   sh_desc_enc);
3582 
3583 	ctx->jrdev = caam_jr_alloc();
3584 	if (IS_ERR(ctx->jrdev)) {
3585 		pr_err("Job Ring Device allocation for transform failed\n");
3586 		return PTR_ERR(ctx->jrdev);
3587 	}
3588 
3589 	priv = dev_get_drvdata(ctx->jrdev->parent);
3590 	if (priv->era >= 6 && uses_dkp)
3591 		ctx->dir = DMA_BIDIRECTIONAL;
3592 	else
3593 		ctx->dir = DMA_TO_DEVICE;
3594 
3595 	dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc,
3596 					offsetof(struct caam_ctx,
3597 						 sh_desc_enc_dma) -
3598 					sh_desc_enc_offset,
3599 					ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
3600 	if (dma_mapping_error(ctx->jrdev, dma_addr)) {
3601 		dev_err(ctx->jrdev, "unable to map key, shared descriptors\n");
3602 		caam_jr_free(ctx->jrdev);
3603 		return -ENOMEM;
3604 	}
3605 
3606 	ctx->sh_desc_enc_dma = dma_addr;
3607 	ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx,
3608 						   sh_desc_dec) -
3609 					sh_desc_enc_offset;
3610 	ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key) -
3611 					sh_desc_enc_offset;
3612 
3613 	/* copy descriptor header template value */
3614 	ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
3615 	ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
3616 
3617 	return 0;
3618 }
3619 
caam_cra_init(struct crypto_skcipher * tfm)3620 static int caam_cra_init(struct crypto_skcipher *tfm)
3621 {
3622 	struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
3623 	struct caam_skcipher_alg *caam_alg =
3624 		container_of(alg, typeof(*caam_alg), skcipher.base);
3625 	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(tfm);
3626 	u32 alg_aai = caam_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
3627 	int ret = 0;
3628 
3629 	if (alg_aai == OP_ALG_AAI_XTS) {
3630 		const char *tfm_name = crypto_tfm_alg_name(&tfm->base);
3631 		struct crypto_skcipher *fallback;
3632 
3633 		fallback = crypto_alloc_skcipher(tfm_name, 0,
3634 						 CRYPTO_ALG_NEED_FALLBACK);
3635 		if (IS_ERR(fallback)) {
3636 			pr_err("Failed to allocate %s fallback: %ld\n",
3637 			       tfm_name, PTR_ERR(fallback));
3638 			return PTR_ERR(fallback);
3639 		}
3640 
3641 		ctx->fallback = fallback;
3642 		crypto_skcipher_set_reqsize(tfm, sizeof(struct caam_skcipher_req_ctx) +
3643 					    crypto_skcipher_reqsize(fallback));
3644 	} else {
3645 		crypto_skcipher_set_reqsize(tfm, sizeof(struct caam_skcipher_req_ctx));
3646 	}
3647 
3648 	ret = caam_init_common(ctx, &caam_alg->caam, false);
3649 	if (ret && ctx->fallback)
3650 		crypto_free_skcipher(ctx->fallback);
3651 
3652 	return ret;
3653 }
3654 
caam_aead_init(struct crypto_aead * tfm)3655 static int caam_aead_init(struct crypto_aead *tfm)
3656 {
3657 	struct aead_alg *alg = crypto_aead_alg(tfm);
3658 	struct caam_aead_alg *caam_alg =
3659 		 container_of(alg, struct caam_aead_alg, aead.base);
3660 	struct caam_ctx *ctx = crypto_aead_ctx_dma(tfm);
3661 
3662 	crypto_aead_set_reqsize(tfm, sizeof(struct caam_aead_req_ctx));
3663 
3664 	return caam_init_common(ctx, &caam_alg->caam, !caam_alg->caam.nodkp);
3665 }
3666 
caam_exit_common(struct caam_ctx * ctx)3667 static void caam_exit_common(struct caam_ctx *ctx)
3668 {
3669 	dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma,
3670 			       offsetof(struct caam_ctx, sh_desc_enc_dma) -
3671 			       offsetof(struct caam_ctx, sh_desc_enc),
3672 			       ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
3673 	caam_jr_free(ctx->jrdev);
3674 }
3675 
caam_cra_exit(struct crypto_skcipher * tfm)3676 static void caam_cra_exit(struct crypto_skcipher *tfm)
3677 {
3678 	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(tfm);
3679 
3680 	if (ctx->fallback)
3681 		crypto_free_skcipher(ctx->fallback);
3682 	caam_exit_common(ctx);
3683 }
3684 
caam_aead_exit(struct crypto_aead * tfm)3685 static void caam_aead_exit(struct crypto_aead *tfm)
3686 {
3687 	caam_exit_common(crypto_aead_ctx_dma(tfm));
3688 }
3689 
caam_algapi_exit(void)3690 void caam_algapi_exit(void)
3691 {
3692 	int i;
3693 
3694 	for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3695 		struct caam_aead_alg *t_alg = driver_aeads + i;
3696 
3697 		if (t_alg->registered)
3698 			crypto_engine_unregister_aead(&t_alg->aead);
3699 	}
3700 
3701 	for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3702 		struct caam_skcipher_alg *t_alg = driver_algs + i;
3703 
3704 		if (t_alg->registered)
3705 			crypto_engine_unregister_skcipher(&t_alg->skcipher);
3706 	}
3707 }
3708 
caam_skcipher_alg_init(struct caam_skcipher_alg * t_alg)3709 static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg)
3710 {
3711 	struct skcipher_alg *alg = &t_alg->skcipher.base;
3712 
3713 	alg->base.cra_module = THIS_MODULE;
3714 	alg->base.cra_priority = CAAM_CRA_PRIORITY;
3715 	alg->base.cra_ctxsize = sizeof(struct caam_ctx) + crypto_dma_padding();
3716 	alg->base.cra_flags |= (CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
3717 			      CRYPTO_ALG_KERN_DRIVER_ONLY);
3718 
3719 	alg->init = caam_cra_init;
3720 	alg->exit = caam_cra_exit;
3721 }
3722 
caam_aead_alg_init(struct caam_aead_alg * t_alg)3723 static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
3724 {
3725 	struct aead_alg *alg = &t_alg->aead.base;
3726 
3727 	alg->base.cra_module = THIS_MODULE;
3728 	alg->base.cra_priority = CAAM_CRA_PRIORITY;
3729 	alg->base.cra_ctxsize = sizeof(struct caam_ctx) + crypto_dma_padding();
3730 	alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
3731 			      CRYPTO_ALG_KERN_DRIVER_ONLY;
3732 
3733 	alg->init = caam_aead_init;
3734 	alg->exit = caam_aead_exit;
3735 }
3736 
caam_algapi_init(struct device * ctrldev)3737 int caam_algapi_init(struct device *ctrldev)
3738 {
3739 	struct caam_drv_private *priv = dev_get_drvdata(ctrldev);
3740 	int i = 0, err = 0;
3741 	u32 aes_vid, aes_inst, des_inst, md_vid, md_inst, ccha_inst, ptha_inst;
3742 	unsigned int md_limit = SHA512_DIGEST_SIZE;
3743 	bool registered = false, gcm_support;
3744 
3745 	/*
3746 	 * Register crypto algorithms the device supports.
3747 	 * First, detect presence and attributes of DES, AES, and MD blocks.
3748 	 */
3749 	if (priv->era < 10) {
3750 		struct caam_perfmon __iomem *perfmon = &priv->jr[0]->perfmon;
3751 		u32 cha_vid, cha_inst, aes_rn;
3752 
3753 		cha_vid = rd_reg32(&perfmon->cha_id_ls);
3754 		aes_vid = cha_vid & CHA_ID_LS_AES_MASK;
3755 		md_vid = (cha_vid & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
3756 
3757 		cha_inst = rd_reg32(&perfmon->cha_num_ls);
3758 		des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >>
3759 			   CHA_ID_LS_DES_SHIFT;
3760 		aes_inst = cha_inst & CHA_ID_LS_AES_MASK;
3761 		md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
3762 		ccha_inst = 0;
3763 		ptha_inst = 0;
3764 
3765 		aes_rn = rd_reg32(&perfmon->cha_rev_ls) & CHA_ID_LS_AES_MASK;
3766 		gcm_support = !(aes_vid == CHA_VER_VID_AES_LP && aes_rn < 8);
3767 	} else {
3768 		struct version_regs __iomem *vreg = &priv->jr[0]->vreg;
3769 		u32 aesa, mdha;
3770 
3771 		aesa = rd_reg32(&vreg->aesa);
3772 		mdha = rd_reg32(&vreg->mdha);
3773 
3774 		aes_vid = (aesa & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
3775 		md_vid = (mdha & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
3776 
3777 		des_inst = rd_reg32(&vreg->desa) & CHA_VER_NUM_MASK;
3778 		aes_inst = aesa & CHA_VER_NUM_MASK;
3779 		md_inst = mdha & CHA_VER_NUM_MASK;
3780 		ccha_inst = rd_reg32(&vreg->ccha) & CHA_VER_NUM_MASK;
3781 		ptha_inst = rd_reg32(&vreg->ptha) & CHA_VER_NUM_MASK;
3782 
3783 		gcm_support = aesa & CHA_VER_MISC_AES_GCM;
3784 	}
3785 
3786 	/* If MD is present, limit digest size based on LP256 */
3787 	if (md_inst && md_vid  == CHA_VER_VID_MD_LP256)
3788 		md_limit = SHA256_DIGEST_SIZE;
3789 
3790 	for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3791 		struct caam_skcipher_alg *t_alg = driver_algs + i;
3792 		u32 alg_sel = t_alg->caam.class1_alg_type & OP_ALG_ALGSEL_MASK;
3793 
3794 		/* Skip DES algorithms if not supported by device */
3795 		if (!des_inst &&
3796 		    ((alg_sel == OP_ALG_ALGSEL_3DES) ||
3797 		     (alg_sel == OP_ALG_ALGSEL_DES)))
3798 				continue;
3799 
3800 		/* Skip AES algorithms if not supported by device */
3801 		if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
3802 				continue;
3803 
3804 		/*
3805 		 * Check support for AES modes not available
3806 		 * on LP devices.
3807 		 */
3808 		if (aes_vid == CHA_VER_VID_AES_LP &&
3809 		    (t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK) ==
3810 		    OP_ALG_AAI_XTS)
3811 			continue;
3812 
3813 		caam_skcipher_alg_init(t_alg);
3814 
3815 		err = crypto_engine_register_skcipher(&t_alg->skcipher);
3816 		if (err) {
3817 			pr_warn("%s alg registration failed\n",
3818 				t_alg->skcipher.base.base.cra_driver_name);
3819 			continue;
3820 		}
3821 
3822 		t_alg->registered = true;
3823 		registered = true;
3824 	}
3825 
3826 	for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3827 		struct caam_aead_alg *t_alg = driver_aeads + i;
3828 		u32 c1_alg_sel = t_alg->caam.class1_alg_type &
3829 				 OP_ALG_ALGSEL_MASK;
3830 		u32 c2_alg_sel = t_alg->caam.class2_alg_type &
3831 				 OP_ALG_ALGSEL_MASK;
3832 		u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
3833 
3834 		/* Skip DES algorithms if not supported by device */
3835 		if (!des_inst &&
3836 		    ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
3837 		     (c1_alg_sel == OP_ALG_ALGSEL_DES)))
3838 				continue;
3839 
3840 		/* Skip AES algorithms if not supported by device */
3841 		if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
3842 				continue;
3843 
3844 		/* Skip CHACHA20 algorithms if not supported by device */
3845 		if (c1_alg_sel == OP_ALG_ALGSEL_CHACHA20 && !ccha_inst)
3846 			continue;
3847 
3848 		/* Skip POLY1305 algorithms if not supported by device */
3849 		if (c2_alg_sel == OP_ALG_ALGSEL_POLY1305 && !ptha_inst)
3850 			continue;
3851 
3852 		/* Skip GCM algorithms if not supported by device */
3853 		if (c1_alg_sel == OP_ALG_ALGSEL_AES &&
3854 		    alg_aai == OP_ALG_AAI_GCM && !gcm_support)
3855 			continue;
3856 
3857 		/*
3858 		 * Skip algorithms requiring message digests
3859 		 * if MD or MD size is not supported by device.
3860 		 */
3861 		if (is_mdha(c2_alg_sel) &&
3862 		    (!md_inst || t_alg->aead.base.maxauthsize > md_limit))
3863 			continue;
3864 
3865 		caam_aead_alg_init(t_alg);
3866 
3867 		err = crypto_engine_register_aead(&t_alg->aead);
3868 		if (err) {
3869 			pr_warn("%s alg registration failed\n",
3870 				t_alg->aead.base.base.cra_driver_name);
3871 			continue;
3872 		}
3873 
3874 		t_alg->registered = true;
3875 		registered = true;
3876 	}
3877 
3878 	if (registered)
3879 		pr_info("caam algorithms registered in /proc/crypto\n");
3880 
3881 	return err;
3882 }
3883