xref: /linux/drivers/crypto/caam/caamalg_qi.c (revision 618b5dc48365cecc03daffa46800f20ab11e3f80)
1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3  * Freescale FSL CAAM support for crypto API over QI backend.
4  * Based on caamalg.c
5  *
6  * Copyright 2013-2016 Freescale Semiconductor, Inc.
7  * Copyright 2016-2018 NXP
8  */
9 
10 #include "compat.h"
11 #include "ctrl.h"
12 #include "regs.h"
13 #include "intern.h"
14 #include "desc_constr.h"
15 #include "error.h"
16 #include "sg_sw_qm.h"
17 #include "key_gen.h"
18 #include "qi.h"
19 #include "jr.h"
20 #include "caamalg_desc.h"
21 
22 /*
23  * crypto alg
24  */
25 #define CAAM_CRA_PRIORITY		2000
26 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
27 #define CAAM_MAX_KEY_SIZE		(AES_MAX_KEY_SIZE + \
28 					 SHA512_DIGEST_SIZE * 2)
29 
30 #define DESC_MAX_USED_BYTES		(DESC_QI_AEAD_GIVENC_LEN + \
31 					 CAAM_MAX_KEY_SIZE)
32 #define DESC_MAX_USED_LEN		(DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
33 
34 struct caam_alg_entry {
35 	int class1_alg_type;
36 	int class2_alg_type;
37 	bool rfc3686;
38 	bool geniv;
39 };
40 
41 struct caam_aead_alg {
42 	struct aead_alg aead;
43 	struct caam_alg_entry caam;
44 	bool registered;
45 };
46 
47 struct caam_skcipher_alg {
48 	struct skcipher_alg skcipher;
49 	struct caam_alg_entry caam;
50 	bool registered;
51 };
52 
53 /*
54  * per-session context
55  */
56 struct caam_ctx {
57 	struct device *jrdev;
58 	u32 sh_desc_enc[DESC_MAX_USED_LEN];
59 	u32 sh_desc_dec[DESC_MAX_USED_LEN];
60 	u8 key[CAAM_MAX_KEY_SIZE];
61 	dma_addr_t key_dma;
62 	enum dma_data_direction dir;
63 	struct alginfo adata;
64 	struct alginfo cdata;
65 	unsigned int authsize;
66 	struct device *qidev;
67 	spinlock_t lock;	/* Protects multiple init of driver context */
68 	struct caam_drv_ctx *drv_ctx[NUM_OP];
69 };
70 
71 static int aead_set_sh_desc(struct crypto_aead *aead)
72 {
73 	struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
74 						 typeof(*alg), aead);
75 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
76 	unsigned int ivsize = crypto_aead_ivsize(aead);
77 	u32 ctx1_iv_off = 0;
78 	u32 *nonce = NULL;
79 	unsigned int data_len[2];
80 	u32 inl_mask;
81 	const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
82 			       OP_ALG_AAI_CTR_MOD128);
83 	const bool is_rfc3686 = alg->caam.rfc3686;
84 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent);
85 
86 	if (!ctx->cdata.keylen || !ctx->authsize)
87 		return 0;
88 
89 	/*
90 	 * AES-CTR needs to load IV in CONTEXT1 reg
91 	 * at an offset of 128bits (16bytes)
92 	 * CONTEXT1[255:128] = IV
93 	 */
94 	if (ctr_mode)
95 		ctx1_iv_off = 16;
96 
97 	/*
98 	 * RFC3686 specific:
99 	 *	CONTEXT1[255:128] = {NONCE, IV, COUNTER}
100 	 */
101 	if (is_rfc3686) {
102 		ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
103 		nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
104 				ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
105 	}
106 
107 	data_len[0] = ctx->adata.keylen_pad;
108 	data_len[1] = ctx->cdata.keylen;
109 
110 	if (alg->caam.geniv)
111 		goto skip_enc;
112 
113 	/* aead_encrypt shared descriptor */
114 	if (desc_inline_query(DESC_QI_AEAD_ENC_LEN +
115 			      (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
116 			      DESC_JOB_IO_LEN, data_len, &inl_mask,
117 			      ARRAY_SIZE(data_len)) < 0)
118 		return -EINVAL;
119 
120 	if (inl_mask & 1)
121 		ctx->adata.key_virt = ctx->key;
122 	else
123 		ctx->adata.key_dma = ctx->key_dma;
124 
125 	if (inl_mask & 2)
126 		ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
127 	else
128 		ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
129 
130 	ctx->adata.key_inline = !!(inl_mask & 1);
131 	ctx->cdata.key_inline = !!(inl_mask & 2);
132 
133 	cnstr_shdsc_aead_encap(ctx->sh_desc_enc, &ctx->cdata, &ctx->adata,
134 			       ivsize, ctx->authsize, is_rfc3686, nonce,
135 			       ctx1_iv_off, true, ctrlpriv->era);
136 
137 skip_enc:
138 	/* aead_decrypt shared descriptor */
139 	if (desc_inline_query(DESC_QI_AEAD_DEC_LEN +
140 			      (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
141 			      DESC_JOB_IO_LEN, data_len, &inl_mask,
142 			      ARRAY_SIZE(data_len)) < 0)
143 		return -EINVAL;
144 
145 	if (inl_mask & 1)
146 		ctx->adata.key_virt = ctx->key;
147 	else
148 		ctx->adata.key_dma = ctx->key_dma;
149 
150 	if (inl_mask & 2)
151 		ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
152 	else
153 		ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
154 
155 	ctx->adata.key_inline = !!(inl_mask & 1);
156 	ctx->cdata.key_inline = !!(inl_mask & 2);
157 
158 	cnstr_shdsc_aead_decap(ctx->sh_desc_dec, &ctx->cdata, &ctx->adata,
159 			       ivsize, ctx->authsize, alg->caam.geniv,
160 			       is_rfc3686, nonce, ctx1_iv_off, true,
161 			       ctrlpriv->era);
162 
163 	if (!alg->caam.geniv)
164 		goto skip_givenc;
165 
166 	/* aead_givencrypt shared descriptor */
167 	if (desc_inline_query(DESC_QI_AEAD_GIVENC_LEN +
168 			      (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
169 			      DESC_JOB_IO_LEN, data_len, &inl_mask,
170 			      ARRAY_SIZE(data_len)) < 0)
171 		return -EINVAL;
172 
173 	if (inl_mask & 1)
174 		ctx->adata.key_virt = ctx->key;
175 	else
176 		ctx->adata.key_dma = ctx->key_dma;
177 
178 	if (inl_mask & 2)
179 		ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
180 	else
181 		ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
182 
183 	ctx->adata.key_inline = !!(inl_mask & 1);
184 	ctx->cdata.key_inline = !!(inl_mask & 2);
185 
186 	cnstr_shdsc_aead_givencap(ctx->sh_desc_enc, &ctx->cdata, &ctx->adata,
187 				  ivsize, ctx->authsize, is_rfc3686, nonce,
188 				  ctx1_iv_off, true, ctrlpriv->era);
189 
190 skip_givenc:
191 	return 0;
192 }
193 
194 static int aead_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
195 {
196 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
197 
198 	ctx->authsize = authsize;
199 	aead_set_sh_desc(authenc);
200 
201 	return 0;
202 }
203 
204 static int aead_setkey(struct crypto_aead *aead, const u8 *key,
205 		       unsigned int keylen)
206 {
207 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
208 	struct device *jrdev = ctx->jrdev;
209 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
210 	struct crypto_authenc_keys keys;
211 	int ret = 0;
212 
213 	if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
214 		goto badkey;
215 
216 #ifdef DEBUG
217 	dev_err(jrdev, "keylen %d enckeylen %d authkeylen %d\n",
218 		keys.authkeylen + keys.enckeylen, keys.enckeylen,
219 		keys.authkeylen);
220 	print_hex_dump(KERN_ERR, "key in @" __stringify(__LINE__)": ",
221 		       DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
222 #endif
223 
224 	/*
225 	 * If DKP is supported, use it in the shared descriptor to generate
226 	 * the split key.
227 	 */
228 	if (ctrlpriv->era >= 6) {
229 		ctx->adata.keylen = keys.authkeylen;
230 		ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype &
231 						      OP_ALG_ALGSEL_MASK);
232 
233 		if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE)
234 			goto badkey;
235 
236 		memcpy(ctx->key, keys.authkey, keys.authkeylen);
237 		memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey,
238 		       keys.enckeylen);
239 		dma_sync_single_for_device(jrdev, ctx->key_dma,
240 					   ctx->adata.keylen_pad +
241 					   keys.enckeylen, ctx->dir);
242 		goto skip_split_key;
243 	}
244 
245 	ret = gen_split_key(jrdev, ctx->key, &ctx->adata, keys.authkey,
246 			    keys.authkeylen, CAAM_MAX_KEY_SIZE -
247 			    keys.enckeylen);
248 	if (ret)
249 		goto badkey;
250 
251 	/* postpend encryption key to auth split key */
252 	memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
253 	dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
254 				   keys.enckeylen, ctx->dir);
255 #ifdef DEBUG
256 	print_hex_dump(KERN_ERR, "ctx.key@" __stringify(__LINE__)": ",
257 		       DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
258 		       ctx->adata.keylen_pad + keys.enckeylen, 1);
259 #endif
260 
261 skip_split_key:
262 	ctx->cdata.keylen = keys.enckeylen;
263 
264 	ret = aead_set_sh_desc(aead);
265 	if (ret)
266 		goto badkey;
267 
268 	/* Now update the driver contexts with the new shared descriptor */
269 	if (ctx->drv_ctx[ENCRYPT]) {
270 		ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
271 					  ctx->sh_desc_enc);
272 		if (ret) {
273 			dev_err(jrdev, "driver enc context update failed\n");
274 			goto badkey;
275 		}
276 	}
277 
278 	if (ctx->drv_ctx[DECRYPT]) {
279 		ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
280 					  ctx->sh_desc_dec);
281 		if (ret) {
282 			dev_err(jrdev, "driver dec context update failed\n");
283 			goto badkey;
284 		}
285 	}
286 
287 	memzero_explicit(&keys, sizeof(keys));
288 	return ret;
289 badkey:
290 	crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
291 	memzero_explicit(&keys, sizeof(keys));
292 	return -EINVAL;
293 }
294 
295 static int gcm_set_sh_desc(struct crypto_aead *aead)
296 {
297 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
298 	unsigned int ivsize = crypto_aead_ivsize(aead);
299 	int rem_bytes = CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN -
300 			ctx->cdata.keylen;
301 
302 	if (!ctx->cdata.keylen || !ctx->authsize)
303 		return 0;
304 
305 	/*
306 	 * Job Descriptor and Shared Descriptor
307 	 * must fit into the 64-word Descriptor h/w Buffer
308 	 */
309 	if (rem_bytes >= DESC_QI_GCM_ENC_LEN) {
310 		ctx->cdata.key_inline = true;
311 		ctx->cdata.key_virt = ctx->key;
312 	} else {
313 		ctx->cdata.key_inline = false;
314 		ctx->cdata.key_dma = ctx->key_dma;
315 	}
316 
317 	cnstr_shdsc_gcm_encap(ctx->sh_desc_enc, &ctx->cdata, ivsize,
318 			      ctx->authsize, true);
319 
320 	/*
321 	 * Job Descriptor and Shared Descriptor
322 	 * must fit into the 64-word Descriptor h/w Buffer
323 	 */
324 	if (rem_bytes >= DESC_QI_GCM_DEC_LEN) {
325 		ctx->cdata.key_inline = true;
326 		ctx->cdata.key_virt = ctx->key;
327 	} else {
328 		ctx->cdata.key_inline = false;
329 		ctx->cdata.key_dma = ctx->key_dma;
330 	}
331 
332 	cnstr_shdsc_gcm_decap(ctx->sh_desc_dec, &ctx->cdata, ivsize,
333 			      ctx->authsize, true);
334 
335 	return 0;
336 }
337 
338 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
339 {
340 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
341 
342 	ctx->authsize = authsize;
343 	gcm_set_sh_desc(authenc);
344 
345 	return 0;
346 }
347 
348 static int gcm_setkey(struct crypto_aead *aead,
349 		      const u8 *key, unsigned int keylen)
350 {
351 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
352 	struct device *jrdev = ctx->jrdev;
353 	int ret;
354 
355 #ifdef DEBUG
356 	print_hex_dump(KERN_ERR, "key in @" __stringify(__LINE__)": ",
357 		       DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
358 #endif
359 
360 	memcpy(ctx->key, key, keylen);
361 	dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir);
362 	ctx->cdata.keylen = keylen;
363 
364 	ret = gcm_set_sh_desc(aead);
365 	if (ret)
366 		return ret;
367 
368 	/* Now update the driver contexts with the new shared descriptor */
369 	if (ctx->drv_ctx[ENCRYPT]) {
370 		ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
371 					  ctx->sh_desc_enc);
372 		if (ret) {
373 			dev_err(jrdev, "driver enc context update failed\n");
374 			return ret;
375 		}
376 	}
377 
378 	if (ctx->drv_ctx[DECRYPT]) {
379 		ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
380 					  ctx->sh_desc_dec);
381 		if (ret) {
382 			dev_err(jrdev, "driver dec context update failed\n");
383 			return ret;
384 		}
385 	}
386 
387 	return 0;
388 }
389 
390 static int rfc4106_set_sh_desc(struct crypto_aead *aead)
391 {
392 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
393 	unsigned int ivsize = crypto_aead_ivsize(aead);
394 	int rem_bytes = CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN -
395 			ctx->cdata.keylen;
396 
397 	if (!ctx->cdata.keylen || !ctx->authsize)
398 		return 0;
399 
400 	ctx->cdata.key_virt = ctx->key;
401 
402 	/*
403 	 * Job Descriptor and Shared Descriptor
404 	 * must fit into the 64-word Descriptor h/w Buffer
405 	 */
406 	if (rem_bytes >= DESC_QI_RFC4106_ENC_LEN) {
407 		ctx->cdata.key_inline = true;
408 	} else {
409 		ctx->cdata.key_inline = false;
410 		ctx->cdata.key_dma = ctx->key_dma;
411 	}
412 
413 	cnstr_shdsc_rfc4106_encap(ctx->sh_desc_enc, &ctx->cdata, ivsize,
414 				  ctx->authsize, true);
415 
416 	/*
417 	 * Job Descriptor and Shared Descriptor
418 	 * must fit into the 64-word Descriptor h/w Buffer
419 	 */
420 	if (rem_bytes >= DESC_QI_RFC4106_DEC_LEN) {
421 		ctx->cdata.key_inline = true;
422 	} else {
423 		ctx->cdata.key_inline = false;
424 		ctx->cdata.key_dma = ctx->key_dma;
425 	}
426 
427 	cnstr_shdsc_rfc4106_decap(ctx->sh_desc_dec, &ctx->cdata, ivsize,
428 				  ctx->authsize, true);
429 
430 	return 0;
431 }
432 
433 static int rfc4106_setauthsize(struct crypto_aead *authenc,
434 			       unsigned int authsize)
435 {
436 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
437 
438 	ctx->authsize = authsize;
439 	rfc4106_set_sh_desc(authenc);
440 
441 	return 0;
442 }
443 
444 static int rfc4106_setkey(struct crypto_aead *aead,
445 			  const u8 *key, unsigned int keylen)
446 {
447 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
448 	struct device *jrdev = ctx->jrdev;
449 	int ret;
450 
451 	if (keylen < 4)
452 		return -EINVAL;
453 
454 #ifdef DEBUG
455 	print_hex_dump(KERN_ERR, "key in @" __stringify(__LINE__)": ",
456 		       DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
457 #endif
458 
459 	memcpy(ctx->key, key, keylen);
460 	/*
461 	 * The last four bytes of the key material are used as the salt value
462 	 * in the nonce. Update the AES key length.
463 	 */
464 	ctx->cdata.keylen = keylen - 4;
465 	dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
466 				   ctx->dir);
467 
468 	ret = rfc4106_set_sh_desc(aead);
469 	if (ret)
470 		return ret;
471 
472 	/* Now update the driver contexts with the new shared descriptor */
473 	if (ctx->drv_ctx[ENCRYPT]) {
474 		ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
475 					  ctx->sh_desc_enc);
476 		if (ret) {
477 			dev_err(jrdev, "driver enc context update failed\n");
478 			return ret;
479 		}
480 	}
481 
482 	if (ctx->drv_ctx[DECRYPT]) {
483 		ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
484 					  ctx->sh_desc_dec);
485 		if (ret) {
486 			dev_err(jrdev, "driver dec context update failed\n");
487 			return ret;
488 		}
489 	}
490 
491 	return 0;
492 }
493 
494 static int rfc4543_set_sh_desc(struct crypto_aead *aead)
495 {
496 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
497 	unsigned int ivsize = crypto_aead_ivsize(aead);
498 	int rem_bytes = CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN -
499 			ctx->cdata.keylen;
500 
501 	if (!ctx->cdata.keylen || !ctx->authsize)
502 		return 0;
503 
504 	ctx->cdata.key_virt = ctx->key;
505 
506 	/*
507 	 * Job Descriptor and Shared Descriptor
508 	 * must fit into the 64-word Descriptor h/w Buffer
509 	 */
510 	if (rem_bytes >= DESC_QI_RFC4543_ENC_LEN) {
511 		ctx->cdata.key_inline = true;
512 	} else {
513 		ctx->cdata.key_inline = false;
514 		ctx->cdata.key_dma = ctx->key_dma;
515 	}
516 
517 	cnstr_shdsc_rfc4543_encap(ctx->sh_desc_enc, &ctx->cdata, ivsize,
518 				  ctx->authsize, true);
519 
520 	/*
521 	 * Job Descriptor and Shared Descriptor
522 	 * must fit into the 64-word Descriptor h/w Buffer
523 	 */
524 	if (rem_bytes >= DESC_QI_RFC4543_DEC_LEN) {
525 		ctx->cdata.key_inline = true;
526 	} else {
527 		ctx->cdata.key_inline = false;
528 		ctx->cdata.key_dma = ctx->key_dma;
529 	}
530 
531 	cnstr_shdsc_rfc4543_decap(ctx->sh_desc_dec, &ctx->cdata, ivsize,
532 				  ctx->authsize, true);
533 
534 	return 0;
535 }
536 
537 static int rfc4543_setauthsize(struct crypto_aead *authenc,
538 			       unsigned int authsize)
539 {
540 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
541 
542 	ctx->authsize = authsize;
543 	rfc4543_set_sh_desc(authenc);
544 
545 	return 0;
546 }
547 
548 static int rfc4543_setkey(struct crypto_aead *aead,
549 			  const u8 *key, unsigned int keylen)
550 {
551 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
552 	struct device *jrdev = ctx->jrdev;
553 	int ret;
554 
555 	if (keylen < 4)
556 		return -EINVAL;
557 
558 #ifdef DEBUG
559 	print_hex_dump(KERN_ERR, "key in @" __stringify(__LINE__)": ",
560 		       DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
561 #endif
562 
563 	memcpy(ctx->key, key, keylen);
564 	/*
565 	 * The last four bytes of the key material are used as the salt value
566 	 * in the nonce. Update the AES key length.
567 	 */
568 	ctx->cdata.keylen = keylen - 4;
569 	dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
570 				   ctx->dir);
571 
572 	ret = rfc4543_set_sh_desc(aead);
573 	if (ret)
574 		return ret;
575 
576 	/* Now update the driver contexts with the new shared descriptor */
577 	if (ctx->drv_ctx[ENCRYPT]) {
578 		ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
579 					  ctx->sh_desc_enc);
580 		if (ret) {
581 			dev_err(jrdev, "driver enc context update failed\n");
582 			return ret;
583 		}
584 	}
585 
586 	if (ctx->drv_ctx[DECRYPT]) {
587 		ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
588 					  ctx->sh_desc_dec);
589 		if (ret) {
590 			dev_err(jrdev, "driver dec context update failed\n");
591 			return ret;
592 		}
593 	}
594 
595 	return 0;
596 }
597 
598 static int skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
599 			   unsigned int keylen)
600 {
601 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
602 	struct caam_skcipher_alg *alg =
603 		container_of(crypto_skcipher_alg(skcipher), typeof(*alg),
604 			     skcipher);
605 	struct device *jrdev = ctx->jrdev;
606 	unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
607 	u32 ctx1_iv_off = 0;
608 	const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
609 			       OP_ALG_AAI_CTR_MOD128);
610 	const bool is_rfc3686 = alg->caam.rfc3686;
611 	int ret = 0;
612 
613 #ifdef DEBUG
614 	print_hex_dump(KERN_ERR, "key in @" __stringify(__LINE__)": ",
615 		       DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
616 #endif
617 	/*
618 	 * AES-CTR needs to load IV in CONTEXT1 reg
619 	 * at an offset of 128bits (16bytes)
620 	 * CONTEXT1[255:128] = IV
621 	 */
622 	if (ctr_mode)
623 		ctx1_iv_off = 16;
624 
625 	/*
626 	 * RFC3686 specific:
627 	 *	| CONTEXT1[255:128] = {NONCE, IV, COUNTER}
628 	 *	| *key = {KEY, NONCE}
629 	 */
630 	if (is_rfc3686) {
631 		ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
632 		keylen -= CTR_RFC3686_NONCE_SIZE;
633 	}
634 
635 	ctx->cdata.keylen = keylen;
636 	ctx->cdata.key_virt = key;
637 	ctx->cdata.key_inline = true;
638 
639 	/* skcipher encrypt, decrypt shared descriptors */
640 	cnstr_shdsc_skcipher_encap(ctx->sh_desc_enc, &ctx->cdata, ivsize,
641 				   is_rfc3686, ctx1_iv_off);
642 	cnstr_shdsc_skcipher_decap(ctx->sh_desc_dec, &ctx->cdata, ivsize,
643 				   is_rfc3686, ctx1_iv_off);
644 
645 	/* Now update the driver contexts with the new shared descriptor */
646 	if (ctx->drv_ctx[ENCRYPT]) {
647 		ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
648 					  ctx->sh_desc_enc);
649 		if (ret) {
650 			dev_err(jrdev, "driver enc context update failed\n");
651 			goto badkey;
652 		}
653 	}
654 
655 	if (ctx->drv_ctx[DECRYPT]) {
656 		ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
657 					  ctx->sh_desc_dec);
658 		if (ret) {
659 			dev_err(jrdev, "driver dec context update failed\n");
660 			goto badkey;
661 		}
662 	}
663 
664 	return ret;
665 badkey:
666 	crypto_skcipher_set_flags(skcipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
667 	return -EINVAL;
668 }
669 
670 static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
671 			       unsigned int keylen)
672 {
673 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
674 	struct device *jrdev = ctx->jrdev;
675 	int ret = 0;
676 
677 	if (keylen != 2 * AES_MIN_KEY_SIZE  && keylen != 2 * AES_MAX_KEY_SIZE) {
678 		dev_err(jrdev, "key size mismatch\n");
679 		goto badkey;
680 	}
681 
682 	ctx->cdata.keylen = keylen;
683 	ctx->cdata.key_virt = key;
684 	ctx->cdata.key_inline = true;
685 
686 	/* xts skcipher encrypt, decrypt shared descriptors */
687 	cnstr_shdsc_xts_skcipher_encap(ctx->sh_desc_enc, &ctx->cdata);
688 	cnstr_shdsc_xts_skcipher_decap(ctx->sh_desc_dec, &ctx->cdata);
689 
690 	/* Now update the driver contexts with the new shared descriptor */
691 	if (ctx->drv_ctx[ENCRYPT]) {
692 		ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
693 					  ctx->sh_desc_enc);
694 		if (ret) {
695 			dev_err(jrdev, "driver enc context update failed\n");
696 			goto badkey;
697 		}
698 	}
699 
700 	if (ctx->drv_ctx[DECRYPT]) {
701 		ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
702 					  ctx->sh_desc_dec);
703 		if (ret) {
704 			dev_err(jrdev, "driver dec context update failed\n");
705 			goto badkey;
706 		}
707 	}
708 
709 	return ret;
710 badkey:
711 	crypto_skcipher_set_flags(skcipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
712 	return -EINVAL;
713 }
714 
715 /*
716  * aead_edesc - s/w-extended aead descriptor
717  * @src_nents: number of segments in input scatterlist
718  * @dst_nents: number of segments in output scatterlist
719  * @iv_dma: dma address of iv for checking continuity and link table
720  * @qm_sg_bytes: length of dma mapped h/w link table
721  * @qm_sg_dma: bus physical mapped address of h/w link table
722  * @assoclen: associated data length, in CAAM endianness
723  * @assoclen_dma: bus physical mapped address of req->assoclen
724  * @drv_req: driver-specific request structure
725  * @sgt: the h/w link table, followed by IV
726  */
727 struct aead_edesc {
728 	int src_nents;
729 	int dst_nents;
730 	dma_addr_t iv_dma;
731 	int qm_sg_bytes;
732 	dma_addr_t qm_sg_dma;
733 	unsigned int assoclen;
734 	dma_addr_t assoclen_dma;
735 	struct caam_drv_req drv_req;
736 	struct qm_sg_entry sgt[0];
737 };
738 
739 /*
740  * skcipher_edesc - s/w-extended skcipher descriptor
741  * @src_nents: number of segments in input scatterlist
742  * @dst_nents: number of segments in output scatterlist
743  * @iv_dma: dma address of iv for checking continuity and link table
744  * @qm_sg_bytes: length of dma mapped h/w link table
745  * @qm_sg_dma: bus physical mapped address of h/w link table
746  * @drv_req: driver-specific request structure
747  * @sgt: the h/w link table, followed by IV
748  */
749 struct skcipher_edesc {
750 	int src_nents;
751 	int dst_nents;
752 	dma_addr_t iv_dma;
753 	int qm_sg_bytes;
754 	dma_addr_t qm_sg_dma;
755 	struct caam_drv_req drv_req;
756 	struct qm_sg_entry sgt[0];
757 };
758 
759 static struct caam_drv_ctx *get_drv_ctx(struct caam_ctx *ctx,
760 					enum optype type)
761 {
762 	/*
763 	 * This function is called on the fast path with values of 'type'
764 	 * known at compile time. Invalid arguments are not expected and
765 	 * thus no checks are made.
766 	 */
767 	struct caam_drv_ctx *drv_ctx = ctx->drv_ctx[type];
768 	u32 *desc;
769 
770 	if (unlikely(!drv_ctx)) {
771 		spin_lock(&ctx->lock);
772 
773 		/* Read again to check if some other core init drv_ctx */
774 		drv_ctx = ctx->drv_ctx[type];
775 		if (!drv_ctx) {
776 			int cpu;
777 
778 			if (type == ENCRYPT)
779 				desc = ctx->sh_desc_enc;
780 			else /* (type == DECRYPT) */
781 				desc = ctx->sh_desc_dec;
782 
783 			cpu = smp_processor_id();
784 			drv_ctx = caam_drv_ctx_init(ctx->qidev, &cpu, desc);
785 			if (likely(!IS_ERR_OR_NULL(drv_ctx)))
786 				drv_ctx->op_type = type;
787 
788 			ctx->drv_ctx[type] = drv_ctx;
789 		}
790 
791 		spin_unlock(&ctx->lock);
792 	}
793 
794 	return drv_ctx;
795 }
796 
797 static void caam_unmap(struct device *dev, struct scatterlist *src,
798 		       struct scatterlist *dst, int src_nents,
799 		       int dst_nents, dma_addr_t iv_dma, int ivsize,
800 		       dma_addr_t qm_sg_dma, int qm_sg_bytes)
801 {
802 	if (dst != src) {
803 		if (src_nents)
804 			dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
805 		dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
806 	} else {
807 		dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
808 	}
809 
810 	if (iv_dma)
811 		dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE);
812 	if (qm_sg_bytes)
813 		dma_unmap_single(dev, qm_sg_dma, qm_sg_bytes, DMA_TO_DEVICE);
814 }
815 
816 static void aead_unmap(struct device *dev,
817 		       struct aead_edesc *edesc,
818 		       struct aead_request *req)
819 {
820 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
821 	int ivsize = crypto_aead_ivsize(aead);
822 
823 	caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents,
824 		   edesc->iv_dma, ivsize, edesc->qm_sg_dma, edesc->qm_sg_bytes);
825 	dma_unmap_single(dev, edesc->assoclen_dma, 4, DMA_TO_DEVICE);
826 }
827 
828 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc,
829 			   struct skcipher_request *req)
830 {
831 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
832 	int ivsize = crypto_skcipher_ivsize(skcipher);
833 
834 	caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents,
835 		   edesc->iv_dma, ivsize, edesc->qm_sg_dma, edesc->qm_sg_bytes);
836 }
837 
838 static void aead_done(struct caam_drv_req *drv_req, u32 status)
839 {
840 	struct device *qidev;
841 	struct aead_edesc *edesc;
842 	struct aead_request *aead_req = drv_req->app_ctx;
843 	struct crypto_aead *aead = crypto_aead_reqtfm(aead_req);
844 	struct caam_ctx *caam_ctx = crypto_aead_ctx(aead);
845 	int ecode = 0;
846 
847 	qidev = caam_ctx->qidev;
848 
849 	if (unlikely(status)) {
850 		u32 ssrc = status & JRSTA_SSRC_MASK;
851 		u8 err_id = status & JRSTA_CCBERR_ERRID_MASK;
852 
853 		caam_jr_strstatus(qidev, status);
854 		/*
855 		 * verify hw auth check passed else return -EBADMSG
856 		 */
857 		if (ssrc == JRSTA_SSRC_CCB_ERROR &&
858 		    err_id == JRSTA_CCBERR_ERRID_ICVCHK)
859 			ecode = -EBADMSG;
860 		else
861 			ecode = -EIO;
862 	}
863 
864 	edesc = container_of(drv_req, typeof(*edesc), drv_req);
865 	aead_unmap(qidev, edesc, aead_req);
866 
867 	aead_request_complete(aead_req, ecode);
868 	qi_cache_free(edesc);
869 }
870 
871 /*
872  * allocate and map the aead extended descriptor
873  */
874 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
875 					   bool encrypt)
876 {
877 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
878 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
879 	struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
880 						 typeof(*alg), aead);
881 	struct device *qidev = ctx->qidev;
882 	gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
883 		       GFP_KERNEL : GFP_ATOMIC;
884 	int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
885 	struct aead_edesc *edesc;
886 	dma_addr_t qm_sg_dma, iv_dma = 0;
887 	int ivsize = 0;
888 	unsigned int authsize = ctx->authsize;
889 	int qm_sg_index = 0, qm_sg_ents = 0, qm_sg_bytes;
890 	int in_len, out_len;
891 	struct qm_sg_entry *sg_table, *fd_sgt;
892 	struct caam_drv_ctx *drv_ctx;
893 
894 	drv_ctx = get_drv_ctx(ctx, encrypt ? ENCRYPT : DECRYPT);
895 	if (unlikely(IS_ERR_OR_NULL(drv_ctx)))
896 		return (struct aead_edesc *)drv_ctx;
897 
898 	/* allocate space for base edesc and hw desc commands, link tables */
899 	edesc = qi_cache_alloc(GFP_DMA | flags);
900 	if (unlikely(!edesc)) {
901 		dev_err(qidev, "could not allocate extended descriptor\n");
902 		return ERR_PTR(-ENOMEM);
903 	}
904 
905 	if (likely(req->src == req->dst)) {
906 		src_nents = sg_nents_for_len(req->src, req->assoclen +
907 					     req->cryptlen +
908 						(encrypt ? authsize : 0));
909 		if (unlikely(src_nents < 0)) {
910 			dev_err(qidev, "Insufficient bytes (%d) in src S/G\n",
911 				req->assoclen + req->cryptlen +
912 				(encrypt ? authsize : 0));
913 			qi_cache_free(edesc);
914 			return ERR_PTR(src_nents);
915 		}
916 
917 		mapped_src_nents = dma_map_sg(qidev, req->src, src_nents,
918 					      DMA_BIDIRECTIONAL);
919 		if (unlikely(!mapped_src_nents)) {
920 			dev_err(qidev, "unable to map source\n");
921 			qi_cache_free(edesc);
922 			return ERR_PTR(-ENOMEM);
923 		}
924 	} else {
925 		src_nents = sg_nents_for_len(req->src, req->assoclen +
926 					     req->cryptlen);
927 		if (unlikely(src_nents < 0)) {
928 			dev_err(qidev, "Insufficient bytes (%d) in src S/G\n",
929 				req->assoclen + req->cryptlen);
930 			qi_cache_free(edesc);
931 			return ERR_PTR(src_nents);
932 		}
933 
934 		dst_nents = sg_nents_for_len(req->dst, req->assoclen +
935 					     req->cryptlen +
936 					     (encrypt ? authsize :
937 							(-authsize)));
938 		if (unlikely(dst_nents < 0)) {
939 			dev_err(qidev, "Insufficient bytes (%d) in dst S/G\n",
940 				req->assoclen + req->cryptlen +
941 				(encrypt ? authsize : (-authsize)));
942 			qi_cache_free(edesc);
943 			return ERR_PTR(dst_nents);
944 		}
945 
946 		if (src_nents) {
947 			mapped_src_nents = dma_map_sg(qidev, req->src,
948 						      src_nents, DMA_TO_DEVICE);
949 			if (unlikely(!mapped_src_nents)) {
950 				dev_err(qidev, "unable to map source\n");
951 				qi_cache_free(edesc);
952 				return ERR_PTR(-ENOMEM);
953 			}
954 		} else {
955 			mapped_src_nents = 0;
956 		}
957 
958 		mapped_dst_nents = dma_map_sg(qidev, req->dst, dst_nents,
959 					      DMA_FROM_DEVICE);
960 		if (unlikely(!mapped_dst_nents)) {
961 			dev_err(qidev, "unable to map destination\n");
962 			dma_unmap_sg(qidev, req->src, src_nents, DMA_TO_DEVICE);
963 			qi_cache_free(edesc);
964 			return ERR_PTR(-ENOMEM);
965 		}
966 	}
967 
968 	if ((alg->caam.rfc3686 && encrypt) || !alg->caam.geniv)
969 		ivsize = crypto_aead_ivsize(aead);
970 
971 	/*
972 	 * Create S/G table: req->assoclen, [IV,] req->src [, req->dst].
973 	 * Input is not contiguous.
974 	 */
975 	qm_sg_ents = 1 + !!ivsize + mapped_src_nents +
976 		     (mapped_dst_nents > 1 ? mapped_dst_nents : 0);
977 	sg_table = &edesc->sgt[0];
978 	qm_sg_bytes = qm_sg_ents * sizeof(*sg_table);
979 	if (unlikely(offsetof(struct aead_edesc, sgt) + qm_sg_bytes + ivsize >
980 		     CAAM_QI_MEMCACHE_SIZE)) {
981 		dev_err(qidev, "No space for %d S/G entries and/or %dB IV\n",
982 			qm_sg_ents, ivsize);
983 		caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, 0,
984 			   0, 0, 0);
985 		qi_cache_free(edesc);
986 		return ERR_PTR(-ENOMEM);
987 	}
988 
989 	if (ivsize) {
990 		u8 *iv = (u8 *)(sg_table + qm_sg_ents);
991 
992 		/* Make sure IV is located in a DMAable area */
993 		memcpy(iv, req->iv, ivsize);
994 
995 		iv_dma = dma_map_single(qidev, iv, ivsize, DMA_TO_DEVICE);
996 		if (dma_mapping_error(qidev, iv_dma)) {
997 			dev_err(qidev, "unable to map IV\n");
998 			caam_unmap(qidev, req->src, req->dst, src_nents,
999 				   dst_nents, 0, 0, 0, 0);
1000 			qi_cache_free(edesc);
1001 			return ERR_PTR(-ENOMEM);
1002 		}
1003 	}
1004 
1005 	edesc->src_nents = src_nents;
1006 	edesc->dst_nents = dst_nents;
1007 	edesc->iv_dma = iv_dma;
1008 	edesc->drv_req.app_ctx = req;
1009 	edesc->drv_req.cbk = aead_done;
1010 	edesc->drv_req.drv_ctx = drv_ctx;
1011 
1012 	edesc->assoclen = cpu_to_caam32(req->assoclen);
1013 	edesc->assoclen_dma = dma_map_single(qidev, &edesc->assoclen, 4,
1014 					     DMA_TO_DEVICE);
1015 	if (dma_mapping_error(qidev, edesc->assoclen_dma)) {
1016 		dev_err(qidev, "unable to map assoclen\n");
1017 		caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents,
1018 			   iv_dma, ivsize, 0, 0);
1019 		qi_cache_free(edesc);
1020 		return ERR_PTR(-ENOMEM);
1021 	}
1022 
1023 	dma_to_qm_sg_one(sg_table, edesc->assoclen_dma, 4, 0);
1024 	qm_sg_index++;
1025 	if (ivsize) {
1026 		dma_to_qm_sg_one(sg_table + qm_sg_index, iv_dma, ivsize, 0);
1027 		qm_sg_index++;
1028 	}
1029 	sg_to_qm_sg_last(req->src, mapped_src_nents, sg_table + qm_sg_index, 0);
1030 	qm_sg_index += mapped_src_nents;
1031 
1032 	if (mapped_dst_nents > 1)
1033 		sg_to_qm_sg_last(req->dst, mapped_dst_nents, sg_table +
1034 				 qm_sg_index, 0);
1035 
1036 	qm_sg_dma = dma_map_single(qidev, sg_table, qm_sg_bytes, DMA_TO_DEVICE);
1037 	if (dma_mapping_error(qidev, qm_sg_dma)) {
1038 		dev_err(qidev, "unable to map S/G table\n");
1039 		dma_unmap_single(qidev, edesc->assoclen_dma, 4, DMA_TO_DEVICE);
1040 		caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents,
1041 			   iv_dma, ivsize, 0, 0);
1042 		qi_cache_free(edesc);
1043 		return ERR_PTR(-ENOMEM);
1044 	}
1045 
1046 	edesc->qm_sg_dma = qm_sg_dma;
1047 	edesc->qm_sg_bytes = qm_sg_bytes;
1048 
1049 	out_len = req->assoclen + req->cryptlen +
1050 		  (encrypt ? ctx->authsize : (-ctx->authsize));
1051 	in_len = 4 + ivsize + req->assoclen + req->cryptlen;
1052 
1053 	fd_sgt = &edesc->drv_req.fd_sgt[0];
1054 	dma_to_qm_sg_one_last_ext(&fd_sgt[1], qm_sg_dma, in_len, 0);
1055 
1056 	if (req->dst == req->src) {
1057 		if (mapped_src_nents == 1)
1058 			dma_to_qm_sg_one(&fd_sgt[0], sg_dma_address(req->src),
1059 					 out_len, 0);
1060 		else
1061 			dma_to_qm_sg_one_ext(&fd_sgt[0], qm_sg_dma +
1062 					     (1 + !!ivsize) * sizeof(*sg_table),
1063 					     out_len, 0);
1064 	} else if (mapped_dst_nents == 1) {
1065 		dma_to_qm_sg_one(&fd_sgt[0], sg_dma_address(req->dst), out_len,
1066 				 0);
1067 	} else {
1068 		dma_to_qm_sg_one_ext(&fd_sgt[0], qm_sg_dma + sizeof(*sg_table) *
1069 				     qm_sg_index, out_len, 0);
1070 	}
1071 
1072 	return edesc;
1073 }
1074 
1075 static inline int aead_crypt(struct aead_request *req, bool encrypt)
1076 {
1077 	struct aead_edesc *edesc;
1078 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1079 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1080 	int ret;
1081 
1082 	if (unlikely(caam_congested))
1083 		return -EAGAIN;
1084 
1085 	/* allocate extended descriptor */
1086 	edesc = aead_edesc_alloc(req, encrypt);
1087 	if (IS_ERR_OR_NULL(edesc))
1088 		return PTR_ERR(edesc);
1089 
1090 	/* Create and submit job descriptor */
1091 	ret = caam_qi_enqueue(ctx->qidev, &edesc->drv_req);
1092 	if (!ret) {
1093 		ret = -EINPROGRESS;
1094 	} else {
1095 		aead_unmap(ctx->qidev, edesc, req);
1096 		qi_cache_free(edesc);
1097 	}
1098 
1099 	return ret;
1100 }
1101 
1102 static int aead_encrypt(struct aead_request *req)
1103 {
1104 	return aead_crypt(req, true);
1105 }
1106 
1107 static int aead_decrypt(struct aead_request *req)
1108 {
1109 	return aead_crypt(req, false);
1110 }
1111 
1112 static int ipsec_gcm_encrypt(struct aead_request *req)
1113 {
1114 	if (req->assoclen < 8)
1115 		return -EINVAL;
1116 
1117 	return aead_crypt(req, true);
1118 }
1119 
1120 static int ipsec_gcm_decrypt(struct aead_request *req)
1121 {
1122 	if (req->assoclen < 8)
1123 		return -EINVAL;
1124 
1125 	return aead_crypt(req, false);
1126 }
1127 
1128 static void skcipher_done(struct caam_drv_req *drv_req, u32 status)
1129 {
1130 	struct skcipher_edesc *edesc;
1131 	struct skcipher_request *req = drv_req->app_ctx;
1132 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1133 	struct caam_ctx *caam_ctx = crypto_skcipher_ctx(skcipher);
1134 	struct device *qidev = caam_ctx->qidev;
1135 	int ivsize = crypto_skcipher_ivsize(skcipher);
1136 
1137 #ifdef DEBUG
1138 	dev_err(qidev, "%s %d: status 0x%x\n", __func__, __LINE__, status);
1139 #endif
1140 
1141 	edesc = container_of(drv_req, typeof(*edesc), drv_req);
1142 
1143 	if (status)
1144 		caam_jr_strstatus(qidev, status);
1145 
1146 #ifdef DEBUG
1147 	print_hex_dump(KERN_ERR, "dstiv  @" __stringify(__LINE__)": ",
1148 		       DUMP_PREFIX_ADDRESS, 16, 4, req->iv,
1149 		       edesc->src_nents > 1 ? 100 : ivsize, 1);
1150 	caam_dump_sg(KERN_ERR, "dst    @" __stringify(__LINE__)": ",
1151 		     DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
1152 		     edesc->dst_nents > 1 ? 100 : req->cryptlen, 1);
1153 #endif
1154 
1155 	skcipher_unmap(qidev, edesc, req);
1156 
1157 	/*
1158 	 * The crypto API expects us to set the IV (req->iv) to the last
1159 	 * ciphertext block. This is used e.g. by the CTS mode.
1160 	 */
1161 	if (edesc->drv_req.drv_ctx->op_type == ENCRYPT)
1162 		scatterwalk_map_and_copy(req->iv, req->dst, req->cryptlen -
1163 					 ivsize, ivsize, 0);
1164 
1165 	qi_cache_free(edesc);
1166 	skcipher_request_complete(req, status);
1167 }
1168 
1169 static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req,
1170 						   bool encrypt)
1171 {
1172 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1173 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1174 	struct device *qidev = ctx->qidev;
1175 	gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1176 		       GFP_KERNEL : GFP_ATOMIC;
1177 	int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1178 	struct skcipher_edesc *edesc;
1179 	dma_addr_t iv_dma;
1180 	u8 *iv;
1181 	int ivsize = crypto_skcipher_ivsize(skcipher);
1182 	int dst_sg_idx, qm_sg_ents, qm_sg_bytes;
1183 	struct qm_sg_entry *sg_table, *fd_sgt;
1184 	struct caam_drv_ctx *drv_ctx;
1185 
1186 	drv_ctx = get_drv_ctx(ctx, encrypt ? ENCRYPT : DECRYPT);
1187 	if (unlikely(IS_ERR_OR_NULL(drv_ctx)))
1188 		return (struct skcipher_edesc *)drv_ctx;
1189 
1190 	src_nents = sg_nents_for_len(req->src, req->cryptlen);
1191 	if (unlikely(src_nents < 0)) {
1192 		dev_err(qidev, "Insufficient bytes (%d) in src S/G\n",
1193 			req->cryptlen);
1194 		return ERR_PTR(src_nents);
1195 	}
1196 
1197 	if (unlikely(req->src != req->dst)) {
1198 		dst_nents = sg_nents_for_len(req->dst, req->cryptlen);
1199 		if (unlikely(dst_nents < 0)) {
1200 			dev_err(qidev, "Insufficient bytes (%d) in dst S/G\n",
1201 				req->cryptlen);
1202 			return ERR_PTR(dst_nents);
1203 		}
1204 
1205 		mapped_src_nents = dma_map_sg(qidev, req->src, src_nents,
1206 					      DMA_TO_DEVICE);
1207 		if (unlikely(!mapped_src_nents)) {
1208 			dev_err(qidev, "unable to map source\n");
1209 			return ERR_PTR(-ENOMEM);
1210 		}
1211 
1212 		mapped_dst_nents = dma_map_sg(qidev, req->dst, dst_nents,
1213 					      DMA_FROM_DEVICE);
1214 		if (unlikely(!mapped_dst_nents)) {
1215 			dev_err(qidev, "unable to map destination\n");
1216 			dma_unmap_sg(qidev, req->src, src_nents, DMA_TO_DEVICE);
1217 			return ERR_PTR(-ENOMEM);
1218 		}
1219 	} else {
1220 		mapped_src_nents = dma_map_sg(qidev, req->src, src_nents,
1221 					      DMA_BIDIRECTIONAL);
1222 		if (unlikely(!mapped_src_nents)) {
1223 			dev_err(qidev, "unable to map source\n");
1224 			return ERR_PTR(-ENOMEM);
1225 		}
1226 	}
1227 
1228 	qm_sg_ents = 1 + mapped_src_nents;
1229 	dst_sg_idx = qm_sg_ents;
1230 
1231 	qm_sg_ents += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
1232 	qm_sg_bytes = qm_sg_ents * sizeof(struct qm_sg_entry);
1233 	if (unlikely(offsetof(struct skcipher_edesc, sgt) + qm_sg_bytes +
1234 		     ivsize > CAAM_QI_MEMCACHE_SIZE)) {
1235 		dev_err(qidev, "No space for %d S/G entries and/or %dB IV\n",
1236 			qm_sg_ents, ivsize);
1237 		caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, 0,
1238 			   0, 0, 0);
1239 		return ERR_PTR(-ENOMEM);
1240 	}
1241 
1242 	/* allocate space for base edesc, link tables and IV */
1243 	edesc = qi_cache_alloc(GFP_DMA | flags);
1244 	if (unlikely(!edesc)) {
1245 		dev_err(qidev, "could not allocate extended descriptor\n");
1246 		caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, 0,
1247 			   0, 0, 0);
1248 		return ERR_PTR(-ENOMEM);
1249 	}
1250 
1251 	/* Make sure IV is located in a DMAable area */
1252 	sg_table = &edesc->sgt[0];
1253 	iv = (u8 *)(sg_table + qm_sg_ents);
1254 	memcpy(iv, req->iv, ivsize);
1255 
1256 	iv_dma = dma_map_single(qidev, iv, ivsize, DMA_TO_DEVICE);
1257 	if (dma_mapping_error(qidev, iv_dma)) {
1258 		dev_err(qidev, "unable to map IV\n");
1259 		caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, 0,
1260 			   0, 0, 0);
1261 		qi_cache_free(edesc);
1262 		return ERR_PTR(-ENOMEM);
1263 	}
1264 
1265 	edesc->src_nents = src_nents;
1266 	edesc->dst_nents = dst_nents;
1267 	edesc->iv_dma = iv_dma;
1268 	edesc->qm_sg_bytes = qm_sg_bytes;
1269 	edesc->drv_req.app_ctx = req;
1270 	edesc->drv_req.cbk = skcipher_done;
1271 	edesc->drv_req.drv_ctx = drv_ctx;
1272 
1273 	dma_to_qm_sg_one(sg_table, iv_dma, ivsize, 0);
1274 	sg_to_qm_sg_last(req->src, mapped_src_nents, sg_table + 1, 0);
1275 
1276 	if (mapped_dst_nents > 1)
1277 		sg_to_qm_sg_last(req->dst, mapped_dst_nents, sg_table +
1278 				 dst_sg_idx, 0);
1279 
1280 	edesc->qm_sg_dma = dma_map_single(qidev, sg_table, edesc->qm_sg_bytes,
1281 					  DMA_TO_DEVICE);
1282 	if (dma_mapping_error(qidev, edesc->qm_sg_dma)) {
1283 		dev_err(qidev, "unable to map S/G table\n");
1284 		caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents,
1285 			   iv_dma, ivsize, 0, 0);
1286 		qi_cache_free(edesc);
1287 		return ERR_PTR(-ENOMEM);
1288 	}
1289 
1290 	fd_sgt = &edesc->drv_req.fd_sgt[0];
1291 
1292 	dma_to_qm_sg_one_last_ext(&fd_sgt[1], edesc->qm_sg_dma,
1293 				  ivsize + req->cryptlen, 0);
1294 
1295 	if (req->src == req->dst) {
1296 		dma_to_qm_sg_one_ext(&fd_sgt[0], edesc->qm_sg_dma +
1297 				     sizeof(*sg_table), req->cryptlen, 0);
1298 	} else if (mapped_dst_nents > 1) {
1299 		dma_to_qm_sg_one_ext(&fd_sgt[0], edesc->qm_sg_dma + dst_sg_idx *
1300 				     sizeof(*sg_table), req->cryptlen, 0);
1301 	} else {
1302 		dma_to_qm_sg_one(&fd_sgt[0], sg_dma_address(req->dst),
1303 				 req->cryptlen, 0);
1304 	}
1305 
1306 	return edesc;
1307 }
1308 
1309 static inline int skcipher_crypt(struct skcipher_request *req, bool encrypt)
1310 {
1311 	struct skcipher_edesc *edesc;
1312 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1313 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1314 	int ivsize = crypto_skcipher_ivsize(skcipher);
1315 	int ret;
1316 
1317 	if (unlikely(caam_congested))
1318 		return -EAGAIN;
1319 
1320 	/* allocate extended descriptor */
1321 	edesc = skcipher_edesc_alloc(req, encrypt);
1322 	if (IS_ERR(edesc))
1323 		return PTR_ERR(edesc);
1324 
1325 	/*
1326 	 * The crypto API expects us to set the IV (req->iv) to the last
1327 	 * ciphertext block.
1328 	 */
1329 	if (!encrypt)
1330 		scatterwalk_map_and_copy(req->iv, req->src, req->cryptlen -
1331 					 ivsize, ivsize, 0);
1332 
1333 	ret = caam_qi_enqueue(ctx->qidev, &edesc->drv_req);
1334 	if (!ret) {
1335 		ret = -EINPROGRESS;
1336 	} else {
1337 		skcipher_unmap(ctx->qidev, edesc, req);
1338 		qi_cache_free(edesc);
1339 	}
1340 
1341 	return ret;
1342 }
1343 
1344 static int skcipher_encrypt(struct skcipher_request *req)
1345 {
1346 	return skcipher_crypt(req, true);
1347 }
1348 
1349 static int skcipher_decrypt(struct skcipher_request *req)
1350 {
1351 	return skcipher_crypt(req, false);
1352 }
1353 
1354 static struct caam_skcipher_alg driver_algs[] = {
1355 	{
1356 		.skcipher = {
1357 			.base = {
1358 				.cra_name = "cbc(aes)",
1359 				.cra_driver_name = "cbc-aes-caam-qi",
1360 				.cra_blocksize = AES_BLOCK_SIZE,
1361 			},
1362 			.setkey = skcipher_setkey,
1363 			.encrypt = skcipher_encrypt,
1364 			.decrypt = skcipher_decrypt,
1365 			.min_keysize = AES_MIN_KEY_SIZE,
1366 			.max_keysize = AES_MAX_KEY_SIZE,
1367 			.ivsize = AES_BLOCK_SIZE,
1368 		},
1369 		.caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1370 	},
1371 	{
1372 		.skcipher = {
1373 			.base = {
1374 				.cra_name = "cbc(des3_ede)",
1375 				.cra_driver_name = "cbc-3des-caam-qi",
1376 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1377 			},
1378 			.setkey = skcipher_setkey,
1379 			.encrypt = skcipher_encrypt,
1380 			.decrypt = skcipher_decrypt,
1381 			.min_keysize = DES3_EDE_KEY_SIZE,
1382 			.max_keysize = DES3_EDE_KEY_SIZE,
1383 			.ivsize = DES3_EDE_BLOCK_SIZE,
1384 		},
1385 		.caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1386 	},
1387 	{
1388 		.skcipher = {
1389 			.base = {
1390 				.cra_name = "cbc(des)",
1391 				.cra_driver_name = "cbc-des-caam-qi",
1392 				.cra_blocksize = DES_BLOCK_SIZE,
1393 			},
1394 			.setkey = skcipher_setkey,
1395 			.encrypt = skcipher_encrypt,
1396 			.decrypt = skcipher_decrypt,
1397 			.min_keysize = DES_KEY_SIZE,
1398 			.max_keysize = DES_KEY_SIZE,
1399 			.ivsize = DES_BLOCK_SIZE,
1400 		},
1401 		.caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
1402 	},
1403 	{
1404 		.skcipher = {
1405 			.base = {
1406 				.cra_name = "ctr(aes)",
1407 				.cra_driver_name = "ctr-aes-caam-qi",
1408 				.cra_blocksize = 1,
1409 			},
1410 			.setkey = skcipher_setkey,
1411 			.encrypt = skcipher_encrypt,
1412 			.decrypt = skcipher_decrypt,
1413 			.min_keysize = AES_MIN_KEY_SIZE,
1414 			.max_keysize = AES_MAX_KEY_SIZE,
1415 			.ivsize = AES_BLOCK_SIZE,
1416 			.chunksize = AES_BLOCK_SIZE,
1417 		},
1418 		.caam.class1_alg_type = OP_ALG_ALGSEL_AES |
1419 					OP_ALG_AAI_CTR_MOD128,
1420 	},
1421 	{
1422 		.skcipher = {
1423 			.base = {
1424 				.cra_name = "rfc3686(ctr(aes))",
1425 				.cra_driver_name = "rfc3686-ctr-aes-caam-qi",
1426 				.cra_blocksize = 1,
1427 			},
1428 			.setkey = skcipher_setkey,
1429 			.encrypt = skcipher_encrypt,
1430 			.decrypt = skcipher_decrypt,
1431 			.min_keysize = AES_MIN_KEY_SIZE +
1432 				       CTR_RFC3686_NONCE_SIZE,
1433 			.max_keysize = AES_MAX_KEY_SIZE +
1434 				       CTR_RFC3686_NONCE_SIZE,
1435 			.ivsize = CTR_RFC3686_IV_SIZE,
1436 			.chunksize = AES_BLOCK_SIZE,
1437 		},
1438 		.caam = {
1439 			.class1_alg_type = OP_ALG_ALGSEL_AES |
1440 					   OP_ALG_AAI_CTR_MOD128,
1441 			.rfc3686 = true,
1442 		},
1443 	},
1444 	{
1445 		.skcipher = {
1446 			.base = {
1447 				.cra_name = "xts(aes)",
1448 				.cra_driver_name = "xts-aes-caam-qi",
1449 				.cra_blocksize = AES_BLOCK_SIZE,
1450 			},
1451 			.setkey = xts_skcipher_setkey,
1452 			.encrypt = skcipher_encrypt,
1453 			.decrypt = skcipher_decrypt,
1454 			.min_keysize = 2 * AES_MIN_KEY_SIZE,
1455 			.max_keysize = 2 * AES_MAX_KEY_SIZE,
1456 			.ivsize = AES_BLOCK_SIZE,
1457 		},
1458 		.caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
1459 	},
1460 };
1461 
1462 static struct caam_aead_alg driver_aeads[] = {
1463 	{
1464 		.aead = {
1465 			.base = {
1466 				.cra_name = "rfc4106(gcm(aes))",
1467 				.cra_driver_name = "rfc4106-gcm-aes-caam-qi",
1468 				.cra_blocksize = 1,
1469 			},
1470 			.setkey = rfc4106_setkey,
1471 			.setauthsize = rfc4106_setauthsize,
1472 			.encrypt = ipsec_gcm_encrypt,
1473 			.decrypt = ipsec_gcm_decrypt,
1474 			.ivsize = 8,
1475 			.maxauthsize = AES_BLOCK_SIZE,
1476 		},
1477 		.caam = {
1478 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
1479 		},
1480 	},
1481 	{
1482 		.aead = {
1483 			.base = {
1484 				.cra_name = "rfc4543(gcm(aes))",
1485 				.cra_driver_name = "rfc4543-gcm-aes-caam-qi",
1486 				.cra_blocksize = 1,
1487 			},
1488 			.setkey = rfc4543_setkey,
1489 			.setauthsize = rfc4543_setauthsize,
1490 			.encrypt = ipsec_gcm_encrypt,
1491 			.decrypt = ipsec_gcm_decrypt,
1492 			.ivsize = 8,
1493 			.maxauthsize = AES_BLOCK_SIZE,
1494 		},
1495 		.caam = {
1496 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
1497 		},
1498 	},
1499 	/* Galois Counter Mode */
1500 	{
1501 		.aead = {
1502 			.base = {
1503 				.cra_name = "gcm(aes)",
1504 				.cra_driver_name = "gcm-aes-caam-qi",
1505 				.cra_blocksize = 1,
1506 			},
1507 			.setkey = gcm_setkey,
1508 			.setauthsize = gcm_setauthsize,
1509 			.encrypt = aead_encrypt,
1510 			.decrypt = aead_decrypt,
1511 			.ivsize = 12,
1512 			.maxauthsize = AES_BLOCK_SIZE,
1513 		},
1514 		.caam = {
1515 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
1516 		}
1517 	},
1518 	/* single-pass ipsec_esp descriptor */
1519 	{
1520 		.aead = {
1521 			.base = {
1522 				.cra_name = "authenc(hmac(md5),cbc(aes))",
1523 				.cra_driver_name = "authenc-hmac-md5-"
1524 						   "cbc-aes-caam-qi",
1525 				.cra_blocksize = AES_BLOCK_SIZE,
1526 			},
1527 			.setkey = aead_setkey,
1528 			.setauthsize = aead_setauthsize,
1529 			.encrypt = aead_encrypt,
1530 			.decrypt = aead_decrypt,
1531 			.ivsize = AES_BLOCK_SIZE,
1532 			.maxauthsize = MD5_DIGEST_SIZE,
1533 		},
1534 		.caam = {
1535 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1536 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
1537 					   OP_ALG_AAI_HMAC_PRECOMP,
1538 		}
1539 	},
1540 	{
1541 		.aead = {
1542 			.base = {
1543 				.cra_name = "echainiv(authenc(hmac(md5),"
1544 					    "cbc(aes)))",
1545 				.cra_driver_name = "echainiv-authenc-hmac-md5-"
1546 						   "cbc-aes-caam-qi",
1547 				.cra_blocksize = AES_BLOCK_SIZE,
1548 			},
1549 			.setkey = aead_setkey,
1550 			.setauthsize = aead_setauthsize,
1551 			.encrypt = aead_encrypt,
1552 			.decrypt = aead_decrypt,
1553 			.ivsize = AES_BLOCK_SIZE,
1554 			.maxauthsize = MD5_DIGEST_SIZE,
1555 		},
1556 		.caam = {
1557 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1558 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
1559 					   OP_ALG_AAI_HMAC_PRECOMP,
1560 			.geniv = true,
1561 		}
1562 	},
1563 	{
1564 		.aead = {
1565 			.base = {
1566 				.cra_name = "authenc(hmac(sha1),cbc(aes))",
1567 				.cra_driver_name = "authenc-hmac-sha1-"
1568 						   "cbc-aes-caam-qi",
1569 				.cra_blocksize = AES_BLOCK_SIZE,
1570 			},
1571 			.setkey = aead_setkey,
1572 			.setauthsize = aead_setauthsize,
1573 			.encrypt = aead_encrypt,
1574 			.decrypt = aead_decrypt,
1575 			.ivsize = AES_BLOCK_SIZE,
1576 			.maxauthsize = SHA1_DIGEST_SIZE,
1577 		},
1578 		.caam = {
1579 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1580 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
1581 					   OP_ALG_AAI_HMAC_PRECOMP,
1582 		}
1583 	},
1584 	{
1585 		.aead = {
1586 			.base = {
1587 				.cra_name = "echainiv(authenc(hmac(sha1),"
1588 					    "cbc(aes)))",
1589 				.cra_driver_name = "echainiv-authenc-"
1590 						   "hmac-sha1-cbc-aes-caam-qi",
1591 				.cra_blocksize = AES_BLOCK_SIZE,
1592 			},
1593 			.setkey = aead_setkey,
1594 			.setauthsize = aead_setauthsize,
1595 			.encrypt = aead_encrypt,
1596 			.decrypt = aead_decrypt,
1597 			.ivsize = AES_BLOCK_SIZE,
1598 			.maxauthsize = SHA1_DIGEST_SIZE,
1599 		},
1600 		.caam = {
1601 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1602 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
1603 					   OP_ALG_AAI_HMAC_PRECOMP,
1604 			.geniv = true,
1605 		},
1606 	},
1607 	{
1608 		.aead = {
1609 			.base = {
1610 				.cra_name = "authenc(hmac(sha224),cbc(aes))",
1611 				.cra_driver_name = "authenc-hmac-sha224-"
1612 						   "cbc-aes-caam-qi",
1613 				.cra_blocksize = AES_BLOCK_SIZE,
1614 			},
1615 			.setkey = aead_setkey,
1616 			.setauthsize = aead_setauthsize,
1617 			.encrypt = aead_encrypt,
1618 			.decrypt = aead_decrypt,
1619 			.ivsize = AES_BLOCK_SIZE,
1620 			.maxauthsize = SHA224_DIGEST_SIZE,
1621 		},
1622 		.caam = {
1623 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1624 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
1625 					   OP_ALG_AAI_HMAC_PRECOMP,
1626 		}
1627 	},
1628 	{
1629 		.aead = {
1630 			.base = {
1631 				.cra_name = "echainiv(authenc(hmac(sha224),"
1632 					    "cbc(aes)))",
1633 				.cra_driver_name = "echainiv-authenc-"
1634 						   "hmac-sha224-cbc-aes-caam-qi",
1635 				.cra_blocksize = AES_BLOCK_SIZE,
1636 			},
1637 			.setkey = aead_setkey,
1638 			.setauthsize = aead_setauthsize,
1639 			.encrypt = aead_encrypt,
1640 			.decrypt = aead_decrypt,
1641 			.ivsize = AES_BLOCK_SIZE,
1642 			.maxauthsize = SHA224_DIGEST_SIZE,
1643 		},
1644 		.caam = {
1645 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1646 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
1647 					   OP_ALG_AAI_HMAC_PRECOMP,
1648 			.geniv = true,
1649 		}
1650 	},
1651 	{
1652 		.aead = {
1653 			.base = {
1654 				.cra_name = "authenc(hmac(sha256),cbc(aes))",
1655 				.cra_driver_name = "authenc-hmac-sha256-"
1656 						   "cbc-aes-caam-qi",
1657 				.cra_blocksize = AES_BLOCK_SIZE,
1658 			},
1659 			.setkey = aead_setkey,
1660 			.setauthsize = aead_setauthsize,
1661 			.encrypt = aead_encrypt,
1662 			.decrypt = aead_decrypt,
1663 			.ivsize = AES_BLOCK_SIZE,
1664 			.maxauthsize = SHA256_DIGEST_SIZE,
1665 		},
1666 		.caam = {
1667 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1668 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
1669 					   OP_ALG_AAI_HMAC_PRECOMP,
1670 		}
1671 	},
1672 	{
1673 		.aead = {
1674 			.base = {
1675 				.cra_name = "echainiv(authenc(hmac(sha256),"
1676 					    "cbc(aes)))",
1677 				.cra_driver_name = "echainiv-authenc-"
1678 						   "hmac-sha256-cbc-aes-"
1679 						   "caam-qi",
1680 				.cra_blocksize = AES_BLOCK_SIZE,
1681 			},
1682 			.setkey = aead_setkey,
1683 			.setauthsize = aead_setauthsize,
1684 			.encrypt = aead_encrypt,
1685 			.decrypt = aead_decrypt,
1686 			.ivsize = AES_BLOCK_SIZE,
1687 			.maxauthsize = SHA256_DIGEST_SIZE,
1688 		},
1689 		.caam = {
1690 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1691 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
1692 					   OP_ALG_AAI_HMAC_PRECOMP,
1693 			.geniv = true,
1694 		}
1695 	},
1696 	{
1697 		.aead = {
1698 			.base = {
1699 				.cra_name = "authenc(hmac(sha384),cbc(aes))",
1700 				.cra_driver_name = "authenc-hmac-sha384-"
1701 						   "cbc-aes-caam-qi",
1702 				.cra_blocksize = AES_BLOCK_SIZE,
1703 			},
1704 			.setkey = aead_setkey,
1705 			.setauthsize = aead_setauthsize,
1706 			.encrypt = aead_encrypt,
1707 			.decrypt = aead_decrypt,
1708 			.ivsize = AES_BLOCK_SIZE,
1709 			.maxauthsize = SHA384_DIGEST_SIZE,
1710 		},
1711 		.caam = {
1712 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1713 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
1714 					   OP_ALG_AAI_HMAC_PRECOMP,
1715 		}
1716 	},
1717 	{
1718 		.aead = {
1719 			.base = {
1720 				.cra_name = "echainiv(authenc(hmac(sha384),"
1721 					    "cbc(aes)))",
1722 				.cra_driver_name = "echainiv-authenc-"
1723 						   "hmac-sha384-cbc-aes-"
1724 						   "caam-qi",
1725 				.cra_blocksize = AES_BLOCK_SIZE,
1726 			},
1727 			.setkey = aead_setkey,
1728 			.setauthsize = aead_setauthsize,
1729 			.encrypt = aead_encrypt,
1730 			.decrypt = aead_decrypt,
1731 			.ivsize = AES_BLOCK_SIZE,
1732 			.maxauthsize = SHA384_DIGEST_SIZE,
1733 		},
1734 		.caam = {
1735 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1736 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
1737 					   OP_ALG_AAI_HMAC_PRECOMP,
1738 			.geniv = true,
1739 		}
1740 	},
1741 	{
1742 		.aead = {
1743 			.base = {
1744 				.cra_name = "authenc(hmac(sha512),cbc(aes))",
1745 				.cra_driver_name = "authenc-hmac-sha512-"
1746 						   "cbc-aes-caam-qi",
1747 				.cra_blocksize = AES_BLOCK_SIZE,
1748 			},
1749 			.setkey = aead_setkey,
1750 			.setauthsize = aead_setauthsize,
1751 			.encrypt = aead_encrypt,
1752 			.decrypt = aead_decrypt,
1753 			.ivsize = AES_BLOCK_SIZE,
1754 			.maxauthsize = SHA512_DIGEST_SIZE,
1755 		},
1756 		.caam = {
1757 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1758 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
1759 					   OP_ALG_AAI_HMAC_PRECOMP,
1760 		}
1761 	},
1762 	{
1763 		.aead = {
1764 			.base = {
1765 				.cra_name = "echainiv(authenc(hmac(sha512),"
1766 					    "cbc(aes)))",
1767 				.cra_driver_name = "echainiv-authenc-"
1768 						   "hmac-sha512-cbc-aes-"
1769 						   "caam-qi",
1770 				.cra_blocksize = AES_BLOCK_SIZE,
1771 			},
1772 			.setkey = aead_setkey,
1773 			.setauthsize = aead_setauthsize,
1774 			.encrypt = aead_encrypt,
1775 			.decrypt = aead_decrypt,
1776 			.ivsize = AES_BLOCK_SIZE,
1777 			.maxauthsize = SHA512_DIGEST_SIZE,
1778 		},
1779 		.caam = {
1780 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1781 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
1782 					   OP_ALG_AAI_HMAC_PRECOMP,
1783 			.geniv = true,
1784 		}
1785 	},
1786 	{
1787 		.aead = {
1788 			.base = {
1789 				.cra_name = "authenc(hmac(md5),cbc(des3_ede))",
1790 				.cra_driver_name = "authenc-hmac-md5-"
1791 						   "cbc-des3_ede-caam-qi",
1792 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1793 			},
1794 			.setkey = aead_setkey,
1795 			.setauthsize = aead_setauthsize,
1796 			.encrypt = aead_encrypt,
1797 			.decrypt = aead_decrypt,
1798 			.ivsize = DES3_EDE_BLOCK_SIZE,
1799 			.maxauthsize = MD5_DIGEST_SIZE,
1800 		},
1801 		.caam = {
1802 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1803 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
1804 					   OP_ALG_AAI_HMAC_PRECOMP,
1805 		}
1806 	},
1807 	{
1808 		.aead = {
1809 			.base = {
1810 				.cra_name = "echainiv(authenc(hmac(md5),"
1811 					    "cbc(des3_ede)))",
1812 				.cra_driver_name = "echainiv-authenc-hmac-md5-"
1813 						   "cbc-des3_ede-caam-qi",
1814 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1815 			},
1816 			.setkey = aead_setkey,
1817 			.setauthsize = aead_setauthsize,
1818 			.encrypt = aead_encrypt,
1819 			.decrypt = aead_decrypt,
1820 			.ivsize = DES3_EDE_BLOCK_SIZE,
1821 			.maxauthsize = MD5_DIGEST_SIZE,
1822 		},
1823 		.caam = {
1824 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1825 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
1826 					   OP_ALG_AAI_HMAC_PRECOMP,
1827 			.geniv = true,
1828 		}
1829 	},
1830 	{
1831 		.aead = {
1832 			.base = {
1833 				.cra_name = "authenc(hmac(sha1),"
1834 					    "cbc(des3_ede))",
1835 				.cra_driver_name = "authenc-hmac-sha1-"
1836 						   "cbc-des3_ede-caam-qi",
1837 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1838 			},
1839 			.setkey = aead_setkey,
1840 			.setauthsize = aead_setauthsize,
1841 			.encrypt = aead_encrypt,
1842 			.decrypt = aead_decrypt,
1843 			.ivsize = DES3_EDE_BLOCK_SIZE,
1844 			.maxauthsize = SHA1_DIGEST_SIZE,
1845 		},
1846 		.caam = {
1847 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1848 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
1849 					   OP_ALG_AAI_HMAC_PRECOMP,
1850 		},
1851 	},
1852 	{
1853 		.aead = {
1854 			.base = {
1855 				.cra_name = "echainiv(authenc(hmac(sha1),"
1856 					    "cbc(des3_ede)))",
1857 				.cra_driver_name = "echainiv-authenc-"
1858 						   "hmac-sha1-"
1859 						   "cbc-des3_ede-caam-qi",
1860 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1861 			},
1862 			.setkey = aead_setkey,
1863 			.setauthsize = aead_setauthsize,
1864 			.encrypt = aead_encrypt,
1865 			.decrypt = aead_decrypt,
1866 			.ivsize = DES3_EDE_BLOCK_SIZE,
1867 			.maxauthsize = SHA1_DIGEST_SIZE,
1868 		},
1869 		.caam = {
1870 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1871 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
1872 					   OP_ALG_AAI_HMAC_PRECOMP,
1873 			.geniv = true,
1874 		}
1875 	},
1876 	{
1877 		.aead = {
1878 			.base = {
1879 				.cra_name = "authenc(hmac(sha224),"
1880 					    "cbc(des3_ede))",
1881 				.cra_driver_name = "authenc-hmac-sha224-"
1882 						   "cbc-des3_ede-caam-qi",
1883 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1884 			},
1885 			.setkey = aead_setkey,
1886 			.setauthsize = aead_setauthsize,
1887 			.encrypt = aead_encrypt,
1888 			.decrypt = aead_decrypt,
1889 			.ivsize = DES3_EDE_BLOCK_SIZE,
1890 			.maxauthsize = SHA224_DIGEST_SIZE,
1891 		},
1892 		.caam = {
1893 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1894 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
1895 					   OP_ALG_AAI_HMAC_PRECOMP,
1896 		},
1897 	},
1898 	{
1899 		.aead = {
1900 			.base = {
1901 				.cra_name = "echainiv(authenc(hmac(sha224),"
1902 					    "cbc(des3_ede)))",
1903 				.cra_driver_name = "echainiv-authenc-"
1904 						   "hmac-sha224-"
1905 						   "cbc-des3_ede-caam-qi",
1906 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1907 			},
1908 			.setkey = aead_setkey,
1909 			.setauthsize = aead_setauthsize,
1910 			.encrypt = aead_encrypt,
1911 			.decrypt = aead_decrypt,
1912 			.ivsize = DES3_EDE_BLOCK_SIZE,
1913 			.maxauthsize = SHA224_DIGEST_SIZE,
1914 		},
1915 		.caam = {
1916 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1917 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
1918 					   OP_ALG_AAI_HMAC_PRECOMP,
1919 			.geniv = true,
1920 		}
1921 	},
1922 	{
1923 		.aead = {
1924 			.base = {
1925 				.cra_name = "authenc(hmac(sha256),"
1926 					    "cbc(des3_ede))",
1927 				.cra_driver_name = "authenc-hmac-sha256-"
1928 						   "cbc-des3_ede-caam-qi",
1929 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1930 			},
1931 			.setkey = aead_setkey,
1932 			.setauthsize = aead_setauthsize,
1933 			.encrypt = aead_encrypt,
1934 			.decrypt = aead_decrypt,
1935 			.ivsize = DES3_EDE_BLOCK_SIZE,
1936 			.maxauthsize = SHA256_DIGEST_SIZE,
1937 		},
1938 		.caam = {
1939 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1940 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
1941 					   OP_ALG_AAI_HMAC_PRECOMP,
1942 		},
1943 	},
1944 	{
1945 		.aead = {
1946 			.base = {
1947 				.cra_name = "echainiv(authenc(hmac(sha256),"
1948 					    "cbc(des3_ede)))",
1949 				.cra_driver_name = "echainiv-authenc-"
1950 						   "hmac-sha256-"
1951 						   "cbc-des3_ede-caam-qi",
1952 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1953 			},
1954 			.setkey = aead_setkey,
1955 			.setauthsize = aead_setauthsize,
1956 			.encrypt = aead_encrypt,
1957 			.decrypt = aead_decrypt,
1958 			.ivsize = DES3_EDE_BLOCK_SIZE,
1959 			.maxauthsize = SHA256_DIGEST_SIZE,
1960 		},
1961 		.caam = {
1962 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1963 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
1964 					   OP_ALG_AAI_HMAC_PRECOMP,
1965 			.geniv = true,
1966 		}
1967 	},
1968 	{
1969 		.aead = {
1970 			.base = {
1971 				.cra_name = "authenc(hmac(sha384),"
1972 					    "cbc(des3_ede))",
1973 				.cra_driver_name = "authenc-hmac-sha384-"
1974 						   "cbc-des3_ede-caam-qi",
1975 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1976 			},
1977 			.setkey = aead_setkey,
1978 			.setauthsize = aead_setauthsize,
1979 			.encrypt = aead_encrypt,
1980 			.decrypt = aead_decrypt,
1981 			.ivsize = DES3_EDE_BLOCK_SIZE,
1982 			.maxauthsize = SHA384_DIGEST_SIZE,
1983 		},
1984 		.caam = {
1985 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1986 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
1987 					   OP_ALG_AAI_HMAC_PRECOMP,
1988 		},
1989 	},
1990 	{
1991 		.aead = {
1992 			.base = {
1993 				.cra_name = "echainiv(authenc(hmac(sha384),"
1994 					    "cbc(des3_ede)))",
1995 				.cra_driver_name = "echainiv-authenc-"
1996 						   "hmac-sha384-"
1997 						   "cbc-des3_ede-caam-qi",
1998 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1999 			},
2000 			.setkey = aead_setkey,
2001 			.setauthsize = aead_setauthsize,
2002 			.encrypt = aead_encrypt,
2003 			.decrypt = aead_decrypt,
2004 			.ivsize = DES3_EDE_BLOCK_SIZE,
2005 			.maxauthsize = SHA384_DIGEST_SIZE,
2006 		},
2007 		.caam = {
2008 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2009 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2010 					   OP_ALG_AAI_HMAC_PRECOMP,
2011 			.geniv = true,
2012 		}
2013 	},
2014 	{
2015 		.aead = {
2016 			.base = {
2017 				.cra_name = "authenc(hmac(sha512),"
2018 					    "cbc(des3_ede))",
2019 				.cra_driver_name = "authenc-hmac-sha512-"
2020 						   "cbc-des3_ede-caam-qi",
2021 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2022 			},
2023 			.setkey = aead_setkey,
2024 			.setauthsize = aead_setauthsize,
2025 			.encrypt = aead_encrypt,
2026 			.decrypt = aead_decrypt,
2027 			.ivsize = DES3_EDE_BLOCK_SIZE,
2028 			.maxauthsize = SHA512_DIGEST_SIZE,
2029 		},
2030 		.caam = {
2031 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2032 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2033 					   OP_ALG_AAI_HMAC_PRECOMP,
2034 		},
2035 	},
2036 	{
2037 		.aead = {
2038 			.base = {
2039 				.cra_name = "echainiv(authenc(hmac(sha512),"
2040 					    "cbc(des3_ede)))",
2041 				.cra_driver_name = "echainiv-authenc-"
2042 						   "hmac-sha512-"
2043 						   "cbc-des3_ede-caam-qi",
2044 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2045 			},
2046 			.setkey = aead_setkey,
2047 			.setauthsize = aead_setauthsize,
2048 			.encrypt = aead_encrypt,
2049 			.decrypt = aead_decrypt,
2050 			.ivsize = DES3_EDE_BLOCK_SIZE,
2051 			.maxauthsize = SHA512_DIGEST_SIZE,
2052 		},
2053 		.caam = {
2054 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2055 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2056 					   OP_ALG_AAI_HMAC_PRECOMP,
2057 			.geniv = true,
2058 		}
2059 	},
2060 	{
2061 		.aead = {
2062 			.base = {
2063 				.cra_name = "authenc(hmac(md5),cbc(des))",
2064 				.cra_driver_name = "authenc-hmac-md5-"
2065 						   "cbc-des-caam-qi",
2066 				.cra_blocksize = DES_BLOCK_SIZE,
2067 			},
2068 			.setkey = aead_setkey,
2069 			.setauthsize = aead_setauthsize,
2070 			.encrypt = aead_encrypt,
2071 			.decrypt = aead_decrypt,
2072 			.ivsize = DES_BLOCK_SIZE,
2073 			.maxauthsize = MD5_DIGEST_SIZE,
2074 		},
2075 		.caam = {
2076 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2077 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2078 					   OP_ALG_AAI_HMAC_PRECOMP,
2079 		},
2080 	},
2081 	{
2082 		.aead = {
2083 			.base = {
2084 				.cra_name = "echainiv(authenc(hmac(md5),"
2085 					    "cbc(des)))",
2086 				.cra_driver_name = "echainiv-authenc-hmac-md5-"
2087 						   "cbc-des-caam-qi",
2088 				.cra_blocksize = DES_BLOCK_SIZE,
2089 			},
2090 			.setkey = aead_setkey,
2091 			.setauthsize = aead_setauthsize,
2092 			.encrypt = aead_encrypt,
2093 			.decrypt = aead_decrypt,
2094 			.ivsize = DES_BLOCK_SIZE,
2095 			.maxauthsize = MD5_DIGEST_SIZE,
2096 		},
2097 		.caam = {
2098 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2099 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2100 					   OP_ALG_AAI_HMAC_PRECOMP,
2101 			.geniv = true,
2102 		}
2103 	},
2104 	{
2105 		.aead = {
2106 			.base = {
2107 				.cra_name = "authenc(hmac(sha1),cbc(des))",
2108 				.cra_driver_name = "authenc-hmac-sha1-"
2109 						   "cbc-des-caam-qi",
2110 				.cra_blocksize = DES_BLOCK_SIZE,
2111 			},
2112 			.setkey = aead_setkey,
2113 			.setauthsize = aead_setauthsize,
2114 			.encrypt = aead_encrypt,
2115 			.decrypt = aead_decrypt,
2116 			.ivsize = DES_BLOCK_SIZE,
2117 			.maxauthsize = SHA1_DIGEST_SIZE,
2118 		},
2119 		.caam = {
2120 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2121 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2122 					   OP_ALG_AAI_HMAC_PRECOMP,
2123 		},
2124 	},
2125 	{
2126 		.aead = {
2127 			.base = {
2128 				.cra_name = "echainiv(authenc(hmac(sha1),"
2129 					    "cbc(des)))",
2130 				.cra_driver_name = "echainiv-authenc-"
2131 						   "hmac-sha1-cbc-des-caam-qi",
2132 				.cra_blocksize = DES_BLOCK_SIZE,
2133 			},
2134 			.setkey = aead_setkey,
2135 			.setauthsize = aead_setauthsize,
2136 			.encrypt = aead_encrypt,
2137 			.decrypt = aead_decrypt,
2138 			.ivsize = DES_BLOCK_SIZE,
2139 			.maxauthsize = SHA1_DIGEST_SIZE,
2140 		},
2141 		.caam = {
2142 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2143 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2144 					   OP_ALG_AAI_HMAC_PRECOMP,
2145 			.geniv = true,
2146 		}
2147 	},
2148 	{
2149 		.aead = {
2150 			.base = {
2151 				.cra_name = "authenc(hmac(sha224),cbc(des))",
2152 				.cra_driver_name = "authenc-hmac-sha224-"
2153 						   "cbc-des-caam-qi",
2154 				.cra_blocksize = DES_BLOCK_SIZE,
2155 			},
2156 			.setkey = aead_setkey,
2157 			.setauthsize = aead_setauthsize,
2158 			.encrypt = aead_encrypt,
2159 			.decrypt = aead_decrypt,
2160 			.ivsize = DES_BLOCK_SIZE,
2161 			.maxauthsize = SHA224_DIGEST_SIZE,
2162 		},
2163 		.caam = {
2164 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2165 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2166 					   OP_ALG_AAI_HMAC_PRECOMP,
2167 		},
2168 	},
2169 	{
2170 		.aead = {
2171 			.base = {
2172 				.cra_name = "echainiv(authenc(hmac(sha224),"
2173 					    "cbc(des)))",
2174 				.cra_driver_name = "echainiv-authenc-"
2175 						   "hmac-sha224-cbc-des-"
2176 						   "caam-qi",
2177 				.cra_blocksize = DES_BLOCK_SIZE,
2178 			},
2179 			.setkey = aead_setkey,
2180 			.setauthsize = aead_setauthsize,
2181 			.encrypt = aead_encrypt,
2182 			.decrypt = aead_decrypt,
2183 			.ivsize = DES_BLOCK_SIZE,
2184 			.maxauthsize = SHA224_DIGEST_SIZE,
2185 		},
2186 		.caam = {
2187 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2188 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2189 					   OP_ALG_AAI_HMAC_PRECOMP,
2190 			.geniv = true,
2191 		}
2192 	},
2193 	{
2194 		.aead = {
2195 			.base = {
2196 				.cra_name = "authenc(hmac(sha256),cbc(des))",
2197 				.cra_driver_name = "authenc-hmac-sha256-"
2198 						   "cbc-des-caam-qi",
2199 				.cra_blocksize = DES_BLOCK_SIZE,
2200 			},
2201 			.setkey = aead_setkey,
2202 			.setauthsize = aead_setauthsize,
2203 			.encrypt = aead_encrypt,
2204 			.decrypt = aead_decrypt,
2205 			.ivsize = DES_BLOCK_SIZE,
2206 			.maxauthsize = SHA256_DIGEST_SIZE,
2207 		},
2208 		.caam = {
2209 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2210 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2211 					   OP_ALG_AAI_HMAC_PRECOMP,
2212 		},
2213 	},
2214 	{
2215 		.aead = {
2216 			.base = {
2217 				.cra_name = "echainiv(authenc(hmac(sha256),"
2218 					    "cbc(des)))",
2219 				.cra_driver_name = "echainiv-authenc-"
2220 						   "hmac-sha256-cbc-des-"
2221 						   "caam-qi",
2222 				.cra_blocksize = DES_BLOCK_SIZE,
2223 			},
2224 			.setkey = aead_setkey,
2225 			.setauthsize = aead_setauthsize,
2226 			.encrypt = aead_encrypt,
2227 			.decrypt = aead_decrypt,
2228 			.ivsize = DES_BLOCK_SIZE,
2229 			.maxauthsize = SHA256_DIGEST_SIZE,
2230 		},
2231 		.caam = {
2232 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2233 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2234 					   OP_ALG_AAI_HMAC_PRECOMP,
2235 			.geniv = true,
2236 		},
2237 	},
2238 	{
2239 		.aead = {
2240 			.base = {
2241 				.cra_name = "authenc(hmac(sha384),cbc(des))",
2242 				.cra_driver_name = "authenc-hmac-sha384-"
2243 						   "cbc-des-caam-qi",
2244 				.cra_blocksize = DES_BLOCK_SIZE,
2245 			},
2246 			.setkey = aead_setkey,
2247 			.setauthsize = aead_setauthsize,
2248 			.encrypt = aead_encrypt,
2249 			.decrypt = aead_decrypt,
2250 			.ivsize = DES_BLOCK_SIZE,
2251 			.maxauthsize = SHA384_DIGEST_SIZE,
2252 		},
2253 		.caam = {
2254 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2255 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2256 					   OP_ALG_AAI_HMAC_PRECOMP,
2257 		},
2258 	},
2259 	{
2260 		.aead = {
2261 			.base = {
2262 				.cra_name = "echainiv(authenc(hmac(sha384),"
2263 					    "cbc(des)))",
2264 				.cra_driver_name = "echainiv-authenc-"
2265 						   "hmac-sha384-cbc-des-"
2266 						   "caam-qi",
2267 				.cra_blocksize = DES_BLOCK_SIZE,
2268 			},
2269 			.setkey = aead_setkey,
2270 			.setauthsize = aead_setauthsize,
2271 			.encrypt = aead_encrypt,
2272 			.decrypt = aead_decrypt,
2273 			.ivsize = DES_BLOCK_SIZE,
2274 			.maxauthsize = SHA384_DIGEST_SIZE,
2275 		},
2276 		.caam = {
2277 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2278 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2279 					   OP_ALG_AAI_HMAC_PRECOMP,
2280 			.geniv = true,
2281 		}
2282 	},
2283 	{
2284 		.aead = {
2285 			.base = {
2286 				.cra_name = "authenc(hmac(sha512),cbc(des))",
2287 				.cra_driver_name = "authenc-hmac-sha512-"
2288 						   "cbc-des-caam-qi",
2289 				.cra_blocksize = DES_BLOCK_SIZE,
2290 			},
2291 			.setkey = aead_setkey,
2292 			.setauthsize = aead_setauthsize,
2293 			.encrypt = aead_encrypt,
2294 			.decrypt = aead_decrypt,
2295 			.ivsize = DES_BLOCK_SIZE,
2296 			.maxauthsize = SHA512_DIGEST_SIZE,
2297 		},
2298 		.caam = {
2299 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2300 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2301 					   OP_ALG_AAI_HMAC_PRECOMP,
2302 		}
2303 	},
2304 	{
2305 		.aead = {
2306 			.base = {
2307 				.cra_name = "echainiv(authenc(hmac(sha512),"
2308 					    "cbc(des)))",
2309 				.cra_driver_name = "echainiv-authenc-"
2310 						   "hmac-sha512-cbc-des-"
2311 						   "caam-qi",
2312 				.cra_blocksize = DES_BLOCK_SIZE,
2313 			},
2314 			.setkey = aead_setkey,
2315 			.setauthsize = aead_setauthsize,
2316 			.encrypt = aead_encrypt,
2317 			.decrypt = aead_decrypt,
2318 			.ivsize = DES_BLOCK_SIZE,
2319 			.maxauthsize = SHA512_DIGEST_SIZE,
2320 		},
2321 		.caam = {
2322 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2323 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2324 					   OP_ALG_AAI_HMAC_PRECOMP,
2325 			.geniv = true,
2326 		}
2327 	},
2328 };
2329 
2330 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam,
2331 			    bool uses_dkp)
2332 {
2333 	struct caam_drv_private *priv;
2334 
2335 	/*
2336 	 * distribute tfms across job rings to ensure in-order
2337 	 * crypto request processing per tfm
2338 	 */
2339 	ctx->jrdev = caam_jr_alloc();
2340 	if (IS_ERR(ctx->jrdev)) {
2341 		pr_err("Job Ring Device allocation for transform failed\n");
2342 		return PTR_ERR(ctx->jrdev);
2343 	}
2344 
2345 	priv = dev_get_drvdata(ctx->jrdev->parent);
2346 	if (priv->era >= 6 && uses_dkp)
2347 		ctx->dir = DMA_BIDIRECTIONAL;
2348 	else
2349 		ctx->dir = DMA_TO_DEVICE;
2350 
2351 	ctx->key_dma = dma_map_single(ctx->jrdev, ctx->key, sizeof(ctx->key),
2352 				      ctx->dir);
2353 	if (dma_mapping_error(ctx->jrdev, ctx->key_dma)) {
2354 		dev_err(ctx->jrdev, "unable to map key\n");
2355 		caam_jr_free(ctx->jrdev);
2356 		return -ENOMEM;
2357 	}
2358 
2359 	/* copy descriptor header template value */
2360 	ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
2361 	ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
2362 
2363 	ctx->qidev = priv->qidev;
2364 
2365 	spin_lock_init(&ctx->lock);
2366 	ctx->drv_ctx[ENCRYPT] = NULL;
2367 	ctx->drv_ctx[DECRYPT] = NULL;
2368 
2369 	return 0;
2370 }
2371 
2372 static int caam_cra_init(struct crypto_skcipher *tfm)
2373 {
2374 	struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
2375 	struct caam_skcipher_alg *caam_alg =
2376 		container_of(alg, typeof(*caam_alg), skcipher);
2377 
2378 	return caam_init_common(crypto_skcipher_ctx(tfm), &caam_alg->caam,
2379 				false);
2380 }
2381 
2382 static int caam_aead_init(struct crypto_aead *tfm)
2383 {
2384 	struct aead_alg *alg = crypto_aead_alg(tfm);
2385 	struct caam_aead_alg *caam_alg = container_of(alg, typeof(*caam_alg),
2386 						      aead);
2387 	struct caam_ctx *ctx = crypto_aead_ctx(tfm);
2388 
2389 	return caam_init_common(ctx, &caam_alg->caam,
2390 				alg->setkey == aead_setkey);
2391 }
2392 
2393 static void caam_exit_common(struct caam_ctx *ctx)
2394 {
2395 	caam_drv_ctx_rel(ctx->drv_ctx[ENCRYPT]);
2396 	caam_drv_ctx_rel(ctx->drv_ctx[DECRYPT]);
2397 
2398 	dma_unmap_single(ctx->jrdev, ctx->key_dma, sizeof(ctx->key), ctx->dir);
2399 
2400 	caam_jr_free(ctx->jrdev);
2401 }
2402 
2403 static void caam_cra_exit(struct crypto_skcipher *tfm)
2404 {
2405 	caam_exit_common(crypto_skcipher_ctx(tfm));
2406 }
2407 
2408 static void caam_aead_exit(struct crypto_aead *tfm)
2409 {
2410 	caam_exit_common(crypto_aead_ctx(tfm));
2411 }
2412 
2413 static void __exit caam_qi_algapi_exit(void)
2414 {
2415 	int i;
2416 
2417 	for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
2418 		struct caam_aead_alg *t_alg = driver_aeads + i;
2419 
2420 		if (t_alg->registered)
2421 			crypto_unregister_aead(&t_alg->aead);
2422 	}
2423 
2424 	for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
2425 		struct caam_skcipher_alg *t_alg = driver_algs + i;
2426 
2427 		if (t_alg->registered)
2428 			crypto_unregister_skcipher(&t_alg->skcipher);
2429 	}
2430 }
2431 
2432 static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg)
2433 {
2434 	struct skcipher_alg *alg = &t_alg->skcipher;
2435 
2436 	alg->base.cra_module = THIS_MODULE;
2437 	alg->base.cra_priority = CAAM_CRA_PRIORITY;
2438 	alg->base.cra_ctxsize = sizeof(struct caam_ctx);
2439 	alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
2440 
2441 	alg->init = caam_cra_init;
2442 	alg->exit = caam_cra_exit;
2443 }
2444 
2445 static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
2446 {
2447 	struct aead_alg *alg = &t_alg->aead;
2448 
2449 	alg->base.cra_module = THIS_MODULE;
2450 	alg->base.cra_priority = CAAM_CRA_PRIORITY;
2451 	alg->base.cra_ctxsize = sizeof(struct caam_ctx);
2452 	alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
2453 
2454 	alg->init = caam_aead_init;
2455 	alg->exit = caam_aead_exit;
2456 }
2457 
2458 static int __init caam_qi_algapi_init(void)
2459 {
2460 	struct device_node *dev_node;
2461 	struct platform_device *pdev;
2462 	struct device *ctrldev;
2463 	struct caam_drv_private *priv;
2464 	int i = 0, err = 0;
2465 	u32 cha_vid, cha_inst, des_inst, aes_inst, md_inst;
2466 	unsigned int md_limit = SHA512_DIGEST_SIZE;
2467 	bool registered = false;
2468 
2469 	dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0");
2470 	if (!dev_node) {
2471 		dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0");
2472 		if (!dev_node)
2473 			return -ENODEV;
2474 	}
2475 
2476 	pdev = of_find_device_by_node(dev_node);
2477 	of_node_put(dev_node);
2478 	if (!pdev)
2479 		return -ENODEV;
2480 
2481 	ctrldev = &pdev->dev;
2482 	priv = dev_get_drvdata(ctrldev);
2483 
2484 	/*
2485 	 * If priv is NULL, it's probably because the caam driver wasn't
2486 	 * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
2487 	 */
2488 	if (!priv || !priv->qi_present)
2489 		return -ENODEV;
2490 
2491 	if (caam_dpaa2) {
2492 		dev_info(ctrldev, "caam/qi frontend driver not suitable for DPAA 2.x, aborting...\n");
2493 		return -ENODEV;
2494 	}
2495 
2496 	/*
2497 	 * Register crypto algorithms the device supports.
2498 	 * First, detect presence and attributes of DES, AES, and MD blocks.
2499 	 */
2500 	cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
2501 	cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
2502 	des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> CHA_ID_LS_DES_SHIFT;
2503 	aes_inst = (cha_inst & CHA_ID_LS_AES_MASK) >> CHA_ID_LS_AES_SHIFT;
2504 	md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
2505 
2506 	/* If MD is present, limit digest size based on LP256 */
2507 	if (md_inst && ((cha_vid & CHA_ID_LS_MD_MASK) == CHA_ID_LS_MD_LP256))
2508 		md_limit = SHA256_DIGEST_SIZE;
2509 
2510 	for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
2511 		struct caam_skcipher_alg *t_alg = driver_algs + i;
2512 		u32 alg_sel = t_alg->caam.class1_alg_type & OP_ALG_ALGSEL_MASK;
2513 
2514 		/* Skip DES algorithms if not supported by device */
2515 		if (!des_inst &&
2516 		    ((alg_sel == OP_ALG_ALGSEL_3DES) ||
2517 		     (alg_sel == OP_ALG_ALGSEL_DES)))
2518 			continue;
2519 
2520 		/* Skip AES algorithms if not supported by device */
2521 		if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
2522 			continue;
2523 
2524 		caam_skcipher_alg_init(t_alg);
2525 
2526 		err = crypto_register_skcipher(&t_alg->skcipher);
2527 		if (err) {
2528 			dev_warn(priv->qidev, "%s alg registration failed\n",
2529 				 t_alg->skcipher.base.cra_driver_name);
2530 			continue;
2531 		}
2532 
2533 		t_alg->registered = true;
2534 		registered = true;
2535 	}
2536 
2537 	for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
2538 		struct caam_aead_alg *t_alg = driver_aeads + i;
2539 		u32 c1_alg_sel = t_alg->caam.class1_alg_type &
2540 				 OP_ALG_ALGSEL_MASK;
2541 		u32 c2_alg_sel = t_alg->caam.class2_alg_type &
2542 				 OP_ALG_ALGSEL_MASK;
2543 		u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
2544 
2545 		/* Skip DES algorithms if not supported by device */
2546 		if (!des_inst &&
2547 		    ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
2548 		     (c1_alg_sel == OP_ALG_ALGSEL_DES)))
2549 			continue;
2550 
2551 		/* Skip AES algorithms if not supported by device */
2552 		if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
2553 			continue;
2554 
2555 		/*
2556 		 * Check support for AES algorithms not available
2557 		 * on LP devices.
2558 		 */
2559 		if (((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP) &&
2560 		    (alg_aai == OP_ALG_AAI_GCM))
2561 			continue;
2562 
2563 		/*
2564 		 * Skip algorithms requiring message digests
2565 		 * if MD or MD size is not supported by device.
2566 		 */
2567 		if (c2_alg_sel &&
2568 		    (!md_inst || (t_alg->aead.maxauthsize > md_limit)))
2569 			continue;
2570 
2571 		caam_aead_alg_init(t_alg);
2572 
2573 		err = crypto_register_aead(&t_alg->aead);
2574 		if (err) {
2575 			pr_warn("%s alg registration failed\n",
2576 				t_alg->aead.base.cra_driver_name);
2577 			continue;
2578 		}
2579 
2580 		t_alg->registered = true;
2581 		registered = true;
2582 	}
2583 
2584 	if (registered)
2585 		dev_info(priv->qidev, "algorithms registered in /proc/crypto\n");
2586 
2587 	return err;
2588 }
2589 
2590 module_init(caam_qi_algapi_init);
2591 module_exit(caam_qi_algapi_exit);
2592 
2593 MODULE_LICENSE("GPL");
2594 MODULE_DESCRIPTION("Support for crypto API using CAAM-QI backend");
2595 MODULE_AUTHOR("Freescale Semiconductor");
2596