xref: /linux/drivers/crypto/caam/caamalg_qi.c (revision a6f37cee6e4f6fa9d61962efbcb06a032efed1ba)
1 /*
2  * Freescale FSL CAAM support for crypto API over QI backend.
3  * Based on caamalg.c
4  *
5  * Copyright 2013-2016 Freescale Semiconductor, Inc.
6  * Copyright 2016-2018 NXP
7  */
8 
9 #include "compat.h"
10 #include "ctrl.h"
11 #include "regs.h"
12 #include "intern.h"
13 #include "desc_constr.h"
14 #include "error.h"
15 #include "sg_sw_qm.h"
16 #include "key_gen.h"
17 #include "qi.h"
18 #include "jr.h"
19 #include "caamalg_desc.h"
20 
21 /*
22  * crypto alg
23  */
24 #define CAAM_CRA_PRIORITY		2000
25 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
26 #define CAAM_MAX_KEY_SIZE		(AES_MAX_KEY_SIZE + \
27 					 SHA512_DIGEST_SIZE * 2)
28 
29 #define DESC_MAX_USED_BYTES		(DESC_QI_AEAD_GIVENC_LEN + \
30 					 CAAM_MAX_KEY_SIZE)
31 #define DESC_MAX_USED_LEN		(DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
32 
33 struct caam_alg_entry {
34 	int class1_alg_type;
35 	int class2_alg_type;
36 	bool rfc3686;
37 	bool geniv;
38 };
39 
40 struct caam_aead_alg {
41 	struct aead_alg aead;
42 	struct caam_alg_entry caam;
43 	bool registered;
44 };
45 
46 struct caam_skcipher_alg {
47 	struct skcipher_alg skcipher;
48 	struct caam_alg_entry caam;
49 	bool registered;
50 };
51 
52 /*
53  * per-session context
54  */
55 struct caam_ctx {
56 	struct device *jrdev;
57 	u32 sh_desc_enc[DESC_MAX_USED_LEN];
58 	u32 sh_desc_dec[DESC_MAX_USED_LEN];
59 	u8 key[CAAM_MAX_KEY_SIZE];
60 	dma_addr_t key_dma;
61 	enum dma_data_direction dir;
62 	struct alginfo adata;
63 	struct alginfo cdata;
64 	unsigned int authsize;
65 	struct device *qidev;
66 	spinlock_t lock;	/* Protects multiple init of driver context */
67 	struct caam_drv_ctx *drv_ctx[NUM_OP];
68 };
69 
70 static int aead_set_sh_desc(struct crypto_aead *aead)
71 {
72 	struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
73 						 typeof(*alg), aead);
74 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
75 	unsigned int ivsize = crypto_aead_ivsize(aead);
76 	u32 ctx1_iv_off = 0;
77 	u32 *nonce = NULL;
78 	unsigned int data_len[2];
79 	u32 inl_mask;
80 	const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
81 			       OP_ALG_AAI_CTR_MOD128);
82 	const bool is_rfc3686 = alg->caam.rfc3686;
83 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent);
84 
85 	if (!ctx->cdata.keylen || !ctx->authsize)
86 		return 0;
87 
88 	/*
89 	 * AES-CTR needs to load IV in CONTEXT1 reg
90 	 * at an offset of 128bits (16bytes)
91 	 * CONTEXT1[255:128] = IV
92 	 */
93 	if (ctr_mode)
94 		ctx1_iv_off = 16;
95 
96 	/*
97 	 * RFC3686 specific:
98 	 *	CONTEXT1[255:128] = {NONCE, IV, COUNTER}
99 	 */
100 	if (is_rfc3686) {
101 		ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
102 		nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
103 				ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
104 	}
105 
106 	data_len[0] = ctx->adata.keylen_pad;
107 	data_len[1] = ctx->cdata.keylen;
108 
109 	if (alg->caam.geniv)
110 		goto skip_enc;
111 
112 	/* aead_encrypt shared descriptor */
113 	if (desc_inline_query(DESC_QI_AEAD_ENC_LEN +
114 			      (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
115 			      DESC_JOB_IO_LEN, data_len, &inl_mask,
116 			      ARRAY_SIZE(data_len)) < 0)
117 		return -EINVAL;
118 
119 	if (inl_mask & 1)
120 		ctx->adata.key_virt = ctx->key;
121 	else
122 		ctx->adata.key_dma = ctx->key_dma;
123 
124 	if (inl_mask & 2)
125 		ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
126 	else
127 		ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
128 
129 	ctx->adata.key_inline = !!(inl_mask & 1);
130 	ctx->cdata.key_inline = !!(inl_mask & 2);
131 
132 	cnstr_shdsc_aead_encap(ctx->sh_desc_enc, &ctx->cdata, &ctx->adata,
133 			       ivsize, ctx->authsize, is_rfc3686, nonce,
134 			       ctx1_iv_off, true, ctrlpriv->era);
135 
136 skip_enc:
137 	/* aead_decrypt shared descriptor */
138 	if (desc_inline_query(DESC_QI_AEAD_DEC_LEN +
139 			      (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
140 			      DESC_JOB_IO_LEN, data_len, &inl_mask,
141 			      ARRAY_SIZE(data_len)) < 0)
142 		return -EINVAL;
143 
144 	if (inl_mask & 1)
145 		ctx->adata.key_virt = ctx->key;
146 	else
147 		ctx->adata.key_dma = ctx->key_dma;
148 
149 	if (inl_mask & 2)
150 		ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
151 	else
152 		ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
153 
154 	ctx->adata.key_inline = !!(inl_mask & 1);
155 	ctx->cdata.key_inline = !!(inl_mask & 2);
156 
157 	cnstr_shdsc_aead_decap(ctx->sh_desc_dec, &ctx->cdata, &ctx->adata,
158 			       ivsize, ctx->authsize, alg->caam.geniv,
159 			       is_rfc3686, nonce, ctx1_iv_off, true,
160 			       ctrlpriv->era);
161 
162 	if (!alg->caam.geniv)
163 		goto skip_givenc;
164 
165 	/* aead_givencrypt shared descriptor */
166 	if (desc_inline_query(DESC_QI_AEAD_GIVENC_LEN +
167 			      (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
168 			      DESC_JOB_IO_LEN, data_len, &inl_mask,
169 			      ARRAY_SIZE(data_len)) < 0)
170 		return -EINVAL;
171 
172 	if (inl_mask & 1)
173 		ctx->adata.key_virt = ctx->key;
174 	else
175 		ctx->adata.key_dma = ctx->key_dma;
176 
177 	if (inl_mask & 2)
178 		ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
179 	else
180 		ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
181 
182 	ctx->adata.key_inline = !!(inl_mask & 1);
183 	ctx->cdata.key_inline = !!(inl_mask & 2);
184 
185 	cnstr_shdsc_aead_givencap(ctx->sh_desc_enc, &ctx->cdata, &ctx->adata,
186 				  ivsize, ctx->authsize, is_rfc3686, nonce,
187 				  ctx1_iv_off, true, ctrlpriv->era);
188 
189 skip_givenc:
190 	return 0;
191 }
192 
193 static int aead_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
194 {
195 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
196 
197 	ctx->authsize = authsize;
198 	aead_set_sh_desc(authenc);
199 
200 	return 0;
201 }
202 
203 static int aead_setkey(struct crypto_aead *aead, const u8 *key,
204 		       unsigned int keylen)
205 {
206 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
207 	struct device *jrdev = ctx->jrdev;
208 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
209 	struct crypto_authenc_keys keys;
210 	int ret = 0;
211 
212 	if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
213 		goto badkey;
214 
215 #ifdef DEBUG
216 	dev_err(jrdev, "keylen %d enckeylen %d authkeylen %d\n",
217 		keys.authkeylen + keys.enckeylen, keys.enckeylen,
218 		keys.authkeylen);
219 	print_hex_dump(KERN_ERR, "key in @" __stringify(__LINE__)": ",
220 		       DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
221 #endif
222 
223 	/*
224 	 * If DKP is supported, use it in the shared descriptor to generate
225 	 * the split key.
226 	 */
227 	if (ctrlpriv->era >= 6) {
228 		ctx->adata.keylen = keys.authkeylen;
229 		ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype &
230 						      OP_ALG_ALGSEL_MASK);
231 
232 		if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE)
233 			goto badkey;
234 
235 		memcpy(ctx->key, keys.authkey, keys.authkeylen);
236 		memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey,
237 		       keys.enckeylen);
238 		dma_sync_single_for_device(jrdev, ctx->key_dma,
239 					   ctx->adata.keylen_pad +
240 					   keys.enckeylen, ctx->dir);
241 		goto skip_split_key;
242 	}
243 
244 	ret = gen_split_key(jrdev, ctx->key, &ctx->adata, keys.authkey,
245 			    keys.authkeylen, CAAM_MAX_KEY_SIZE -
246 			    keys.enckeylen);
247 	if (ret)
248 		goto badkey;
249 
250 	/* postpend encryption key to auth split key */
251 	memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
252 	dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
253 				   keys.enckeylen, ctx->dir);
254 #ifdef DEBUG
255 	print_hex_dump(KERN_ERR, "ctx.key@" __stringify(__LINE__)": ",
256 		       DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
257 		       ctx->adata.keylen_pad + keys.enckeylen, 1);
258 #endif
259 
260 skip_split_key:
261 	ctx->cdata.keylen = keys.enckeylen;
262 
263 	ret = aead_set_sh_desc(aead);
264 	if (ret)
265 		goto badkey;
266 
267 	/* Now update the driver contexts with the new shared descriptor */
268 	if (ctx->drv_ctx[ENCRYPT]) {
269 		ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
270 					  ctx->sh_desc_enc);
271 		if (ret) {
272 			dev_err(jrdev, "driver enc context update failed\n");
273 			goto badkey;
274 		}
275 	}
276 
277 	if (ctx->drv_ctx[DECRYPT]) {
278 		ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
279 					  ctx->sh_desc_dec);
280 		if (ret) {
281 			dev_err(jrdev, "driver dec context update failed\n");
282 			goto badkey;
283 		}
284 	}
285 
286 	memzero_explicit(&keys, sizeof(keys));
287 	return ret;
288 badkey:
289 	crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
290 	memzero_explicit(&keys, sizeof(keys));
291 	return -EINVAL;
292 }
293 
294 static int gcm_set_sh_desc(struct crypto_aead *aead)
295 {
296 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
297 	unsigned int ivsize = crypto_aead_ivsize(aead);
298 	int rem_bytes = CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN -
299 			ctx->cdata.keylen;
300 
301 	if (!ctx->cdata.keylen || !ctx->authsize)
302 		return 0;
303 
304 	/*
305 	 * Job Descriptor and Shared Descriptor
306 	 * must fit into the 64-word Descriptor h/w Buffer
307 	 */
308 	if (rem_bytes >= DESC_QI_GCM_ENC_LEN) {
309 		ctx->cdata.key_inline = true;
310 		ctx->cdata.key_virt = ctx->key;
311 	} else {
312 		ctx->cdata.key_inline = false;
313 		ctx->cdata.key_dma = ctx->key_dma;
314 	}
315 
316 	cnstr_shdsc_gcm_encap(ctx->sh_desc_enc, &ctx->cdata, ivsize,
317 			      ctx->authsize, true);
318 
319 	/*
320 	 * Job Descriptor and Shared Descriptor
321 	 * must fit into the 64-word Descriptor h/w Buffer
322 	 */
323 	if (rem_bytes >= DESC_QI_GCM_DEC_LEN) {
324 		ctx->cdata.key_inline = true;
325 		ctx->cdata.key_virt = ctx->key;
326 	} else {
327 		ctx->cdata.key_inline = false;
328 		ctx->cdata.key_dma = ctx->key_dma;
329 	}
330 
331 	cnstr_shdsc_gcm_decap(ctx->sh_desc_dec, &ctx->cdata, ivsize,
332 			      ctx->authsize, true);
333 
334 	return 0;
335 }
336 
337 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
338 {
339 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
340 
341 	ctx->authsize = authsize;
342 	gcm_set_sh_desc(authenc);
343 
344 	return 0;
345 }
346 
347 static int gcm_setkey(struct crypto_aead *aead,
348 		      const u8 *key, unsigned int keylen)
349 {
350 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
351 	struct device *jrdev = ctx->jrdev;
352 	int ret;
353 
354 #ifdef DEBUG
355 	print_hex_dump(KERN_ERR, "key in @" __stringify(__LINE__)": ",
356 		       DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
357 #endif
358 
359 	memcpy(ctx->key, key, keylen);
360 	dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir);
361 	ctx->cdata.keylen = keylen;
362 
363 	ret = gcm_set_sh_desc(aead);
364 	if (ret)
365 		return ret;
366 
367 	/* Now update the driver contexts with the new shared descriptor */
368 	if (ctx->drv_ctx[ENCRYPT]) {
369 		ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
370 					  ctx->sh_desc_enc);
371 		if (ret) {
372 			dev_err(jrdev, "driver enc context update failed\n");
373 			return ret;
374 		}
375 	}
376 
377 	if (ctx->drv_ctx[DECRYPT]) {
378 		ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
379 					  ctx->sh_desc_dec);
380 		if (ret) {
381 			dev_err(jrdev, "driver dec context update failed\n");
382 			return ret;
383 		}
384 	}
385 
386 	return 0;
387 }
388 
389 static int rfc4106_set_sh_desc(struct crypto_aead *aead)
390 {
391 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
392 	unsigned int ivsize = crypto_aead_ivsize(aead);
393 	int rem_bytes = CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN -
394 			ctx->cdata.keylen;
395 
396 	if (!ctx->cdata.keylen || !ctx->authsize)
397 		return 0;
398 
399 	ctx->cdata.key_virt = ctx->key;
400 
401 	/*
402 	 * Job Descriptor and Shared Descriptor
403 	 * must fit into the 64-word Descriptor h/w Buffer
404 	 */
405 	if (rem_bytes >= DESC_QI_RFC4106_ENC_LEN) {
406 		ctx->cdata.key_inline = true;
407 	} else {
408 		ctx->cdata.key_inline = false;
409 		ctx->cdata.key_dma = ctx->key_dma;
410 	}
411 
412 	cnstr_shdsc_rfc4106_encap(ctx->sh_desc_enc, &ctx->cdata, ivsize,
413 				  ctx->authsize, true);
414 
415 	/*
416 	 * Job Descriptor and Shared Descriptor
417 	 * must fit into the 64-word Descriptor h/w Buffer
418 	 */
419 	if (rem_bytes >= DESC_QI_RFC4106_DEC_LEN) {
420 		ctx->cdata.key_inline = true;
421 	} else {
422 		ctx->cdata.key_inline = false;
423 		ctx->cdata.key_dma = ctx->key_dma;
424 	}
425 
426 	cnstr_shdsc_rfc4106_decap(ctx->sh_desc_dec, &ctx->cdata, ivsize,
427 				  ctx->authsize, true);
428 
429 	return 0;
430 }
431 
432 static int rfc4106_setauthsize(struct crypto_aead *authenc,
433 			       unsigned int authsize)
434 {
435 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
436 
437 	ctx->authsize = authsize;
438 	rfc4106_set_sh_desc(authenc);
439 
440 	return 0;
441 }
442 
443 static int rfc4106_setkey(struct crypto_aead *aead,
444 			  const u8 *key, unsigned int keylen)
445 {
446 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
447 	struct device *jrdev = ctx->jrdev;
448 	int ret;
449 
450 	if (keylen < 4)
451 		return -EINVAL;
452 
453 #ifdef DEBUG
454 	print_hex_dump(KERN_ERR, "key in @" __stringify(__LINE__)": ",
455 		       DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
456 #endif
457 
458 	memcpy(ctx->key, key, keylen);
459 	/*
460 	 * The last four bytes of the key material are used as the salt value
461 	 * in the nonce. Update the AES key length.
462 	 */
463 	ctx->cdata.keylen = keylen - 4;
464 	dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
465 				   ctx->dir);
466 
467 	ret = rfc4106_set_sh_desc(aead);
468 	if (ret)
469 		return ret;
470 
471 	/* Now update the driver contexts with the new shared descriptor */
472 	if (ctx->drv_ctx[ENCRYPT]) {
473 		ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
474 					  ctx->sh_desc_enc);
475 		if (ret) {
476 			dev_err(jrdev, "driver enc context update failed\n");
477 			return ret;
478 		}
479 	}
480 
481 	if (ctx->drv_ctx[DECRYPT]) {
482 		ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
483 					  ctx->sh_desc_dec);
484 		if (ret) {
485 			dev_err(jrdev, "driver dec context update failed\n");
486 			return ret;
487 		}
488 	}
489 
490 	return 0;
491 }
492 
493 static int rfc4543_set_sh_desc(struct crypto_aead *aead)
494 {
495 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
496 	unsigned int ivsize = crypto_aead_ivsize(aead);
497 	int rem_bytes = CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN -
498 			ctx->cdata.keylen;
499 
500 	if (!ctx->cdata.keylen || !ctx->authsize)
501 		return 0;
502 
503 	ctx->cdata.key_virt = ctx->key;
504 
505 	/*
506 	 * Job Descriptor and Shared Descriptor
507 	 * must fit into the 64-word Descriptor h/w Buffer
508 	 */
509 	if (rem_bytes >= DESC_QI_RFC4543_ENC_LEN) {
510 		ctx->cdata.key_inline = true;
511 	} else {
512 		ctx->cdata.key_inline = false;
513 		ctx->cdata.key_dma = ctx->key_dma;
514 	}
515 
516 	cnstr_shdsc_rfc4543_encap(ctx->sh_desc_enc, &ctx->cdata, ivsize,
517 				  ctx->authsize, true);
518 
519 	/*
520 	 * Job Descriptor and Shared Descriptor
521 	 * must fit into the 64-word Descriptor h/w Buffer
522 	 */
523 	if (rem_bytes >= DESC_QI_RFC4543_DEC_LEN) {
524 		ctx->cdata.key_inline = true;
525 	} else {
526 		ctx->cdata.key_inline = false;
527 		ctx->cdata.key_dma = ctx->key_dma;
528 	}
529 
530 	cnstr_shdsc_rfc4543_decap(ctx->sh_desc_dec, &ctx->cdata, ivsize,
531 				  ctx->authsize, true);
532 
533 	return 0;
534 }
535 
536 static int rfc4543_setauthsize(struct crypto_aead *authenc,
537 			       unsigned int authsize)
538 {
539 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
540 
541 	ctx->authsize = authsize;
542 	rfc4543_set_sh_desc(authenc);
543 
544 	return 0;
545 }
546 
547 static int rfc4543_setkey(struct crypto_aead *aead,
548 			  const u8 *key, unsigned int keylen)
549 {
550 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
551 	struct device *jrdev = ctx->jrdev;
552 	int ret;
553 
554 	if (keylen < 4)
555 		return -EINVAL;
556 
557 #ifdef DEBUG
558 	print_hex_dump(KERN_ERR, "key in @" __stringify(__LINE__)": ",
559 		       DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
560 #endif
561 
562 	memcpy(ctx->key, key, keylen);
563 	/*
564 	 * The last four bytes of the key material are used as the salt value
565 	 * in the nonce. Update the AES key length.
566 	 */
567 	ctx->cdata.keylen = keylen - 4;
568 	dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
569 				   ctx->dir);
570 
571 	ret = rfc4543_set_sh_desc(aead);
572 	if (ret)
573 		return ret;
574 
575 	/* Now update the driver contexts with the new shared descriptor */
576 	if (ctx->drv_ctx[ENCRYPT]) {
577 		ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
578 					  ctx->sh_desc_enc);
579 		if (ret) {
580 			dev_err(jrdev, "driver enc context update failed\n");
581 			return ret;
582 		}
583 	}
584 
585 	if (ctx->drv_ctx[DECRYPT]) {
586 		ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
587 					  ctx->sh_desc_dec);
588 		if (ret) {
589 			dev_err(jrdev, "driver dec context update failed\n");
590 			return ret;
591 		}
592 	}
593 
594 	return 0;
595 }
596 
597 static int skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
598 			   unsigned int keylen)
599 {
600 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
601 	struct caam_skcipher_alg *alg =
602 		container_of(crypto_skcipher_alg(skcipher), typeof(*alg),
603 			     skcipher);
604 	struct device *jrdev = ctx->jrdev;
605 	unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
606 	u32 ctx1_iv_off = 0;
607 	const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
608 			       OP_ALG_AAI_CTR_MOD128);
609 	const bool is_rfc3686 = alg->caam.rfc3686;
610 	int ret = 0;
611 
612 #ifdef DEBUG
613 	print_hex_dump(KERN_ERR, "key in @" __stringify(__LINE__)": ",
614 		       DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
615 #endif
616 	/*
617 	 * AES-CTR needs to load IV in CONTEXT1 reg
618 	 * at an offset of 128bits (16bytes)
619 	 * CONTEXT1[255:128] = IV
620 	 */
621 	if (ctr_mode)
622 		ctx1_iv_off = 16;
623 
624 	/*
625 	 * RFC3686 specific:
626 	 *	| CONTEXT1[255:128] = {NONCE, IV, COUNTER}
627 	 *	| *key = {KEY, NONCE}
628 	 */
629 	if (is_rfc3686) {
630 		ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
631 		keylen -= CTR_RFC3686_NONCE_SIZE;
632 	}
633 
634 	ctx->cdata.keylen = keylen;
635 	ctx->cdata.key_virt = key;
636 	ctx->cdata.key_inline = true;
637 
638 	/* skcipher encrypt, decrypt shared descriptors */
639 	cnstr_shdsc_skcipher_encap(ctx->sh_desc_enc, &ctx->cdata, ivsize,
640 				   is_rfc3686, ctx1_iv_off);
641 	cnstr_shdsc_skcipher_decap(ctx->sh_desc_dec, &ctx->cdata, ivsize,
642 				   is_rfc3686, ctx1_iv_off);
643 
644 	/* Now update the driver contexts with the new shared descriptor */
645 	if (ctx->drv_ctx[ENCRYPT]) {
646 		ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
647 					  ctx->sh_desc_enc);
648 		if (ret) {
649 			dev_err(jrdev, "driver enc context update failed\n");
650 			goto badkey;
651 		}
652 	}
653 
654 	if (ctx->drv_ctx[DECRYPT]) {
655 		ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
656 					  ctx->sh_desc_dec);
657 		if (ret) {
658 			dev_err(jrdev, "driver dec context update failed\n");
659 			goto badkey;
660 		}
661 	}
662 
663 	return ret;
664 badkey:
665 	crypto_skcipher_set_flags(skcipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
666 	return -EINVAL;
667 }
668 
669 static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
670 			       unsigned int keylen)
671 {
672 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
673 	struct device *jrdev = ctx->jrdev;
674 	int ret = 0;
675 
676 	if (keylen != 2 * AES_MIN_KEY_SIZE  && keylen != 2 * AES_MAX_KEY_SIZE) {
677 		dev_err(jrdev, "key size mismatch\n");
678 		goto badkey;
679 	}
680 
681 	ctx->cdata.keylen = keylen;
682 	ctx->cdata.key_virt = key;
683 	ctx->cdata.key_inline = true;
684 
685 	/* xts skcipher encrypt, decrypt shared descriptors */
686 	cnstr_shdsc_xts_skcipher_encap(ctx->sh_desc_enc, &ctx->cdata);
687 	cnstr_shdsc_xts_skcipher_decap(ctx->sh_desc_dec, &ctx->cdata);
688 
689 	/* Now update the driver contexts with the new shared descriptor */
690 	if (ctx->drv_ctx[ENCRYPT]) {
691 		ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
692 					  ctx->sh_desc_enc);
693 		if (ret) {
694 			dev_err(jrdev, "driver enc context update failed\n");
695 			goto badkey;
696 		}
697 	}
698 
699 	if (ctx->drv_ctx[DECRYPT]) {
700 		ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
701 					  ctx->sh_desc_dec);
702 		if (ret) {
703 			dev_err(jrdev, "driver dec context update failed\n");
704 			goto badkey;
705 		}
706 	}
707 
708 	return ret;
709 badkey:
710 	crypto_skcipher_set_flags(skcipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
711 	return -EINVAL;
712 }
713 
714 /*
715  * aead_edesc - s/w-extended aead descriptor
716  * @src_nents: number of segments in input scatterlist
717  * @dst_nents: number of segments in output scatterlist
718  * @iv_dma: dma address of iv for checking continuity and link table
719  * @qm_sg_bytes: length of dma mapped h/w link table
720  * @qm_sg_dma: bus physical mapped address of h/w link table
721  * @assoclen: associated data length, in CAAM endianness
722  * @assoclen_dma: bus physical mapped address of req->assoclen
723  * @drv_req: driver-specific request structure
724  * @sgt: the h/w link table, followed by IV
725  */
726 struct aead_edesc {
727 	int src_nents;
728 	int dst_nents;
729 	dma_addr_t iv_dma;
730 	int qm_sg_bytes;
731 	dma_addr_t qm_sg_dma;
732 	unsigned int assoclen;
733 	dma_addr_t assoclen_dma;
734 	struct caam_drv_req drv_req;
735 	struct qm_sg_entry sgt[0];
736 };
737 
738 /*
739  * skcipher_edesc - s/w-extended skcipher descriptor
740  * @src_nents: number of segments in input scatterlist
741  * @dst_nents: number of segments in output scatterlist
742  * @iv_dma: dma address of iv for checking continuity and link table
743  * @qm_sg_bytes: length of dma mapped h/w link table
744  * @qm_sg_dma: bus physical mapped address of h/w link table
745  * @drv_req: driver-specific request structure
746  * @sgt: the h/w link table, followed by IV
747  */
748 struct skcipher_edesc {
749 	int src_nents;
750 	int dst_nents;
751 	dma_addr_t iv_dma;
752 	int qm_sg_bytes;
753 	dma_addr_t qm_sg_dma;
754 	struct caam_drv_req drv_req;
755 	struct qm_sg_entry sgt[0];
756 };
757 
758 static struct caam_drv_ctx *get_drv_ctx(struct caam_ctx *ctx,
759 					enum optype type)
760 {
761 	/*
762 	 * This function is called on the fast path with values of 'type'
763 	 * known at compile time. Invalid arguments are not expected and
764 	 * thus no checks are made.
765 	 */
766 	struct caam_drv_ctx *drv_ctx = ctx->drv_ctx[type];
767 	u32 *desc;
768 
769 	if (unlikely(!drv_ctx)) {
770 		spin_lock(&ctx->lock);
771 
772 		/* Read again to check if some other core init drv_ctx */
773 		drv_ctx = ctx->drv_ctx[type];
774 		if (!drv_ctx) {
775 			int cpu;
776 
777 			if (type == ENCRYPT)
778 				desc = ctx->sh_desc_enc;
779 			else /* (type == DECRYPT) */
780 				desc = ctx->sh_desc_dec;
781 
782 			cpu = smp_processor_id();
783 			drv_ctx = caam_drv_ctx_init(ctx->qidev, &cpu, desc);
784 			if (likely(!IS_ERR_OR_NULL(drv_ctx)))
785 				drv_ctx->op_type = type;
786 
787 			ctx->drv_ctx[type] = drv_ctx;
788 		}
789 
790 		spin_unlock(&ctx->lock);
791 	}
792 
793 	return drv_ctx;
794 }
795 
796 static void caam_unmap(struct device *dev, struct scatterlist *src,
797 		       struct scatterlist *dst, int src_nents,
798 		       int dst_nents, dma_addr_t iv_dma, int ivsize,
799 		       dma_addr_t qm_sg_dma, int qm_sg_bytes)
800 {
801 	if (dst != src) {
802 		if (src_nents)
803 			dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
804 		dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
805 	} else {
806 		dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
807 	}
808 
809 	if (iv_dma)
810 		dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE);
811 	if (qm_sg_bytes)
812 		dma_unmap_single(dev, qm_sg_dma, qm_sg_bytes, DMA_TO_DEVICE);
813 }
814 
815 static void aead_unmap(struct device *dev,
816 		       struct aead_edesc *edesc,
817 		       struct aead_request *req)
818 {
819 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
820 	int ivsize = crypto_aead_ivsize(aead);
821 
822 	caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents,
823 		   edesc->iv_dma, ivsize, edesc->qm_sg_dma, edesc->qm_sg_bytes);
824 	dma_unmap_single(dev, edesc->assoclen_dma, 4, DMA_TO_DEVICE);
825 }
826 
827 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc,
828 			   struct skcipher_request *req)
829 {
830 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
831 	int ivsize = crypto_skcipher_ivsize(skcipher);
832 
833 	caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents,
834 		   edesc->iv_dma, ivsize, edesc->qm_sg_dma, edesc->qm_sg_bytes);
835 }
836 
837 static void aead_done(struct caam_drv_req *drv_req, u32 status)
838 {
839 	struct device *qidev;
840 	struct aead_edesc *edesc;
841 	struct aead_request *aead_req = drv_req->app_ctx;
842 	struct crypto_aead *aead = crypto_aead_reqtfm(aead_req);
843 	struct caam_ctx *caam_ctx = crypto_aead_ctx(aead);
844 	int ecode = 0;
845 
846 	qidev = caam_ctx->qidev;
847 
848 	if (unlikely(status)) {
849 		u32 ssrc = status & JRSTA_SSRC_MASK;
850 		u8 err_id = status & JRSTA_CCBERR_ERRID_MASK;
851 
852 		caam_jr_strstatus(qidev, status);
853 		/*
854 		 * verify hw auth check passed else return -EBADMSG
855 		 */
856 		if (ssrc == JRSTA_SSRC_CCB_ERROR &&
857 		    err_id == JRSTA_CCBERR_ERRID_ICVCHK)
858 			ecode = -EBADMSG;
859 		else
860 			ecode = -EIO;
861 	}
862 
863 	edesc = container_of(drv_req, typeof(*edesc), drv_req);
864 	aead_unmap(qidev, edesc, aead_req);
865 
866 	aead_request_complete(aead_req, ecode);
867 	qi_cache_free(edesc);
868 }
869 
870 /*
871  * allocate and map the aead extended descriptor
872  */
873 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
874 					   bool encrypt)
875 {
876 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
877 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
878 	struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
879 						 typeof(*alg), aead);
880 	struct device *qidev = ctx->qidev;
881 	gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
882 		       GFP_KERNEL : GFP_ATOMIC;
883 	int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
884 	struct aead_edesc *edesc;
885 	dma_addr_t qm_sg_dma, iv_dma = 0;
886 	int ivsize = 0;
887 	unsigned int authsize = ctx->authsize;
888 	int qm_sg_index = 0, qm_sg_ents = 0, qm_sg_bytes;
889 	int in_len, out_len;
890 	struct qm_sg_entry *sg_table, *fd_sgt;
891 	struct caam_drv_ctx *drv_ctx;
892 
893 	drv_ctx = get_drv_ctx(ctx, encrypt ? ENCRYPT : DECRYPT);
894 	if (unlikely(IS_ERR_OR_NULL(drv_ctx)))
895 		return (struct aead_edesc *)drv_ctx;
896 
897 	/* allocate space for base edesc and hw desc commands, link tables */
898 	edesc = qi_cache_alloc(GFP_DMA | flags);
899 	if (unlikely(!edesc)) {
900 		dev_err(qidev, "could not allocate extended descriptor\n");
901 		return ERR_PTR(-ENOMEM);
902 	}
903 
904 	if (likely(req->src == req->dst)) {
905 		src_nents = sg_nents_for_len(req->src, req->assoclen +
906 					     req->cryptlen +
907 						(encrypt ? authsize : 0));
908 		if (unlikely(src_nents < 0)) {
909 			dev_err(qidev, "Insufficient bytes (%d) in src S/G\n",
910 				req->assoclen + req->cryptlen +
911 				(encrypt ? authsize : 0));
912 			qi_cache_free(edesc);
913 			return ERR_PTR(src_nents);
914 		}
915 
916 		mapped_src_nents = dma_map_sg(qidev, req->src, src_nents,
917 					      DMA_BIDIRECTIONAL);
918 		if (unlikely(!mapped_src_nents)) {
919 			dev_err(qidev, "unable to map source\n");
920 			qi_cache_free(edesc);
921 			return ERR_PTR(-ENOMEM);
922 		}
923 	} else {
924 		src_nents = sg_nents_for_len(req->src, req->assoclen +
925 					     req->cryptlen);
926 		if (unlikely(src_nents < 0)) {
927 			dev_err(qidev, "Insufficient bytes (%d) in src S/G\n",
928 				req->assoclen + req->cryptlen);
929 			qi_cache_free(edesc);
930 			return ERR_PTR(src_nents);
931 		}
932 
933 		dst_nents = sg_nents_for_len(req->dst, req->assoclen +
934 					     req->cryptlen +
935 					     (encrypt ? authsize :
936 							(-authsize)));
937 		if (unlikely(dst_nents < 0)) {
938 			dev_err(qidev, "Insufficient bytes (%d) in dst S/G\n",
939 				req->assoclen + req->cryptlen +
940 				(encrypt ? authsize : (-authsize)));
941 			qi_cache_free(edesc);
942 			return ERR_PTR(dst_nents);
943 		}
944 
945 		if (src_nents) {
946 			mapped_src_nents = dma_map_sg(qidev, req->src,
947 						      src_nents, DMA_TO_DEVICE);
948 			if (unlikely(!mapped_src_nents)) {
949 				dev_err(qidev, "unable to map source\n");
950 				qi_cache_free(edesc);
951 				return ERR_PTR(-ENOMEM);
952 			}
953 		} else {
954 			mapped_src_nents = 0;
955 		}
956 
957 		mapped_dst_nents = dma_map_sg(qidev, req->dst, dst_nents,
958 					      DMA_FROM_DEVICE);
959 		if (unlikely(!mapped_dst_nents)) {
960 			dev_err(qidev, "unable to map destination\n");
961 			dma_unmap_sg(qidev, req->src, src_nents, DMA_TO_DEVICE);
962 			qi_cache_free(edesc);
963 			return ERR_PTR(-ENOMEM);
964 		}
965 	}
966 
967 	if ((alg->caam.rfc3686 && encrypt) || !alg->caam.geniv)
968 		ivsize = crypto_aead_ivsize(aead);
969 
970 	/*
971 	 * Create S/G table: req->assoclen, [IV,] req->src [, req->dst].
972 	 * Input is not contiguous.
973 	 */
974 	qm_sg_ents = 1 + !!ivsize + mapped_src_nents +
975 		     (mapped_dst_nents > 1 ? mapped_dst_nents : 0);
976 	sg_table = &edesc->sgt[0];
977 	qm_sg_bytes = qm_sg_ents * sizeof(*sg_table);
978 	if (unlikely(offsetof(struct aead_edesc, sgt) + qm_sg_bytes + ivsize >
979 		     CAAM_QI_MEMCACHE_SIZE)) {
980 		dev_err(qidev, "No space for %d S/G entries and/or %dB IV\n",
981 			qm_sg_ents, ivsize);
982 		caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, 0,
983 			   0, 0, 0);
984 		qi_cache_free(edesc);
985 		return ERR_PTR(-ENOMEM);
986 	}
987 
988 	if (ivsize) {
989 		u8 *iv = (u8 *)(sg_table + qm_sg_ents);
990 
991 		/* Make sure IV is located in a DMAable area */
992 		memcpy(iv, req->iv, ivsize);
993 
994 		iv_dma = dma_map_single(qidev, iv, ivsize, DMA_TO_DEVICE);
995 		if (dma_mapping_error(qidev, iv_dma)) {
996 			dev_err(qidev, "unable to map IV\n");
997 			caam_unmap(qidev, req->src, req->dst, src_nents,
998 				   dst_nents, 0, 0, 0, 0);
999 			qi_cache_free(edesc);
1000 			return ERR_PTR(-ENOMEM);
1001 		}
1002 	}
1003 
1004 	edesc->src_nents = src_nents;
1005 	edesc->dst_nents = dst_nents;
1006 	edesc->iv_dma = iv_dma;
1007 	edesc->drv_req.app_ctx = req;
1008 	edesc->drv_req.cbk = aead_done;
1009 	edesc->drv_req.drv_ctx = drv_ctx;
1010 
1011 	edesc->assoclen = cpu_to_caam32(req->assoclen);
1012 	edesc->assoclen_dma = dma_map_single(qidev, &edesc->assoclen, 4,
1013 					     DMA_TO_DEVICE);
1014 	if (dma_mapping_error(qidev, edesc->assoclen_dma)) {
1015 		dev_err(qidev, "unable to map assoclen\n");
1016 		caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents,
1017 			   iv_dma, ivsize, 0, 0);
1018 		qi_cache_free(edesc);
1019 		return ERR_PTR(-ENOMEM);
1020 	}
1021 
1022 	dma_to_qm_sg_one(sg_table, edesc->assoclen_dma, 4, 0);
1023 	qm_sg_index++;
1024 	if (ivsize) {
1025 		dma_to_qm_sg_one(sg_table + qm_sg_index, iv_dma, ivsize, 0);
1026 		qm_sg_index++;
1027 	}
1028 	sg_to_qm_sg_last(req->src, mapped_src_nents, sg_table + qm_sg_index, 0);
1029 	qm_sg_index += mapped_src_nents;
1030 
1031 	if (mapped_dst_nents > 1)
1032 		sg_to_qm_sg_last(req->dst, mapped_dst_nents, sg_table +
1033 				 qm_sg_index, 0);
1034 
1035 	qm_sg_dma = dma_map_single(qidev, sg_table, qm_sg_bytes, DMA_TO_DEVICE);
1036 	if (dma_mapping_error(qidev, qm_sg_dma)) {
1037 		dev_err(qidev, "unable to map S/G table\n");
1038 		dma_unmap_single(qidev, edesc->assoclen_dma, 4, DMA_TO_DEVICE);
1039 		caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents,
1040 			   iv_dma, ivsize, 0, 0);
1041 		qi_cache_free(edesc);
1042 		return ERR_PTR(-ENOMEM);
1043 	}
1044 
1045 	edesc->qm_sg_dma = qm_sg_dma;
1046 	edesc->qm_sg_bytes = qm_sg_bytes;
1047 
1048 	out_len = req->assoclen + req->cryptlen +
1049 		  (encrypt ? ctx->authsize : (-ctx->authsize));
1050 	in_len = 4 + ivsize + req->assoclen + req->cryptlen;
1051 
1052 	fd_sgt = &edesc->drv_req.fd_sgt[0];
1053 	dma_to_qm_sg_one_last_ext(&fd_sgt[1], qm_sg_dma, in_len, 0);
1054 
1055 	if (req->dst == req->src) {
1056 		if (mapped_src_nents == 1)
1057 			dma_to_qm_sg_one(&fd_sgt[0], sg_dma_address(req->src),
1058 					 out_len, 0);
1059 		else
1060 			dma_to_qm_sg_one_ext(&fd_sgt[0], qm_sg_dma +
1061 					     (1 + !!ivsize) * sizeof(*sg_table),
1062 					     out_len, 0);
1063 	} else if (mapped_dst_nents == 1) {
1064 		dma_to_qm_sg_one(&fd_sgt[0], sg_dma_address(req->dst), out_len,
1065 				 0);
1066 	} else {
1067 		dma_to_qm_sg_one_ext(&fd_sgt[0], qm_sg_dma + sizeof(*sg_table) *
1068 				     qm_sg_index, out_len, 0);
1069 	}
1070 
1071 	return edesc;
1072 }
1073 
1074 static inline int aead_crypt(struct aead_request *req, bool encrypt)
1075 {
1076 	struct aead_edesc *edesc;
1077 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1078 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1079 	int ret;
1080 
1081 	if (unlikely(caam_congested))
1082 		return -EAGAIN;
1083 
1084 	/* allocate extended descriptor */
1085 	edesc = aead_edesc_alloc(req, encrypt);
1086 	if (IS_ERR_OR_NULL(edesc))
1087 		return PTR_ERR(edesc);
1088 
1089 	/* Create and submit job descriptor */
1090 	ret = caam_qi_enqueue(ctx->qidev, &edesc->drv_req);
1091 	if (!ret) {
1092 		ret = -EINPROGRESS;
1093 	} else {
1094 		aead_unmap(ctx->qidev, edesc, req);
1095 		qi_cache_free(edesc);
1096 	}
1097 
1098 	return ret;
1099 }
1100 
1101 static int aead_encrypt(struct aead_request *req)
1102 {
1103 	return aead_crypt(req, true);
1104 }
1105 
1106 static int aead_decrypt(struct aead_request *req)
1107 {
1108 	return aead_crypt(req, false);
1109 }
1110 
1111 static int ipsec_gcm_encrypt(struct aead_request *req)
1112 {
1113 	if (req->assoclen < 8)
1114 		return -EINVAL;
1115 
1116 	return aead_crypt(req, true);
1117 }
1118 
1119 static int ipsec_gcm_decrypt(struct aead_request *req)
1120 {
1121 	if (req->assoclen < 8)
1122 		return -EINVAL;
1123 
1124 	return aead_crypt(req, false);
1125 }
1126 
1127 static void skcipher_done(struct caam_drv_req *drv_req, u32 status)
1128 {
1129 	struct skcipher_edesc *edesc;
1130 	struct skcipher_request *req = drv_req->app_ctx;
1131 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1132 	struct caam_ctx *caam_ctx = crypto_skcipher_ctx(skcipher);
1133 	struct device *qidev = caam_ctx->qidev;
1134 	int ivsize = crypto_skcipher_ivsize(skcipher);
1135 
1136 #ifdef DEBUG
1137 	dev_err(qidev, "%s %d: status 0x%x\n", __func__, __LINE__, status);
1138 #endif
1139 
1140 	edesc = container_of(drv_req, typeof(*edesc), drv_req);
1141 
1142 	if (status)
1143 		caam_jr_strstatus(qidev, status);
1144 
1145 #ifdef DEBUG
1146 	print_hex_dump(KERN_ERR, "dstiv  @" __stringify(__LINE__)": ",
1147 		       DUMP_PREFIX_ADDRESS, 16, 4, req->iv,
1148 		       edesc->src_nents > 1 ? 100 : ivsize, 1);
1149 	caam_dump_sg(KERN_ERR, "dst    @" __stringify(__LINE__)": ",
1150 		     DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
1151 		     edesc->dst_nents > 1 ? 100 : req->cryptlen, 1);
1152 #endif
1153 
1154 	skcipher_unmap(qidev, edesc, req);
1155 
1156 	/*
1157 	 * The crypto API expects us to set the IV (req->iv) to the last
1158 	 * ciphertext block. This is used e.g. by the CTS mode.
1159 	 */
1160 	if (edesc->drv_req.drv_ctx->op_type == ENCRYPT)
1161 		scatterwalk_map_and_copy(req->iv, req->dst, req->cryptlen -
1162 					 ivsize, ivsize, 0);
1163 
1164 	qi_cache_free(edesc);
1165 	skcipher_request_complete(req, status);
1166 }
1167 
1168 static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req,
1169 						   bool encrypt)
1170 {
1171 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1172 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1173 	struct device *qidev = ctx->qidev;
1174 	gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1175 		       GFP_KERNEL : GFP_ATOMIC;
1176 	int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1177 	struct skcipher_edesc *edesc;
1178 	dma_addr_t iv_dma;
1179 	u8 *iv;
1180 	int ivsize = crypto_skcipher_ivsize(skcipher);
1181 	int dst_sg_idx, qm_sg_ents, qm_sg_bytes;
1182 	struct qm_sg_entry *sg_table, *fd_sgt;
1183 	struct caam_drv_ctx *drv_ctx;
1184 
1185 	drv_ctx = get_drv_ctx(ctx, encrypt ? ENCRYPT : DECRYPT);
1186 	if (unlikely(IS_ERR_OR_NULL(drv_ctx)))
1187 		return (struct skcipher_edesc *)drv_ctx;
1188 
1189 	src_nents = sg_nents_for_len(req->src, req->cryptlen);
1190 	if (unlikely(src_nents < 0)) {
1191 		dev_err(qidev, "Insufficient bytes (%d) in src S/G\n",
1192 			req->cryptlen);
1193 		return ERR_PTR(src_nents);
1194 	}
1195 
1196 	if (unlikely(req->src != req->dst)) {
1197 		dst_nents = sg_nents_for_len(req->dst, req->cryptlen);
1198 		if (unlikely(dst_nents < 0)) {
1199 			dev_err(qidev, "Insufficient bytes (%d) in dst S/G\n",
1200 				req->cryptlen);
1201 			return ERR_PTR(dst_nents);
1202 		}
1203 
1204 		mapped_src_nents = dma_map_sg(qidev, req->src, src_nents,
1205 					      DMA_TO_DEVICE);
1206 		if (unlikely(!mapped_src_nents)) {
1207 			dev_err(qidev, "unable to map source\n");
1208 			return ERR_PTR(-ENOMEM);
1209 		}
1210 
1211 		mapped_dst_nents = dma_map_sg(qidev, req->dst, dst_nents,
1212 					      DMA_FROM_DEVICE);
1213 		if (unlikely(!mapped_dst_nents)) {
1214 			dev_err(qidev, "unable to map destination\n");
1215 			dma_unmap_sg(qidev, req->src, src_nents, DMA_TO_DEVICE);
1216 			return ERR_PTR(-ENOMEM);
1217 		}
1218 	} else {
1219 		mapped_src_nents = dma_map_sg(qidev, req->src, src_nents,
1220 					      DMA_BIDIRECTIONAL);
1221 		if (unlikely(!mapped_src_nents)) {
1222 			dev_err(qidev, "unable to map source\n");
1223 			return ERR_PTR(-ENOMEM);
1224 		}
1225 	}
1226 
1227 	qm_sg_ents = 1 + mapped_src_nents;
1228 	dst_sg_idx = qm_sg_ents;
1229 
1230 	qm_sg_ents += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
1231 	qm_sg_bytes = qm_sg_ents * sizeof(struct qm_sg_entry);
1232 	if (unlikely(offsetof(struct skcipher_edesc, sgt) + qm_sg_bytes +
1233 		     ivsize > CAAM_QI_MEMCACHE_SIZE)) {
1234 		dev_err(qidev, "No space for %d S/G entries and/or %dB IV\n",
1235 			qm_sg_ents, ivsize);
1236 		caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, 0,
1237 			   0, 0, 0);
1238 		return ERR_PTR(-ENOMEM);
1239 	}
1240 
1241 	/* allocate space for base edesc, link tables and IV */
1242 	edesc = qi_cache_alloc(GFP_DMA | flags);
1243 	if (unlikely(!edesc)) {
1244 		dev_err(qidev, "could not allocate extended descriptor\n");
1245 		caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, 0,
1246 			   0, 0, 0);
1247 		return ERR_PTR(-ENOMEM);
1248 	}
1249 
1250 	/* Make sure IV is located in a DMAable area */
1251 	sg_table = &edesc->sgt[0];
1252 	iv = (u8 *)(sg_table + qm_sg_ents);
1253 	memcpy(iv, req->iv, ivsize);
1254 
1255 	iv_dma = dma_map_single(qidev, iv, ivsize, DMA_TO_DEVICE);
1256 	if (dma_mapping_error(qidev, iv_dma)) {
1257 		dev_err(qidev, "unable to map IV\n");
1258 		caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, 0,
1259 			   0, 0, 0);
1260 		qi_cache_free(edesc);
1261 		return ERR_PTR(-ENOMEM);
1262 	}
1263 
1264 	edesc->src_nents = src_nents;
1265 	edesc->dst_nents = dst_nents;
1266 	edesc->iv_dma = iv_dma;
1267 	edesc->qm_sg_bytes = qm_sg_bytes;
1268 	edesc->drv_req.app_ctx = req;
1269 	edesc->drv_req.cbk = skcipher_done;
1270 	edesc->drv_req.drv_ctx = drv_ctx;
1271 
1272 	dma_to_qm_sg_one(sg_table, iv_dma, ivsize, 0);
1273 	sg_to_qm_sg_last(req->src, mapped_src_nents, sg_table + 1, 0);
1274 
1275 	if (mapped_dst_nents > 1)
1276 		sg_to_qm_sg_last(req->dst, mapped_dst_nents, sg_table +
1277 				 dst_sg_idx, 0);
1278 
1279 	edesc->qm_sg_dma = dma_map_single(qidev, sg_table, edesc->qm_sg_bytes,
1280 					  DMA_TO_DEVICE);
1281 	if (dma_mapping_error(qidev, edesc->qm_sg_dma)) {
1282 		dev_err(qidev, "unable to map S/G table\n");
1283 		caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents,
1284 			   iv_dma, ivsize, 0, 0);
1285 		qi_cache_free(edesc);
1286 		return ERR_PTR(-ENOMEM);
1287 	}
1288 
1289 	fd_sgt = &edesc->drv_req.fd_sgt[0];
1290 
1291 	dma_to_qm_sg_one_last_ext(&fd_sgt[1], edesc->qm_sg_dma,
1292 				  ivsize + req->cryptlen, 0);
1293 
1294 	if (req->src == req->dst) {
1295 		dma_to_qm_sg_one_ext(&fd_sgt[0], edesc->qm_sg_dma +
1296 				     sizeof(*sg_table), req->cryptlen, 0);
1297 	} else if (mapped_dst_nents > 1) {
1298 		dma_to_qm_sg_one_ext(&fd_sgt[0], edesc->qm_sg_dma + dst_sg_idx *
1299 				     sizeof(*sg_table), req->cryptlen, 0);
1300 	} else {
1301 		dma_to_qm_sg_one(&fd_sgt[0], sg_dma_address(req->dst),
1302 				 req->cryptlen, 0);
1303 	}
1304 
1305 	return edesc;
1306 }
1307 
1308 static inline int skcipher_crypt(struct skcipher_request *req, bool encrypt)
1309 {
1310 	struct skcipher_edesc *edesc;
1311 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1312 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1313 	int ivsize = crypto_skcipher_ivsize(skcipher);
1314 	int ret;
1315 
1316 	if (unlikely(caam_congested))
1317 		return -EAGAIN;
1318 
1319 	/* allocate extended descriptor */
1320 	edesc = skcipher_edesc_alloc(req, encrypt);
1321 	if (IS_ERR(edesc))
1322 		return PTR_ERR(edesc);
1323 
1324 	/*
1325 	 * The crypto API expects us to set the IV (req->iv) to the last
1326 	 * ciphertext block.
1327 	 */
1328 	if (!encrypt)
1329 		scatterwalk_map_and_copy(req->iv, req->src, req->cryptlen -
1330 					 ivsize, ivsize, 0);
1331 
1332 	ret = caam_qi_enqueue(ctx->qidev, &edesc->drv_req);
1333 	if (!ret) {
1334 		ret = -EINPROGRESS;
1335 	} else {
1336 		skcipher_unmap(ctx->qidev, edesc, req);
1337 		qi_cache_free(edesc);
1338 	}
1339 
1340 	return ret;
1341 }
1342 
1343 static int skcipher_encrypt(struct skcipher_request *req)
1344 {
1345 	return skcipher_crypt(req, true);
1346 }
1347 
1348 static int skcipher_decrypt(struct skcipher_request *req)
1349 {
1350 	return skcipher_crypt(req, false);
1351 }
1352 
1353 static struct caam_skcipher_alg driver_algs[] = {
1354 	{
1355 		.skcipher = {
1356 			.base = {
1357 				.cra_name = "cbc(aes)",
1358 				.cra_driver_name = "cbc-aes-caam-qi",
1359 				.cra_blocksize = AES_BLOCK_SIZE,
1360 			},
1361 			.setkey = skcipher_setkey,
1362 			.encrypt = skcipher_encrypt,
1363 			.decrypt = skcipher_decrypt,
1364 			.min_keysize = AES_MIN_KEY_SIZE,
1365 			.max_keysize = AES_MAX_KEY_SIZE,
1366 			.ivsize = AES_BLOCK_SIZE,
1367 		},
1368 		.caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1369 	},
1370 	{
1371 		.skcipher = {
1372 			.base = {
1373 				.cra_name = "cbc(des3_ede)",
1374 				.cra_driver_name = "cbc-3des-caam-qi",
1375 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1376 			},
1377 			.setkey = skcipher_setkey,
1378 			.encrypt = skcipher_encrypt,
1379 			.decrypt = skcipher_decrypt,
1380 			.min_keysize = DES3_EDE_KEY_SIZE,
1381 			.max_keysize = DES3_EDE_KEY_SIZE,
1382 			.ivsize = DES3_EDE_BLOCK_SIZE,
1383 		},
1384 		.caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1385 	},
1386 	{
1387 		.skcipher = {
1388 			.base = {
1389 				.cra_name = "cbc(des)",
1390 				.cra_driver_name = "cbc-des-caam-qi",
1391 				.cra_blocksize = DES_BLOCK_SIZE,
1392 			},
1393 			.setkey = skcipher_setkey,
1394 			.encrypt = skcipher_encrypt,
1395 			.decrypt = skcipher_decrypt,
1396 			.min_keysize = DES_KEY_SIZE,
1397 			.max_keysize = DES_KEY_SIZE,
1398 			.ivsize = DES_BLOCK_SIZE,
1399 		},
1400 		.caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
1401 	},
1402 	{
1403 		.skcipher = {
1404 			.base = {
1405 				.cra_name = "ctr(aes)",
1406 				.cra_driver_name = "ctr-aes-caam-qi",
1407 				.cra_blocksize = 1,
1408 			},
1409 			.setkey = skcipher_setkey,
1410 			.encrypt = skcipher_encrypt,
1411 			.decrypt = skcipher_decrypt,
1412 			.min_keysize = AES_MIN_KEY_SIZE,
1413 			.max_keysize = AES_MAX_KEY_SIZE,
1414 			.ivsize = AES_BLOCK_SIZE,
1415 			.chunksize = AES_BLOCK_SIZE,
1416 		},
1417 		.caam.class1_alg_type = OP_ALG_ALGSEL_AES |
1418 					OP_ALG_AAI_CTR_MOD128,
1419 	},
1420 	{
1421 		.skcipher = {
1422 			.base = {
1423 				.cra_name = "rfc3686(ctr(aes))",
1424 				.cra_driver_name = "rfc3686-ctr-aes-caam-qi",
1425 				.cra_blocksize = 1,
1426 			},
1427 			.setkey = skcipher_setkey,
1428 			.encrypt = skcipher_encrypt,
1429 			.decrypt = skcipher_decrypt,
1430 			.min_keysize = AES_MIN_KEY_SIZE +
1431 				       CTR_RFC3686_NONCE_SIZE,
1432 			.max_keysize = AES_MAX_KEY_SIZE +
1433 				       CTR_RFC3686_NONCE_SIZE,
1434 			.ivsize = CTR_RFC3686_IV_SIZE,
1435 			.chunksize = AES_BLOCK_SIZE,
1436 		},
1437 		.caam = {
1438 			.class1_alg_type = OP_ALG_ALGSEL_AES |
1439 					   OP_ALG_AAI_CTR_MOD128,
1440 			.rfc3686 = true,
1441 		},
1442 	},
1443 	{
1444 		.skcipher = {
1445 			.base = {
1446 				.cra_name = "xts(aes)",
1447 				.cra_driver_name = "xts-aes-caam-qi",
1448 				.cra_blocksize = AES_BLOCK_SIZE,
1449 			},
1450 			.setkey = xts_skcipher_setkey,
1451 			.encrypt = skcipher_encrypt,
1452 			.decrypt = skcipher_decrypt,
1453 			.min_keysize = 2 * AES_MIN_KEY_SIZE,
1454 			.max_keysize = 2 * AES_MAX_KEY_SIZE,
1455 			.ivsize = AES_BLOCK_SIZE,
1456 		},
1457 		.caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
1458 	},
1459 };
1460 
1461 static struct caam_aead_alg driver_aeads[] = {
1462 	{
1463 		.aead = {
1464 			.base = {
1465 				.cra_name = "rfc4106(gcm(aes))",
1466 				.cra_driver_name = "rfc4106-gcm-aes-caam-qi",
1467 				.cra_blocksize = 1,
1468 			},
1469 			.setkey = rfc4106_setkey,
1470 			.setauthsize = rfc4106_setauthsize,
1471 			.encrypt = ipsec_gcm_encrypt,
1472 			.decrypt = ipsec_gcm_decrypt,
1473 			.ivsize = 8,
1474 			.maxauthsize = AES_BLOCK_SIZE,
1475 		},
1476 		.caam = {
1477 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
1478 		},
1479 	},
1480 	{
1481 		.aead = {
1482 			.base = {
1483 				.cra_name = "rfc4543(gcm(aes))",
1484 				.cra_driver_name = "rfc4543-gcm-aes-caam-qi",
1485 				.cra_blocksize = 1,
1486 			},
1487 			.setkey = rfc4543_setkey,
1488 			.setauthsize = rfc4543_setauthsize,
1489 			.encrypt = ipsec_gcm_encrypt,
1490 			.decrypt = ipsec_gcm_decrypt,
1491 			.ivsize = 8,
1492 			.maxauthsize = AES_BLOCK_SIZE,
1493 		},
1494 		.caam = {
1495 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
1496 		},
1497 	},
1498 	/* Galois Counter Mode */
1499 	{
1500 		.aead = {
1501 			.base = {
1502 				.cra_name = "gcm(aes)",
1503 				.cra_driver_name = "gcm-aes-caam-qi",
1504 				.cra_blocksize = 1,
1505 			},
1506 			.setkey = gcm_setkey,
1507 			.setauthsize = gcm_setauthsize,
1508 			.encrypt = aead_encrypt,
1509 			.decrypt = aead_decrypt,
1510 			.ivsize = 12,
1511 			.maxauthsize = AES_BLOCK_SIZE,
1512 		},
1513 		.caam = {
1514 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
1515 		}
1516 	},
1517 	/* single-pass ipsec_esp descriptor */
1518 	{
1519 		.aead = {
1520 			.base = {
1521 				.cra_name = "authenc(hmac(md5),cbc(aes))",
1522 				.cra_driver_name = "authenc-hmac-md5-"
1523 						   "cbc-aes-caam-qi",
1524 				.cra_blocksize = AES_BLOCK_SIZE,
1525 			},
1526 			.setkey = aead_setkey,
1527 			.setauthsize = aead_setauthsize,
1528 			.encrypt = aead_encrypt,
1529 			.decrypt = aead_decrypt,
1530 			.ivsize = AES_BLOCK_SIZE,
1531 			.maxauthsize = MD5_DIGEST_SIZE,
1532 		},
1533 		.caam = {
1534 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1535 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
1536 					   OP_ALG_AAI_HMAC_PRECOMP,
1537 		}
1538 	},
1539 	{
1540 		.aead = {
1541 			.base = {
1542 				.cra_name = "echainiv(authenc(hmac(md5),"
1543 					    "cbc(aes)))",
1544 				.cra_driver_name = "echainiv-authenc-hmac-md5-"
1545 						   "cbc-aes-caam-qi",
1546 				.cra_blocksize = AES_BLOCK_SIZE,
1547 			},
1548 			.setkey = aead_setkey,
1549 			.setauthsize = aead_setauthsize,
1550 			.encrypt = aead_encrypt,
1551 			.decrypt = aead_decrypt,
1552 			.ivsize = AES_BLOCK_SIZE,
1553 			.maxauthsize = MD5_DIGEST_SIZE,
1554 		},
1555 		.caam = {
1556 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1557 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
1558 					   OP_ALG_AAI_HMAC_PRECOMP,
1559 			.geniv = true,
1560 		}
1561 	},
1562 	{
1563 		.aead = {
1564 			.base = {
1565 				.cra_name = "authenc(hmac(sha1),cbc(aes))",
1566 				.cra_driver_name = "authenc-hmac-sha1-"
1567 						   "cbc-aes-caam-qi",
1568 				.cra_blocksize = AES_BLOCK_SIZE,
1569 			},
1570 			.setkey = aead_setkey,
1571 			.setauthsize = aead_setauthsize,
1572 			.encrypt = aead_encrypt,
1573 			.decrypt = aead_decrypt,
1574 			.ivsize = AES_BLOCK_SIZE,
1575 			.maxauthsize = SHA1_DIGEST_SIZE,
1576 		},
1577 		.caam = {
1578 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1579 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
1580 					   OP_ALG_AAI_HMAC_PRECOMP,
1581 		}
1582 	},
1583 	{
1584 		.aead = {
1585 			.base = {
1586 				.cra_name = "echainiv(authenc(hmac(sha1),"
1587 					    "cbc(aes)))",
1588 				.cra_driver_name = "echainiv-authenc-"
1589 						   "hmac-sha1-cbc-aes-caam-qi",
1590 				.cra_blocksize = AES_BLOCK_SIZE,
1591 			},
1592 			.setkey = aead_setkey,
1593 			.setauthsize = aead_setauthsize,
1594 			.encrypt = aead_encrypt,
1595 			.decrypt = aead_decrypt,
1596 			.ivsize = AES_BLOCK_SIZE,
1597 			.maxauthsize = SHA1_DIGEST_SIZE,
1598 		},
1599 		.caam = {
1600 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1601 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
1602 					   OP_ALG_AAI_HMAC_PRECOMP,
1603 			.geniv = true,
1604 		},
1605 	},
1606 	{
1607 		.aead = {
1608 			.base = {
1609 				.cra_name = "authenc(hmac(sha224),cbc(aes))",
1610 				.cra_driver_name = "authenc-hmac-sha224-"
1611 						   "cbc-aes-caam-qi",
1612 				.cra_blocksize = AES_BLOCK_SIZE,
1613 			},
1614 			.setkey = aead_setkey,
1615 			.setauthsize = aead_setauthsize,
1616 			.encrypt = aead_encrypt,
1617 			.decrypt = aead_decrypt,
1618 			.ivsize = AES_BLOCK_SIZE,
1619 			.maxauthsize = SHA224_DIGEST_SIZE,
1620 		},
1621 		.caam = {
1622 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1623 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
1624 					   OP_ALG_AAI_HMAC_PRECOMP,
1625 		}
1626 	},
1627 	{
1628 		.aead = {
1629 			.base = {
1630 				.cra_name = "echainiv(authenc(hmac(sha224),"
1631 					    "cbc(aes)))",
1632 				.cra_driver_name = "echainiv-authenc-"
1633 						   "hmac-sha224-cbc-aes-caam-qi",
1634 				.cra_blocksize = AES_BLOCK_SIZE,
1635 			},
1636 			.setkey = aead_setkey,
1637 			.setauthsize = aead_setauthsize,
1638 			.encrypt = aead_encrypt,
1639 			.decrypt = aead_decrypt,
1640 			.ivsize = AES_BLOCK_SIZE,
1641 			.maxauthsize = SHA224_DIGEST_SIZE,
1642 		},
1643 		.caam = {
1644 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1645 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
1646 					   OP_ALG_AAI_HMAC_PRECOMP,
1647 			.geniv = true,
1648 		}
1649 	},
1650 	{
1651 		.aead = {
1652 			.base = {
1653 				.cra_name = "authenc(hmac(sha256),cbc(aes))",
1654 				.cra_driver_name = "authenc-hmac-sha256-"
1655 						   "cbc-aes-caam-qi",
1656 				.cra_blocksize = AES_BLOCK_SIZE,
1657 			},
1658 			.setkey = aead_setkey,
1659 			.setauthsize = aead_setauthsize,
1660 			.encrypt = aead_encrypt,
1661 			.decrypt = aead_decrypt,
1662 			.ivsize = AES_BLOCK_SIZE,
1663 			.maxauthsize = SHA256_DIGEST_SIZE,
1664 		},
1665 		.caam = {
1666 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1667 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
1668 					   OP_ALG_AAI_HMAC_PRECOMP,
1669 		}
1670 	},
1671 	{
1672 		.aead = {
1673 			.base = {
1674 				.cra_name = "echainiv(authenc(hmac(sha256),"
1675 					    "cbc(aes)))",
1676 				.cra_driver_name = "echainiv-authenc-"
1677 						   "hmac-sha256-cbc-aes-"
1678 						   "caam-qi",
1679 				.cra_blocksize = AES_BLOCK_SIZE,
1680 			},
1681 			.setkey = aead_setkey,
1682 			.setauthsize = aead_setauthsize,
1683 			.encrypt = aead_encrypt,
1684 			.decrypt = aead_decrypt,
1685 			.ivsize = AES_BLOCK_SIZE,
1686 			.maxauthsize = SHA256_DIGEST_SIZE,
1687 		},
1688 		.caam = {
1689 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1690 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
1691 					   OP_ALG_AAI_HMAC_PRECOMP,
1692 			.geniv = true,
1693 		}
1694 	},
1695 	{
1696 		.aead = {
1697 			.base = {
1698 				.cra_name = "authenc(hmac(sha384),cbc(aes))",
1699 				.cra_driver_name = "authenc-hmac-sha384-"
1700 						   "cbc-aes-caam-qi",
1701 				.cra_blocksize = AES_BLOCK_SIZE,
1702 			},
1703 			.setkey = aead_setkey,
1704 			.setauthsize = aead_setauthsize,
1705 			.encrypt = aead_encrypt,
1706 			.decrypt = aead_decrypt,
1707 			.ivsize = AES_BLOCK_SIZE,
1708 			.maxauthsize = SHA384_DIGEST_SIZE,
1709 		},
1710 		.caam = {
1711 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1712 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
1713 					   OP_ALG_AAI_HMAC_PRECOMP,
1714 		}
1715 	},
1716 	{
1717 		.aead = {
1718 			.base = {
1719 				.cra_name = "echainiv(authenc(hmac(sha384),"
1720 					    "cbc(aes)))",
1721 				.cra_driver_name = "echainiv-authenc-"
1722 						   "hmac-sha384-cbc-aes-"
1723 						   "caam-qi",
1724 				.cra_blocksize = AES_BLOCK_SIZE,
1725 			},
1726 			.setkey = aead_setkey,
1727 			.setauthsize = aead_setauthsize,
1728 			.encrypt = aead_encrypt,
1729 			.decrypt = aead_decrypt,
1730 			.ivsize = AES_BLOCK_SIZE,
1731 			.maxauthsize = SHA384_DIGEST_SIZE,
1732 		},
1733 		.caam = {
1734 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1735 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
1736 					   OP_ALG_AAI_HMAC_PRECOMP,
1737 			.geniv = true,
1738 		}
1739 	},
1740 	{
1741 		.aead = {
1742 			.base = {
1743 				.cra_name = "authenc(hmac(sha512),cbc(aes))",
1744 				.cra_driver_name = "authenc-hmac-sha512-"
1745 						   "cbc-aes-caam-qi",
1746 				.cra_blocksize = AES_BLOCK_SIZE,
1747 			},
1748 			.setkey = aead_setkey,
1749 			.setauthsize = aead_setauthsize,
1750 			.encrypt = aead_encrypt,
1751 			.decrypt = aead_decrypt,
1752 			.ivsize = AES_BLOCK_SIZE,
1753 			.maxauthsize = SHA512_DIGEST_SIZE,
1754 		},
1755 		.caam = {
1756 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1757 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
1758 					   OP_ALG_AAI_HMAC_PRECOMP,
1759 		}
1760 	},
1761 	{
1762 		.aead = {
1763 			.base = {
1764 				.cra_name = "echainiv(authenc(hmac(sha512),"
1765 					    "cbc(aes)))",
1766 				.cra_driver_name = "echainiv-authenc-"
1767 						   "hmac-sha512-cbc-aes-"
1768 						   "caam-qi",
1769 				.cra_blocksize = AES_BLOCK_SIZE,
1770 			},
1771 			.setkey = aead_setkey,
1772 			.setauthsize = aead_setauthsize,
1773 			.encrypt = aead_encrypt,
1774 			.decrypt = aead_decrypt,
1775 			.ivsize = AES_BLOCK_SIZE,
1776 			.maxauthsize = SHA512_DIGEST_SIZE,
1777 		},
1778 		.caam = {
1779 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1780 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
1781 					   OP_ALG_AAI_HMAC_PRECOMP,
1782 			.geniv = true,
1783 		}
1784 	},
1785 	{
1786 		.aead = {
1787 			.base = {
1788 				.cra_name = "authenc(hmac(md5),cbc(des3_ede))",
1789 				.cra_driver_name = "authenc-hmac-md5-"
1790 						   "cbc-des3_ede-caam-qi",
1791 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1792 			},
1793 			.setkey = aead_setkey,
1794 			.setauthsize = aead_setauthsize,
1795 			.encrypt = aead_encrypt,
1796 			.decrypt = aead_decrypt,
1797 			.ivsize = DES3_EDE_BLOCK_SIZE,
1798 			.maxauthsize = MD5_DIGEST_SIZE,
1799 		},
1800 		.caam = {
1801 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1802 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
1803 					   OP_ALG_AAI_HMAC_PRECOMP,
1804 		}
1805 	},
1806 	{
1807 		.aead = {
1808 			.base = {
1809 				.cra_name = "echainiv(authenc(hmac(md5),"
1810 					    "cbc(des3_ede)))",
1811 				.cra_driver_name = "echainiv-authenc-hmac-md5-"
1812 						   "cbc-des3_ede-caam-qi",
1813 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1814 			},
1815 			.setkey = aead_setkey,
1816 			.setauthsize = aead_setauthsize,
1817 			.encrypt = aead_encrypt,
1818 			.decrypt = aead_decrypt,
1819 			.ivsize = DES3_EDE_BLOCK_SIZE,
1820 			.maxauthsize = MD5_DIGEST_SIZE,
1821 		},
1822 		.caam = {
1823 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1824 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
1825 					   OP_ALG_AAI_HMAC_PRECOMP,
1826 			.geniv = true,
1827 		}
1828 	},
1829 	{
1830 		.aead = {
1831 			.base = {
1832 				.cra_name = "authenc(hmac(sha1),"
1833 					    "cbc(des3_ede))",
1834 				.cra_driver_name = "authenc-hmac-sha1-"
1835 						   "cbc-des3_ede-caam-qi",
1836 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1837 			},
1838 			.setkey = aead_setkey,
1839 			.setauthsize = aead_setauthsize,
1840 			.encrypt = aead_encrypt,
1841 			.decrypt = aead_decrypt,
1842 			.ivsize = DES3_EDE_BLOCK_SIZE,
1843 			.maxauthsize = SHA1_DIGEST_SIZE,
1844 		},
1845 		.caam = {
1846 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1847 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
1848 					   OP_ALG_AAI_HMAC_PRECOMP,
1849 		},
1850 	},
1851 	{
1852 		.aead = {
1853 			.base = {
1854 				.cra_name = "echainiv(authenc(hmac(sha1),"
1855 					    "cbc(des3_ede)))",
1856 				.cra_driver_name = "echainiv-authenc-"
1857 						   "hmac-sha1-"
1858 						   "cbc-des3_ede-caam-qi",
1859 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1860 			},
1861 			.setkey = aead_setkey,
1862 			.setauthsize = aead_setauthsize,
1863 			.encrypt = aead_encrypt,
1864 			.decrypt = aead_decrypt,
1865 			.ivsize = DES3_EDE_BLOCK_SIZE,
1866 			.maxauthsize = SHA1_DIGEST_SIZE,
1867 		},
1868 		.caam = {
1869 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1870 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
1871 					   OP_ALG_AAI_HMAC_PRECOMP,
1872 			.geniv = true,
1873 		}
1874 	},
1875 	{
1876 		.aead = {
1877 			.base = {
1878 				.cra_name = "authenc(hmac(sha224),"
1879 					    "cbc(des3_ede))",
1880 				.cra_driver_name = "authenc-hmac-sha224-"
1881 						   "cbc-des3_ede-caam-qi",
1882 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1883 			},
1884 			.setkey = aead_setkey,
1885 			.setauthsize = aead_setauthsize,
1886 			.encrypt = aead_encrypt,
1887 			.decrypt = aead_decrypt,
1888 			.ivsize = DES3_EDE_BLOCK_SIZE,
1889 			.maxauthsize = SHA224_DIGEST_SIZE,
1890 		},
1891 		.caam = {
1892 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1893 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
1894 					   OP_ALG_AAI_HMAC_PRECOMP,
1895 		},
1896 	},
1897 	{
1898 		.aead = {
1899 			.base = {
1900 				.cra_name = "echainiv(authenc(hmac(sha224),"
1901 					    "cbc(des3_ede)))",
1902 				.cra_driver_name = "echainiv-authenc-"
1903 						   "hmac-sha224-"
1904 						   "cbc-des3_ede-caam-qi",
1905 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1906 			},
1907 			.setkey = aead_setkey,
1908 			.setauthsize = aead_setauthsize,
1909 			.encrypt = aead_encrypt,
1910 			.decrypt = aead_decrypt,
1911 			.ivsize = DES3_EDE_BLOCK_SIZE,
1912 			.maxauthsize = SHA224_DIGEST_SIZE,
1913 		},
1914 		.caam = {
1915 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1916 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
1917 					   OP_ALG_AAI_HMAC_PRECOMP,
1918 			.geniv = true,
1919 		}
1920 	},
1921 	{
1922 		.aead = {
1923 			.base = {
1924 				.cra_name = "authenc(hmac(sha256),"
1925 					    "cbc(des3_ede))",
1926 				.cra_driver_name = "authenc-hmac-sha256-"
1927 						   "cbc-des3_ede-caam-qi",
1928 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1929 			},
1930 			.setkey = aead_setkey,
1931 			.setauthsize = aead_setauthsize,
1932 			.encrypt = aead_encrypt,
1933 			.decrypt = aead_decrypt,
1934 			.ivsize = DES3_EDE_BLOCK_SIZE,
1935 			.maxauthsize = SHA256_DIGEST_SIZE,
1936 		},
1937 		.caam = {
1938 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1939 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
1940 					   OP_ALG_AAI_HMAC_PRECOMP,
1941 		},
1942 	},
1943 	{
1944 		.aead = {
1945 			.base = {
1946 				.cra_name = "echainiv(authenc(hmac(sha256),"
1947 					    "cbc(des3_ede)))",
1948 				.cra_driver_name = "echainiv-authenc-"
1949 						   "hmac-sha256-"
1950 						   "cbc-des3_ede-caam-qi",
1951 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1952 			},
1953 			.setkey = aead_setkey,
1954 			.setauthsize = aead_setauthsize,
1955 			.encrypt = aead_encrypt,
1956 			.decrypt = aead_decrypt,
1957 			.ivsize = DES3_EDE_BLOCK_SIZE,
1958 			.maxauthsize = SHA256_DIGEST_SIZE,
1959 		},
1960 		.caam = {
1961 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1962 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
1963 					   OP_ALG_AAI_HMAC_PRECOMP,
1964 			.geniv = true,
1965 		}
1966 	},
1967 	{
1968 		.aead = {
1969 			.base = {
1970 				.cra_name = "authenc(hmac(sha384),"
1971 					    "cbc(des3_ede))",
1972 				.cra_driver_name = "authenc-hmac-sha384-"
1973 						   "cbc-des3_ede-caam-qi",
1974 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1975 			},
1976 			.setkey = aead_setkey,
1977 			.setauthsize = aead_setauthsize,
1978 			.encrypt = aead_encrypt,
1979 			.decrypt = aead_decrypt,
1980 			.ivsize = DES3_EDE_BLOCK_SIZE,
1981 			.maxauthsize = SHA384_DIGEST_SIZE,
1982 		},
1983 		.caam = {
1984 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1985 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
1986 					   OP_ALG_AAI_HMAC_PRECOMP,
1987 		},
1988 	},
1989 	{
1990 		.aead = {
1991 			.base = {
1992 				.cra_name = "echainiv(authenc(hmac(sha384),"
1993 					    "cbc(des3_ede)))",
1994 				.cra_driver_name = "echainiv-authenc-"
1995 						   "hmac-sha384-"
1996 						   "cbc-des3_ede-caam-qi",
1997 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1998 			},
1999 			.setkey = aead_setkey,
2000 			.setauthsize = aead_setauthsize,
2001 			.encrypt = aead_encrypt,
2002 			.decrypt = aead_decrypt,
2003 			.ivsize = DES3_EDE_BLOCK_SIZE,
2004 			.maxauthsize = SHA384_DIGEST_SIZE,
2005 		},
2006 		.caam = {
2007 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2008 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2009 					   OP_ALG_AAI_HMAC_PRECOMP,
2010 			.geniv = true,
2011 		}
2012 	},
2013 	{
2014 		.aead = {
2015 			.base = {
2016 				.cra_name = "authenc(hmac(sha512),"
2017 					    "cbc(des3_ede))",
2018 				.cra_driver_name = "authenc-hmac-sha512-"
2019 						   "cbc-des3_ede-caam-qi",
2020 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2021 			},
2022 			.setkey = aead_setkey,
2023 			.setauthsize = aead_setauthsize,
2024 			.encrypt = aead_encrypt,
2025 			.decrypt = aead_decrypt,
2026 			.ivsize = DES3_EDE_BLOCK_SIZE,
2027 			.maxauthsize = SHA512_DIGEST_SIZE,
2028 		},
2029 		.caam = {
2030 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2031 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2032 					   OP_ALG_AAI_HMAC_PRECOMP,
2033 		},
2034 	},
2035 	{
2036 		.aead = {
2037 			.base = {
2038 				.cra_name = "echainiv(authenc(hmac(sha512),"
2039 					    "cbc(des3_ede)))",
2040 				.cra_driver_name = "echainiv-authenc-"
2041 						   "hmac-sha512-"
2042 						   "cbc-des3_ede-caam-qi",
2043 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2044 			},
2045 			.setkey = aead_setkey,
2046 			.setauthsize = aead_setauthsize,
2047 			.encrypt = aead_encrypt,
2048 			.decrypt = aead_decrypt,
2049 			.ivsize = DES3_EDE_BLOCK_SIZE,
2050 			.maxauthsize = SHA512_DIGEST_SIZE,
2051 		},
2052 		.caam = {
2053 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2054 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2055 					   OP_ALG_AAI_HMAC_PRECOMP,
2056 			.geniv = true,
2057 		}
2058 	},
2059 	{
2060 		.aead = {
2061 			.base = {
2062 				.cra_name = "authenc(hmac(md5),cbc(des))",
2063 				.cra_driver_name = "authenc-hmac-md5-"
2064 						   "cbc-des-caam-qi",
2065 				.cra_blocksize = DES_BLOCK_SIZE,
2066 			},
2067 			.setkey = aead_setkey,
2068 			.setauthsize = aead_setauthsize,
2069 			.encrypt = aead_encrypt,
2070 			.decrypt = aead_decrypt,
2071 			.ivsize = DES_BLOCK_SIZE,
2072 			.maxauthsize = MD5_DIGEST_SIZE,
2073 		},
2074 		.caam = {
2075 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2076 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2077 					   OP_ALG_AAI_HMAC_PRECOMP,
2078 		},
2079 	},
2080 	{
2081 		.aead = {
2082 			.base = {
2083 				.cra_name = "echainiv(authenc(hmac(md5),"
2084 					    "cbc(des)))",
2085 				.cra_driver_name = "echainiv-authenc-hmac-md5-"
2086 						   "cbc-des-caam-qi",
2087 				.cra_blocksize = DES_BLOCK_SIZE,
2088 			},
2089 			.setkey = aead_setkey,
2090 			.setauthsize = aead_setauthsize,
2091 			.encrypt = aead_encrypt,
2092 			.decrypt = aead_decrypt,
2093 			.ivsize = DES_BLOCK_SIZE,
2094 			.maxauthsize = MD5_DIGEST_SIZE,
2095 		},
2096 		.caam = {
2097 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2098 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2099 					   OP_ALG_AAI_HMAC_PRECOMP,
2100 			.geniv = true,
2101 		}
2102 	},
2103 	{
2104 		.aead = {
2105 			.base = {
2106 				.cra_name = "authenc(hmac(sha1),cbc(des))",
2107 				.cra_driver_name = "authenc-hmac-sha1-"
2108 						   "cbc-des-caam-qi",
2109 				.cra_blocksize = DES_BLOCK_SIZE,
2110 			},
2111 			.setkey = aead_setkey,
2112 			.setauthsize = aead_setauthsize,
2113 			.encrypt = aead_encrypt,
2114 			.decrypt = aead_decrypt,
2115 			.ivsize = DES_BLOCK_SIZE,
2116 			.maxauthsize = SHA1_DIGEST_SIZE,
2117 		},
2118 		.caam = {
2119 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2120 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2121 					   OP_ALG_AAI_HMAC_PRECOMP,
2122 		},
2123 	},
2124 	{
2125 		.aead = {
2126 			.base = {
2127 				.cra_name = "echainiv(authenc(hmac(sha1),"
2128 					    "cbc(des)))",
2129 				.cra_driver_name = "echainiv-authenc-"
2130 						   "hmac-sha1-cbc-des-caam-qi",
2131 				.cra_blocksize = DES_BLOCK_SIZE,
2132 			},
2133 			.setkey = aead_setkey,
2134 			.setauthsize = aead_setauthsize,
2135 			.encrypt = aead_encrypt,
2136 			.decrypt = aead_decrypt,
2137 			.ivsize = DES_BLOCK_SIZE,
2138 			.maxauthsize = SHA1_DIGEST_SIZE,
2139 		},
2140 		.caam = {
2141 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2142 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2143 					   OP_ALG_AAI_HMAC_PRECOMP,
2144 			.geniv = true,
2145 		}
2146 	},
2147 	{
2148 		.aead = {
2149 			.base = {
2150 				.cra_name = "authenc(hmac(sha224),cbc(des))",
2151 				.cra_driver_name = "authenc-hmac-sha224-"
2152 						   "cbc-des-caam-qi",
2153 				.cra_blocksize = DES_BLOCK_SIZE,
2154 			},
2155 			.setkey = aead_setkey,
2156 			.setauthsize = aead_setauthsize,
2157 			.encrypt = aead_encrypt,
2158 			.decrypt = aead_decrypt,
2159 			.ivsize = DES_BLOCK_SIZE,
2160 			.maxauthsize = SHA224_DIGEST_SIZE,
2161 		},
2162 		.caam = {
2163 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2164 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2165 					   OP_ALG_AAI_HMAC_PRECOMP,
2166 		},
2167 	},
2168 	{
2169 		.aead = {
2170 			.base = {
2171 				.cra_name = "echainiv(authenc(hmac(sha224),"
2172 					    "cbc(des)))",
2173 				.cra_driver_name = "echainiv-authenc-"
2174 						   "hmac-sha224-cbc-des-"
2175 						   "caam-qi",
2176 				.cra_blocksize = DES_BLOCK_SIZE,
2177 			},
2178 			.setkey = aead_setkey,
2179 			.setauthsize = aead_setauthsize,
2180 			.encrypt = aead_encrypt,
2181 			.decrypt = aead_decrypt,
2182 			.ivsize = DES_BLOCK_SIZE,
2183 			.maxauthsize = SHA224_DIGEST_SIZE,
2184 		},
2185 		.caam = {
2186 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2187 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2188 					   OP_ALG_AAI_HMAC_PRECOMP,
2189 			.geniv = true,
2190 		}
2191 	},
2192 	{
2193 		.aead = {
2194 			.base = {
2195 				.cra_name = "authenc(hmac(sha256),cbc(des))",
2196 				.cra_driver_name = "authenc-hmac-sha256-"
2197 						   "cbc-des-caam-qi",
2198 				.cra_blocksize = DES_BLOCK_SIZE,
2199 			},
2200 			.setkey = aead_setkey,
2201 			.setauthsize = aead_setauthsize,
2202 			.encrypt = aead_encrypt,
2203 			.decrypt = aead_decrypt,
2204 			.ivsize = DES_BLOCK_SIZE,
2205 			.maxauthsize = SHA256_DIGEST_SIZE,
2206 		},
2207 		.caam = {
2208 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2209 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2210 					   OP_ALG_AAI_HMAC_PRECOMP,
2211 		},
2212 	},
2213 	{
2214 		.aead = {
2215 			.base = {
2216 				.cra_name = "echainiv(authenc(hmac(sha256),"
2217 					    "cbc(des)))",
2218 				.cra_driver_name = "echainiv-authenc-"
2219 						   "hmac-sha256-cbc-des-"
2220 						   "caam-qi",
2221 				.cra_blocksize = DES_BLOCK_SIZE,
2222 			},
2223 			.setkey = aead_setkey,
2224 			.setauthsize = aead_setauthsize,
2225 			.encrypt = aead_encrypt,
2226 			.decrypt = aead_decrypt,
2227 			.ivsize = DES_BLOCK_SIZE,
2228 			.maxauthsize = SHA256_DIGEST_SIZE,
2229 		},
2230 		.caam = {
2231 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2232 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2233 					   OP_ALG_AAI_HMAC_PRECOMP,
2234 			.geniv = true,
2235 		},
2236 	},
2237 	{
2238 		.aead = {
2239 			.base = {
2240 				.cra_name = "authenc(hmac(sha384),cbc(des))",
2241 				.cra_driver_name = "authenc-hmac-sha384-"
2242 						   "cbc-des-caam-qi",
2243 				.cra_blocksize = DES_BLOCK_SIZE,
2244 			},
2245 			.setkey = aead_setkey,
2246 			.setauthsize = aead_setauthsize,
2247 			.encrypt = aead_encrypt,
2248 			.decrypt = aead_decrypt,
2249 			.ivsize = DES_BLOCK_SIZE,
2250 			.maxauthsize = SHA384_DIGEST_SIZE,
2251 		},
2252 		.caam = {
2253 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2254 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2255 					   OP_ALG_AAI_HMAC_PRECOMP,
2256 		},
2257 	},
2258 	{
2259 		.aead = {
2260 			.base = {
2261 				.cra_name = "echainiv(authenc(hmac(sha384),"
2262 					    "cbc(des)))",
2263 				.cra_driver_name = "echainiv-authenc-"
2264 						   "hmac-sha384-cbc-des-"
2265 						   "caam-qi",
2266 				.cra_blocksize = DES_BLOCK_SIZE,
2267 			},
2268 			.setkey = aead_setkey,
2269 			.setauthsize = aead_setauthsize,
2270 			.encrypt = aead_encrypt,
2271 			.decrypt = aead_decrypt,
2272 			.ivsize = DES_BLOCK_SIZE,
2273 			.maxauthsize = SHA384_DIGEST_SIZE,
2274 		},
2275 		.caam = {
2276 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2277 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2278 					   OP_ALG_AAI_HMAC_PRECOMP,
2279 			.geniv = true,
2280 		}
2281 	},
2282 	{
2283 		.aead = {
2284 			.base = {
2285 				.cra_name = "authenc(hmac(sha512),cbc(des))",
2286 				.cra_driver_name = "authenc-hmac-sha512-"
2287 						   "cbc-des-caam-qi",
2288 				.cra_blocksize = DES_BLOCK_SIZE,
2289 			},
2290 			.setkey = aead_setkey,
2291 			.setauthsize = aead_setauthsize,
2292 			.encrypt = aead_encrypt,
2293 			.decrypt = aead_decrypt,
2294 			.ivsize = DES_BLOCK_SIZE,
2295 			.maxauthsize = SHA512_DIGEST_SIZE,
2296 		},
2297 		.caam = {
2298 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2299 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2300 					   OP_ALG_AAI_HMAC_PRECOMP,
2301 		}
2302 	},
2303 	{
2304 		.aead = {
2305 			.base = {
2306 				.cra_name = "echainiv(authenc(hmac(sha512),"
2307 					    "cbc(des)))",
2308 				.cra_driver_name = "echainiv-authenc-"
2309 						   "hmac-sha512-cbc-des-"
2310 						   "caam-qi",
2311 				.cra_blocksize = DES_BLOCK_SIZE,
2312 			},
2313 			.setkey = aead_setkey,
2314 			.setauthsize = aead_setauthsize,
2315 			.encrypt = aead_encrypt,
2316 			.decrypt = aead_decrypt,
2317 			.ivsize = DES_BLOCK_SIZE,
2318 			.maxauthsize = SHA512_DIGEST_SIZE,
2319 		},
2320 		.caam = {
2321 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2322 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2323 					   OP_ALG_AAI_HMAC_PRECOMP,
2324 			.geniv = true,
2325 		}
2326 	},
2327 };
2328 
2329 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam,
2330 			    bool uses_dkp)
2331 {
2332 	struct caam_drv_private *priv;
2333 
2334 	/*
2335 	 * distribute tfms across job rings to ensure in-order
2336 	 * crypto request processing per tfm
2337 	 */
2338 	ctx->jrdev = caam_jr_alloc();
2339 	if (IS_ERR(ctx->jrdev)) {
2340 		pr_err("Job Ring Device allocation for transform failed\n");
2341 		return PTR_ERR(ctx->jrdev);
2342 	}
2343 
2344 	priv = dev_get_drvdata(ctx->jrdev->parent);
2345 	if (priv->era >= 6 && uses_dkp)
2346 		ctx->dir = DMA_BIDIRECTIONAL;
2347 	else
2348 		ctx->dir = DMA_TO_DEVICE;
2349 
2350 	ctx->key_dma = dma_map_single(ctx->jrdev, ctx->key, sizeof(ctx->key),
2351 				      ctx->dir);
2352 	if (dma_mapping_error(ctx->jrdev, ctx->key_dma)) {
2353 		dev_err(ctx->jrdev, "unable to map key\n");
2354 		caam_jr_free(ctx->jrdev);
2355 		return -ENOMEM;
2356 	}
2357 
2358 	/* copy descriptor header template value */
2359 	ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
2360 	ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
2361 
2362 	ctx->qidev = priv->qidev;
2363 
2364 	spin_lock_init(&ctx->lock);
2365 	ctx->drv_ctx[ENCRYPT] = NULL;
2366 	ctx->drv_ctx[DECRYPT] = NULL;
2367 
2368 	return 0;
2369 }
2370 
2371 static int caam_cra_init(struct crypto_skcipher *tfm)
2372 {
2373 	struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
2374 	struct caam_skcipher_alg *caam_alg =
2375 		container_of(alg, typeof(*caam_alg), skcipher);
2376 
2377 	return caam_init_common(crypto_skcipher_ctx(tfm), &caam_alg->caam,
2378 				false);
2379 }
2380 
2381 static int caam_aead_init(struct crypto_aead *tfm)
2382 {
2383 	struct aead_alg *alg = crypto_aead_alg(tfm);
2384 	struct caam_aead_alg *caam_alg = container_of(alg, typeof(*caam_alg),
2385 						      aead);
2386 	struct caam_ctx *ctx = crypto_aead_ctx(tfm);
2387 
2388 	return caam_init_common(ctx, &caam_alg->caam,
2389 				alg->setkey == aead_setkey);
2390 }
2391 
2392 static void caam_exit_common(struct caam_ctx *ctx)
2393 {
2394 	caam_drv_ctx_rel(ctx->drv_ctx[ENCRYPT]);
2395 	caam_drv_ctx_rel(ctx->drv_ctx[DECRYPT]);
2396 
2397 	dma_unmap_single(ctx->jrdev, ctx->key_dma, sizeof(ctx->key), ctx->dir);
2398 
2399 	caam_jr_free(ctx->jrdev);
2400 }
2401 
2402 static void caam_cra_exit(struct crypto_skcipher *tfm)
2403 {
2404 	caam_exit_common(crypto_skcipher_ctx(tfm));
2405 }
2406 
2407 static void caam_aead_exit(struct crypto_aead *tfm)
2408 {
2409 	caam_exit_common(crypto_aead_ctx(tfm));
2410 }
2411 
2412 static void __exit caam_qi_algapi_exit(void)
2413 {
2414 	int i;
2415 
2416 	for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
2417 		struct caam_aead_alg *t_alg = driver_aeads + i;
2418 
2419 		if (t_alg->registered)
2420 			crypto_unregister_aead(&t_alg->aead);
2421 	}
2422 
2423 	for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
2424 		struct caam_skcipher_alg *t_alg = driver_algs + i;
2425 
2426 		if (t_alg->registered)
2427 			crypto_unregister_skcipher(&t_alg->skcipher);
2428 	}
2429 }
2430 
2431 static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg)
2432 {
2433 	struct skcipher_alg *alg = &t_alg->skcipher;
2434 
2435 	alg->base.cra_module = THIS_MODULE;
2436 	alg->base.cra_priority = CAAM_CRA_PRIORITY;
2437 	alg->base.cra_ctxsize = sizeof(struct caam_ctx);
2438 	alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
2439 
2440 	alg->init = caam_cra_init;
2441 	alg->exit = caam_cra_exit;
2442 }
2443 
2444 static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
2445 {
2446 	struct aead_alg *alg = &t_alg->aead;
2447 
2448 	alg->base.cra_module = THIS_MODULE;
2449 	alg->base.cra_priority = CAAM_CRA_PRIORITY;
2450 	alg->base.cra_ctxsize = sizeof(struct caam_ctx);
2451 	alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
2452 
2453 	alg->init = caam_aead_init;
2454 	alg->exit = caam_aead_exit;
2455 }
2456 
2457 static int __init caam_qi_algapi_init(void)
2458 {
2459 	struct device_node *dev_node;
2460 	struct platform_device *pdev;
2461 	struct device *ctrldev;
2462 	struct caam_drv_private *priv;
2463 	int i = 0, err = 0;
2464 	u32 cha_vid, cha_inst, des_inst, aes_inst, md_inst;
2465 	unsigned int md_limit = SHA512_DIGEST_SIZE;
2466 	bool registered = false;
2467 
2468 	dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0");
2469 	if (!dev_node) {
2470 		dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0");
2471 		if (!dev_node)
2472 			return -ENODEV;
2473 	}
2474 
2475 	pdev = of_find_device_by_node(dev_node);
2476 	of_node_put(dev_node);
2477 	if (!pdev)
2478 		return -ENODEV;
2479 
2480 	ctrldev = &pdev->dev;
2481 	priv = dev_get_drvdata(ctrldev);
2482 
2483 	/*
2484 	 * If priv is NULL, it's probably because the caam driver wasn't
2485 	 * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
2486 	 */
2487 	if (!priv || !priv->qi_present)
2488 		return -ENODEV;
2489 
2490 	if (caam_dpaa2) {
2491 		dev_info(ctrldev, "caam/qi frontend driver not suitable for DPAA 2.x, aborting...\n");
2492 		return -ENODEV;
2493 	}
2494 
2495 	/*
2496 	 * Register crypto algorithms the device supports.
2497 	 * First, detect presence and attributes of DES, AES, and MD blocks.
2498 	 */
2499 	cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
2500 	cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
2501 	des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> CHA_ID_LS_DES_SHIFT;
2502 	aes_inst = (cha_inst & CHA_ID_LS_AES_MASK) >> CHA_ID_LS_AES_SHIFT;
2503 	md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
2504 
2505 	/* If MD is present, limit digest size based on LP256 */
2506 	if (md_inst && ((cha_vid & CHA_ID_LS_MD_MASK) == CHA_ID_LS_MD_LP256))
2507 		md_limit = SHA256_DIGEST_SIZE;
2508 
2509 	for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
2510 		struct caam_skcipher_alg *t_alg = driver_algs + i;
2511 		u32 alg_sel = t_alg->caam.class1_alg_type & OP_ALG_ALGSEL_MASK;
2512 
2513 		/* Skip DES algorithms if not supported by device */
2514 		if (!des_inst &&
2515 		    ((alg_sel == OP_ALG_ALGSEL_3DES) ||
2516 		     (alg_sel == OP_ALG_ALGSEL_DES)))
2517 			continue;
2518 
2519 		/* Skip AES algorithms if not supported by device */
2520 		if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
2521 			continue;
2522 
2523 		caam_skcipher_alg_init(t_alg);
2524 
2525 		err = crypto_register_skcipher(&t_alg->skcipher);
2526 		if (err) {
2527 			dev_warn(priv->qidev, "%s alg registration failed\n",
2528 				 t_alg->skcipher.base.cra_driver_name);
2529 			continue;
2530 		}
2531 
2532 		t_alg->registered = true;
2533 		registered = true;
2534 	}
2535 
2536 	for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
2537 		struct caam_aead_alg *t_alg = driver_aeads + i;
2538 		u32 c1_alg_sel = t_alg->caam.class1_alg_type &
2539 				 OP_ALG_ALGSEL_MASK;
2540 		u32 c2_alg_sel = t_alg->caam.class2_alg_type &
2541 				 OP_ALG_ALGSEL_MASK;
2542 		u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
2543 
2544 		/* Skip DES algorithms if not supported by device */
2545 		if (!des_inst &&
2546 		    ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
2547 		     (c1_alg_sel == OP_ALG_ALGSEL_DES)))
2548 			continue;
2549 
2550 		/* Skip AES algorithms if not supported by device */
2551 		if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
2552 			continue;
2553 
2554 		/*
2555 		 * Check support for AES algorithms not available
2556 		 * on LP devices.
2557 		 */
2558 		if (((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP) &&
2559 		    (alg_aai == OP_ALG_AAI_GCM))
2560 			continue;
2561 
2562 		/*
2563 		 * Skip algorithms requiring message digests
2564 		 * if MD or MD size is not supported by device.
2565 		 */
2566 		if (c2_alg_sel &&
2567 		    (!md_inst || (t_alg->aead.maxauthsize > md_limit)))
2568 			continue;
2569 
2570 		caam_aead_alg_init(t_alg);
2571 
2572 		err = crypto_register_aead(&t_alg->aead);
2573 		if (err) {
2574 			pr_warn("%s alg registration failed\n",
2575 				t_alg->aead.base.cra_driver_name);
2576 			continue;
2577 		}
2578 
2579 		t_alg->registered = true;
2580 		registered = true;
2581 	}
2582 
2583 	if (registered)
2584 		dev_info(priv->qidev, "algorithms registered in /proc/crypto\n");
2585 
2586 	return err;
2587 }
2588 
2589 module_init(caam_qi_algapi_init);
2590 module_exit(caam_qi_algapi_exit);
2591 
2592 MODULE_LICENSE("GPL");
2593 MODULE_DESCRIPTION("Support for crypto API using CAAM-QI backend");
2594 MODULE_AUTHOR("Freescale Semiconductor");
2595