Lines Matching +full:inline +full:- +full:crypto +full:- +full:engine
1 // SPDX-License-Identifier: GPL-2.0+
3 * caam - Freescale FSL CAAM support for crypto API
5 * Copyright 2008-2011 Freescale Semiconductor, Inc.
6 * Copyright 2016-2019, 2023 NXP
8 * Based on talitos crypto API driver.
12 * --------------- ---------------
13 * | JobDesc #1 |-------------------->| ShareDesc |
15 * --------------- |------------->| (hashKey) |
17 * . | |-------->| (operation) |
18 * --------------- | | ---------------
19 * | JobDesc #2 |------| |
21 * --------------- |
24 * --------------- |
25 * | JobDesc #3 |------------
27 * ---------------
37 * ---------------------
46 * ---------------------
60 #include <crypto/internal/aead.h>
61 #include <crypto/internal/engine.h>
62 #include <crypto/internal/skcipher.h>
63 #include <crypto/xts.h>
64 #include <linux/dma-mapping.h>
73 * crypto alg
89 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN_MIN)
113 * per-session context
143 struct device *jrdev = ctx->jrdev; in aead_null_set_sh_desc()
144 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); in aead_null_set_sh_desc()
146 int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN - in aead_null_set_sh_desc()
147 ctx->adata.keylen_pad; in aead_null_set_sh_desc()
151 * must all fit into the 64-word Descriptor h/w Buffer in aead_null_set_sh_desc()
154 ctx->adata.key_inline = true; in aead_null_set_sh_desc()
155 ctx->adata.key_virt = ctx->key; in aead_null_set_sh_desc()
157 ctx->adata.key_inline = false; in aead_null_set_sh_desc()
158 ctx->adata.key_dma = ctx->key_dma; in aead_null_set_sh_desc()
162 desc = ctx->sh_desc_enc; in aead_null_set_sh_desc()
163 cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize, in aead_null_set_sh_desc()
164 ctrlpriv->era); in aead_null_set_sh_desc()
165 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, in aead_null_set_sh_desc()
166 desc_bytes(desc), ctx->dir); in aead_null_set_sh_desc()
170 * must all fit into the 64-word Descriptor h/w Buffer in aead_null_set_sh_desc()
173 ctx->adata.key_inline = true; in aead_null_set_sh_desc()
174 ctx->adata.key_virt = ctx->key; in aead_null_set_sh_desc()
176 ctx->adata.key_inline = false; in aead_null_set_sh_desc()
177 ctx->adata.key_dma = ctx->key_dma; in aead_null_set_sh_desc()
181 desc = ctx->sh_desc_dec; in aead_null_set_sh_desc()
182 cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize, in aead_null_set_sh_desc()
183 ctrlpriv->era); in aead_null_set_sh_desc()
184 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, in aead_null_set_sh_desc()
185 desc_bytes(desc), ctx->dir); in aead_null_set_sh_desc()
197 struct device *jrdev = ctx->jrdev; in aead_set_sh_desc()
198 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); in aead_set_sh_desc()
203 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == in aead_set_sh_desc()
205 const bool is_rfc3686 = alg->caam.rfc3686; in aead_set_sh_desc()
207 if (!ctx->authsize) in aead_set_sh_desc()
211 if (!ctx->cdata.keylen) in aead_set_sh_desc()
215 * AES-CTR needs to load IV in CONTEXT1 reg in aead_set_sh_desc()
228 nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad + in aead_set_sh_desc()
229 ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE); in aead_set_sh_desc()
238 ctx->adata.key_virt = ctx->key; in aead_set_sh_desc()
239 ctx->adata.key_dma = ctx->key_dma; in aead_set_sh_desc()
241 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad; in aead_set_sh_desc()
242 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad; in aead_set_sh_desc()
244 data_len[0] = ctx->adata.keylen_pad; in aead_set_sh_desc()
245 data_len[1] = ctx->cdata.keylen; in aead_set_sh_desc()
247 if (alg->caam.geniv) in aead_set_sh_desc()
252 * must all fit into the 64-word Descriptor h/w Buffer in aead_set_sh_desc()
258 return -EINVAL; in aead_set_sh_desc()
260 ctx->adata.key_inline = !!(inl_mask & 1); in aead_set_sh_desc()
261 ctx->cdata.key_inline = !!(inl_mask & 2); in aead_set_sh_desc()
264 desc = ctx->sh_desc_enc; in aead_set_sh_desc()
265 cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize, in aead_set_sh_desc()
266 ctx->authsize, is_rfc3686, nonce, ctx1_iv_off, in aead_set_sh_desc()
267 false, ctrlpriv->era); in aead_set_sh_desc()
268 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, in aead_set_sh_desc()
269 desc_bytes(desc), ctx->dir); in aead_set_sh_desc()
274 * must all fit into the 64-word Descriptor h/w Buffer in aead_set_sh_desc()
280 return -EINVAL; in aead_set_sh_desc()
282 ctx->adata.key_inline = !!(inl_mask & 1); in aead_set_sh_desc()
283 ctx->cdata.key_inline = !!(inl_mask & 2); in aead_set_sh_desc()
286 desc = ctx->sh_desc_dec; in aead_set_sh_desc()
287 cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize, in aead_set_sh_desc()
288 ctx->authsize, alg->caam.geniv, is_rfc3686, in aead_set_sh_desc()
289 nonce, ctx1_iv_off, false, ctrlpriv->era); in aead_set_sh_desc()
290 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, in aead_set_sh_desc()
291 desc_bytes(desc), ctx->dir); in aead_set_sh_desc()
293 if (!alg->caam.geniv) in aead_set_sh_desc()
298 * must all fit into the 64-word Descriptor h/w Buffer in aead_set_sh_desc()
304 return -EINVAL; in aead_set_sh_desc()
306 ctx->adata.key_inline = !!(inl_mask & 1); in aead_set_sh_desc()
307 ctx->cdata.key_inline = !!(inl_mask & 2); in aead_set_sh_desc()
310 desc = ctx->sh_desc_enc; in aead_set_sh_desc()
311 cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize, in aead_set_sh_desc()
312 ctx->authsize, is_rfc3686, nonce, in aead_set_sh_desc()
313 ctx1_iv_off, false, ctrlpriv->era); in aead_set_sh_desc()
314 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, in aead_set_sh_desc()
315 desc_bytes(desc), ctx->dir); in aead_set_sh_desc()
326 ctx->authsize = authsize; in aead_setauthsize()
335 struct device *jrdev = ctx->jrdev; in gcm_set_sh_desc()
338 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - in gcm_set_sh_desc()
339 ctx->cdata.keylen; in gcm_set_sh_desc()
341 if (!ctx->cdata.keylen || !ctx->authsize) in gcm_set_sh_desc()
347 * must fit into the 64-word Descriptor h/w Buffer in gcm_set_sh_desc()
350 ctx->cdata.key_inline = true; in gcm_set_sh_desc()
351 ctx->cdata.key_virt = ctx->key; in gcm_set_sh_desc()
353 ctx->cdata.key_inline = false; in gcm_set_sh_desc()
354 ctx->cdata.key_dma = ctx->key_dma; in gcm_set_sh_desc()
357 desc = ctx->sh_desc_enc; in gcm_set_sh_desc()
358 cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ivsize, ctx->authsize, false); in gcm_set_sh_desc()
359 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, in gcm_set_sh_desc()
360 desc_bytes(desc), ctx->dir); in gcm_set_sh_desc()
364 * must all fit into the 64-word Descriptor h/w Buffer in gcm_set_sh_desc()
367 ctx->cdata.key_inline = true; in gcm_set_sh_desc()
368 ctx->cdata.key_virt = ctx->key; in gcm_set_sh_desc()
370 ctx->cdata.key_inline = false; in gcm_set_sh_desc()
371 ctx->cdata.key_dma = ctx->key_dma; in gcm_set_sh_desc()
374 desc = ctx->sh_desc_dec; in gcm_set_sh_desc()
375 cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ivsize, ctx->authsize, false); in gcm_set_sh_desc()
376 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, in gcm_set_sh_desc()
377 desc_bytes(desc), ctx->dir); in gcm_set_sh_desc()
391 ctx->authsize = authsize; in gcm_setauthsize()
400 struct device *jrdev = ctx->jrdev; in rfc4106_set_sh_desc()
403 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - in rfc4106_set_sh_desc()
404 ctx->cdata.keylen; in rfc4106_set_sh_desc()
406 if (!ctx->cdata.keylen || !ctx->authsize) in rfc4106_set_sh_desc()
412 * must fit into the 64-word Descriptor h/w Buffer in rfc4106_set_sh_desc()
415 ctx->cdata.key_inline = true; in rfc4106_set_sh_desc()
416 ctx->cdata.key_virt = ctx->key; in rfc4106_set_sh_desc()
418 ctx->cdata.key_inline = false; in rfc4106_set_sh_desc()
419 ctx->cdata.key_dma = ctx->key_dma; in rfc4106_set_sh_desc()
422 desc = ctx->sh_desc_enc; in rfc4106_set_sh_desc()
423 cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ivsize, ctx->authsize, in rfc4106_set_sh_desc()
425 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, in rfc4106_set_sh_desc()
426 desc_bytes(desc), ctx->dir); in rfc4106_set_sh_desc()
430 * must all fit into the 64-word Descriptor h/w Buffer in rfc4106_set_sh_desc()
433 ctx->cdata.key_inline = true; in rfc4106_set_sh_desc()
434 ctx->cdata.key_virt = ctx->key; in rfc4106_set_sh_desc()
436 ctx->cdata.key_inline = false; in rfc4106_set_sh_desc()
437 ctx->cdata.key_dma = ctx->key_dma; in rfc4106_set_sh_desc()
440 desc = ctx->sh_desc_dec; in rfc4106_set_sh_desc()
441 cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ivsize, ctx->authsize, in rfc4106_set_sh_desc()
443 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, in rfc4106_set_sh_desc()
444 desc_bytes(desc), ctx->dir); in rfc4106_set_sh_desc()
459 ctx->authsize = authsize; in rfc4106_setauthsize()
468 struct device *jrdev = ctx->jrdev; in rfc4543_set_sh_desc()
471 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - in rfc4543_set_sh_desc()
472 ctx->cdata.keylen; in rfc4543_set_sh_desc()
474 if (!ctx->cdata.keylen || !ctx->authsize) in rfc4543_set_sh_desc()
480 * must fit into the 64-word Descriptor h/w Buffer in rfc4543_set_sh_desc()
483 ctx->cdata.key_inline = true; in rfc4543_set_sh_desc()
484 ctx->cdata.key_virt = ctx->key; in rfc4543_set_sh_desc()
486 ctx->cdata.key_inline = false; in rfc4543_set_sh_desc()
487 ctx->cdata.key_dma = ctx->key_dma; in rfc4543_set_sh_desc()
490 desc = ctx->sh_desc_enc; in rfc4543_set_sh_desc()
491 cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ivsize, ctx->authsize, in rfc4543_set_sh_desc()
493 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, in rfc4543_set_sh_desc()
494 desc_bytes(desc), ctx->dir); in rfc4543_set_sh_desc()
498 * must all fit into the 64-word Descriptor h/w Buffer in rfc4543_set_sh_desc()
501 ctx->cdata.key_inline = true; in rfc4543_set_sh_desc()
502 ctx->cdata.key_virt = ctx->key; in rfc4543_set_sh_desc()
504 ctx->cdata.key_inline = false; in rfc4543_set_sh_desc()
505 ctx->cdata.key_dma = ctx->key_dma; in rfc4543_set_sh_desc()
508 desc = ctx->sh_desc_dec; in rfc4543_set_sh_desc()
509 cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ivsize, ctx->authsize, in rfc4543_set_sh_desc()
511 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, in rfc4543_set_sh_desc()
512 desc_bytes(desc), ctx->dir); in rfc4543_set_sh_desc()
523 return -EINVAL; in rfc4543_setauthsize()
525 ctx->authsize = authsize; in rfc4543_setauthsize()
534 struct device *jrdev = ctx->jrdev; in chachapoly_set_sh_desc()
538 if (!ctx->cdata.keylen || !ctx->authsize) in chachapoly_set_sh_desc()
541 desc = ctx->sh_desc_enc; in chachapoly_set_sh_desc()
542 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize, in chachapoly_set_sh_desc()
543 ctx->authsize, true, false); in chachapoly_set_sh_desc()
544 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, in chachapoly_set_sh_desc()
545 desc_bytes(desc), ctx->dir); in chachapoly_set_sh_desc()
547 desc = ctx->sh_desc_dec; in chachapoly_set_sh_desc()
548 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize, in chachapoly_set_sh_desc()
549 ctx->authsize, false, false); in chachapoly_set_sh_desc()
550 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, in chachapoly_set_sh_desc()
551 desc_bytes(desc), ctx->dir); in chachapoly_set_sh_desc()
562 return -EINVAL; in chachapoly_setauthsize()
564 ctx->authsize = authsize; in chachapoly_setauthsize()
573 unsigned int saltlen = CHACHAPOLY_IV_SIZE - ivsize; in chachapoly_setkey()
576 return -EINVAL; in chachapoly_setkey()
578 memcpy(ctx->key, key, keylen); in chachapoly_setkey()
579 ctx->cdata.key_virt = ctx->key; in chachapoly_setkey()
580 ctx->cdata.keylen = keylen - saltlen; in chachapoly_setkey()
589 struct device *jrdev = ctx->jrdev; in aead_setkey()
590 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); in aead_setkey()
607 if (ctrlpriv->era >= 6) { in aead_setkey()
608 ctx->adata.keylen = keys.authkeylen; in aead_setkey()
609 ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype & in aead_setkey()
612 if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE) in aead_setkey()
615 memcpy(ctx->key, keys.authkey, keys.authkeylen); in aead_setkey()
616 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, in aead_setkey()
618 dma_sync_single_for_device(jrdev, ctx->key_dma, in aead_setkey()
619 ctx->adata.keylen_pad + in aead_setkey()
620 keys.enckeylen, ctx->dir); in aead_setkey()
624 ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey, in aead_setkey()
625 keys.authkeylen, CAAM_MAX_KEY_SIZE - in aead_setkey()
632 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen); in aead_setkey()
633 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad + in aead_setkey()
634 keys.enckeylen, ctx->dir); in aead_setkey()
637 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key, in aead_setkey()
638 ctx->adata.keylen_pad + keys.enckeylen, 1); in aead_setkey()
641 ctx->cdata.keylen = keys.enckeylen; in aead_setkey()
646 return -EINVAL; in aead_setkey()
670 struct device *jrdev = ctx->jrdev; in gcm_setkey()
680 memcpy(ctx->key, key, keylen); in gcm_setkey()
681 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir); in gcm_setkey()
682 ctx->cdata.keylen = keylen; in gcm_setkey()
691 struct device *jrdev = ctx->jrdev; in rfc4106_setkey()
694 err = aes_check_keylen(keylen - 4); in rfc4106_setkey()
701 memcpy(ctx->key, key, keylen); in rfc4106_setkey()
707 ctx->cdata.keylen = keylen - 4; in rfc4106_setkey()
708 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen, in rfc4106_setkey()
709 ctx->dir); in rfc4106_setkey()
717 struct device *jrdev = ctx->jrdev; in rfc4543_setkey()
720 err = aes_check_keylen(keylen - 4); in rfc4543_setkey()
727 memcpy(ctx->key, key, keylen); in rfc4543_setkey()
733 ctx->cdata.keylen = keylen - 4; in rfc4543_setkey()
734 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen, in rfc4543_setkey()
735 ctx->dir); in rfc4543_setkey()
746 struct device *jrdev = ctx->jrdev; in skcipher_setkey()
749 const bool is_rfc3686 = alg->caam.rfc3686; in skcipher_setkey()
754 ctx->cdata.keylen = keylen; in skcipher_setkey()
755 ctx->cdata.key_virt = key; in skcipher_setkey()
756 ctx->cdata.key_inline = true; in skcipher_setkey()
759 desc = ctx->sh_desc_enc; in skcipher_setkey()
760 cnstr_shdsc_skcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686, in skcipher_setkey()
762 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, in skcipher_setkey()
763 desc_bytes(desc), ctx->dir); in skcipher_setkey()
766 desc = ctx->sh_desc_dec; in skcipher_setkey()
767 cnstr_shdsc_skcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686, in skcipher_setkey()
769 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, in skcipher_setkey()
770 desc_bytes(desc), ctx->dir); in skcipher_setkey()
799 keylen -= CTR_RFC3686_NONCE_SIZE; in rfc3686_skcipher_setkey()
815 * AES-CTR needs to load IV in CONTEXT1 reg in ctr_skcipher_setkey()
846 struct device *jrdev = ctx->jrdev; in xts_skcipher_setkey()
847 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); in xts_skcipher_setkey()
858 ctx->xts_key_fallback = true; in xts_skcipher_setkey()
860 if (ctrlpriv->era <= 8 || ctx->xts_key_fallback) { in xts_skcipher_setkey()
861 err = crypto_skcipher_setkey(ctx->fallback, key, keylen); in xts_skcipher_setkey()
866 ctx->cdata.keylen = keylen; in xts_skcipher_setkey()
867 ctx->cdata.key_virt = key; in xts_skcipher_setkey()
868 ctx->cdata.key_inline = true; in xts_skcipher_setkey()
871 desc = ctx->sh_desc_enc; in xts_skcipher_setkey()
872 cnstr_shdsc_xts_skcipher_encap(desc, &ctx->cdata); in xts_skcipher_setkey()
873 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, in xts_skcipher_setkey()
874 desc_bytes(desc), ctx->dir); in xts_skcipher_setkey()
877 desc = ctx->sh_desc_dec; in xts_skcipher_setkey()
878 cnstr_shdsc_xts_skcipher_decap(desc, &ctx->cdata); in xts_skcipher_setkey()
879 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, in xts_skcipher_setkey()
880 desc_bytes(desc), ctx->dir); in xts_skcipher_setkey()
886 * aead_edesc - s/w-extended aead descriptor
910 * skcipher_edesc - s/w-extended skcipher descriptor
962 caam_unmap(dev, req->src, req->dst, in aead_unmap()
963 edesc->src_nents, edesc->dst_nents, 0, 0, in aead_unmap()
964 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); in aead_unmap()
973 caam_unmap(dev, req->src, req->dst, in skcipher_unmap()
974 edesc->src_nents, edesc->dst_nents, in skcipher_unmap()
975 edesc->iv_dma, ivsize, in skcipher_unmap()
976 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); in skcipher_unmap()
991 edesc = rctx->edesc; in aead_crypt_done()
992 has_bklog = edesc->bklog; in aead_crypt_done()
1003 * by CAAM, not crypto engine. in aead_crypt_done()
1008 crypto_finalize_aead_request(jrp->engine, req, ecode); in aead_crypt_done()
1011 static inline u8 *skcipher_edesc_iv(struct skcipher_edesc *edesc) in skcipher_edesc_iv()
1014 return PTR_ALIGN((u8 *)edesc->sec4_sg + edesc->sec4_sg_bytes, in skcipher_edesc_iv()
1032 edesc = rctx->edesc; in skcipher_crypt_done()
1033 has_bklog = edesc->bklog; in skcipher_crypt_done()
1040 * The crypto API expects us to set the IV (req->iv) to the last in skcipher_crypt_done()
1045 memcpy(req->iv, skcipher_edesc_iv(edesc), ivsize); in skcipher_crypt_done()
1048 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, in skcipher_crypt_done()
1053 DUMP_PREFIX_ADDRESS, 16, 4, req->dst, in skcipher_crypt_done()
1054 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); in skcipher_crypt_done()
1060 * by CAAM, not crypto engine. in skcipher_crypt_done()
1065 crypto_finalize_skcipher_request(jrp->engine, req, ecode); in skcipher_crypt_done()
1077 int authsize = ctx->authsize; in init_aead_job()
1078 u32 *desc = edesc->hw_desc; in init_aead_job()
1085 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec; in init_aead_job()
1086 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma; in init_aead_job()
1092 src_dma = edesc->mapped_src_nents ? sg_dma_address(req->src) : in init_aead_job()
1096 src_dma = edesc->sec4_sg_dma; in init_aead_job()
1097 sec4_sg_index += edesc->mapped_src_nents; in init_aead_job()
1101 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen, in init_aead_job()
1107 if (unlikely(req->src != req->dst)) { in init_aead_job()
1108 if (!edesc->mapped_dst_nents) { in init_aead_job()
1111 } else if (edesc->mapped_dst_nents == 1) { in init_aead_job()
1112 dst_dma = sg_dma_address(req->dst); in init_aead_job()
1115 dst_dma = edesc->sec4_sg_dma + in init_aead_job()
1124 req->assoclen + req->cryptlen + authsize, in init_aead_job()
1128 req->assoclen + req->cryptlen - authsize, in init_aead_job()
1139 u32 *desc = edesc->hw_desc; in init_gcm_job()
1144 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen); in init_gcm_job()
1148 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen)) in init_gcm_job()
1156 append_data(desc, ctx->key + ctx->cdata.keylen, 4); in init_gcm_job()
1158 append_data(desc, req->iv, ivsize); in init_gcm_job()
1168 unsigned int assoclen = req->assoclen; in init_chachapoly_job()
1169 u32 *desc = edesc->hw_desc; in init_chachapoly_job()
1182 assoclen -= ivsize; in init_chachapoly_job()
1191 append_load_as_imm(desc, req->iv, ivsize, LDST_CLASS_1_CCB | in init_chachapoly_job()
1206 struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent); in init_authenc_job()
1207 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == in init_authenc_job()
1209 const bool is_rfc3686 = alg->caam.rfc3686; in init_authenc_job()
1210 u32 *desc = edesc->hw_desc; in init_authenc_job()
1214 * AES-CTR needs to load IV in CONTEXT1 reg in init_authenc_job()
1234 if (ctrlpriv->era < 3) in init_authenc_job()
1235 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen); in init_authenc_job()
1237 append_math_add_imm_u32(desc, DPOVRD, ZERO, IMM, req->assoclen); in init_authenc_job()
1239 if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv)) in init_authenc_job()
1240 append_load_as_imm(desc, req->iv, ivsize, in init_authenc_job()
1255 struct device *jrdev = ctx->jrdev; in init_skcipher_job()
1257 u32 *desc = edesc->hw_desc; in init_skcipher_job()
1264 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1); in init_skcipher_job()
1266 (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen); in init_skcipher_job()
1269 DUMP_PREFIX_ADDRESS, 16, 4, req->src, in init_skcipher_job()
1270 edesc->src_nents > 1 ? 100 : req->cryptlen, 1); in init_skcipher_job()
1272 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec; in init_skcipher_job()
1273 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma; in init_skcipher_job()
1278 if (ivsize || edesc->mapped_src_nents > 1) { in init_skcipher_job()
1279 src_dma = edesc->sec4_sg_dma; in init_skcipher_job()
1280 sec4_sg_index = edesc->mapped_src_nents + !!ivsize; in init_skcipher_job()
1283 src_dma = sg_dma_address(req->src); in init_skcipher_job()
1286 append_seq_in_ptr(desc, src_dma, req->cryptlen + ivsize, in_options); in init_skcipher_job()
1288 if (likely(req->src == req->dst)) { in init_skcipher_job()
1291 } else if (!ivsize && edesc->mapped_dst_nents == 1) { in init_skcipher_job()
1292 dst_dma = sg_dma_address(req->dst); in init_skcipher_job()
1294 dst_dma = edesc->sec4_sg_dma + sec4_sg_index * in init_skcipher_job()
1299 append_seq_out_ptr(desc, dst_dma, req->cryptlen + ivsize, out_options); in init_skcipher_job()
1311 struct device *jrdev = ctx->jrdev; in aead_edesc_alloc()
1313 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? in aead_edesc_alloc()
1319 unsigned int authsize = ctx->authsize; in aead_edesc_alloc()
1321 if (unlikely(req->dst != req->src)) { in aead_edesc_alloc()
1322 src_len = req->assoclen + req->cryptlen; in aead_edesc_alloc()
1323 dst_len = src_len + (encrypt ? authsize : (-authsize)); in aead_edesc_alloc()
1325 src_nents = sg_nents_for_len(req->src, src_len); in aead_edesc_alloc()
1332 dst_nents = sg_nents_for_len(req->dst, dst_len); in aead_edesc_alloc()
1339 src_len = req->assoclen + req->cryptlen + in aead_edesc_alloc()
1342 src_nents = sg_nents_for_len(req->src, src_len); in aead_edesc_alloc()
1350 if (likely(req->src == req->dst)) { in aead_edesc_alloc()
1351 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, in aead_edesc_alloc()
1355 return ERR_PTR(-ENOMEM); in aead_edesc_alloc()
1360 mapped_src_nents = dma_map_sg(jrdev, req->src, in aead_edesc_alloc()
1364 return ERR_PTR(-ENOMEM); in aead_edesc_alloc()
1372 mapped_dst_nents = dma_map_sg(jrdev, req->dst, in aead_edesc_alloc()
1377 dma_unmap_sg(jrdev, req->src, src_nents, in aead_edesc_alloc()
1379 return ERR_PTR(-ENOMEM); in aead_edesc_alloc()
1401 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, in aead_edesc_alloc()
1403 return ERR_PTR(-ENOMEM); in aead_edesc_alloc()
1406 edesc->src_nents = src_nents; in aead_edesc_alloc()
1407 edesc->dst_nents = dst_nents; in aead_edesc_alloc()
1408 edesc->mapped_src_nents = mapped_src_nents; in aead_edesc_alloc()
1409 edesc->mapped_dst_nents = mapped_dst_nents; in aead_edesc_alloc()
1410 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) + in aead_edesc_alloc()
1413 rctx->edesc = edesc; in aead_edesc_alloc()
1419 sg_to_sec4_sg_last(req->src, src_len, in aead_edesc_alloc()
1420 edesc->sec4_sg + sec4_sg_index, 0); in aead_edesc_alloc()
1424 sg_to_sec4_sg_last(req->dst, dst_len, in aead_edesc_alloc()
1425 edesc->sec4_sg + sec4_sg_index, 0); in aead_edesc_alloc()
1431 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, in aead_edesc_alloc()
1433 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { in aead_edesc_alloc()
1437 return ERR_PTR(-ENOMEM); in aead_edesc_alloc()
1440 edesc->sec4_sg_bytes = sec4_sg_bytes; in aead_edesc_alloc()
1449 struct aead_edesc *edesc = rctx->edesc; in aead_enqueue_req()
1450 u32 *desc = edesc->hw_desc; in aead_enqueue_req()
1454 * Only the backlog request are sent to crypto-engine since the others in aead_enqueue_req()
1456 * entries (more than the 10 entries from crypto-engine). in aead_enqueue_req()
1458 if (req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG) in aead_enqueue_req()
1459 ret = crypto_transfer_aead_request_to_engine(jrpriv->engine, in aead_enqueue_req()
1464 if ((ret != -EINPROGRESS) && (ret != -EBUSY)) { in aead_enqueue_req()
1466 kfree(rctx->edesc); in aead_enqueue_req()
1472 static inline int chachapoly_crypt(struct aead_request *req, bool encrypt) in chachapoly_crypt()
1477 struct device *jrdev = ctx->jrdev; in chachapoly_crypt()
1486 desc = edesc->hw_desc; in chachapoly_crypt()
1506 static inline int aead_crypt(struct aead_request *req, bool encrypt) in aead_crypt()
1511 struct device *jrdev = ctx->jrdev; in aead_crypt()
1524 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, in aead_crypt()
1525 desc_bytes(edesc->hw_desc), 1); in aead_crypt()
1540 static int aead_do_one_req(struct crypto_engine *engine, void *areq) in aead_do_one_req() argument
1545 u32 *desc = rctx->edesc->hw_desc; in aead_do_one_req()
1548 rctx->edesc->bklog = true; in aead_do_one_req()
1550 ret = caam_jr_enqueue(ctx->jrdev, desc, aead_crypt_done, req); in aead_do_one_req()
1552 if (ret == -ENOSPC && engine->retry_support) in aead_do_one_req()
1555 if (ret != -EINPROGRESS) { in aead_do_one_req()
1556 aead_unmap(ctx->jrdev, rctx->edesc, req); in aead_do_one_req()
1557 kfree(rctx->edesc); in aead_do_one_req()
1565 static inline int gcm_crypt(struct aead_request *req, bool encrypt) in gcm_crypt()
1570 struct device *jrdev = ctx->jrdev; in gcm_crypt()
1583 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, in gcm_crypt()
1584 desc_bytes(edesc->hw_desc), 1); in gcm_crypt()
1601 return crypto_ipsec_check_assoclen(req->assoclen) ? : gcm_encrypt(req); in ipsec_gcm_encrypt()
1606 return crypto_ipsec_check_assoclen(req->assoclen) ? : gcm_decrypt(req); in ipsec_gcm_decrypt()
1618 struct device *jrdev = ctx->jrdev; in skcipher_edesc_alloc()
1619 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? in skcipher_edesc_alloc()
1629 src_nents = sg_nents_for_len(req->src, req->cryptlen); in skcipher_edesc_alloc()
1632 req->cryptlen); in skcipher_edesc_alloc()
1636 if (req->dst != req->src) { in skcipher_edesc_alloc()
1637 dst_nents = sg_nents_for_len(req->dst, req->cryptlen); in skcipher_edesc_alloc()
1640 req->cryptlen); in skcipher_edesc_alloc()
1645 if (likely(req->src == req->dst)) { in skcipher_edesc_alloc()
1646 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, in skcipher_edesc_alloc()
1650 return ERR_PTR(-ENOMEM); in skcipher_edesc_alloc()
1653 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, in skcipher_edesc_alloc()
1657 return ERR_PTR(-ENOMEM); in skcipher_edesc_alloc()
1659 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents, in skcipher_edesc_alloc()
1663 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); in skcipher_edesc_alloc()
1664 return ERR_PTR(-ENOMEM); in skcipher_edesc_alloc()
1687 if (req->src == req->dst) in skcipher_edesc_alloc()
1703 aligned_size += ~(ARCH_KMALLOC_MINALIGN - 1) & in skcipher_edesc_alloc()
1704 (dma_get_cache_alignment() - 1); in skcipher_edesc_alloc()
1709 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, in skcipher_edesc_alloc()
1711 return ERR_PTR(-ENOMEM); in skcipher_edesc_alloc()
1714 edesc->src_nents = src_nents; in skcipher_edesc_alloc()
1715 edesc->dst_nents = dst_nents; in skcipher_edesc_alloc()
1716 edesc->mapped_src_nents = mapped_src_nents; in skcipher_edesc_alloc()
1717 edesc->mapped_dst_nents = mapped_dst_nents; in skcipher_edesc_alloc()
1718 edesc->sec4_sg_bytes = sec4_sg_bytes; in skcipher_edesc_alloc()
1719 edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc + in skcipher_edesc_alloc()
1721 rctx->edesc = edesc; in skcipher_edesc_alloc()
1726 memcpy(iv, req->iv, ivsize); in skcipher_edesc_alloc()
1731 caam_unmap(jrdev, req->src, req->dst, src_nents, in skcipher_edesc_alloc()
1734 return ERR_PTR(-ENOMEM); in skcipher_edesc_alloc()
1737 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0); in skcipher_edesc_alloc()
1740 sg_to_sec4_sg(req->src, req->cryptlen, edesc->sec4_sg + in skcipher_edesc_alloc()
1743 if (req->src != req->dst && (ivsize || mapped_dst_nents > 1)) in skcipher_edesc_alloc()
1744 sg_to_sec4_sg(req->dst, req->cryptlen, edesc->sec4_sg + in skcipher_edesc_alloc()
1748 dma_to_sec4_sg_one(edesc->sec4_sg + dst_sg_idx + in skcipher_edesc_alloc()
1752 sg_to_sec4_set_last(edesc->sec4_sg + dst_sg_idx + in skcipher_edesc_alloc()
1753 mapped_dst_nents - 1 + !!ivsize); in skcipher_edesc_alloc()
1756 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, in skcipher_edesc_alloc()
1759 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { in skcipher_edesc_alloc()
1761 caam_unmap(jrdev, req->src, req->dst, src_nents, in skcipher_edesc_alloc()
1764 return ERR_PTR(-ENOMEM); in skcipher_edesc_alloc()
1768 edesc->iv_dma = iv_dma; in skcipher_edesc_alloc()
1771 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg, in skcipher_edesc_alloc()
1777 static int skcipher_do_one_req(struct crypto_engine *engine, void *areq) in skcipher_do_one_req() argument
1782 u32 *desc = rctx->edesc->hw_desc; in skcipher_do_one_req()
1785 rctx->edesc->bklog = true; in skcipher_do_one_req()
1787 ret = caam_jr_enqueue(ctx->jrdev, desc, skcipher_crypt_done, req); in skcipher_do_one_req()
1789 if (ret == -ENOSPC && engine->retry_support) in skcipher_do_one_req()
1792 if (ret != -EINPROGRESS) { in skcipher_do_one_req()
1793 skcipher_unmap(ctx->jrdev, rctx->edesc, req); in skcipher_do_one_req()
1794 kfree(rctx->edesc); in skcipher_do_one_req()
1802 static inline bool xts_skcipher_ivsize(struct skcipher_request *req) in xts_skcipher_ivsize()
1807 return !!get_unaligned((u64 *)(req->iv + (ivsize / 2))); in xts_skcipher_ivsize()
1810 static inline int skcipher_crypt(struct skcipher_request *req, bool encrypt) in skcipher_crypt()
1815 struct device *jrdev = ctx->jrdev; in skcipher_crypt()
1817 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); in skcipher_crypt()
1826 if (!req->cryptlen && !ctx->fallback) in skcipher_crypt()
1829 if (ctx->fallback && ((ctrlpriv->era <= 8 && xts_skcipher_ivsize(req)) || in skcipher_crypt()
1830 ctx->xts_key_fallback)) { in skcipher_crypt()
1833 skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback); in skcipher_crypt()
1834 skcipher_request_set_callback(&rctx->fallback_req, in skcipher_crypt()
1835 req->base.flags, in skcipher_crypt()
1836 req->base.complete, in skcipher_crypt()
1837 req->base.data); in skcipher_crypt()
1838 skcipher_request_set_crypt(&rctx->fallback_req, req->src, in skcipher_crypt()
1839 req->dst, req->cryptlen, req->iv); in skcipher_crypt()
1841 return encrypt ? crypto_skcipher_encrypt(&rctx->fallback_req) : in skcipher_crypt()
1842 crypto_skcipher_decrypt(&rctx->fallback_req); in skcipher_crypt()
1854 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, in skcipher_crypt()
1855 desc_bytes(edesc->hw_desc), 1); in skcipher_crypt()
1857 desc = edesc->hw_desc; in skcipher_crypt()
1859 * Only the backlog request are sent to crypto-engine since the others in skcipher_crypt()
1861 * entries (more than the 10 entries from crypto-engine). in skcipher_crypt()
1863 if (req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG) in skcipher_crypt()
1864 ret = crypto_transfer_skcipher_request_to_engine(jrpriv->engine, in skcipher_crypt()
1869 if ((ret != -EINPROGRESS) && (ret != -EBUSY)) { in skcipher_crypt()
1892 .cra_driver_name = "cbc-aes-caam",
1911 .cra_driver_name = "cbc-3des-caam",
1930 .cra_driver_name = "cbc-des-caam",
1949 .cra_driver_name = "ctr-aes-caam",
1970 .cra_driver_name = "rfc3686-ctr-aes-caam",
1996 .cra_driver_name = "xts-aes-caam",
2016 .cra_driver_name = "ecb-des-caam",
2034 .cra_driver_name = "ecb-aes-caam",
2052 .cra_driver_name = "ecb-des3-caam",
2073 .cra_driver_name = "rfc4106-gcm-aes-caam",
2095 .cra_driver_name = "rfc4543-gcm-aes-caam",
2118 .cra_driver_name = "gcm-aes-caam",
2136 /* single-pass ipsec_esp descriptor */
2142 .cra_driver_name = "authenc-hmac-md5-"
2143 "ecb-cipher_null-caam",
2166 .cra_driver_name = "authenc-hmac-sha1-"
2167 "ecb-cipher_null-caam",
2190 .cra_driver_name = "authenc-hmac-sha224-"
2191 "ecb-cipher_null-caam",
2214 .cra_driver_name = "authenc-hmac-sha256-"
2215 "ecb-cipher_null-caam",
2238 .cra_driver_name = "authenc-hmac-sha384-"
2239 "ecb-cipher_null-caam",
2262 .cra_driver_name = "authenc-hmac-sha512-"
2263 "ecb-cipher_null-caam",
2285 .cra_driver_name = "authenc-hmac-md5-"
2286 "cbc-aes-caam",
2310 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2311 "cbc-aes-caam",
2335 .cra_driver_name = "authenc-hmac-sha1-"
2336 "cbc-aes-caam",
2360 .cra_driver_name = "echainiv-authenc-"
2361 "hmac-sha1-cbc-aes-caam",
2385 .cra_driver_name = "authenc-hmac-sha224-"
2386 "cbc-aes-caam",
2410 .cra_driver_name = "echainiv-authenc-"
2411 "hmac-sha224-cbc-aes-caam",
2435 .cra_driver_name = "authenc-hmac-sha256-"
2436 "cbc-aes-caam",
2460 .cra_driver_name = "echainiv-authenc-"
2461 "hmac-sha256-cbc-aes-caam",
2485 .cra_driver_name = "authenc-hmac-sha384-"
2486 "cbc-aes-caam",
2510 .cra_driver_name = "echainiv-authenc-"
2511 "hmac-sha384-cbc-aes-caam",
2535 .cra_driver_name = "authenc-hmac-sha512-"
2536 "cbc-aes-caam",
2560 .cra_driver_name = "echainiv-authenc-"
2561 "hmac-sha512-cbc-aes-caam",
2585 .cra_driver_name = "authenc-hmac-md5-"
2586 "cbc-des3_ede-caam",
2610 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2611 "cbc-des3_ede-caam",
2636 .cra_driver_name = "authenc-hmac-sha1-"
2637 "cbc-des3_ede-caam",
2661 .cra_driver_name = "echainiv-authenc-"
2662 "hmac-sha1-"
2663 "cbc-des3_ede-caam",
2688 .cra_driver_name = "authenc-hmac-sha224-"
2689 "cbc-des3_ede-caam",
2713 .cra_driver_name = "echainiv-authenc-"
2714 "hmac-sha224-"
2715 "cbc-des3_ede-caam",
2740 .cra_driver_name = "authenc-hmac-sha256-"
2741 "cbc-des3_ede-caam",
2765 .cra_driver_name = "echainiv-authenc-"
2766 "hmac-sha256-"
2767 "cbc-des3_ede-caam",
2792 .cra_driver_name = "authenc-hmac-sha384-"
2793 "cbc-des3_ede-caam",
2817 .cra_driver_name = "echainiv-authenc-"
2818 "hmac-sha384-"
2819 "cbc-des3_ede-caam",
2844 .cra_driver_name = "authenc-hmac-sha512-"
2845 "cbc-des3_ede-caam",
2869 .cra_driver_name = "echainiv-authenc-"
2870 "hmac-sha512-"
2871 "cbc-des3_ede-caam",
2895 .cra_driver_name = "authenc-hmac-md5-"
2896 "cbc-des-caam",
2920 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2921 "cbc-des-caam",
2945 .cra_driver_name = "authenc-hmac-sha1-"
2946 "cbc-des-caam",
2970 .cra_driver_name = "echainiv-authenc-"
2971 "hmac-sha1-cbc-des-caam",
2995 .cra_driver_name = "authenc-hmac-sha224-"
2996 "cbc-des-caam",
3020 .cra_driver_name = "echainiv-authenc-"
3021 "hmac-sha224-cbc-des-caam",
3045 .cra_driver_name = "authenc-hmac-sha256-"
3046 "cbc-des-caam",
3070 .cra_driver_name = "echainiv-authenc-"
3071 "hmac-sha256-cbc-des-caam",
3095 .cra_driver_name = "authenc-hmac-sha384-"
3096 "cbc-des-caam",
3120 .cra_driver_name = "echainiv-authenc-"
3121 "hmac-sha384-cbc-des-caam",
3145 .cra_driver_name = "authenc-hmac-sha512-"
3146 "cbc-des-caam",
3170 .cra_driver_name = "echainiv-authenc-"
3171 "hmac-sha512-cbc-des-caam",
3196 .cra_driver_name = "authenc-hmac-md5-"
3197 "rfc3686-ctr-aes-caam",
3223 .cra_driver_name = "seqiv-authenc-hmac-md5-"
3224 "rfc3686-ctr-aes-caam",
3251 .cra_driver_name = "authenc-hmac-sha1-"
3252 "rfc3686-ctr-aes-caam",
3278 .cra_driver_name = "seqiv-authenc-hmac-sha1-"
3279 "rfc3686-ctr-aes-caam",
3306 .cra_driver_name = "authenc-hmac-sha224-"
3307 "rfc3686-ctr-aes-caam",
3333 .cra_driver_name = "seqiv-authenc-hmac-sha224-"
3334 "rfc3686-ctr-aes-caam",
3361 .cra_driver_name = "authenc-hmac-sha256-"
3362 "rfc3686-ctr-aes-caam",
3388 .cra_driver_name = "seqiv-authenc-hmac-sha256-"
3389 "rfc3686-ctr-aes-caam",
3416 .cra_driver_name = "authenc-hmac-sha384-"
3417 "rfc3686-ctr-aes-caam",
3443 .cra_driver_name = "seqiv-authenc-hmac-sha384-"
3444 "rfc3686-ctr-aes-caam",
3471 .cra_driver_name = "authenc-hmac-sha512-"
3472 "rfc3686-ctr-aes-caam",
3498 .cra_driver_name = "seqiv-authenc-hmac-sha512-"
3499 "rfc3686-ctr-aes-caam",
3525 .cra_driver_name = "rfc7539-chacha20-poly1305-"
3551 .cra_driver_name = "rfc7539esp-chacha20-"
3552 "poly1305-caam",
3583 ctx->jrdev = caam_jr_alloc(); in caam_init_common()
3584 if (IS_ERR(ctx->jrdev)) { in caam_init_common()
3586 return PTR_ERR(ctx->jrdev); in caam_init_common()
3589 priv = dev_get_drvdata(ctx->jrdev->parent); in caam_init_common()
3590 if (priv->era >= 6 && uses_dkp) in caam_init_common()
3591 ctx->dir = DMA_BIDIRECTIONAL; in caam_init_common()
3593 ctx->dir = DMA_TO_DEVICE; in caam_init_common()
3595 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc, in caam_init_common()
3597 sh_desc_enc_dma) - in caam_init_common()
3599 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC); in caam_init_common()
3600 if (dma_mapping_error(ctx->jrdev, dma_addr)) { in caam_init_common()
3601 dev_err(ctx->jrdev, "unable to map key, shared descriptors\n"); in caam_init_common()
3602 caam_jr_free(ctx->jrdev); in caam_init_common()
3603 return -ENOMEM; in caam_init_common()
3606 ctx->sh_desc_enc_dma = dma_addr; in caam_init_common()
3607 ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx, in caam_init_common()
3608 sh_desc_dec) - in caam_init_common()
3610 ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key) - in caam_init_common()
3614 ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type; in caam_init_common()
3615 ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type; in caam_init_common()
3626 u32 alg_aai = caam_alg->caam.class1_alg_type & OP_ALG_AAI_MASK; in caam_cra_init()
3630 const char *tfm_name = crypto_tfm_alg_name(&tfm->base); in caam_cra_init()
3641 ctx->fallback = fallback; in caam_cra_init()
3648 ret = caam_init_common(ctx, &caam_alg->caam, false); in caam_cra_init()
3649 if (ret && ctx->fallback) in caam_cra_init()
3650 crypto_free_skcipher(ctx->fallback); in caam_cra_init()
3664 return caam_init_common(ctx, &caam_alg->caam, !caam_alg->caam.nodkp); in caam_aead_init()
3669 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma, in caam_exit_common()
3670 offsetof(struct caam_ctx, sh_desc_enc_dma) - in caam_exit_common()
3672 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC); in caam_exit_common()
3673 caam_jr_free(ctx->jrdev); in caam_exit_common()
3680 if (ctx->fallback) in caam_cra_exit()
3681 crypto_free_skcipher(ctx->fallback); in caam_cra_exit()
3697 if (t_alg->registered) in caam_algapi_exit()
3698 crypto_engine_unregister_aead(&t_alg->aead); in caam_algapi_exit()
3704 if (t_alg->registered) in caam_algapi_exit()
3705 crypto_engine_unregister_skcipher(&t_alg->skcipher); in caam_algapi_exit()
3711 struct skcipher_alg *alg = &t_alg->skcipher.base; in caam_skcipher_alg_init()
3713 alg->base.cra_module = THIS_MODULE; in caam_skcipher_alg_init()
3714 alg->base.cra_priority = CAAM_CRA_PRIORITY; in caam_skcipher_alg_init()
3715 alg->base.cra_ctxsize = sizeof(struct caam_ctx) + crypto_dma_padding(); in caam_skcipher_alg_init()
3716 alg->base.cra_flags |= (CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY | in caam_skcipher_alg_init()
3719 alg->init = caam_cra_init; in caam_skcipher_alg_init()
3720 alg->exit = caam_cra_exit; in caam_skcipher_alg_init()
3725 struct aead_alg *alg = &t_alg->aead.base; in caam_aead_alg_init()
3727 alg->base.cra_module = THIS_MODULE; in caam_aead_alg_init()
3728 alg->base.cra_priority = CAAM_CRA_PRIORITY; in caam_aead_alg_init()
3729 alg->base.cra_ctxsize = sizeof(struct caam_ctx) + crypto_dma_padding(); in caam_aead_alg_init()
3730 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY | in caam_aead_alg_init()
3733 alg->init = caam_aead_init; in caam_aead_alg_init()
3734 alg->exit = caam_aead_exit; in caam_aead_alg_init()
3746 * Register crypto algorithms the device supports. in caam_algapi_init()
3749 if (priv->era < 10) { in caam_algapi_init()
3750 struct caam_perfmon __iomem *perfmon = &priv->jr[0]->perfmon; in caam_algapi_init()
3753 cha_vid = rd_reg32(&perfmon->cha_id_ls); in caam_algapi_init()
3757 cha_inst = rd_reg32(&perfmon->cha_num_ls); in caam_algapi_init()
3765 aes_rn = rd_reg32(&perfmon->cha_rev_ls) & CHA_ID_LS_AES_MASK; in caam_algapi_init()
3768 struct version_regs __iomem *vreg = &priv->jr[0]->vreg; in caam_algapi_init()
3771 aesa = rd_reg32(&vreg->aesa); in caam_algapi_init()
3772 mdha = rd_reg32(&vreg->mdha); in caam_algapi_init()
3777 des_inst = rd_reg32(&vreg->desa) & CHA_VER_NUM_MASK; in caam_algapi_init()
3780 ccha_inst = rd_reg32(&vreg->ccha) & CHA_VER_NUM_MASK; in caam_algapi_init()
3781 ptha_inst = rd_reg32(&vreg->ptha) & CHA_VER_NUM_MASK; in caam_algapi_init()
3792 u32 alg_sel = t_alg->caam.class1_alg_type & OP_ALG_ALGSEL_MASK; in caam_algapi_init()
3809 (t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK) == in caam_algapi_init()
3815 err = crypto_engine_register_skcipher(&t_alg->skcipher); in caam_algapi_init()
3818 t_alg->skcipher.base.base.cra_driver_name); in caam_algapi_init()
3822 t_alg->registered = true; in caam_algapi_init()
3828 u32 c1_alg_sel = t_alg->caam.class1_alg_type & in caam_algapi_init()
3830 u32 c2_alg_sel = t_alg->caam.class2_alg_type & in caam_algapi_init()
3832 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK; in caam_algapi_init()
3862 (!md_inst || t_alg->aead.base.maxauthsize > md_limit)) in caam_algapi_init()
3867 err = crypto_engine_register_aead(&t_alg->aead); in caam_algapi_init()
3870 t_alg->aead.base.base.cra_driver_name); in caam_algapi_init()
3874 t_alg->registered = true; in caam_algapi_init()
3879 pr_info("caam algorithms registered in /proc/crypto\n"); in caam_algapi_init()