caamalg.c (4d370a1036958d7df9f1492c345b4984a4eba7f6) | caamalg.c (ee38767f152a3310aabee7074848911f43bf5d69) |
---|---|
1// SPDX-License-Identifier: GPL-2.0+ 2/* 3 * caam - Freescale FSL CAAM support for crypto API 4 * 5 * Copyright 2008-2011 Freescale Semiconductor, Inc. 6 * Copyright 2016-2019 NXP 7 * 8 * Based on talitos crypto API driver. --- 42 unchanged lines hidden (view full) --- 51#include "regs.h" 52#include "intern.h" 53#include "desc_constr.h" 54#include "jr.h" 55#include "error.h" 56#include "sg_sw_sec4.h" 57#include "key_gen.h" 58#include "caamalg_desc.h" | 1// SPDX-License-Identifier: GPL-2.0+ 2/* 3 * caam - Freescale FSL CAAM support for crypto API 4 * 5 * Copyright 2008-2011 Freescale Semiconductor, Inc. 6 * Copyright 2016-2019 NXP 7 * 8 * Based on talitos crypto API driver. --- 42 unchanged lines hidden (view full) --- 51#include "regs.h" 52#include "intern.h" 53#include "desc_constr.h" 54#include "jr.h" 55#include "error.h" 56#include "sg_sw_sec4.h" 57#include "key_gen.h" 58#include "caamalg_desc.h" |
59#include <crypto/engine.h> |
|
59 60/* 61 * crypto alg 62 */ 63#define CAAM_CRA_PRIORITY 3000 64/* max key is sum of AES_MAX_KEY_SIZE, max split key size */ 65#define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \ 66 CTR_RFC3686_NONCE_SIZE + \ --- 29 unchanged lines hidden (view full) --- 96 struct caam_alg_entry caam; 97 bool registered; 98}; 99 100/* 101 * per-session context 102 */ 103struct caam_ctx { | 60 61/* 62 * crypto alg 63 */ 64#define CAAM_CRA_PRIORITY 3000 65/* max key is sum of AES_MAX_KEY_SIZE, max split key size */ 66#define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \ 67 CTR_RFC3686_NONCE_SIZE + \ --- 29 unchanged lines hidden (view full) --- 97 struct caam_alg_entry caam; 98 bool registered; 99}; 100 101/* 102 * per-session context 103 */ 104struct caam_ctx { |
105 struct crypto_engine_ctx enginectx; |
|
104 u32 sh_desc_enc[DESC_MAX_USED_LEN]; 105 u32 sh_desc_dec[DESC_MAX_USED_LEN]; 106 u8 key[CAAM_MAX_KEY_SIZE]; 107 dma_addr_t sh_desc_enc_dma; 108 dma_addr_t sh_desc_dec_dma; 109 dma_addr_t key_dma; 110 enum dma_data_direction dir; 111 struct device *jrdev; 112 struct alginfo adata; 113 struct alginfo cdata; 114 unsigned int authsize; 115}; 116 | 106 u32 sh_desc_enc[DESC_MAX_USED_LEN]; 107 u32 sh_desc_dec[DESC_MAX_USED_LEN]; 108 u8 key[CAAM_MAX_KEY_SIZE]; 109 dma_addr_t sh_desc_enc_dma; 110 dma_addr_t sh_desc_dec_dma; 111 dma_addr_t key_dma; 112 enum dma_data_direction dir; 113 struct device *jrdev; 114 struct alginfo adata; 115 struct alginfo cdata; 116 unsigned int authsize; 117}; 118 |
119struct caam_skcipher_req_ctx { 120 struct skcipher_edesc *edesc; 121}; 122 |
|
117static int aead_null_set_sh_desc(struct crypto_aead *aead) 118{ 119 struct caam_ctx *ctx = crypto_aead_ctx(aead); 120 struct device *jrdev = ctx->jrdev; 121 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); 122 u32 *desc; 123 int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN - 124 ctx->adata.keylen_pad; --- 751 unchanged lines hidden (view full) --- 876/* 877 * skcipher_edesc - s/w-extended skcipher descriptor 878 * @src_nents: number of segments in input s/w scatterlist 879 * @dst_nents: number of segments in output s/w scatterlist 880 * @mapped_src_nents: number of segments in input h/w link table 881 * @mapped_dst_nents: number of segments in output h/w link table 882 * @iv_dma: dma address of iv for checking continuity and link table 883 * @sec4_sg_bytes: length of dma mapped sec4_sg space | 123static int aead_null_set_sh_desc(struct crypto_aead *aead) 124{ 125 struct caam_ctx *ctx = crypto_aead_ctx(aead); 126 struct device *jrdev = ctx->jrdev; 127 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); 128 u32 *desc; 129 int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN - 130 ctx->adata.keylen_pad; --- 751 unchanged lines hidden (view full) --- 882/* 883 * skcipher_edesc - s/w-extended skcipher descriptor 884 * @src_nents: number of segments in input s/w scatterlist 885 * @dst_nents: number of segments in output s/w scatterlist 886 * @mapped_src_nents: number of segments in input h/w link table 887 * @mapped_dst_nents: number of segments in output h/w link table 888 * @iv_dma: dma address of iv for checking continuity and link table 889 * @sec4_sg_bytes: length of dma mapped sec4_sg space |
890 * @bklog: stored to determine if the request needs backlog |
|
884 * @sec4_sg_dma: bus physical mapped address of h/w link table 885 * @sec4_sg: pointer to h/w link table 886 * @hw_desc: the h/w job descriptor followed by any referenced link tables 887 * and IV 888 */ 889struct skcipher_edesc { 890 int src_nents; 891 int dst_nents; 892 int mapped_src_nents; 893 int mapped_dst_nents; 894 dma_addr_t iv_dma; 895 int sec4_sg_bytes; | 891 * @sec4_sg_dma: bus physical mapped address of h/w link table 892 * @sec4_sg: pointer to h/w link table 893 * @hw_desc: the h/w job descriptor followed by any referenced link tables 894 * and IV 895 */ 896struct skcipher_edesc { 897 int src_nents; 898 int dst_nents; 899 int mapped_src_nents; 900 int mapped_dst_nents; 901 dma_addr_t iv_dma; 902 int sec4_sg_bytes; |
903 bool bklog; |
|
896 dma_addr_t sec4_sg_dma; 897 struct sec4_sg_entry *sec4_sg; 898 u32 hw_desc[0]; 899}; 900 901static void caam_unmap(struct device *dev, struct scatterlist *src, 902 struct scatterlist *dst, int src_nents, 903 int dst_nents, --- 58 unchanged lines hidden (view full) --- 962 aead_request_complete(req, ecode); 963} 964 965static void skcipher_crypt_done(struct device *jrdev, u32 *desc, u32 err, 966 void *context) 967{ 968 struct skcipher_request *req = context; 969 struct skcipher_edesc *edesc; | 904 dma_addr_t sec4_sg_dma; 905 struct sec4_sg_entry *sec4_sg; 906 u32 hw_desc[0]; 907}; 908 909static void caam_unmap(struct device *dev, struct scatterlist *src, 910 struct scatterlist *dst, int src_nents, 911 int dst_nents, --- 58 unchanged lines hidden (view full) --- 970 aead_request_complete(req, ecode); 971} 972 973static void skcipher_crypt_done(struct device *jrdev, u32 *desc, u32 err, 974 void *context) 975{ 976 struct skcipher_request *req = context; 977 struct skcipher_edesc *edesc; |
978 struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req); |
|
970 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); | 979 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); |
980 struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev); |
|
971 int ivsize = crypto_skcipher_ivsize(skcipher); 972 int ecode = 0; 973 974 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 975 | 981 int ivsize = crypto_skcipher_ivsize(skcipher); 982 int ecode = 0; 983 984 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 985 |
976 edesc = container_of(desc, struct skcipher_edesc, hw_desc[0]); | 986 edesc = rctx->edesc; |
977 if (err) 978 ecode = caam_jr_strstatus(jrdev, err); 979 980 skcipher_unmap(jrdev, edesc, req); 981 982 /* 983 * The crypto API expects us to set the IV (req->iv) to the last 984 * ciphertext block (CBC mode) or last counter (CTR mode). --- 9 unchanged lines hidden (view full) --- 994 } 995 996 caam_dump_sg("dst @" __stringify(__LINE__)": ", 997 DUMP_PREFIX_ADDRESS, 16, 4, req->dst, 998 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); 999 1000 kfree(edesc); 1001 | 987 if (err) 988 ecode = caam_jr_strstatus(jrdev, err); 989 990 skcipher_unmap(jrdev, edesc, req); 991 992 /* 993 * The crypto API expects us to set the IV (req->iv) to the last 994 * ciphertext block (CBC mode) or last counter (CTR mode). --- 9 unchanged lines hidden (view full) --- 1004 } 1005 1006 caam_dump_sg("dst @" __stringify(__LINE__)": ", 1007 DUMP_PREFIX_ADDRESS, 16, 4, req->dst, 1008 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); 1009 1010 kfree(edesc); 1011 |
1002 skcipher_request_complete(req, ecode); | 1012 /* 1013 * If no backlog flag, the completion of the request is done 1014 * by CAAM, not crypto engine. 1015 */ 1016 if (!edesc->bklog) 1017 skcipher_request_complete(req, ecode); 1018 else 1019 crypto_finalize_skcipher_request(jrp->engine, req, ecode); |
1003} 1004 1005/* 1006 * Fill in aead job descriptor 1007 */ 1008static void init_aead_job(struct aead_request *req, 1009 struct aead_edesc *edesc, 1010 bool all_contig, bool encrypt) --- 504 unchanged lines hidden (view full) --- 1515/* 1516 * allocate and map the skcipher extended descriptor for skcipher 1517 */ 1518static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req, 1519 int desc_bytes) 1520{ 1521 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1522 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); | 1020} 1021 1022/* 1023 * Fill in aead job descriptor 1024 */ 1025static void init_aead_job(struct aead_request *req, 1026 struct aead_edesc *edesc, 1027 bool all_contig, bool encrypt) --- 504 unchanged lines hidden (view full) --- 1532/* 1533 * allocate and map the skcipher extended descriptor for skcipher 1534 */ 1535static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req, 1536 int desc_bytes) 1537{ 1538 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1539 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); |
1540 struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req); |
|
1523 struct device *jrdev = ctx->jrdev; 1524 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1525 GFP_KERNEL : GFP_ATOMIC; 1526 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; 1527 struct skcipher_edesc *edesc; 1528 dma_addr_t iv_dma = 0; 1529 u8 *iv; 1530 int ivsize = crypto_skcipher_ivsize(skcipher); --- 82 unchanged lines hidden (view full) --- 1613 1614 edesc->src_nents = src_nents; 1615 edesc->dst_nents = dst_nents; 1616 edesc->mapped_src_nents = mapped_src_nents; 1617 edesc->mapped_dst_nents = mapped_dst_nents; 1618 edesc->sec4_sg_bytes = sec4_sg_bytes; 1619 edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc + 1620 desc_bytes); | 1541 struct device *jrdev = ctx->jrdev; 1542 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1543 GFP_KERNEL : GFP_ATOMIC; 1544 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; 1545 struct skcipher_edesc *edesc; 1546 dma_addr_t iv_dma = 0; 1547 u8 *iv; 1548 int ivsize = crypto_skcipher_ivsize(skcipher); --- 82 unchanged lines hidden (view full) --- 1631 1632 edesc->src_nents = src_nents; 1633 edesc->dst_nents = dst_nents; 1634 edesc->mapped_src_nents = mapped_src_nents; 1635 edesc->mapped_dst_nents = mapped_dst_nents; 1636 edesc->sec4_sg_bytes = sec4_sg_bytes; 1637 edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc + 1638 desc_bytes); |
1639 rctx->edesc = edesc; |
|
1621 1622 /* Make sure IV is located in a DMAable area */ 1623 if (ivsize) { 1624 iv = (u8 *)edesc->sec4_sg + sec4_sg_bytes; 1625 memcpy(iv, req->iv, ivsize); 1626 1627 iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_BIDIRECTIONAL); 1628 if (dma_mapping_error(jrdev, iv_dma)) { --- 39 unchanged lines hidden (view full) --- 1668 1669 print_hex_dump_debug("skcipher sec4_sg@" __stringify(__LINE__)": ", 1670 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg, 1671 sec4_sg_bytes, 1); 1672 1673 return edesc; 1674} 1675 | 1640 1641 /* Make sure IV is located in a DMAable area */ 1642 if (ivsize) { 1643 iv = (u8 *)edesc->sec4_sg + sec4_sg_bytes; 1644 memcpy(iv, req->iv, ivsize); 1645 1646 iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_BIDIRECTIONAL); 1647 if (dma_mapping_error(jrdev, iv_dma)) { --- 39 unchanged lines hidden (view full) --- 1687 1688 print_hex_dump_debug("skcipher sec4_sg@" __stringify(__LINE__)": ", 1689 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg, 1690 sec4_sg_bytes, 1); 1691 1692 return edesc; 1693} 1694 |
1695static int skcipher_do_one_req(struct crypto_engine *engine, void *areq) 1696{ 1697 struct skcipher_request *req = skcipher_request_cast(areq); 1698 struct caam_ctx *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)); 1699 struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req); 1700 u32 *desc = rctx->edesc->hw_desc; 1701 int ret; 1702 1703 rctx->edesc->bklog = true; 1704 1705 ret = caam_jr_enqueue(ctx->jrdev, desc, skcipher_crypt_done, req); 1706 1707 if (ret != -EINPROGRESS) { 1708 skcipher_unmap(ctx->jrdev, rctx->edesc, req); 1709 kfree(rctx->edesc); 1710 } else { 1711 ret = 0; 1712 } 1713 1714 return ret; 1715} 1716 |
|
1676static inline int skcipher_crypt(struct skcipher_request *req, bool encrypt) 1677{ 1678 struct skcipher_edesc *edesc; 1679 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1680 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 1681 struct device *jrdev = ctx->jrdev; | 1717static inline int skcipher_crypt(struct skcipher_request *req, bool encrypt) 1718{ 1719 struct skcipher_edesc *edesc; 1720 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1721 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 1722 struct device *jrdev = ctx->jrdev; |
1723 struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev); |
|
1682 u32 *desc; 1683 int ret = 0; 1684 1685 if (!req->cryptlen) 1686 return 0; 1687 1688 /* allocate extended descriptor */ 1689 edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ); 1690 if (IS_ERR(edesc)) 1691 return PTR_ERR(edesc); 1692 1693 /* Create and submit job descriptor*/ 1694 init_skcipher_job(req, edesc, encrypt); 1695 1696 print_hex_dump_debug("skcipher jobdesc@" __stringify(__LINE__)": ", 1697 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1698 desc_bytes(edesc->hw_desc), 1); 1699 1700 desc = edesc->hw_desc; | 1724 u32 *desc; 1725 int ret = 0; 1726 1727 if (!req->cryptlen) 1728 return 0; 1729 1730 /* allocate extended descriptor */ 1731 edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ); 1732 if (IS_ERR(edesc)) 1733 return PTR_ERR(edesc); 1734 1735 /* Create and submit job descriptor*/ 1736 init_skcipher_job(req, edesc, encrypt); 1737 1738 print_hex_dump_debug("skcipher jobdesc@" __stringify(__LINE__)": ", 1739 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1740 desc_bytes(edesc->hw_desc), 1); 1741 1742 desc = edesc->hw_desc; |
1701 ret = caam_jr_enqueue(jrdev, desc, skcipher_crypt_done, req); | 1743 /* 1744 * Only the backlog request are sent to crypto-engine since the others 1745 * can be handled by CAAM, if free, especially since JR has up to 1024 1746 * entries (more than the 10 entries from crypto-engine). 1747 */ 1748 if (req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG) 1749 ret = crypto_transfer_skcipher_request_to_engine(jrpriv->engine, 1750 req); 1751 else 1752 ret = caam_jr_enqueue(jrdev, desc, skcipher_crypt_done, req); |
1702 | 1753 |
1703 if (ret != -EINPROGRESS) { | 1754 if ((ret != -EINPROGRESS) && (ret != -EBUSY)) { |
1704 skcipher_unmap(jrdev, edesc, req); 1705 kfree(edesc); 1706 } 1707 1708 return ret; 1709} 1710 1711static int skcipher_encrypt(struct skcipher_request *req) --- 1504 unchanged lines hidden (view full) --- 3216 }, 3217}; 3218 3219static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam, 3220 bool uses_dkp) 3221{ 3222 dma_addr_t dma_addr; 3223 struct caam_drv_private *priv; | 1755 skcipher_unmap(jrdev, edesc, req); 1756 kfree(edesc); 1757 } 1758 1759 return ret; 1760} 1761 1762static int skcipher_encrypt(struct skcipher_request *req) --- 1504 unchanged lines hidden (view full) --- 3267 }, 3268}; 3269 3270static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam, 3271 bool uses_dkp) 3272{ 3273 dma_addr_t dma_addr; 3274 struct caam_drv_private *priv; |
3275 const size_t sh_desc_enc_offset = offsetof(struct caam_ctx, 3276 sh_desc_enc); |
|
3224 3225 ctx->jrdev = caam_jr_alloc(); 3226 if (IS_ERR(ctx->jrdev)) { 3227 pr_err("Job Ring Device allocation for transform failed\n"); 3228 return PTR_ERR(ctx->jrdev); 3229 } 3230 3231 priv = dev_get_drvdata(ctx->jrdev->parent); 3232 if (priv->era >= 6 && uses_dkp) 3233 ctx->dir = DMA_BIDIRECTIONAL; 3234 else 3235 ctx->dir = DMA_TO_DEVICE; 3236 3237 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc, 3238 offsetof(struct caam_ctx, | 3277 3278 ctx->jrdev = caam_jr_alloc(); 3279 if (IS_ERR(ctx->jrdev)) { 3280 pr_err("Job Ring Device allocation for transform failed\n"); 3281 return PTR_ERR(ctx->jrdev); 3282 } 3283 3284 priv = dev_get_drvdata(ctx->jrdev->parent); 3285 if (priv->era >= 6 && uses_dkp) 3286 ctx->dir = DMA_BIDIRECTIONAL; 3287 else 3288 ctx->dir = DMA_TO_DEVICE; 3289 3290 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc, 3291 offsetof(struct caam_ctx, |
3239 sh_desc_enc_dma), | 3292 sh_desc_enc_dma) - 3293 sh_desc_enc_offset, |
3240 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC); 3241 if (dma_mapping_error(ctx->jrdev, dma_addr)) { 3242 dev_err(ctx->jrdev, "unable to map key, shared descriptors\n"); 3243 caam_jr_free(ctx->jrdev); 3244 return -ENOMEM; 3245 } 3246 3247 ctx->sh_desc_enc_dma = dma_addr; 3248 ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx, | 3294 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC); 3295 if (dma_mapping_error(ctx->jrdev, dma_addr)) { 3296 dev_err(ctx->jrdev, "unable to map key, shared descriptors\n"); 3297 caam_jr_free(ctx->jrdev); 3298 return -ENOMEM; 3299 } 3300 3301 ctx->sh_desc_enc_dma = dma_addr; 3302 ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx, |
3249 sh_desc_dec); 3250 ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key); | 3303 sh_desc_dec) - 3304 sh_desc_enc_offset; 3305 ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key) - 3306 sh_desc_enc_offset; |
3251 3252 /* copy descriptor header template value */ 3253 ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type; 3254 ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type; 3255 3256 return 0; 3257} 3258 3259static int caam_cra_init(struct crypto_skcipher *tfm) 3260{ 3261 struct skcipher_alg *alg = crypto_skcipher_alg(tfm); 3262 struct caam_skcipher_alg *caam_alg = 3263 container_of(alg, typeof(*caam_alg), skcipher); | 3307 3308 /* copy descriptor header template value */ 3309 ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type; 3310 ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type; 3311 3312 return 0; 3313} 3314 3315static int caam_cra_init(struct crypto_skcipher *tfm) 3316{ 3317 struct skcipher_alg *alg = crypto_skcipher_alg(tfm); 3318 struct caam_skcipher_alg *caam_alg = 3319 container_of(alg, typeof(*caam_alg), skcipher); |
3320 struct caam_ctx *ctx = crypto_skcipher_ctx(tfm); |
|
3264 | 3321 |
3322 crypto_skcipher_set_reqsize(tfm, sizeof(struct caam_skcipher_req_ctx)); 3323 3324 ctx->enginectx.op.do_one_request = skcipher_do_one_req; 3325 |
|
3265 return caam_init_common(crypto_skcipher_ctx(tfm), &caam_alg->caam, 3266 false); 3267} 3268 3269static int caam_aead_init(struct crypto_aead *tfm) 3270{ 3271 struct aead_alg *alg = crypto_aead_alg(tfm); 3272 struct caam_aead_alg *caam_alg = 3273 container_of(alg, struct caam_aead_alg, aead); 3274 struct caam_ctx *ctx = crypto_aead_ctx(tfm); 3275 3276 return caam_init_common(ctx, &caam_alg->caam, !caam_alg->caam.nodkp); 3277} 3278 3279static void caam_exit_common(struct caam_ctx *ctx) 3280{ 3281 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma, | 3326 return caam_init_common(crypto_skcipher_ctx(tfm), &caam_alg->caam, 3327 false); 3328} 3329 3330static int caam_aead_init(struct crypto_aead *tfm) 3331{ 3332 struct aead_alg *alg = crypto_aead_alg(tfm); 3333 struct caam_aead_alg *caam_alg = 3334 container_of(alg, struct caam_aead_alg, aead); 3335 struct caam_ctx *ctx = crypto_aead_ctx(tfm); 3336 3337 return caam_init_common(ctx, &caam_alg->caam, !caam_alg->caam.nodkp); 3338} 3339 3340static void caam_exit_common(struct caam_ctx *ctx) 3341{ 3342 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma, |
3282 offsetof(struct caam_ctx, sh_desc_enc_dma), | 3343 offsetof(struct caam_ctx, sh_desc_enc_dma) - 3344 offsetof(struct caam_ctx, sh_desc_enc), |
3283 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC); 3284 caam_jr_free(ctx->jrdev); 3285} 3286 3287static void caam_cra_exit(struct crypto_skcipher *tfm) 3288{ 3289 caam_exit_common(crypto_skcipher_ctx(tfm)); 3290} --- 204 unchanged lines hidden --- | 3345 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC); 3346 caam_jr_free(ctx->jrdev); 3347} 3348 3349static void caam_cra_exit(struct crypto_skcipher *tfm) 3350{ 3351 caam_exit_common(crypto_skcipher_ctx(tfm)); 3352} --- 204 unchanged lines hidden --- |