Lines Matching refs:edesc
138 struct skcipher_edesc *edesc; member
143 struct aead_edesc *edesc; member
1026 struct aead_edesc *edesc, in aead_unmap() argument
1030 edesc->src_nents, edesc->dst_nents, 0, 0, in aead_unmap()
1031 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); in aead_unmap()
1034 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc, in skcipher_unmap() argument
1041 edesc->src_nents, edesc->dst_nents, in skcipher_unmap()
1042 edesc->iv_dma, ivsize, in skcipher_unmap()
1043 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); in skcipher_unmap()
1052 struct aead_edesc *edesc; in aead_crypt_done() local
1058 edesc = rctx->edesc; in aead_crypt_done()
1059 has_bklog = edesc->bklog; in aead_crypt_done()
1064 aead_unmap(jrdev, edesc, req); in aead_crypt_done()
1066 kfree(edesc); in aead_crypt_done()
1078 static inline u8 *skcipher_edesc_iv(struct skcipher_edesc *edesc) in skcipher_edesc_iv() argument
1081 return PTR_ALIGN((u8 *)edesc->sec4_sg + edesc->sec4_sg_bytes, in skcipher_edesc_iv()
1089 struct skcipher_edesc *edesc; in skcipher_crypt_done() local
1099 edesc = rctx->edesc; in skcipher_crypt_done()
1100 has_bklog = edesc->bklog; in skcipher_crypt_done()
1104 skcipher_unmap(jrdev, edesc, req); in skcipher_crypt_done()
1112 memcpy(req->iv, skcipher_edesc_iv(edesc), ivsize); in skcipher_crypt_done()
1121 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); in skcipher_crypt_done()
1123 kfree(edesc); in skcipher_crypt_done()
1139 struct aead_edesc *edesc, in init_aead_job() argument
1145 u32 *desc = edesc->hw_desc; in init_aead_job()
1159 src_dma = edesc->mapped_src_nents ? sg_dma_address(req->src) : in init_aead_job()
1163 src_dma = edesc->sec4_sg_dma; in init_aead_job()
1164 sec4_sg_index += edesc->mapped_src_nents; in init_aead_job()
1175 if (!edesc->mapped_dst_nents) { in init_aead_job()
1178 } else if (edesc->mapped_dst_nents == 1) { in init_aead_job()
1182 dst_dma = edesc->sec4_sg_dma + in init_aead_job()
1200 struct aead_edesc *edesc, in init_gcm_job() argument
1206 u32 *desc = edesc->hw_desc; in init_gcm_job()
1210 init_aead_job(req, edesc, all_contig, encrypt); in init_gcm_job()
1230 struct aead_edesc *edesc, bool all_contig, in init_chachapoly_job() argument
1236 u32 *desc = edesc->hw_desc; in init_chachapoly_job()
1239 init_aead_job(req, edesc, all_contig, encrypt); in init_chachapoly_job()
1264 struct aead_edesc *edesc, in init_authenc_job() argument
1277 u32 *desc = edesc->hw_desc; in init_authenc_job()
1295 init_aead_job(req, edesc, all_contig, encrypt); in init_authenc_job()
1317 struct skcipher_edesc *edesc, in init_skcipher_job() argument
1324 u32 *desc = !ctx->is_blob ? edesc->hw_desc : in init_skcipher_job()
1325 (u32 *)((u8 *)edesc->hw_desc + CAAM_DESC_BYTES_MAX); in init_skcipher_job()
1335 (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen); in init_skcipher_job()
1339 edesc->src_nents > 1 ? 100 : req->cryptlen, 1); in init_skcipher_job()
1342 if (ivsize || edesc->mapped_src_nents > 1) { in init_skcipher_job()
1343 src_dma = edesc->sec4_sg_dma; in init_skcipher_job()
1344 sec4_sg_index = edesc->mapped_src_nents + !!ivsize; in init_skcipher_job()
1353 } else if (!ivsize && edesc->mapped_dst_nents == 1) { in init_skcipher_job()
1356 dst_dma = edesc->sec4_sg_dma + sec4_sg_index * in init_skcipher_job()
1369 cnstr_desc_protected_blob_decap(edesc->hw_desc, &ctx->cdata, desc_dma); in init_skcipher_job()
1397 struct aead_edesc *edesc; in aead_edesc_alloc() local
1479 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes, flags); in aead_edesc_alloc()
1480 if (!edesc) { in aead_edesc_alloc()
1486 edesc->src_nents = src_nents; in aead_edesc_alloc()
1487 edesc->dst_nents = dst_nents; in aead_edesc_alloc()
1488 edesc->mapped_src_nents = mapped_src_nents; in aead_edesc_alloc()
1489 edesc->mapped_dst_nents = mapped_dst_nents; in aead_edesc_alloc()
1490 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) + in aead_edesc_alloc()
1493 rctx->edesc = edesc; in aead_edesc_alloc()
1500 edesc->sec4_sg + sec4_sg_index, 0); in aead_edesc_alloc()
1505 edesc->sec4_sg + sec4_sg_index, 0); in aead_edesc_alloc()
1509 return edesc; in aead_edesc_alloc()
1511 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, in aead_edesc_alloc()
1513 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { in aead_edesc_alloc()
1515 aead_unmap(jrdev, edesc, req); in aead_edesc_alloc()
1516 kfree(edesc); in aead_edesc_alloc()
1520 edesc->sec4_sg_bytes = sec4_sg_bytes; in aead_edesc_alloc()
1522 return edesc; in aead_edesc_alloc()
1529 struct aead_edesc *edesc = rctx->edesc; in aead_enqueue_req() local
1530 u32 *desc = edesc->hw_desc; in aead_enqueue_req()
1545 aead_unmap(jrdev, edesc, req); in aead_enqueue_req()
1546 kfree(rctx->edesc); in aead_enqueue_req()
1554 struct aead_edesc *edesc; in chachapoly_crypt() local
1561 edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig, in chachapoly_crypt()
1563 if (IS_ERR(edesc)) in chachapoly_crypt()
1564 return PTR_ERR(edesc); in chachapoly_crypt()
1566 desc = edesc->hw_desc; in chachapoly_crypt()
1568 init_chachapoly_job(req, edesc, all_contig, encrypt); in chachapoly_crypt()
1588 struct aead_edesc *edesc; in aead_crypt() local
1595 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN, in aead_crypt()
1597 if (IS_ERR(edesc)) in aead_crypt()
1598 return PTR_ERR(edesc); in aead_crypt()
1601 init_authenc_job(req, edesc, all_contig, encrypt); in aead_crypt()
1604 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, in aead_crypt()
1605 desc_bytes(edesc->hw_desc), 1); in aead_crypt()
1625 u32 *desc = rctx->edesc->hw_desc; in aead_do_one_req()
1628 rctx->edesc->bklog = true; in aead_do_one_req()
1636 aead_unmap(ctx->jrdev, rctx->edesc, req); in aead_do_one_req()
1637 kfree(rctx->edesc); in aead_do_one_req()
1647 struct aead_edesc *edesc; in gcm_crypt() local
1654 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, in gcm_crypt()
1656 if (IS_ERR(edesc)) in gcm_crypt()
1657 return PTR_ERR(edesc); in gcm_crypt()
1660 init_gcm_job(req, edesc, all_contig, encrypt); in gcm_crypt()
1663 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, in gcm_crypt()
1664 desc_bytes(edesc->hw_desc), 1); in gcm_crypt()
1702 struct skcipher_edesc *edesc; in skcipher_edesc_alloc() local
1781 aligned_size = sizeof(*edesc) + desc_bytes + sec4_sg_bytes; in skcipher_edesc_alloc()
1786 edesc = kzalloc(aligned_size, flags); in skcipher_edesc_alloc()
1787 if (!edesc) { in skcipher_edesc_alloc()
1794 edesc->src_nents = src_nents; in skcipher_edesc_alloc()
1795 edesc->dst_nents = dst_nents; in skcipher_edesc_alloc()
1796 edesc->mapped_src_nents = mapped_src_nents; in skcipher_edesc_alloc()
1797 edesc->mapped_dst_nents = mapped_dst_nents; in skcipher_edesc_alloc()
1798 edesc->sec4_sg_bytes = sec4_sg_bytes; in skcipher_edesc_alloc()
1799 edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc + in skcipher_edesc_alloc()
1801 rctx->edesc = edesc; in skcipher_edesc_alloc()
1805 iv = skcipher_edesc_iv(edesc); in skcipher_edesc_alloc()
1813 kfree(edesc); in skcipher_edesc_alloc()
1817 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0); in skcipher_edesc_alloc()
1820 sg_to_sec4_sg(req->src, req->cryptlen, edesc->sec4_sg + in skcipher_edesc_alloc()
1824 sg_to_sec4_sg(req->dst, req->cryptlen, edesc->sec4_sg + in skcipher_edesc_alloc()
1828 dma_to_sec4_sg_one(edesc->sec4_sg + dst_sg_idx + in skcipher_edesc_alloc()
1832 sg_to_sec4_set_last(edesc->sec4_sg + dst_sg_idx + in skcipher_edesc_alloc()
1836 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, in skcipher_edesc_alloc()
1839 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { in skcipher_edesc_alloc()
1843 kfree(edesc); in skcipher_edesc_alloc()
1848 edesc->iv_dma = iv_dma; in skcipher_edesc_alloc()
1851 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg, in skcipher_edesc_alloc()
1854 return edesc; in skcipher_edesc_alloc()
1862 u32 *desc = rctx->edesc->hw_desc; in skcipher_do_one_req()
1865 rctx->edesc->bklog = true; in skcipher_do_one_req()
1873 skcipher_unmap(ctx->jrdev, rctx->edesc, req); in skcipher_do_one_req()
1874 kfree(rctx->edesc); in skcipher_do_one_req()
1892 struct skcipher_edesc *edesc; in skcipher_crypt() local
1931 edesc = skcipher_edesc_alloc(req, len); in skcipher_crypt()
1932 if (IS_ERR(edesc)) in skcipher_crypt()
1933 return PTR_ERR(edesc); in skcipher_crypt()
1936 init_skcipher_job(req, edesc, encrypt); in skcipher_crypt()
1939 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, in skcipher_crypt()
1940 desc_bytes(edesc->hw_desc), 1); in skcipher_crypt()
1942 desc = edesc->hw_desc; in skcipher_crypt()
1955 skcipher_unmap(jrdev, edesc, req); in skcipher_crypt()
1956 kfree(edesc); in skcipher_crypt()