/linux/drivers/crypto/caam/ |
H A D | caamhash.c | 544 int src_nents; member 557 if (edesc->src_nents) in ahash_unmap() 558 dma_unmap_sg(dev, req->src, edesc->src_nents, DMA_TO_DEVICE); in ahash_unmap() 832 int src_nents, mapped_nents, sec4_sg_bytes, sec4_sg_src_index; in ahash_update_ctx() local 854 src_nents = sg_nents_for_len(req->src, src_len); in ahash_update_ctx() 855 if (src_nents < 0) { in ahash_update_ctx() 857 return src_nents; in ahash_update_ctx() 860 if (src_nents) { in ahash_update_ctx() 861 mapped_nents = dma_map_sg(jrdev, req->src, src_nents, in ahash_update_ctx() 882 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); in ahash_update_ctx() [all …]
|
H A D | caamalg_qi2.c | 151 struct scatterlist *dst, int src_nents, in caam_unmap() argument 157 if (src_nents) in caam_unmap() 158 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in caam_unmap() 162 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); in caam_unmap() 364 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; in aead_edesc_alloc() local 385 src_nents = sg_nents_for_len(req->src, src_len); in aead_edesc_alloc() 386 if (unlikely(src_nents < 0)) { in aead_edesc_alloc() 390 return ERR_PTR(src_nents); in aead_edesc_alloc() 401 if (src_nents) { in aead_edesc_alloc() 402 mapped_src_nents = dma_map_sg(dev, req->src, src_nents, in aead_edesc_alloc() [all …]
|
H A D | caamalg_qi.c | 803 int src_nents; member 825 int src_nents; member 873 struct scatterlist *dst, int src_nents, in caam_unmap() argument 879 if (src_nents) in caam_unmap() 880 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in caam_unmap() 884 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); in caam_unmap() 900 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in aead_unmap() 912 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in skcipher_unmap() 951 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; in aead_edesc_alloc() local 977 src_nents = sg_nents_for_len(req->src, src_len); in aead_edesc_alloc() [all …]
|
H A D | caamalg_qi2.h | 109 int src_nents; member 129 int src_nents; member 146 int src_nents; member
|
H A D | caamalg.c | 898 int src_nents; member 924 int src_nents; member 937 struct scatterlist *dst, int src_nents, in caam_unmap() argument 943 if (src_nents) in caam_unmap() 944 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in caam_unmap() 948 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); in caam_unmap() 963 edesc->src_nents, edesc->dst_nents, 0, 0, in aead_unmap() 974 edesc->src_nents, edesc->dst_nents, in skcipher_unmap() 1266 (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen); in init_skcipher_job() 1270 edesc->src_nents > 1 ? 100 : req->cryptlen, 1); in init_skcipher_job() [all …]
|
H A D | caampkc.h | 132 int src_nents; member
|
/linux/drivers/crypto/qce/ |
H A D | aead.c | 49 dma_unmap_sg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src); in qce_aead_done() 239 rctx->src_nents = sg_nents_for_len(req->src, totallen) + 1; in qce_aead_ccm_prepare_buf_assoclen() 241 rctx->src_nents = sg_nents_for_len(req->src, totallen) + 2; in qce_aead_ccm_prepare_buf_assoclen() 244 ret = sg_alloc_table(&rctx->src_tbl, rctx->src_nents, gfp); in qce_aead_ccm_prepare_buf_assoclen() 279 rctx->src_nents = sg_nents_for_len(rctx->src_sg, totallen); in qce_aead_ccm_prepare_buf_assoclen() 289 rctx->dst_nents = rctx->src_nents + 1; in qce_aead_ccm_prepare_buf_assoclen() 291 rctx->dst_nents = rctx->src_nents; in qce_aead_ccm_prepare_buf_assoclen() 316 rctx->src_nents = sg_nents_for_len(req->src, totallen); in qce_aead_prepare_buf() 317 if (rctx->src_nents < 0) { in qce_aead_prepare_buf() 323 rctx->src_nents = rctx->dst_nents - 1; in qce_aead_prepare_buf() [all …]
|
H A D | skcipher.c | 50 dma_unmap_sg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src); in qce_skcipher_done() 75 int dst_nents, src_nents, ret; in qce_skcipher_async_req_handle() local 85 rctx->src_nents = sg_nents_for_len(req->src, req->cryptlen); in qce_skcipher_async_req_handle() 89 rctx->dst_nents = rctx->src_nents; in qce_skcipher_async_req_handle() 90 if (rctx->src_nents < 0) { in qce_skcipher_async_req_handle() 92 return rctx->src_nents; in qce_skcipher_async_req_handle() 133 src_nents = dma_map_sg(qce->dev, req->src, rctx->src_nents, dir_src); in qce_skcipher_async_req_handle() 134 if (!src_nents) { in qce_skcipher_async_req_handle() 141 src_nents = dst_nents - 1; in qce_skcipher_async_req_handle() 144 ret = qce_dma_prep_sgs(&qce->dma, rctx->src_sg, src_nents, in qce_skcipher_async_req_handle() [all …]
|
H A D | sha.c | 53 dma_unmap_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in qce_ahash_done() 93 rctx->src_nents = sg_nents_for_len(req->src, req->nbytes); in qce_ahash_async_req_handle() 94 if (rctx->src_nents < 0) { in qce_ahash_async_req_handle() 96 return rctx->src_nents; in qce_ahash_async_req_handle() 99 ret = dma_map_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in qce_ahash_async_req_handle() 111 ret = qce_dma_prep_sgs(&qce->dma, req->src, rctx->src_nents, in qce_ahash_async_req_handle() 129 dma_unmap_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in qce_ahash_async_req_handle()
|
H A D | cipher.h | 38 int src_nents; member
|
H A D | sha.h | 50 int src_nents; member
|
H A D | aead.h | 30 int src_nents; member
|
/linux/drivers/crypto/virtio/ |
H A D | virtio_crypto_skcipher_algs.c | 333 int src_nents, dst_nents; in __virtio_crypto_skcipher_do_req() local 343 src_nents = sg_nents_for_len(req->src, req->cryptlen); in __virtio_crypto_skcipher_do_req() 344 if (src_nents < 0) { in __virtio_crypto_skcipher_do_req() 346 return src_nents; in __virtio_crypto_skcipher_do_req() 352 src_nents, dst_nents); in __virtio_crypto_skcipher_do_req() 355 sg_total = src_nents + dst_nents + 3; in __virtio_crypto_skcipher_do_req() 435 for (sg = req->src; src_nents; sg = sg_next(sg), src_nents--) in __virtio_crypto_skcipher_do_req()
|
/linux/drivers/crypto/aspeed/ |
H A D | aspeed-hace-crypto.c | 141 dma_unmap_sg(dev, req->src, rctx->src_nents, DMA_BIDIRECTIONAL); in aspeed_sk_transfer_sg() 143 dma_unmap_sg(dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in aspeed_sk_transfer_sg() 192 nbytes = sg_copy_to_buffer(in_sg, rctx->src_nents, in aspeed_sk_start() 197 "nb_in_sg", rctx->src_nents, in aspeed_sk_start() 243 rctx->src_nents, DMA_BIDIRECTIONAL); in aspeed_sk_start_sg() 252 rctx->src_nents, DMA_TO_DEVICE); in aspeed_sk_start_sg() 344 dma_unmap_sg(hace_dev->dev, req->src, rctx->src_nents, in aspeed_sk_start_sg() 350 dma_unmap_sg(hace_dev->dev, req->src, rctx->src_nents, in aspeed_sk_start_sg() 357 dma_unmap_sg(hace_dev->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in aspeed_sk_start_sg() 381 rctx->src_nents = sg_nents(req->src); in aspeed_hace_skcipher_trigger()
|
H A D | aspeed-hace.h | 184 int src_nents; member 238 int src_nents; member
|
H A D | aspeed-hace-hash.c | 181 sg_len = dma_map_sg(hace_dev->dev, rctx->src_sg, rctx->src_nents, in aspeed_ahash_dma_prepare_sg() 265 dma_unmap_sg(hace_dev->dev, rctx->src_sg, rctx->src_nents, in aspeed_ahash_dma_prepare_sg() 461 dma_unmap_sg(hace_dev->dev, rctx->src_sg, rctx->src_nents, in aspeed_ahash_update_resume_sg() 593 rctx->src_nents = sg_nents(req->src); in aspeed_sham_update()
|
/linux/drivers/crypto/marvell/cesa/ |
H A D | hash.c | 105 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, DMA_TO_DEVICE); in mv_cesa_ahash_dma_cleanup() 204 engine, req->src, creq->src_nents, in mv_cesa_ahash_std_step() 427 sg_pcopy_to_buffer(ahashreq->src, creq->src_nents, in mv_cesa_ahash_req_cleanup() 481 sg_pcopy_to_buffer(req->src, creq->src_nents, in mv_cesa_ahash_cache_req() 646 if (creq->src_nents) { in mv_cesa_ahash_dma_req_init() 647 ret = dma_map_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_ahash_dma_req_init() 753 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, DMA_TO_DEVICE); in mv_cesa_ahash_dma_req_init() 765 creq->src_nents = sg_nents_for_len(req->src, req->nbytes); in mv_cesa_ahash_req_init() 766 if (creq->src_nents < 0) { in mv_cesa_ahash_req_init() 768 return creq->src_nents; in mv_cesa_ahash_req_init()
|
H A D | cesa.h | 573 int src_nents; member 619 int src_nents; member
|
/linux/drivers/crypto/ccp/ |
H A D | ccp-dmaengine.c | 356 unsigned int src_nents, in ccp_create_desc() argument 375 if (!dst_nents || !src_nents) in ccp_create_desc() 392 src_nents--; in ccp_create_desc() 393 if (!src_nents) in ccp_create_desc()
|
/linux/drivers/crypto/starfive/ |
H A D | jh7110-aes.c | 506 unsigned int len, src_nents, dst_nents; in starfive_aes_map_sg() local 512 src_nents = dma_map_sg(cryp->dev, stsg, 1, DMA_BIDIRECTIONAL); in starfive_aes_map_sg() 513 if (src_nents == 0) in starfive_aes_map_sg() 517 dst_nents = src_nents; in starfive_aes_map_sg() 529 src_nents = dma_map_sg(cryp->dev, stsg, 1, DMA_TO_DEVICE); in starfive_aes_map_sg() 530 if (src_nents == 0) in starfive_aes_map_sg()
|
H A D | jh7110-hash.c | 203 int ret, src_nents, i; in starfive_hash_one_request() local 230 src_nents = dma_map_sg(cryp->dev, tsg, 1, DMA_TO_DEVICE); in starfive_hash_one_request() 231 if (src_nents == 0) in starfive_hash_one_request()
|
/linux/crypto/ |
H A D | adiantum.c | 345 const unsigned int src_nents = sg_nents(src); in adiantum_crypt() local 362 if (src_nents == 1 && src->offset + req->cryptlen <= PAGE_SIZE) { in adiantum_crypt() 372 err = adiantum_hash_message(req, src, src_nents, &digest); in adiantum_crypt()
|
/linux/drivers/crypto/ |
H A D | talitos.h | 61 int src_nents; member
|
/linux/drivers/crypto/bcm/ |
H A D | cipher.h | 287 int src_nents; /* Number of src entries with data */ member
|