Home
last modified time | relevance | path

Searched refs:src_nents (Results 1 – 24 of 24) sorted by relevance

/linux/drivers/crypto/caam/
H A Dcaamhash.c544 int src_nents; member
557 if (edesc->src_nents) in ahash_unmap()
558 dma_unmap_sg(dev, req->src, edesc->src_nents, DMA_TO_DEVICE); in ahash_unmap()
832 int src_nents, mapped_nents, sec4_sg_bytes, sec4_sg_src_index; in ahash_update_ctx() local
854 src_nents = sg_nents_for_len(req->src, src_len); in ahash_update_ctx()
855 if (src_nents < 0) { in ahash_update_ctx()
857 return src_nents; in ahash_update_ctx()
860 if (src_nents) { in ahash_update_ctx()
861 mapped_nents = dma_map_sg(jrdev, req->src, src_nents, in ahash_update_ctx()
882 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); in ahash_update_ctx()
[all …]
H A Dcaamalg_qi2.c151 struct scatterlist *dst, int src_nents, in caam_unmap() argument
157 if (src_nents) in caam_unmap()
158 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in caam_unmap()
162 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); in caam_unmap()
364 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; in aead_edesc_alloc() local
385 src_nents = sg_nents_for_len(req->src, src_len); in aead_edesc_alloc()
386 if (unlikely(src_nents < 0)) { in aead_edesc_alloc()
390 return ERR_PTR(src_nents); in aead_edesc_alloc()
401 if (src_nents) { in aead_edesc_alloc()
402 mapped_src_nents = dma_map_sg(dev, req->src, src_nents, in aead_edesc_alloc()
[all …]
H A Dcaamalg_qi.c803 int src_nents; member
825 int src_nents; member
873 struct scatterlist *dst, int src_nents, in caam_unmap() argument
879 if (src_nents) in caam_unmap()
880 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in caam_unmap()
884 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); in caam_unmap()
900 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in aead_unmap()
912 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in skcipher_unmap()
951 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; in aead_edesc_alloc() local
977 src_nents = sg_nents_for_len(req->src, src_len); in aead_edesc_alloc()
[all …]
H A Dcaamalg_qi2.h109 int src_nents; member
129 int src_nents; member
146 int src_nents; member
H A Dcaamalg.c898 int src_nents; member
924 int src_nents; member
937 struct scatterlist *dst, int src_nents, in caam_unmap() argument
943 if (src_nents) in caam_unmap()
944 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in caam_unmap()
948 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); in caam_unmap()
963 edesc->src_nents, edesc->dst_nents, 0, 0, in aead_unmap()
974 edesc->src_nents, edesc->dst_nents, in skcipher_unmap()
1266 (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen); in init_skcipher_job()
1270 edesc->src_nents > 1 ? 100 : req->cryptlen, 1); in init_skcipher_job()
[all …]
H A Dcaampkc.h132 int src_nents; member
/linux/drivers/crypto/qce/
H A Daead.c49 dma_unmap_sg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src); in qce_aead_done()
239 rctx->src_nents = sg_nents_for_len(req->src, totallen) + 1; in qce_aead_ccm_prepare_buf_assoclen()
241 rctx->src_nents = sg_nents_for_len(req->src, totallen) + 2; in qce_aead_ccm_prepare_buf_assoclen()
244 ret = sg_alloc_table(&rctx->src_tbl, rctx->src_nents, gfp); in qce_aead_ccm_prepare_buf_assoclen()
279 rctx->src_nents = sg_nents_for_len(rctx->src_sg, totallen); in qce_aead_ccm_prepare_buf_assoclen()
289 rctx->dst_nents = rctx->src_nents + 1; in qce_aead_ccm_prepare_buf_assoclen()
291 rctx->dst_nents = rctx->src_nents; in qce_aead_ccm_prepare_buf_assoclen()
316 rctx->src_nents = sg_nents_for_len(req->src, totallen); in qce_aead_prepare_buf()
317 if (rctx->src_nents < 0) { in qce_aead_prepare_buf()
323 rctx->src_nents = rctx->dst_nents - 1; in qce_aead_prepare_buf()
[all …]
H A Dskcipher.c50 dma_unmap_sg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src); in qce_skcipher_done()
75 int dst_nents, src_nents, ret; in qce_skcipher_async_req_handle() local
85 rctx->src_nents = sg_nents_for_len(req->src, req->cryptlen); in qce_skcipher_async_req_handle()
89 rctx->dst_nents = rctx->src_nents; in qce_skcipher_async_req_handle()
90 if (rctx->src_nents < 0) { in qce_skcipher_async_req_handle()
92 return rctx->src_nents; in qce_skcipher_async_req_handle()
133 src_nents = dma_map_sg(qce->dev, req->src, rctx->src_nents, dir_src); in qce_skcipher_async_req_handle()
134 if (!src_nents) { in qce_skcipher_async_req_handle()
141 src_nents = dst_nents - 1; in qce_skcipher_async_req_handle()
144 ret = qce_dma_prep_sgs(&qce->dma, rctx->src_sg, src_nents, in qce_skcipher_async_req_handle()
[all …]
H A Dsha.c53 dma_unmap_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in qce_ahash_done()
93 rctx->src_nents = sg_nents_for_len(req->src, req->nbytes); in qce_ahash_async_req_handle()
94 if (rctx->src_nents < 0) { in qce_ahash_async_req_handle()
96 return rctx->src_nents; in qce_ahash_async_req_handle()
99 ret = dma_map_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in qce_ahash_async_req_handle()
111 ret = qce_dma_prep_sgs(&qce->dma, req->src, rctx->src_nents, in qce_ahash_async_req_handle()
129 dma_unmap_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in qce_ahash_async_req_handle()
H A Dcipher.h38 int src_nents; member
H A Dsha.h50 int src_nents; member
H A Daead.h30 int src_nents; member
/linux/drivers/crypto/virtio/
H A Dvirtio_crypto_skcipher_algs.c333 int src_nents, dst_nents; in __virtio_crypto_skcipher_do_req() local
343 src_nents = sg_nents_for_len(req->src, req->cryptlen); in __virtio_crypto_skcipher_do_req()
344 if (src_nents < 0) { in __virtio_crypto_skcipher_do_req()
346 return src_nents; in __virtio_crypto_skcipher_do_req()
352 src_nents, dst_nents); in __virtio_crypto_skcipher_do_req()
355 sg_total = src_nents + dst_nents + 3; in __virtio_crypto_skcipher_do_req()
435 for (sg = req->src; src_nents; sg = sg_next(sg), src_nents--) in __virtio_crypto_skcipher_do_req()
/linux/drivers/crypto/aspeed/
H A Daspeed-hace-crypto.c141 dma_unmap_sg(dev, req->src, rctx->src_nents, DMA_BIDIRECTIONAL); in aspeed_sk_transfer_sg()
143 dma_unmap_sg(dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in aspeed_sk_transfer_sg()
192 nbytes = sg_copy_to_buffer(in_sg, rctx->src_nents, in aspeed_sk_start()
197 "nb_in_sg", rctx->src_nents, in aspeed_sk_start()
243 rctx->src_nents, DMA_BIDIRECTIONAL); in aspeed_sk_start_sg()
252 rctx->src_nents, DMA_TO_DEVICE); in aspeed_sk_start_sg()
344 dma_unmap_sg(hace_dev->dev, req->src, rctx->src_nents, in aspeed_sk_start_sg()
350 dma_unmap_sg(hace_dev->dev, req->src, rctx->src_nents, in aspeed_sk_start_sg()
357 dma_unmap_sg(hace_dev->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in aspeed_sk_start_sg()
381 rctx->src_nents = sg_nents(req->src); in aspeed_hace_skcipher_trigger()
H A Daspeed-hace.h184 int src_nents; member
238 int src_nents; member
H A Daspeed-hace-hash.c181 sg_len = dma_map_sg(hace_dev->dev, rctx->src_sg, rctx->src_nents, in aspeed_ahash_dma_prepare_sg()
265 dma_unmap_sg(hace_dev->dev, rctx->src_sg, rctx->src_nents, in aspeed_ahash_dma_prepare_sg()
461 dma_unmap_sg(hace_dev->dev, rctx->src_sg, rctx->src_nents, in aspeed_ahash_update_resume_sg()
593 rctx->src_nents = sg_nents(req->src); in aspeed_sham_update()
/linux/drivers/crypto/marvell/cesa/
H A Dhash.c105 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, DMA_TO_DEVICE); in mv_cesa_ahash_dma_cleanup()
204 engine, req->src, creq->src_nents, in mv_cesa_ahash_std_step()
427 sg_pcopy_to_buffer(ahashreq->src, creq->src_nents, in mv_cesa_ahash_req_cleanup()
481 sg_pcopy_to_buffer(req->src, creq->src_nents, in mv_cesa_ahash_cache_req()
646 if (creq->src_nents) { in mv_cesa_ahash_dma_req_init()
647 ret = dma_map_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_ahash_dma_req_init()
753 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, DMA_TO_DEVICE); in mv_cesa_ahash_dma_req_init()
765 creq->src_nents = sg_nents_for_len(req->src, req->nbytes); in mv_cesa_ahash_req_init()
766 if (creq->src_nents < 0) { in mv_cesa_ahash_req_init()
768 return creq->src_nents; in mv_cesa_ahash_req_init()
H A Dcesa.h573 int src_nents; member
619 int src_nents; member
/linux/drivers/crypto/ccp/
H A Dccp-dmaengine.c356 unsigned int src_nents, in ccp_create_desc() argument
375 if (!dst_nents || !src_nents) in ccp_create_desc()
392 src_nents--; in ccp_create_desc()
393 if (!src_nents) in ccp_create_desc()
/linux/drivers/crypto/starfive/
H A Djh7110-aes.c506 unsigned int len, src_nents, dst_nents; in starfive_aes_map_sg() local
512 src_nents = dma_map_sg(cryp->dev, stsg, 1, DMA_BIDIRECTIONAL); in starfive_aes_map_sg()
513 if (src_nents == 0) in starfive_aes_map_sg()
517 dst_nents = src_nents; in starfive_aes_map_sg()
529 src_nents = dma_map_sg(cryp->dev, stsg, 1, DMA_TO_DEVICE); in starfive_aes_map_sg()
530 if (src_nents == 0) in starfive_aes_map_sg()
H A Djh7110-hash.c203 int ret, src_nents, i; in starfive_hash_one_request() local
230 src_nents = dma_map_sg(cryp->dev, tsg, 1, DMA_TO_DEVICE); in starfive_hash_one_request()
231 if (src_nents == 0) in starfive_hash_one_request()
/linux/crypto/
H A Dadiantum.c345 const unsigned int src_nents = sg_nents(src); in adiantum_crypt() local
362 if (src_nents == 1 && src->offset + req->cryptlen <= PAGE_SIZE) { in adiantum_crypt()
372 err = adiantum_hash_message(req, src, src_nents, &digest); in adiantum_crypt()
/linux/drivers/crypto/
H A Dtalitos.h61 int src_nents; member
/linux/drivers/crypto/bcm/
H A Dcipher.h287 int src_nents; /* Number of src entries with data */ member