Lines Matching refs:src_nents

151 		       struct scatterlist *dst, int src_nents,  in caam_unmap()  argument
157 if (src_nents) in caam_unmap()
158 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in caam_unmap()
162 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); in caam_unmap()
364 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; in aead_edesc_alloc() local
385 src_nents = sg_nents_for_len(req->src, src_len); in aead_edesc_alloc()
386 if (unlikely(src_nents < 0)) { in aead_edesc_alloc()
390 return ERR_PTR(src_nents); in aead_edesc_alloc()
401 if (src_nents) { in aead_edesc_alloc()
402 mapped_src_nents = dma_map_sg(dev, req->src, src_nents, in aead_edesc_alloc()
418 dma_unmap_sg(dev, req->src, src_nents, in aead_edesc_alloc()
430 src_nents = sg_nents_for_len(req->src, src_len); in aead_edesc_alloc()
431 if (unlikely(src_nents < 0)) { in aead_edesc_alloc()
435 return ERR_PTR(src_nents); in aead_edesc_alloc()
438 mapped_src_nents = dma_map_sg(dev, req->src, src_nents, in aead_edesc_alloc()
478 caam_unmap(dev, req->src, req->dst, src_nents, dst_nents, 0, in aead_edesc_alloc()
493 caam_unmap(dev, req->src, req->dst, src_nents, in aead_edesc_alloc()
500 edesc->src_nents = src_nents; in aead_edesc_alloc()
517 caam_unmap(dev, req->src, req->dst, src_nents, dst_nents, in aead_edesc_alloc()
539 caam_unmap(dev, req->src, req->dst, src_nents, dst_nents, in aead_edesc_alloc()
1122 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; in skcipher_edesc_alloc() local
1130 src_nents = sg_nents_for_len(req->src, req->cryptlen); in skcipher_edesc_alloc()
1131 if (unlikely(src_nents < 0)) { in skcipher_edesc_alloc()
1134 return ERR_PTR(src_nents); in skcipher_edesc_alloc()
1145 mapped_src_nents = dma_map_sg(dev, req->src, src_nents, in skcipher_edesc_alloc()
1156 dma_unmap_sg(dev, req->src, src_nents, DMA_TO_DEVICE); in skcipher_edesc_alloc()
1160 mapped_src_nents = dma_map_sg(dev, req->src, src_nents, in skcipher_edesc_alloc()
1189 caam_unmap(dev, req->src, req->dst, src_nents, dst_nents, 0, in skcipher_edesc_alloc()
1198 caam_unmap(dev, req->src, req->dst, src_nents, dst_nents, 0, in skcipher_edesc_alloc()
1211 caam_unmap(dev, req->src, req->dst, src_nents, dst_nents, 0, in skcipher_edesc_alloc()
1217 edesc->src_nents = src_nents; in skcipher_edesc_alloc()
1235 caam_unmap(dev, req->src, req->dst, src_nents, dst_nents, in skcipher_edesc_alloc()
1267 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in aead_unmap()
1279 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in skcipher_unmap()
1410 edesc->src_nents > 1 ? 100 : ivsize, 1); in skcipher_encrypt_done()
1448 edesc->src_nents > 1 ? 100 : ivsize, 1); in skcipher_decrypt_done()
3372 if (edesc->src_nents) in ahash_unmap()
3373 dma_unmap_sg(dev, req->src, edesc->src_nents, DMA_TO_DEVICE); in ahash_unmap()
3543 int src_nents, mapped_nents, qm_sg_bytes, qm_sg_src_index; in ahash_update_ctx() local
3554 src_nents = sg_nents_for_len(req->src, src_len); in ahash_update_ctx()
3555 if (src_nents < 0) { in ahash_update_ctx()
3557 return src_nents; in ahash_update_ctx()
3560 if (src_nents) { in ahash_update_ctx()
3561 mapped_nents = dma_map_sg(ctx->dev, req->src, src_nents, in ahash_update_ctx()
3574 dma_unmap_sg(ctx->dev, req->src, src_nents, in ahash_update_ctx()
3579 edesc->src_nents = src_nents; in ahash_update_ctx()
3731 int src_nents, mapped_nents; in ahash_finup_ctx() local
3737 src_nents = sg_nents_for_len(req->src, req->nbytes); in ahash_finup_ctx()
3738 if (src_nents < 0) { in ahash_finup_ctx()
3740 return src_nents; in ahash_finup_ctx()
3743 if (src_nents) { in ahash_finup_ctx()
3744 mapped_nents = dma_map_sg(ctx->dev, req->src, src_nents, in ahash_finup_ctx()
3757 dma_unmap_sg(ctx->dev, req->src, src_nents, DMA_TO_DEVICE); in ahash_finup_ctx()
3761 edesc->src_nents = src_nents; in ahash_finup_ctx()
3824 int src_nents, mapped_nents; in ahash_digest() local
3830 src_nents = sg_nents_for_len(req->src, req->nbytes); in ahash_digest()
3831 if (src_nents < 0) { in ahash_digest()
3833 return src_nents; in ahash_digest()
3836 if (src_nents) { in ahash_digest()
3837 mapped_nents = dma_map_sg(ctx->dev, req->src, src_nents, in ahash_digest()
3850 dma_unmap_sg(ctx->dev, req->src, src_nents, DMA_TO_DEVICE); in ahash_digest()
3854 edesc->src_nents = src_nents; in ahash_digest()
3995 int qm_sg_bytes, src_nents, mapped_nents; in ahash_update_no_ctx() local
4006 src_nents = sg_nents_for_len(req->src, src_len); in ahash_update_no_ctx()
4007 if (src_nents < 0) { in ahash_update_no_ctx()
4009 return src_nents; in ahash_update_no_ctx()
4012 if (src_nents) { in ahash_update_no_ctx()
4013 mapped_nents = dma_map_sg(ctx->dev, req->src, src_nents, in ahash_update_no_ctx()
4026 dma_unmap_sg(ctx->dev, req->src, src_nents, in ahash_update_no_ctx()
4031 edesc->src_nents = src_nents; in ahash_update_no_ctx()
4113 int qm_sg_bytes, src_nents, mapped_nents; in ahash_finup_no_ctx() local
4119 src_nents = sg_nents_for_len(req->src, req->nbytes); in ahash_finup_no_ctx()
4120 if (src_nents < 0) { in ahash_finup_no_ctx()
4122 return src_nents; in ahash_finup_no_ctx()
4125 if (src_nents) { in ahash_finup_no_ctx()
4126 mapped_nents = dma_map_sg(ctx->dev, req->src, src_nents, in ahash_finup_no_ctx()
4139 dma_unmap_sg(ctx->dev, req->src, src_nents, DMA_TO_DEVICE); in ahash_finup_no_ctx()
4143 edesc->src_nents = src_nents; in ahash_finup_no_ctx()
4212 int src_nents, mapped_nents; in ahash_update_first() local
4224 src_nents = sg_nents_for_len(req->src, src_len); in ahash_update_first()
4225 if (src_nents < 0) { in ahash_update_first()
4227 return src_nents; in ahash_update_first()
4230 if (src_nents) { in ahash_update_first()
4231 mapped_nents = dma_map_sg(ctx->dev, req->src, src_nents, in ahash_update_first()
4244 dma_unmap_sg(ctx->dev, req->src, src_nents, in ahash_update_first()
4249 edesc->src_nents = src_nents; in ahash_update_first()