Home
last modified time | relevance | path

Searched refs:sg_nents (Results 1 – 25 of 47) sorted by relevance

12

/linux/drivers/crypto/amlogic/
H A Damlogic-gxl-cipher.c33 if (sg_nents(src_sg) != sg_nents(dst_sg)) in meson_cipher_need_fallback()
37 if (sg_nents(src_sg) > MAXDESC - 3 || sg_nents(dst_sg) > MAXDESC - 3) in meson_cipher_need_fallback()
178 nr_sgs = dma_map_sg(mc->dev, areq->src, sg_nents(areq->src), in meson_cipher()
187 nr_sgs = dma_map_sg(mc->dev, areq->src, sg_nents(areq->src), in meson_cipher()
194 nr_sgd = dma_map_sg(mc->dev, areq->dst, sg_nents(areq->dst), in meson_cipher()
239 dma_unmap_sg(mc->dev, areq->src, sg_nents(areq->src), DMA_BIDIRECTIONAL); in meson_cipher()
241 dma_unmap_sg(mc->dev, areq->src, sg_nents(areq->src), DMA_TO_DEVICE); in meson_cipher()
242 dma_unmap_sg(mc->dev, areq->dst, sg_nents(areq->dst), DMA_FROM_DEVICE); in meson_cipher()
/linux/drivers/infiniband/sw/rdmavt/
H A Dtrace_mr.h144 TP_PROTO(struct ib_mr *ibmr, int sg_nents, unsigned int *sg_offset),
145 TP_ARGS(ibmr, sg_nents, sg_offset),
152 __field(int, sg_nents)
161 __entry->sg_nents = sg_nents;
171 __entry->sg_nents,
/linux/drivers/crypto/gemini/
H A Dsl3516-ce-cipher.c43 if (sg_nents(areq->src) > MAXDESC / 2) { in sl3516_ce_need_fallback()
48 if (sg_nents(areq->dst) > MAXDESC) { in sl3516_ce_need_fallback()
154 nr_sgs = dma_map_sg(ce->dev, areq->src, sg_nents(areq->src), in sl3516_ce_cipher()
163 nr_sgs = dma_map_sg(ce->dev, areq->src, sg_nents(areq->src), in sl3516_ce_cipher()
170 nr_sgd = dma_map_sg(ce->dev, areq->dst, sg_nents(areq->dst), in sl3516_ce_cipher()
251 dma_unmap_sg(ce->dev, areq->src, sg_nents(areq->src), in sl3516_ce_cipher()
254 dma_unmap_sg(ce->dev, areq->src, sg_nents(areq->src), in sl3516_ce_cipher()
256 dma_unmap_sg(ce->dev, areq->dst, sg_nents(areq->dst), in sl3516_ce_cipher()
/linux/drivers/crypto/intel/iaa/
H A Diaa_crypto_main.c1098 dma_unmap_sg(dev, ctx->req->dst, sg_nents(ctx->req->dst), DMA_TO_DEVICE); in iaa_desc_complete()
1099 dma_unmap_sg(dev, ctx->req->src, sg_nents(ctx->req->src), DMA_FROM_DEVICE); in iaa_desc_complete()
1104 dma_unmap_sg(dev, ctx->req->dst, sg_nents(ctx->req->dst), DMA_FROM_DEVICE); in iaa_desc_complete()
1105 dma_unmap_sg(dev, ctx->req->src, sg_nents(ctx->req->src), DMA_TO_DEVICE); in iaa_desc_complete()
1236 dma_unmap_sg(dev, req->dst, sg_nents(req->dst), DMA_FROM_DEVICE); in iaa_remap_for_verify()
1237 dma_unmap_sg(dev, req->src, sg_nents(req->src), DMA_TO_DEVICE); in iaa_remap_for_verify()
1239 nr_sgs = dma_map_sg(dev, req->src, sg_nents(req->src), DMA_FROM_DEVICE); in iaa_remap_for_verify()
1252 nr_sgs = dma_map_sg(dev, req->dst, sg_nents(req->dst), DMA_TO_DEVICE); in iaa_remap_for_verify()
1258 dma_unmap_sg(dev, req->src, sg_nents(req->src), DMA_FROM_DEVICE); in iaa_remap_for_verify()
1517 nr_sgs = dma_map_sg(dev, req->src, sg_nents(req->src), DMA_TO_DEVICE); in iaa_comp_acompress()
[all …]
/linux/drivers/crypto/bcm/
H A Dutil.c54 unsigned int nents = sg_nents(src); in sg_copy_part_to_buf()
73 unsigned int nents = sg_nents(dest); in sg_copy_part_from_buf()
96 int sg_nents = 0; in spu_sg_count() local
106 sg_nents++; in spu_sg_count()
111 return sg_nents; in spu_sg_count()
/linux/drivers/crypto/cavium/nitrox/
H A Dnitrox_reqmgr.c61 dma_unmap_sg(dev, sr->in.sg, sg_nents(sr->in.sg), in softreq_unmap_sgbufs()
69 dma_unmap_sg(dev, sr->out.sg, sg_nents(sr->out.sg), in softreq_unmap_sgbufs()
165 nents = dma_map_sg(dev, req->src, sg_nents(req->src), in dma_map_inbufs()
182 dma_unmap_sg(dev, req->src, sg_nents(req->src), DMA_BIDIRECTIONAL); in dma_map_inbufs()
193 nents = dma_map_sg(dev, req->dst, sg_nents(req->dst), in dma_map_outbufs()
207 dma_unmap_sg(dev, req->dst, sg_nents(req->dst), DMA_BIDIRECTIONAL); in dma_map_outbufs()
H A Dnitrox_skcipher.c211 int nents = sg_nents(skreq->src) + 1; in alloc_src_sglist()
229 int nents = sg_nents(skreq->dst) + 3; in alloc_dst_sglist()
/linux/drivers/crypto/hisilicon/
H A Dsgl.c233 sg_n = sg_nents(sgl); in hisi_acc_sg_buf_map_to_hw_sgl()
284 dma_unmap_sg(dev, sgl, sg_nents(sgl), dir); in hisi_acc_sg_buf_unmap()
/linux/drivers/crypto/allwinner/sun4i-ss/
H A Dsun4i-ss-cipher.c80 sg_miter_start(&mi, areq->src, sg_nents(areq->src), in sun4i_ss_opti_poll()
108 sg_miter_start(&mo, areq->dst, sg_nents(areq->dst), in sun4i_ss_opti_poll()
272 sg_miter_start(&mi, areq->src, sg_nents(areq->src), in sun4i_ss_cipher_poll()
326 sg_miter_start(&mo, areq->dst, sg_nents(areq->dst), in sun4i_ss_cipher_poll()
H A Dsun4i-ss-hash.c220 copied = sg_pcopy_to_buffer(areq->src, sg_nents(areq->src), in sun4i_hash()
272 sg_miter_start(&mi, areq->src, sg_nents(areq->src), in sun4i_hash()
/linux/drivers/mailbox/
H A Dbcm-pdc-mailbox.c608 sg_nents(pdcs->src_sg[pdcs->txin]), DMA_TO_DEVICE); in pdc_receive_one()
623 dma_unmap_sg(dev, rx_ctx->dst_sg, sg_nents(rx_ctx->dst_sg), in pdc_receive_one()
717 num_desc = (u32)sg_nents(sg); in pdc_tx_list_sg_add()
884 num_desc = (u32)sg_nents(sg); in pdc_rx_list_sg_add()
1205 src_nent = sg_nents(mssg->spu.src); in pdc_send_data()
1212 dst_nent = sg_nents(mssg->spu.dst); in pdc_send_data()
H A Dbcm-flexrm-mailbox.c623 rc = dma_map_sg(dev, msg->spu.src, sg_nents(msg->spu.src), in flexrm_spu_dma_map()
628 rc = dma_map_sg(dev, msg->spu.dst, sg_nents(msg->spu.dst), in flexrm_spu_dma_map()
631 dma_unmap_sg(dev, msg->spu.src, sg_nents(msg->spu.src), in flexrm_spu_dma_map()
641 dma_unmap_sg(dev, msg->spu.dst, sg_nents(msg->spu.dst), in flexrm_spu_dma_unmap()
643 dma_unmap_sg(dev, msg->spu.src, sg_nents(msg->spu.src), in flexrm_spu_dma_unmap()
/linux/include/linux/
H A Ddevcoredump.h34 for_each_sg(table, iter, sg_nents(table), i) { in _devcd_free_sgtable()
/linux/drivers/usb/storage/
H A Dprotocol.c131 nents = sg_nents(sg); in usb_stor_access_xfer_buf()
/linux/lib/crypto/
H A Dchacha20poly1305.c258 sg_miter_start(&miter, src, sg_nents(src), flags); in chacha20poly1305_crypt_sg_inplace()
323 sg_copy_buffer(src, sg_nents(src), b.mac[encrypt], in chacha20poly1305_crypt_sg_inplace()
/linux/drivers/crypto/allwinner/sun8i-ss/
H A Dsun8i-ss-hash.c365 if (sg_nents(areq->src) > MAX_SG - 1) { in sun8i_ss_hash_need_fallback()
526 nr_sgs = dma_map_sg(ss->dev, areq->src, sg_nents(areq->src), DMA_TO_DEVICE); in sun8i_ss_hash_run()
691 dma_unmap_sg(ss->dev, areq->src, sg_nents(areq->src), in sun8i_ss_hash_run()
/linux/drivers/crypto/hisilicon/sec/
H A Dsec_algs.c460 sg_nents(sec_req_el->sgl_out), in sec_skcipher_alg_callback()
467 sg_nents(sec_req_el->sgl_in), in sec_skcipher_alg_callback()
730 sec_req->len_in = sg_nents(skreq->src); in sec_alg_skcipher_crypto()
744 sec_req->len_out = sg_nents(skreq->dst); in sec_alg_skcipher_crypto()
/linux/drivers/crypto/
H A Domap-crypto.c20 int n = sg_nents(*sg); in omap_crypto_copy_sg_lists()
/linux/lib/
H A Dsg_split.c159 ret = sg_calculate_split(in, sg_nents(in), nb_splits, skip, split_sizes, in sg_split()
/linux/drivers/crypto/ccree/
H A Dcc_buffer_mgr.c1238 int sg_nents; in cc_map_hash_request_update() local
1252 sg_nents = sg_nents_for_len(src, nbytes); in cc_map_hash_request_update()
1253 if (sg_nents < 0) in cc_map_hash_request_update()
1254 return sg_nents; in cc_map_hash_request_update()
1255 areq_ctx->in_nents = sg_nents; in cc_map_hash_request_update()
/linux/drivers/infiniband/ulp/isert/
H A Dib_isert.c1051 int imm_data, imm_data_len, unsol_data, sg_nents, rc; in isert_handle_scsi_cmd() local
1078 sg_nents = max(1UL, DIV_ROUND_UP(imm_data_len, PAGE_SIZE)); in isert_handle_scsi_cmd()
1079 sg_copy_from_buffer(cmd->se_cmd.t_data_sg, sg_nents, in isert_handle_scsi_cmd()
1082 sg_nents, imm_data_len); in isert_handle_scsi_cmd()
1122 int rc, sg_nents, sg_off, page_off; in isert_handle_iscsi_dataout() local
1145 sg_nents = max(1UL, DIV_ROUND_UP(unsol_data_len, PAGE_SIZE)); in isert_handle_iscsi_dataout()
1157 sg_nents, isert_get_data(rx_desc), unsol_data_len); in isert_handle_iscsi_dataout()
1159 sg_copy_from_buffer(sg_start, sg_nents, isert_get_data(rx_desc), in isert_handle_iscsi_dataout()
/linux/drivers/target/iscsi/cxgbit/
H A Dcxgbit_target.c895 u32 sg_nents = max(1UL, DIV_ROUND_UP(pdu_cb->dlen, PAGE_SIZE)); in cxgbit_handle_immediate_data() local
897 cxgbit_skb_copy_to_sg(csk->skb, sg, sg_nents, 0); in cxgbit_handle_immediate_data()
1006 int rc, sg_nents, sg_off; in cxgbit_handle_iscsi_dataout() local
1059 sg_nents = max(1UL, DIV_ROUND_UP(skip + data_len, PAGE_SIZE)); in cxgbit_handle_iscsi_dataout()
1061 cxgbit_skb_copy_to_sg(csk->skb, sg_start, sg_nents, skip); in cxgbit_handle_iscsi_dataout()
/linux/drivers/infiniband/sw/rxe/
H A Drxe_mr.c232 int sg_nents, unsigned int *sg_offset) in rxe_map_mr_sg() argument
242 return ib_sg_to_pages(ibmr, sgl, sg_nents, sg_offset, rxe_set_page); in rxe_map_mr_sg()
/linux/drivers/crypto/caam/
H A Ddesc_constr.h79 static inline int pad_sg_nents(int sg_nents) in pad_sg_nents() argument
81 return ALIGN(sg_nents, 4); in pad_sg_nents()
/linux/drivers/base/
H A Ddevcoredump.c315 return sg_pcopy_to_buffer(table, sg_nents(table), buffer, buf_len, in devcd_read_from_sgtable()

12