Home
last modified time | relevance | path

Searched refs:num_sge (Results 1 – 25 of 71) sorted by relevance

123

/linux/drivers/vfio/pci/pds/
H A Dcmds.c221 lm_file->num_sge = lm_file->sg_table.nents; in pds_vfio_dma_map_lm_file()
224 sgl_size = lm_file->num_sge * sizeof(struct pds_lm_sg_elem); in pds_vfio_dma_map_lm_file()
254 lm_file->num_sge = 0; in pds_vfio_dma_map_lm_file()
269 lm_file->num_sge * sizeof(*lm_file->sgl), in pds_vfio_dma_unmap_lm_file()
274 lm_file->num_sge = 0; in pds_vfio_dma_unmap_lm_file()
307 cmd.lm_save.num_sge = cpu_to_le32(lm_file->num_sge); in pds_vfio_get_lm_state_cmd()
346 cmd.lm_restore.num_sge = cpu_to_le32(lm_file->num_sge); in pds_vfio_set_lm_state_cmd()
480 u64 sgl_dma, u16 num_sge, u32 offset, in pds_vfio_dirty_seq_ack_cmd() argument
490 .num_sge = cpu_to_le16(num_sge), in pds_vfio_dirty_seq_ack_cmd()
H A Ddirty.c119 region->num_sge * sizeof(struct pds_lm_sg_elem), in __pds_vfio_dirty_free_sgl()
123 region->num_sge = 0; in __pds_vfio_dirty_free_sgl()
168 region->num_sge = max_sge; in pds_vfio_dirty_alloc_sgl()
387 u16 num_sge; in pds_vfio_dirty_seq_ack() local
433 num_sge = sg_table.nents; in pds_vfio_dirty_seq_ack()
434 size = num_sge * sizeof(struct pds_lm_sg_elem); in pds_vfio_dirty_seq_ack()
437 err = pds_vfio_dirty_seq_ack_cmd(pds_vfio, region->sgl_addr, num_sge, in pds_vfio_dirty_seq_ack()
443 num_sge, region->sgl_addr, ERR_PTR(err)); in pds_vfio_dirty_seq_ack()
H A Dlm.h26 u16 num_sge; member
H A Ddirty.h17 u16 num_sge; member
H A Dcmds.h23 u64 sgl_dma, u16 num_sge, u32 offset,
/linux/drivers/infiniband/sw/rxe/
H A Drxe_odp.c415 u32 num_sge; member
434 WARN_ON(!work->num_sge); in rxe_ib_prefetch_mr_work()
435 for (i = 0; i < work->num_sge; ++i) { in rxe_ib_prefetch_mr_work()
461 u32 num_sge) in rxe_ib_prefetch_sg_list() argument
467 for (i = 0; i < num_sge; ++i) { in rxe_ib_prefetch_sg_list()
507 u32 num_sge) in rxe_ib_advise_mr_prefetch() argument
524 num_sge); in rxe_ib_advise_mr_prefetch()
527 work = kvzalloc(struct_size(work, frags, num_sge), GFP_KERNEL); in rxe_ib_advise_mr_prefetch()
533 work->num_sge = num_sge; in rxe_ib_advise_mr_prefetch()
535 for (i = 0; i < num_sge; ++i) { in rxe_ib_advise_mr_prefetch()
[all …]
/linux/include/rdma/
H A Drdmavt_mr.h80 u8 num_sge; member
95 while (ss->num_sge) { in rvt_put_ss()
97 if (--ss->num_sge) in rvt_put_ss()
125 if (--ss->num_sge) in rvt_update_sge()
/linux/drivers/infiniband/sw/siw/
H A Dsiw_mem.h32 static inline void siw_unref_mem_sgl(struct siw_mem **mem, unsigned int num_sge) in siw_unref_mem_sgl() argument
34 while (num_sge) { in siw_unref_mem_sgl()
41 num_sge--; in siw_unref_mem_sgl()
H A Dsiw_qp_rx.c352 int num_sge = rqe->num_sge; in siw_rqe_get() local
354 if (likely(num_sge <= SIW_MAX_SGE)) { in siw_rqe_get()
364 wqe->rqe.num_sge = num_sge; in siw_rqe_get()
366 while (i < num_sge) { in siw_rqe_get()
377 siw_dbg_qp(qp, "too many sge's: %d\n", rqe->num_sge); in siw_rqe_get()
578 wqe->rqe.num_sge = 1; in siw_proc_write()
710 resp->num_sge = length ? 1 : 0; in siw_init_rresp()
760 wqe->sqe.num_sge = 1; in siw_orqe_start_rx()
/linux/net/rds/
H A Dib_send.c630 send->s_wr.num_sge = 1; in rds_ib_xmit()
653 send->s_wr.num_sge = 2; in rds_ib_xmit()
683 &send->s_wr, send->s_wr.num_sge, send->s_wr.next); in rds_ib_xmit()
802 send->s_atomic_wr.wr.num_sge = 1; in rds_ib_xmit_atomic()
869 int num_sge; in rds_ib_xmit_rdma() local
915 num_sge = op->op_count; in rds_ib_xmit_rdma()
930 if (num_sge > max_sge) { in rds_ib_xmit_rdma()
931 send->s_rdma_wr.wr.num_sge = max_sge; in rds_ib_xmit_rdma()
932 num_sge -= max_sge; in rds_ib_xmit_rdma()
934 send->s_rdma_wr.wr.num_sge = num_sge; in rds_ib_xmit_rdma()
[all …]
/linux/drivers/infiniband/sw/rdmavt/
H A Dtrace_tx.h68 __field(int, num_sge)
88 __entry->num_sge = wqe->wr.num_sge;
109 __entry->num_sge,
H A Drc.c166 ss->num_sge = wqe->wr.num_sge; in rvt_restart_sge()
H A Dsrq.c202 p->num_sge = wqe->num_sge; in rvt_modify_srq()
203 for (i = 0; i < wqe->num_sge; i++) in rvt_modify_srq()
/linux/drivers/infiniband/hw/mlx5/
H A Dwr.c272 for (i = 0; i < wr->num_sge; i++) { in set_data_inl_seg()
569 if (unlikely(send_wr->num_sge != 0) || in set_pi_umr_wr()
935 u8 next_fence, int *num_sge) in handle_qpt_rc() argument
955 *num_sge = 0; in handle_qpt_rc()
962 *num_sge = 0; in handle_qpt_rc()
971 *num_sge = 0; in handle_qpt_rc()
1064 int num_sge; in mlx5_ib_post_send() local
1090 num_sge = wr->num_sge; in mlx5_ib_post_send()
1091 if (unlikely(num_sge > qp->sq.max_gs)) { in mlx5_ib_post_send()
1131 next_fence, &num_sge); in mlx5_ib_post_send()
[all …]
H A Dodp.c1949 u32 num_sge; member
1961 for (i = 0; i < work->num_sge; ++i) in destroy_prefetch_work()
2015 WARN_ON(!work->num_sge); in mlx5_ib_prefetch_mr_work()
2016 for (i = 0; i < work->num_sge; ++i) { in mlx5_ib_prefetch_mr_work()
2031 struct ib_sge *sg_list, u32 num_sge) in init_prefetch_work() argument
2038 for (i = 0; i < num_sge; ++i) { in init_prefetch_work()
2043 work->num_sge = i; in init_prefetch_work()
2050 work->num_sge = num_sge; in init_prefetch_work()
2057 u32 num_sge) in mlx5_ib_prefetch_sg_list() argument
2063 for (i = 0; i < num_sge; ++i) { in mlx5_ib_prefetch_sg_list()
[all …]
/linux/include/uapi/rdma/
H A Dsiw-abi.h110 __u8 num_sge; member
128 __u8 num_sge; member
H A Dvmw_pvrdma-abi.h231 __u32 num_sge; /* size of s/g array */ member
239 __u32 num_sge; /* size of s/g array */ member
H A Drvt-abi.h45 __u8 num_sge; member
/linux/drivers/infiniband/hw/hfi1/
H A Duc.c114 qp->s_sge.num_sge = wqe->wr.num_sge; in hfi1_make_uc_req()
293 qp->r_sge.num_sge = 0; in hfi1_uc_rcv()
458 qp->r_sge.num_sge = 1; in hfi1_uc_rcv()
460 qp->r_sge.num_sge = 0; in hfi1_uc_rcv()
535 qp->r_sge.num_sge = 0; in hfi1_uc_rcv()
/linux/drivers/infiniband/hw/vmw_pvrdma/
H A Dpvrdma_qp.c704 if (unlikely(wr->num_sge > qp->sq.max_sg || wr->num_sge < 0)) { in pvrdma_post_send()
753 wqe_hdr->num_sge = wr->num_sge; in pvrdma_post_send()
836 for (i = 0; i < wr->num_sge; i++) { in pvrdma_post_send()
904 if (unlikely(wr->num_sge > qp->rq.max_sg || in pvrdma_post_recv()
905 wr->num_sge < 0)) { in pvrdma_post_recv()
924 wqe_hdr->num_sge = wr->num_sge; in pvrdma_post_recv()
928 for (i = 0; i < wr->num_sge; i++) { in pvrdma_post_recv()
/linux/drivers/infiniband/hw/qedr/
H A Dqedr_roce_cm.c406 for (i = 0; i < swr->num_sge; ++i) in qedr_gsi_build_header()
530 packet->n_seg = swr->num_sge; in qedr_gsi_build_packet()
558 if (wr->num_sge > RDMA_MAX_SGE_PER_SQ_WQE) { in qedr_gsi_post_send()
560 wr->num_sge, RDMA_MAX_SGE_PER_SQ_WQE); in qedr_gsi_post_send()
631 if (wr->num_sge > QEDR_GSI_MAX_RECV_SGE) { in qedr_gsi_post_recv()
634 wr->num_sge, QEDR_GSI_MAX_RECV_SGE); in qedr_gsi_post_recv()
/linux/net/sunrpc/xprtrdma/
H A Dfrwr_ops.c400 mr->mr_regwr.wr.num_sge = 0; in frwr_send()
520 last->num_sge = 0; in frwr_unmap_sync()
623 last->num_sge = 0; in frwr_unmap_async()
692 mr->mr_regwr.wr.num_sge = 0; in frwr_wp_create()
H A Dsvc_rdma_sendto.c219 ctxt->sc_send_wr.num_sge = 0; in svc_rdma_send_ctxt_get()
248 for (i = 1; i < ctxt->sc_send_wr.num_sge; i++) { in svc_rdma_send_ctxt_release()
590 ctxt->sc_send_wr.num_sge++;
826 * The Send WR's num_sge field is set in all cases. in svc_rdma_map_reply_msg()
840 sctxt->sc_send_wr.num_sge = 1; in svc_rdma_map_reply_msg()
976 sctxt->sc_send_wr.num_sge = 1;
/linux/drivers/infiniband/core/
H A Drw.c160 reg->wr.wr.num_sge = 1; in rdma_rw_init_mr_wrs()
224 rdma_wr->wr.num_sge = nr_sge; in rdma_rw_init_map_wrs()
269 rdma_wr->wr.num_sge = 1; in rdma_rw_init_single_wr()
428 ctx->reg->reg_wr.wr.num_sge = 0; in rdma_rw_ctx_signature_init()
444 rdma_wr->wr.num_sge = 1; in rdma_rw_ctx_signature_init()
/linux/drivers/infiniband/ulp/iser/
H A Diser_memory.c243 inv_wr->num_sge = 0; in iser_inv_rkey()
284 wr->wr.num_sge = 0; in iser_reg_sig_mr()
332 wr->wr.num_sge = 0; in iser_fast_reg_mr()

123