Home
last modified time | relevance | path

Searched refs:wqe_size (Results 1 – 25 of 40) sorted by relevance

12

/linux/drivers/net/ethernet/huawei/hinic/
H A Dhinic_hw_qp.h181 void hinic_sq_write_db(struct hinic_sq *sq, u16 prod_idx, unsigned int wqe_size,
185 unsigned int wqe_size, u16 *prod_idx);
187 void hinic_sq_return_wqe(struct hinic_sq *sq, unsigned int wqe_size);
191 unsigned int wqe_size);
195 unsigned int wqe_size, u16 *cons_idx);
199 unsigned int *wqe_size, u16 *cons_idx);
201 void hinic_sq_put_wqe(struct hinic_sq *sq, unsigned int wqe_size);
207 unsigned int wqe_size, u16 *prod_idx);
213 unsigned int wqe_size,
217 unsigned int wqe_size,
[all …]
H A Dhinic_hw_qp.c635 void hinic_sq_write_db(struct hinic_sq *sq, u16 prod_idx, unsigned int wqe_size, in hinic_sq_write_db() argument
641 prod_idx += ALIGN(wqe_size, wq->wqebb_size) / wq->wqebb_size; in hinic_sq_write_db()
658 unsigned int wqe_size, u16 *prod_idx) in hinic_sq_get_wqe() argument
660 struct hinic_hw_wqe *hw_wqe = hinic_get_wqe(sq->wq, wqe_size, in hinic_sq_get_wqe()
674 void hinic_sq_return_wqe(struct hinic_sq *sq, unsigned int wqe_size) in hinic_sq_return_wqe() argument
676 hinic_return_wqe(sq->wq, wqe_size); in hinic_sq_return_wqe()
689 struct sk_buff *skb, unsigned int wqe_size) in hinic_sq_write_wqe() argument
696 hinic_cpu_to_be32(sq_wqe, wqe_size); in hinic_sq_write_wqe()
698 hinic_write_wqe(sq->wq, hw_wqe, wqe_size); in hinic_sq_write_wqe()
713 unsigned int *wqe_size, u16 *cons_idx) in hinic_sq_read_wqebb() argument
[all …]
H A Dhinic_tx.c498 unsigned int wqe_size; in hinic_lb_xmit_frame() local
510 wqe_size = HINIC_SQ_WQE_SIZE(nr_sges); in hinic_lb_xmit_frame()
512 sq_wqe = hinic_sq_get_wqe(txq->sq, wqe_size, &prod_idx); in hinic_lb_xmit_frame()
516 sq_wqe = hinic_sq_get_wqe(txq->sq, wqe_size, &prod_idx); in hinic_lb_xmit_frame()
528 wqe_size = 0; in hinic_lb_xmit_frame()
534 hinic_sq_write_wqe(txq->sq, prod_idx, sq_wqe, skb, wqe_size); in hinic_lb_xmit_frame()
539 hinic_sq_write_db(txq->sq, prod_idx, wqe_size, 0); in hinic_lb_xmit_frame()
559 unsigned int wqe_size; in hinic_xmit_frame() local
591 wqe_size = HINIC_SQ_WQE_SIZE(nr_sges); in hinic_xmit_frame()
593 sq_wqe = hinic_sq_get_wqe(txq->sq, wqe_size, &prod_idx); in hinic_xmit_frame()
[all …]
H A Dhinic_hw_wq.h96 struct hinic_hw_wqe *hinic_get_wqe(struct hinic_wq *wq, unsigned int wqe_size,
99 void hinic_return_wqe(struct hinic_wq *wq, unsigned int wqe_size);
101 void hinic_put_wqe(struct hinic_wq *wq, unsigned int wqe_size);
103 struct hinic_hw_wqe *hinic_read_wqe(struct hinic_wq *wq, unsigned int wqe_size,
109 unsigned int wqe_size);
H A Dhinic_hw_wq.c740 struct hinic_hw_wqe *hinic_get_wqe(struct hinic_wq *wq, unsigned int wqe_size, in hinic_get_wqe() argument
748 num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) >> wq->wqebb_size_shift; in hinic_get_wqe()
789 void hinic_return_wqe(struct hinic_wq *wq, unsigned int wqe_size) in hinic_return_wqe() argument
791 int num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) / wq->wqebb_size; in hinic_return_wqe()
803 void hinic_put_wqe(struct hinic_wq *wq, unsigned int wqe_size) in hinic_put_wqe() argument
805 int num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) in hinic_put_wqe()
821 struct hinic_hw_wqe *hinic_read_wqe(struct hinic_wq *wq, unsigned int wqe_size, in hinic_read_wqe() argument
824 int num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) in hinic_read_wqe()
889 unsigned int wqe_size) in hinic_write_wqe() argument
899 num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) / wq->wqebb_size; in hinic_write_wqe()
H A Dhinic_hw_cmdq.c155 unsigned int wqe_size = 0; in cmdq_wqe_size_from_bdlen() local
159 wqe_size = WQE_LCMD_SIZE; in cmdq_wqe_size_from_bdlen()
162 wqe_size = WQE_SCMD_SIZE; in cmdq_wqe_size_from_bdlen()
166 return wqe_size; in cmdq_wqe_size_from_bdlen()
532 unsigned int bufdesc_len, wqe_size; in clear_wqe_complete_bit() local
536 wqe_size = cmdq_wqe_size_from_bdlen(bufdesc_len); in clear_wqe_complete_bit()
537 if (wqe_size == WQE_LCMD_SIZE) { in clear_wqe_complete_bit()
/linux/drivers/infiniband/hw/qedr/
H A Dqedr_hsi_rdma.h310 u8 wqe_size; member
338 u8 wqe_size; member
374 u8 wqe_size; member
420 u8 wqe_size; member
475 u8 wqe_size; member
498 u8 wqe_size; member
548 u8 wqe_size; member
602 u8 wqe_size; member
628 u8 wqe_size; member
663 u8 wqe_size; member
[all …]
H A Dverbs.c3291 struct qedr_qp *qp, u8 *wqe_size, in qedr_prepare_sq_inline_data() argument
3328 (*wqe_size)++; in qedr_prepare_sq_inline_data()
3376 static u32 qedr_prepare_sq_sges(struct qedr_qp *qp, u8 *wqe_size, in qedr_prepare_sq_sges() argument
3391 if (wqe_size) in qedr_prepare_sq_sges()
3392 *wqe_size += wr->num_sge; in qedr_prepare_sq_sges()
3413 return qedr_prepare_sq_inline_data(dev, qp, &rwqe->wqe_size, wr, in qedr_prepare_sq_rdma_data()
3417 return qedr_prepare_sq_sges(qp, &rwqe->wqe_size, wr); in qedr_prepare_sq_rdma_data()
3432 return qedr_prepare_sq_inline_data(dev, qp, &swqe->wqe_size, wr, in qedr_prepare_sq_send_data()
3436 return qedr_prepare_sq_sges(qp, &swqe->wqe_size, wr); in qedr_prepare_sq_send_data()
3589 swqe->wqe_size = 2; in __qedr_post_send()
[all …]
H A Dqedr.h434 u8 wqe_size; member
445 u8 wqe_size; member
/linux/drivers/infiniband/sw/rxe/
H A Drxe_srq.c49 int wqe_size; in rxe_srq_from_init() local
59 wqe_size = sizeof(struct rxe_recv_wqe) + in rxe_srq_from_init()
65 q = rxe_queue_init(rxe, &srq->rq.max_wr, wqe_size, in rxe_srq_from_init()
157 int wqe_size; in rxe_srq_from_attr() local
167 wqe_size = sizeof(struct rxe_recv_wqe) + in rxe_srq_from_attr()
170 err = rxe_queue_resize(q, &attr->max_wr, wqe_size, in rxe_srq_from_attr()
H A Drxe_qp.c191 int wqe_size; in rxe_init_sq() local
195 wqe_size = max_t(int, init->cap.max_send_sge * sizeof(struct ib_sge), in rxe_init_sq()
197 qp->sq.max_sge = wqe_size / sizeof(struct ib_sge); in rxe_init_sq()
198 qp->sq.max_inline = wqe_size; in rxe_init_sq()
199 wqe_size += sizeof(struct rxe_send_wqe); in rxe_init_sq()
201 qp->sq.queue = rxe_queue_init(rxe, &qp->sq.max_wr, wqe_size, in rxe_init_sq()
283 int wqe_size; in rxe_init_rq() local
288 wqe_size = sizeof(struct rxe_recv_wqe) + in rxe_init_rq()
291 qp->rq.queue = rxe_queue_init(rxe, &qp->rq.max_wr, wqe_size, in rxe_init_rq()
/linux/drivers/net/ethernet/mellanox/mlx5/core/en/
H A Dtxrx.h49 #define MLX5E_KSM_MAX_ENTRIES_PER_WQE(wqe_size)\ argument
50 (((wqe_size) - sizeof(struct mlx5e_umr_wqe)) / sizeof(struct mlx5_ksm))
52 #define MLX5E_KSM_ENTRIES_PER_WQE(wqe_size)\ argument
53 ALIGN_DOWN(MLX5E_KSM_MAX_ENTRIES_PER_WQE(wqe_size), MLX5_UMR_KSM_NUM_ENTRIES_ALIGNMENT)
130 static inline void *mlx5e_fetch_wqe(struct mlx5_wq_cyc *wq, u16 pi, size_t wqe_size) in mlx5e_fetch_wqe() argument
135 memset(wqe, 0, wqe_size); in mlx5e_fetch_wqe()
521 static inline u16 mlx5e_stop_room_for_wqe(struct mlx5_core_dev *mdev, u16 wqe_size) in mlx5e_stop_room_for_wqe() argument
533 WARN_ONCE(wqe_size > mlx5e_get_max_sq_wqebbs(mdev), in mlx5e_stop_room_for_wqe()
535 wqe_size, mlx5e_get_max_sq_wqebbs(mdev)); in mlx5e_stop_room_for_wqe()
537 return MLX5E_STOP_ROOM(wqe_size); in mlx5e_stop_room_for_wqe()
[all …]
H A Dparams.c835 int wqe_size = BIT(log_stride_sz) * num_strides; in mlx5e_shampo_get_log_cq_size() local
840 return order_base_2((wqe_size / rsrv_size) * wq_size * (pkt_per_rsrv + 1)); in mlx5e_shampo_get_log_cq_size()
1043 int wqe_size = BIT(log_stride_sz) * num_strides; in mlx5e_shampo_hd_per_wqe() local
1047 hd_per_wqe = (wqe_size / resv_size) * pkt_per_resv; in mlx5e_shampo_hd_per_wqe()
1049 __func__, hd_per_wqe, resv_size, wqe_size, pkt_per_resv); in mlx5e_shampo_hd_per_wqe()
/linux/drivers/infiniband/hw/erdma/
H A Derdma_qp.c288 u32 wqe_size, wqebb_cnt, hw_op, flags, sgl_offset; in erdma_push_one_sqe() local
341 wqe_size = sizeof(struct erdma_write_sqe); in erdma_push_one_sqe()
342 sgl_offset = wqe_size; in erdma_push_one_sqe()
370 wqe_size = sizeof(struct erdma_readreq_sqe) + in erdma_push_one_sqe()
389 wqe_size = sizeof(struct erdma_send_sqe); in erdma_push_one_sqe()
390 sgl_offset = wqe_size; in erdma_push_one_sqe()
414 wqe_size = sizeof(struct erdma_reg_mr_sqe) + in erdma_push_one_sqe()
418 wqe_size = sizeof(struct erdma_reg_mr_sqe); in erdma_push_one_sqe()
428 wqe_size = sizeof(struct erdma_reg_mr_sqe); in erdma_push_one_sqe()
457 wqe_size = sizeof(*atomic_sqe); in erdma_push_one_sqe()
[all …]
/linux/drivers/infiniband/hw/ocrdma/
H A Docrdma_verbs.c368 dev->attr.wqe_size) : 0; in _ocrdma_alloc_pd()
500 resp.wqe_size = dev->attr.wqe_size; in ocrdma_alloc_ucontext()
502 resp.dpp_wqe_size = dev->attr.wqe_size; in ocrdma_alloc_ucontext()
1922 const struct ib_send_wr *wr, u32 wqe_size) in ocrdma_build_inline_sges() argument
1943 wqe_size += roundup(hdr->total_len, OCRDMA_WQE_ALIGN_BYTES); in ocrdma_build_inline_sges()
1945 wqe_size += sizeof(struct ocrdma_sge); in ocrdma_build_inline_sges()
1950 wqe_size += (wr->num_sge * sizeof(struct ocrdma_sge)); in ocrdma_build_inline_sges()
1952 wqe_size += sizeof(struct ocrdma_sge); in ocrdma_build_inline_sges()
1955 hdr->cw |= ((wqe_size / OCRDMA_WQE_STRIDE) << OCRDMA_WQE_SIZE_SHIFT); in ocrdma_build_inline_sges()
1963 u32 wqe_size = sizeof(*hdr); in ocrdma_build_send() local
[all …]
/linux/drivers/infiniband/hw/bnxt_re/
H A Dqplib_fp.h95 u16 wqe_size; member
254 u16 wqe_size; member
591 return (que->wqe_size * que->max_wqe) / sizeof(struct sq_sge); in bnxt_qplib_get_depth()
606 static inline u32 bnxt_qplib_set_rq_max_slot(u32 wqe_size) in bnxt_qplib_set_rq_max_slot() argument
608 return (wqe_size / sizeof(struct sq_sge)); in bnxt_qplib_set_rq_max_slot()
H A Dib_verbs.c969 u16 wqe_size, calc_ils; in bnxt_re_get_wqe_size() local
971 wqe_size = bnxt_re_get_swqe_size(nsge); in bnxt_re_get_wqe_size()
974 wqe_size = max_t(u16, calc_ils, wqe_size); in bnxt_re_get_wqe_size()
975 wqe_size = ALIGN(wqe_size, sizeof(struct sq_send_hdr)); in bnxt_re_get_wqe_size()
977 return wqe_size; in bnxt_re_get_wqe_size()
997 sq->wqe_size = bnxt_re_get_wqe_size(ilsize, sq->max_sge); in bnxt_re_setup_swqe_size()
998 if (sq->wqe_size > bnxt_re_get_swqe_size(dev_attr->max_qp_sges)) in bnxt_re_setup_swqe_size()
1003 if (sq->wqe_size < bnxt_re_get_swqe_size(dev_attr->max_qp_sges) && in bnxt_re_setup_swqe_size()
1005 sq->wqe_size = bnxt_re_get_swqe_size(dev_attr->max_qp_sges); in bnxt_re_setup_swqe_size()
1008 qplqp->max_inline_data = sq->wqe_size - in bnxt_re_setup_swqe_size()
[all …]
/linux/drivers/infiniband/hw/mlx5/
H A Dqp.c274 size_t wqe_size = 1 << wq->wqe_shift; in mlx5_ib_read_wqe_rq() local
276 if (buflen < wqe_size) in mlx5_ib_read_wqe_rq()
306 size_t wqe_size = 1 << srq->msrq.wqe_shift; in mlx5_ib_read_wqe_srq() local
308 if (buflen < wqe_size) in mlx5_ib_read_wqe_srq()
436 int wqe_size; in set_rq_size() local
467 wqe_size = in set_rq_size()
470 wqe_size += cap->max_recv_sge * sizeof(struct mlx5_wqe_data_seg); in set_rq_size()
471 wqe_size = roundup_pow_of_two(wqe_size); in set_rq_size()
472 wq_size = roundup_pow_of_two(cap->max_recv_wr) * wqe_size; in set_rq_size()
474 qp->rq.wqe_cnt = wq_size / wqe_size; in set_rq_size()
[all …]
H A Dodp.c1116 int wqe_size = 1 << srq->msrq.wqe_shift; in mlx5_ib_mr_responder_pfault_handler_srq() local
1118 if (wqe_size > wqe_length) { in mlx5_ib_mr_responder_pfault_handler_srq()
1123 *wqe_end = *wqe + wqe_size; in mlx5_ib_mr_responder_pfault_handler_srq()
1135 int wqe_size = 1 << wq->wqe_shift; in mlx5_ib_mr_responder_pfault_handler_rq() local
1142 if (wqe_size > wqe_length) { in mlx5_ib_mr_responder_pfault_handler_rq()
1147 *wqe_end = wqe + wqe_size; in mlx5_ib_mr_responder_pfault_handler_rq()
/linux/drivers/infiniband/hw/vmw_pvrdma/
H A Dpvrdma_qp.c147 qp->rq.wqe_size = roundup_pow_of_two(sizeof(struct pvrdma_rq_wqe_hdr) + in pvrdma_set_rq_size()
150 qp->npages_recv = (qp->rq.wqe_cnt * qp->rq.wqe_size + PAGE_SIZE - 1) / in pvrdma_set_rq_size()
172 qp->sq.wqe_size = roundup_pow_of_two(sizeof(struct pvrdma_sq_wqe_hdr) + in pvrdma_set_sq_size()
177 (qp->sq.wqe_cnt * qp->sq.wqe_size + PAGE_SIZE - 1) / in pvrdma_set_sq_size()
637 qp->sq.offset + n * qp->sq.wqe_size); in get_sq_wqe()
643 qp->rq.offset + n * qp->rq.wqe_size); in get_rq_wqe()
H A Dpvrdma.h155 int wqe_size; member
170 int wqe_size; member
/linux/include/uapi/rdma/
H A Docrdma-abi.h55 __u32 wqe_size; member
/linux/drivers/scsi/elx/libefc_sli/
H A Dsli4.c317 n_wqe = qmem->size / sli4->wqe_size; in sli_cmd_wq_create()
352 if (sli4->wqe_size == SLI4_WQE_EXT_BYTES) in sli_cmd_wq_create()
750 size = sli4->wqe_size; in sli_get_queue_entry_size()
1377 memset(buf, 0, sli->wqe_size); in sli_abort_wqe()
1426 memset(buf, 0, sli->wqe_size); in sli_els_request64_wqe()
1571 memset(buf, 0, sli->wqe_size); in sli_fcp_icmnd64_wqe()
1638 memset(buf, 0, sli->wqe_size); in sli_fcp_iread64_wqe()
1741 memset(buf, 0, sli->wqe_size); in sli_fcp_iwrite64_wqe()
1832 memset(buf, 0, sli->wqe_size); in sli_fcp_treceive64_wqe()
1931 if (params->app_id && sli->wqe_size == SLI4_WQE_EXT_BYTES && in sli_fcp_treceive64_wqe()
[all …]
/linux/drivers/net/ethernet/microsoft/mana/
H A Dgdma_main.c1100 u32 wqe_size; in mana_gd_post_work_request() local
1122 wqe_size = ALIGN(sizeof(struct gdma_wqe) + client_oob_size + in mana_gd_post_work_request()
1124 if (wqe_size > max_wqe_size) in mana_gd_post_work_request()
1127 if (wq->monitor_avl_buf && wqe_size > mana_gd_wq_avail_space(wq)) { in mana_gd_post_work_request()
1134 wqe_info->wqe_size_in_bu = wqe_size / GDMA_WQE_BU_SIZE; in mana_gd_post_work_request()
1144 wq->head += wqe_size / GDMA_WQE_BU_SIZE; in mana_gd_post_work_request()
/linux/drivers/infiniband/hw/irdma/
H A Duk.c1615 int irdma_fragcnt_to_wqesize_rq(u32 frag_cnt, u16 *wqe_size) in irdma_fragcnt_to_wqesize_rq() argument
1620 *wqe_size = 32; in irdma_fragcnt_to_wqesize_rq()
1624 *wqe_size = 64; in irdma_fragcnt_to_wqesize_rq()
1630 *wqe_size = 128; in irdma_fragcnt_to_wqesize_rq()
1639 *wqe_size = 256; in irdma_fragcnt_to_wqesize_rq()

12