| /linux/drivers/infiniband/sw/rdmavt/ |
| H A D | trace_tx.h | 53 __field(u64, wr_id) 74 __entry->wr_id = wqe->wr.wr_id; 95 __entry->wr_id, 121 __field(u64, wr_id) 133 __entry->wr_id = wqe->wr.wr_id; 149 __entry->wr_id,
|
| H A D | trace_cq.h | 74 __field(u64, wr_id) 85 __entry->wr_id = wc->wr_id; 98 __entry->wr_id,
|
| H A D | srq.c | 201 p->wr_id = wqe->wr_id; in rvt_modify_srq()
|
| /linux/net/rds/ |
| H A D | ib_ring.c | 156 u32 rds_ib_ring_completed(struct rds_ib_work_ring *ring, u32 wr_id, u32 oldest) in rds_ib_ring_completed() argument 160 if (oldest <= (unsigned long long)wr_id) in rds_ib_ring_completed() 161 ret = (unsigned long long)wr_id - oldest + 1; in rds_ib_ring_completed() 163 ret = ring->w_nr - oldest + (unsigned long long)wr_id + 1; in rds_ib_ring_completed() 166 wr_id, oldest); in rds_ib_ring_completed()
|
| H A D | ib_frmr.c | 157 reg_wr.wr.wr_id = (unsigned long)(void *)ibmr; in rds_ib_post_reg_frmr() 292 s_wr->wr_id = (unsigned long)(void *)ibmr; in rds_ib_post_inv() 328 struct rds_ib_mr *ibmr = (void *)(unsigned long)wc->wr_id; in rds_ib_mr_cqe_handler()
|
| /linux/net/smc/ |
| H A D | smc_wr.h | 92 u64 wr_id, temp_wr_id; in smc_wr_rx_post() 95 wr_id = ++link->wr_rx_id; /* tasklet context, thus not atomic */ in smc_wr_rx_post() 96 temp_wr_id = wr_id; in smc_wr_rx_post() 98 link->wr_rx_ibs[index].wr_id = wr_id; in smc_wr_rx_post() 94 u64 wr_id, temp_wr_id; smc_wr_rx_post() local
|
| /linux/drivers/infiniband/ulp/ipoib/ |
| H A D | ipoib_cm.c | 99 priv->cm.rx_wr.wr_id = id | IPOIB_OP_CM | IPOIB_OP_RECV; in ipoib_cm_post_receive_srq() 124 wr->wr_id = id | IPOIB_OP_CM | IPOIB_OP_RECV; in ipoib_cm_post_receive_nonsrq() 226 ipoib_cm_rx_drain_wr.wr_id = IPOIB_CM_RX_DRAIN_WRID; in ipoib_cm_start_rx_drain() 564 unsigned int wr_id = wc->wr_id & ~(IPOIB_OP_CM | IPOIB_OP_RECV); in ipoib_cm_handle_rx_wc() local 574 wr_id, wc->status); in ipoib_cm_handle_rx_wc() 576 if (unlikely(wr_id >= ipoib_recvq_size)) { in ipoib_cm_handle_rx_wc() 577 if (wr_id == (IPOIB_CM_RX_DRAIN_WRID & ~(IPOIB_OP_CM | IPOIB_OP_RECV))) { in ipoib_cm_handle_rx_wc() 585 wr_id, ipoib_recvq_size); in ipoib_cm_handle_rx_wc() 594 skb = rx_ring[wr_id].skb; in ipoib_cm_handle_rx_wc() 599 wc->status, wr_id, wc->vendor_err); in ipoib_cm_handle_rx_wc() [all …]
|
| /linux/drivers/infiniband/hw/cxgb4/ |
| H A D | cq.c | 469 srq->sw_rq[srq->pidx].wr_id = pwr->wr_id; in post_pending_srq_wrs() 476 (unsigned long long)pwr->wr_id); in post_pending_srq_wrs() 494 u64 wr_id; in reap_srq_cqe() local 497 wr_id = srq->sw_rq[rel_idx].wr_id; in reap_srq_cqe() 503 (unsigned long long)srq->sw_rq[rel_idx].wr_id); in reap_srq_cqe() 511 srq->sw_rq[srq->cidx].wr_id); in reap_srq_cqe() 522 (unsigned long long)srq->sw_rq[rel_idx].wr_id); in reap_srq_cqe() 525 return wr_id; in reap_srq_cqe() 717 *cookie = wq->sq.sw_sq[wq->sq.cidx].wr_id; in poll_cq() 724 *cookie = wq->rq.sw_rq[wq->rq.cidx].wr_id; in poll_cq() [all …]
|
| /linux/drivers/infiniband/sw/rxe/ |
| H A D | rxe_comp.c | 402 wc->wr_id = wqe->wr.wr_id; in make_send_cqe() 406 uwc->wr_id = wqe->wr.wr_id; in make_send_cqe() 565 uwc->wr_id = wqe->wr.wr_id; in flush_send_wqe() 569 wc->wr_id = wqe->wr.wr_id; in flush_send_wqe()
|
| H A D | rxe_resp.c | 1087 uwc->wr_id = wqe->wr_id; in do_complete() 1091 wc->wr_id = wqe->wr_id; in do_complete() 1452 uwc->wr_id = wqe->wr_id; in flush_recv_wqe() 1456 wc->wr_id = wqe->wr_id; in flush_recv_wqe()
|
| /linux/include/uapi/rdma/ |
| H A D | vmw_pvrdma-abi.h | 230 __aligned_u64 wr_id; /* wr id */ member 238 __aligned_u64 wr_id; /* wr id */ member 291 __aligned_u64 wr_id; member
|
| H A D | rdma_user_rxe.h | 76 __aligned_u64 wr_id; member 176 __aligned_u64 wr_id; member
|
| H A D | rvt-abi.h | 44 __u64 wr_id; member
|
| H A D | mlx5_user_ioctl_verbs.h | 64 __aligned_u64 wr_id; 62 __aligned_u64 wr_id; global() member
|
| /linux/drivers/infiniband/hw/qedr/ |
| H A D | qedr_roce_cm.c | 584 qp->wqe_wr_id[qp->sq.prod].wr_id = wr->wr_id; in qedr_gsi_post_send() 588 wr->wr_id); in qedr_gsi_post_send() 654 qp->rqe_wr_id[qp->rq.prod].wr_id = wr->wr_id; in qedr_gsi_post_recv() 685 wc[i].wr_id = qp->rqe_wr_id[qp->rq.cons].wr_id; in qedr_gsi_poll_cq() 713 wc[i].wr_id = qp->wqe_wr_id[qp->sq.cons].wr_id; in qedr_gsi_poll_cq()
|
| /linux/drivers/infiniband/hw/bnxt_re/ |
| H A D | qplib_fp.c | 785 srqe->wr_id[0] = cpu_to_le32((u32)next); in bnxt_qplib_post_srq_recv() 786 srq->swq[next].wr_id = wqe->wr_id; in bnxt_qplib_post_srq_recv() 1892 swq->wr_id = wqe->wr_id; in bnxt_qplib_post_send() 2142 swq->wr_id = wqe->wr_id; in bnxt_qplib_post_recv() 2170 base_hdr->wr_id[0] = cpu_to_le32(wqe_idx); in bnxt_qplib_post_recv() 2376 if (sq->swq[last].wr_id == BNXT_QPLIB_FENCE_WRID) { in __flush_sq() 2384 cqe->wr_id = sq->swq[last].wr_id; in __flush_sq() 2435 cqe->wr_id = rq->swq[last].wr_id; in __flush_rq() 2529 sq->swq[peek_sq_cons_idx].wr_id == in do_wa9060() 2645 cqe->wr_id = swq->wr_id; in bnxt_qplib_cq_process_req() [all …]
|
| H A D | qplib_fp.h | 118 u64 wr_id; member 133 u64 wr_id; member 418 u64 wr_id; member
|
| /linux/drivers/infiniband/hw/irdma/ |
| H A D | user.h | 293 u64 wr_id; member 344 u64 wr_id; member 370 u64 wr_id; member 407 int irdma_uk_post_nop(struct irdma_qp_uk *qp, u64 wr_id, bool signaled, 586 int irdma_nop(struct irdma_qp_uk *qp, u64 wr_id, bool signaled, bool post_sq);
|
| H A D | uk.c | 169 qp->sq_wrtrk_array[*wqe_idx].wrid = info->wr_id; in irdma_qp_get_next_send_wqe() 457 set_64bit_val(wqe, 16, (u64)info->wr_id); in irdma_uk_srq_post_receive() 1006 qp->rq_wrid_array[wqe_idx] = info->wr_id; in irdma_uk_post_receive() 1291 get_64bit_val(cqe, 8, &info->wr_id); in irdma_uk_cq_poll_cmpl() 1318 info->wr_id = qp->rq_wrid_array[qp->rq_ring.tail]; in irdma_uk_cq_poll_cmpl() 1321 info->wr_id = qp->rq_wrid_array[array_idx]; in irdma_uk_cq_poll_cmpl() 1357 info->wr_id = qp->sq_wrtrk_array[wqe_idx].wrid; in irdma_uk_cq_poll_cmpl() 1387 info->wr_id = qp->sq_wrtrk_array[tail].wrid; in irdma_uk_cq_poll_cmpl() 1815 int irdma_nop(struct irdma_qp_uk *qp, u64 wr_id, bool signaled, bool post_sq) in irdma_nop() argument 1822 info.wr_id = wr_id; in irdma_nop()
|
| /linux/include/rdma/ |
| H A D | rdmavt_qp.h | 837 u64 wr_id; in rvt_qp_complete_swqe() local 850 wr_id = wqe->wr.wr_id; in rvt_qp_complete_swqe() 859 .wr_id = wr_id, in rvt_qp_complete_swqe()
|
| /linux/drivers/net/ethernet/ibm/ehea/ |
| H A D | ehea_qmr.h | 87 u64 wr_id; member 126 u64 wr_id; /* work request ID */ member 149 u64 wr_id; /* work request ID from WQE */ member
|
| /linux/drivers/net/ethernet/cisco/enic/ |
| H A D | vnic_rq.h | 63 uint64_t wr_id; member 119 buf->wr_id = wrid; in vnic_rq_post()
|
| H A D | vnic_wq.h | 48 uint64_t wr_id; /* Cookie */ member 133 buf->wr_id = wrid; in vnic_wq_post()
|
| /linux/drivers/infiniband/hw/mlx5/ |
| H A D | wr.c | 760 u64 wr_id, int nreq, u8 fence, u32 mlx5_opcode) in mlx5r_finish_wqe() argument 771 qp->sq.wrid[idx] = wr_id; in mlx5r_finish_wqe() 836 mlx5r_finish_wqe(qp, *ctrl, *seg, *size, *cur_edge, *idx, wr->wr_id, in handle_psv() 878 wr->wr_id, nreq, fence, MLX5_OPCODE_UMR); in handle_reg_mr_integrity() 909 mlx5r_finish_wqe(qp, *ctrl, *seg, *size, *cur_edge, *idx, wr->wr_id, in handle_reg_mr_integrity() 1185 mlx5r_finish_wqe(qp, ctrl, seg, size, cur_edge, idx, wr->wr_id, in mlx5_ib_post_send() 1264 qp->rq.wrid[ind] = wr->wr_id; in mlx5_ib_post_recv()
|
| H A D | cq.c | 193 wc->wr_id = srq->wrid[wqe_ctr]; in handle_responder() 200 wc->wr_id = wq->wrid[wq->tail & (wq->wqe_cnt - 1)]; in handle_responder() 417 wc->wr_id = wq->wrid[idx]; in sw_comp() 510 wc->wr_id = wq->wrid[idx]; in mlx5_poll_one() 540 wc->wr_id = wq->wrid[idx]; in mlx5_poll_one() 548 wc->wr_id = srq->wrid[wqe_ctr]; in mlx5_poll_one() 552 wc->wr_id = wq->wrid[wq->tail & (wq->wqe_cnt - 1)]; in mlx5_poll_one()
|