Home
last modified time | relevance | path

Searched refs:wr_cqe (Results 1 – 25 of 28) sorted by relevance

12

/linux/net/sunrpc/xprtrdma/
H A Dfrwr_ops.c361 struct ib_cqe *cqe = wc->wr_cqe; in frwr_wc_fastreg()
399 mr->mr_regwr.wr.wr_cqe = &mr->mr_cqe; in frwr_send()
456 struct ib_cqe *cqe = wc->wr_cqe; in frwr_wc_localinv()
475 struct ib_cqe *cqe = wc->wr_cqe; in frwr_wc_localinv_wake()
518 last->wr_cqe = &mr->mr_cqe; in frwr_unmap_sync()
525 last->wr_cqe->done = frwr_wc_localinv; in frwr_unmap_sync()
537 last->wr_cqe->done = frwr_wc_localinv_wake; in frwr_unmap_sync()
572 struct ib_cqe *cqe = wc->wr_cqe; in frwr_wc_localinv_done()
621 last->wr_cqe = &mr->mr_cqe; in frwr_unmap_async()
628 last->wr_cqe->done = frwr_wc_localinv; in frwr_unmap_async()
[all …]
H A Dsvc_rdma_rw.c257 struct ib_cqe *cqe = wc->wr_cqe; in svc_rdma_reply_done()
286 struct ib_cqe *cqe = wc->wr_cqe; in svc_rdma_write_done()
320 struct ib_cqe *cqe = wc->wr_cqe; in svc_rdma_wc_read_done()
H A Dsvc_rdma_recvfrom.c144 ctxt->rc_recv_wr.wr_cqe = &ctxt->rc_cqe; in svc_rdma_recv_ctxt_alloc()
331 struct ib_cqe *cqe = wc->wr_cqe; in svc_rdma_wc_receive()
H A Dverbs.c144 struct ib_cqe *cqe = wc->wr_cqe; in rpcrdma_wc_send()
163 struct ib_cqe *cqe = wc->wr_cqe; in rpcrdma_wc_receive()
973 rep->rr_recv_wr.wr_cqe = &rep->rr_cqe; in rpcrdma_rep_create()
H A Dsvc_rdma_sendto.c141 ctxt->sc_send_wr.wr_cqe = &ctxt->sc_cqe; in svc_rdma_send_ctxt_alloc()
298 struct ib_cqe *cqe = wc->wr_cqe; in svc_rdma_wc_send()
/linux/drivers/infiniband/ulp/rtrs/
H A Drtrs.c88 .wr_cqe = &iu->cqe, in rtrs_iu_post_recv()
102 .wr_cqe = cqe, in rtrs_post_recv_empty()
143 .wr_cqe = &iu->cqe, in rtrs_iu_post_send()
165 .wr.wr_cqe = &iu->cqe, in rtrs_iu_post_rdma_write_imm()
201 .wr.wr_cqe = cqe, in rtrs_post_rdma_write_imm_empty()
H A Drtrs-srv.c250 wr->wr.wr_cqe = &io_comp_cqe; in rdma_write_sg()
278 inv_wr.wr_cqe = &io_comp_cqe; in rdma_write_sg()
289 rwr.wr.wr_cqe = &local_reg_cqe; in rdma_write_sg()
319 imm_wr.wr.wr_cqe = &io_comp_cqe; in rdma_write_sg()
365 inv_wr.wr_cqe = &io_comp_cqe; in send_io_resp_imm()
410 rwr.wr.wr_cqe = &local_reg_cqe; in send_io_resp_imm()
437 imm_wr.wr.wr_cqe = &io_comp_cqe; in send_io_resp_imm()
711 iu = container_of(wc->wr_cqe, struct rtrs_iu, cqe); in rtrs_srv_info_rsp_done()
853 rwr[mri].wr.wr_cqe = &local_reg_cqe; in process_info_req()
916 iu = container_of(wc->wr_cqe, struct rtrs_iu, cqe); in rtrs_srv_info_req_done()
[all …]
H A Drtrs-clt.c350 container_of(wc->wr_cqe, typeof(*req), inv_cqe); in rtrs_clt_inv_rkey_done()
371 .wr_cqe = &req->inv_cqe, in rtrs_inv_rkey()
513 iu = container_of(wc->wr_cqe, struct rtrs_iu, in rtrs_clt_recv_done()
534 iu = container_of(wc->wr_cqe, struct rtrs_iu, cqe); in rtrs_clt_rkey_rsp_done()
592 wr->wr_cqe = cqe; in rtrs_post_recv_empty_x2()
625 if (WARN_ON(wc->wr_cqe->done != rtrs_clt_rdma_done)) in rtrs_clt_rdma_done()
674 WARN_ON(wc->wr_cqe->done != rtrs_clt_rdma_done); in rtrs_clt_rdma_done()
1133 .wr.wr_cqe = &fast_reg_cqe, in rtrs_clt_write_req()
1219 .wr.wr_cqe = &fast_reg_cqe, in rtrs_clt_read_req()
2392 iu = container_of(wc->wr_cqe, struct rtrs_iu, cqe); in rtrs_clt_info_req_done()
[all …]
/linux/net/9p/
H A Dtrans_rdma.c295 container_of(wc->wr_cqe, struct p9_rdma_context, cqe); in recv_done()
346 container_of(wc->wr_cqe, struct p9_rdma_context, cqe); in send_done()
403 wr.wr_cqe = &c->cqe; in post_recv()
500 wr.wr_cqe = &c->cqe; in rdma_request()
/linux/drivers/infiniband/ulp/iser/
H A Diser_memory.c240 inv_wr->wr_cqe = cqe; in iser_inv_rkey()
283 wr->wr.wr_cqe = cqe; in iser_reg_sig_mr()
330 wr->wr.wr_cqe = cqe; in iser_fast_reg_mr()
H A Diser_initiator.c537 struct iser_login_desc *desc = iser_login(wc->wr_cqe); in iser_login_rsp()
638 struct iser_rx_desc *desc = iser_rx(wc->wr_cqe); in iser_task_rsp()
682 struct iser_tx_desc *desc = iser_tx(wc->wr_cqe); in iser_ctrl_comp()
698 struct iser_tx_desc *desc = iser_tx(wc->wr_cqe); in iser_dataout_comp()
H A Diser_verbs.c818 wr.wr_cqe = &desc->cqe; in iser_post_recvl()
837 wr.wr_cqe = &rx_desc->cqe; in iser_post_recvm()
868 wr->wr_cqe = &tx_desc->cqe; in iser_post_send()
/linux/drivers/nvme/target/
H A Drdma.c337 c->wr.wr_cqe = &c->cqe; in nvmet_rdma_alloc_cmd()
420 r->send_wr.wr_cqe = &r->send_cqe; in nvmet_rdma_alloc_rsp()
696 container_of(wc->wr_cqe, struct nvmet_rdma_rsp, send_cqe); in nvmet_rdma_send_done()
704 wc->wr_cqe, ib_wc_status_msg(wc->status), wc->status); in nvmet_rdma_send_done()
749 container_of(wc->wr_cqe, struct nvmet_rdma_rsp, read_cqe); in nvmet_rdma_read_data_done()
763 wc->wr_cqe, ib_wc_status_msg(wc->status), wc->status); in nvmet_rdma_read_data_done()
782 container_of(wc->wr_cqe, struct nvmet_rdma_rsp, write_cqe); in nvmet_rdma_write_data_done()
1002 container_of(wc->wr_cqe, struct nvmet_rdma_cmd, cqe); in nvmet_rdma_recv_done()
1009 wc->wr_cqe, ib_wc_status_msg(wc->status), in nvmet_rdma_recv_done()
/linux/drivers/infiniband/core/
H A Dcq.c109 if (wc->wr_cqe) in __ib_process_cq()
110 wc->wr_cqe->done(cq, wc); in __ib_process_cq()
H A Dmad.c549 wc->wr_cqe = cqe; in build_smp_wc()
689 send_wr->wr.wr_cqe, drslid, in handle_outgoing_dr_smp()
893 mad_send_wr->send_wr.wr.wr_cqe = &mad_send_wr->mad_list.cqe; in ib_create_send_mad()
1006 mad_send_wr->send_wr.wr.wr_cqe = &mad_send_wr->mad_list.cqe; in ib_send_mad()
2021 container_of(wc->wr_cqe, struct ib_mad_list_head, cqe); in ib_mad_recv_done()
2270 container_of(wc->wr_cqe, struct ib_mad_list_head, cqe); in ib_mad_send_done()
2355 container_of(wc->wr_cqe, struct ib_mad_list_head, cqe); in ib_mad_send_error()
2533 local->mad_send_wr->send_wr.wr.wr_cqe, in local_completions()
2715 recv_wr.wr_cqe = &mad_priv->header.mad_list.cqe; in ib_mad_post_receive_mads()
H A Drw.c427 ctx->reg->reg_wr.wr.wr_cqe = NULL; in rdma_rw_ctx_signature_init()
533 last_wr->wr_cqe = cqe; in rdma_rw_ctx_wrs()
H A Dverbs.c2820 struct ib_drain_cqe *cqe = container_of(wc->wr_cqe, struct ib_drain_cqe, in ib_drain_qp_done()
2837 { .wr_cqe = &sdrain.cqe, }, in __ib_drain_sq()
2882 rwr.wr_cqe = &rdrain.cqe; in __ib_drain_rq()
/linux/drivers/nvme/host/
H A Drdma.c1182 op, wc->wr_cqe, in nvme_rdma_wr_error()
1196 container_of(wc->wr_cqe, struct nvme_rdma_request, reg_cqe); in nvme_rdma_inv_rkey_done()
1216 wr.wr_cqe = &req->reg_cqe; in nvme_rdma_inv_rkey()
1338 req->reg_wr.wr.wr_cqe = &req->reg_cqe; in nvme_rdma_map_sg_fr()
1444 wr->wr.wr_cqe = &req->reg_cqe; in nvme_rdma_map_sg_pi()
1588 container_of(wc->wr_cqe, struct nvme_rdma_qe, cqe); in nvme_rdma_send_done()
1610 wr.wr_cqe = &qe->cqe; in nvme_rdma_post_send()
1643 wr.wr_cqe = &qe->cqe; in nvme_rdma_post_recv()
1744 container_of(wc->wr_cqe, struct nvme_rdma_qe, cqe); in nvme_rdma_recv_done()
/linux/drivers/infiniband/ulp/isert/
H A Dib_isert.c736 rx_wr->wr_cqe = &rx_desc->rx_cqe; in isert_post_recvm()
767 rx_wr.wr_cqe = &rx_desc->rx_cqe; in isert_post_recv()
792 send_wr.wr_cqe = &tx_desc->tx_cqe; in isert_login_post_send()
869 send_wr->wr_cqe = &tx_desc->tx_cqe; in isert_init_send_wr()
902 rx_wr.wr_cqe = &isert_conn->login_desc->rx_cqe; in isert_login_post_recv()
1320 struct iser_rx_desc *rx_desc = cqe_to_rx_desc(wc->wr_cqe); in isert_recv_done()
1577 struct iser_tx_desc *desc = cqe_to_tx_desc(wc->wr_cqe); in isert_rdma_write_done()
1619 struct iser_tx_desc *desc = cqe_to_tx_desc(wc->wr_cqe); in isert_rdma_read_done()
1695 struct iser_tx_desc *tx_desc = cqe_to_tx_desc(wc->wr_cqe); in isert_login_send_done()
1711 struct iser_tx_desc *tx_desc = cqe_to_tx_desc(wc->wr_cqe); in isert_send_done()
/linux/drivers/infiniband/hw/mlx5/
H A Dgsi.c72 container_of(wc->wr_cqe, struct mlx5_ib_gsi_wr, cqe); in handle_single_completion()
385 wr->wr.wr_cqe = &gsi_wr->cqe; in mlx5_ib_add_outstanding_wr()
H A Dumr.c308 container_of(wc->wr_cqe, struct mlx5_ib_umr_context, cqe); in mlx5r_umr_done()
/linux/fs/smb/server/
H A Dtransport_rdma.c540 recvmsg = container_of(wc->wr_cqe, struct smb_direct_recvmsg, cqe); in recv_done()
655 wr.wr_cqe = &recvmsg->cqe; in smb_direct_post_recv()
860 sendmsg = container_of(wc->wr_cqe, struct smb_direct_sendmsg, cqe); in send_done()
947 last->wr.wr_cqe = &last->cqe; in smb_direct_flush_send_list()
1141 msg->wr.wr_cqe = NULL; in post_sendmsg()
1156 msg->wr.wr_cqe = &msg->cqe; in post_sendmsg()
1329 struct smb_direct_rdma_rw_msg *msg = container_of(wc->wr_cqe, in read_write_done()
/linux/drivers/infiniband/ulp/srp/
H A Dib_srp.c1171 wr.wr_cqe = &req->reg_cqe; in srp_inv_rkey()
1475 wr.wr.wr_cqe = &req->reg_cqe; in srp_map_finish_fr()
1870 struct srp_iu *iu = container_of(wc->wr_cqe, struct srp_iu, cqe); in srp_send_done()
1904 wr.wr_cqe = &iu->cqe; in srp_post_send()
1926 wr.wr_cqe = &iu->cqe; in srp_post_recv()
2061 struct srp_iu *iu = container_of(wc->wr_cqe, struct srp_iu, cqe); in srp_recv_done()
2145 wc->wr_cqe); in srp_handle_qp_err()
/linux/drivers/infiniband/ulp/srpt/
H A Dib_srpt.c899 wr.wr_cqe = &ioctx->ioctx.cqe; in srpt_post_recv()
924 { .wr_cqe = &ch->zw_cqe, }, in srpt_zerolength_write()
1393 container_of(wc->wr_cqe, struct srpt_send_ioctx, rdma_cqe); in srpt_rdma_read_done()
1758 container_of(wc->wr_cqe, struct srpt_recv_ioctx, ioctx.cqe); in srpt_recv_done()
1819 container_of(wc->wr_cqe, struct srpt_send_ioctx, ioctx.cqe); in srpt_send_done()
2939 send_wr.wr_cqe = &ioctx->ioctx.cqe; in srpt_queue_response()
/linux/include/rdma/
H A Dib_verbs.h1017 struct ib_cqe *wr_cqe; member
1384 struct ib_cqe *wr_cqe; member
1455 struct ib_cqe *wr_cqe; member

12