Home
last modified time | relevance | path

Searched defs:cqe (Results 1 – 25 of 142) sorted by relevance

123456

/linux/drivers/infiniband/sw/rxe/
H A Drxe_cq.c12 int cqe, int comp_vector) in rxe_cq_chk_attr()
42 int rxe_cq_from_init(struct rxe_dev *rxe, struct rxe_cq *cq, int cqe, in rxe_cq_from_init()
69 int rxe_cq_resize_queue(struct rxe_cq *cq, int cqe, in rxe_cq_resize_queue()
85 int rxe_cq_post(struct rxe_cq *cq, struct rxe_cqe *cqe, int solicited) in rxe_cq_post()
H A Drxe_comp.c394 struct rxe_cqe *cqe) in make_send_cqe()
443 struct rxe_cqe cqe; in do_complete() local
559 struct rxe_cqe cqe = {}; in flush_send_wqe() local
/linux/drivers/net/ethernet/mellanox/mlx5/core/en_accel/
H A Dmacsec.h32 static inline bool mlx5e_macsec_is_rx_flow(struct mlx5_cqe64 *cqe) in mlx5e_macsec_is_rx_flow()
46 static inline bool mlx5e_macsec_is_rx_flow(struct mlx5_cqe64 *cqe) { return false; } in mlx5e_macsec_is_rx_flow()
49 struct mlx5_cqe64 *cqe) in mlx5e_macsec_offload_handle_rx_skb()
H A Dipsec_rxtx.h75 static inline bool mlx5_ipsec_is_rx_flow(struct mlx5_cqe64 *cqe) in mlx5_ipsec_is_rx_flow()
153 static inline bool mlx5_ipsec_is_rx_flow(struct mlx5_cqe64 *cqe) { return false; } in mlx5_ipsec_is_rx_flow()
/linux/drivers/scsi/qedi/
H A Dqedi_fw.c31 union iscsi_cqe *cqe, in qedi_process_logout_resp()
82 union iscsi_cqe *cqe, in qedi_process_text_resp()
178 union iscsi_cqe *cqe, in qedi_process_tmf_resp()
258 union iscsi_cqe *cqe, in qedi_process_login_resp()
320 struct iscsi_cqe_unsolicited *cqe, in qedi_get_rq_bdq_buf()
359 struct iscsi_cqe_unsolicited *cqe, in qedi_put_rq_bdq_buf()
397 struct iscsi_cqe_unsolicited *cqe, in qedi_unsol_pdu_adjust_bdq()
409 union iscsi_cqe *cqe, in qedi_process_nopin_mesg()
480 union iscsi_cqe *cqe, in qedi_process_async_mesg()
534 union iscsi_cqe *cqe, in qedi_process_reject_mesg()
[all …]
/linux/drivers/infiniband/hw/cxgb4/
H A Dcq.c186 struct t4_cqe cqe; in insert_recv_cqe() local
220 struct t4_cqe cqe; in insert_sq_cqe() local
422 static int cqe_completes_wr(struct t4_cqe *cqe, struct t4_wq *wq) in cqe_completes_wr()
445 struct t4_cqe *cqe; in c4iw_count_rcqes() local
544 static int poll_cq(struct t4_wq *wq, struct t4_cq *cq, struct t4_cqe *cqe, in poll_cq()
757 struct t4_cqe cqe; in __c4iw_poll_cq_one() local
/linux/drivers/scsi/bnx2i/
H A Dbnx2i_hwi.c1338 struct cqe *cqe) in bnx2i_process_scsi_cmd_resp()
1435 struct cqe *cqe) in bnx2i_process_login_resp()
1503 struct cqe *cqe) in bnx2i_process_text_resp()
1564 struct cqe *cqe) in bnx2i_process_tmf_resp()
1603 struct cqe *cqe) in bnx2i_process_logout_resp()
1649 struct cqe *cqe) in bnx2i_process_nopin_local_cmpl()
1690 struct cqe *cqe) in bnx2i_process_nopin_mesg()
1742 struct cqe *cqe) in bnx2i_process_async_mesg()
1792 struct cqe *cqe) in bnx2i_process_reject_mesg()
1829 struct cqe *cqe) in bnx2i_process_cmd_cleanup_resp()
[all …]
/linux/drivers/net/ethernet/mellanox/mlxsw/
H A Dpci_hw.h286 static inline u16 mlxsw_pci_cqe2_mirror_cong_get(const char *cqe) in mlxsw_pci_cqe2_mirror_cong_get()
343 static inline u64 mlxsw_pci_cqe2_time_stamp_get(const char *cqe) in mlxsw_pci_cqe2_time_stamp_get()
351 static inline u8 mlxsw_pci_cqe2_time_stamp_sec_get(const char *cqe) in mlxsw_pci_cqe2_time_stamp_sec_get()
358 static inline u32 mlxsw_pci_cqe2_time_stamp_nsec_get(const char *cqe) in mlxsw_pci_cqe2_time_stamp_nsec_get()
/linux/drivers/infiniband/hw/mthca/
H A Dmthca_cq.c174 static inline struct mthca_cqe *cqe_sw(struct mthca_cqe *cqe) in cqe_sw()
184 static inline void set_cqe_hw(struct mthca_cqe *cqe) in set_cqe_hw()
191 __be32 *cqe = cqe_ptr; in dump_cqe() local
264 static inline int is_recv_cqe(struct mthca_cqe *cqe) in is_recv_cqe()
276 struct mthca_cqe *cqe; in mthca_cq_clean() local
366 void mthca_free_cq_buf(struct mthca_dev *dev, struct mthca_cq_buf *buf, int cqe) in mthca_free_cq_buf()
374 struct mthca_err_cqe *cqe, in handle_error_cqe()
485 struct mthca_cqe *cqe; in mthca_poll_one() local
/linux/net/sunrpc/xprtrdma/
H A Dfrwr_ops.c361 struct ib_cqe *cqe = wc->wr_cqe; in frwr_wc_fastreg() local
456 struct ib_cqe *cqe = wc->wr_cqe; in frwr_wc_localinv() local
475 struct ib_cqe *cqe = wc->wr_cqe; in frwr_wc_localinv_wake() local
572 struct ib_cqe *cqe = wc->wr_cqe; in frwr_wc_localinv_done() local
H A Dsvc_rdma_rw.c257 struct ib_cqe *cqe = wc->wr_cqe; in svc_rdma_reply_done() local
286 struct ib_cqe *cqe = wc->wr_cqe; in svc_rdma_write_done() local
320 struct ib_cqe *cqe = wc->wr_cqe; in svc_rdma_wc_read_done() local
368 struct ib_cqe *cqe; in svc_rdma_post_chunk_ctxt() local
688 struct ib_cqe *cqe; svc_rdma_prepare_reply_chunk() local
[all...]
/linux/drivers/infiniband/hw/ocrdma/
H A Docrdma_verbs.c1035 struct ocrdma_cqe *cqe = NULL; in ocrdma_flush_cq() local
1594 struct ocrdma_cqe *cqe; in ocrdma_discard_cqes() local
2439 struct ocrdma_cqe *cqe) in ocrdma_set_cqe_status_flushed()
2470 static bool ocrdma_update_err_cqe(struct ib_wc *ibwc, struct ocrdma_cqe *cqe, in ocrdma_update_err_cqe()
2492 static int ocrdma_update_err_rcqe(struct ib_wc *ibwc, struct ocrdma_cqe *cqe, in ocrdma_update_err_rcqe()
2502 static int ocrdma_update_err_scqe(struct ib_wc *ibwc, struct ocrdma_cqe *cqe, in ocrdma_update_err_scqe()
2513 struct ocrdma_cqe *cqe, struct ib_wc *ibwc, in ocrdma_poll_err_scqe()
2557 struct ocrdma_cqe *cqe, in ocrdma_poll_success_scqe()
2582 static bool ocrdma_poll_scqe(struct ocrdma_qp *qp, struct ocrdma_cqe *cqe, in ocrdma_poll_scqe()
2599 struct ocrdma_cqe *cqe) in ocrdma_update_ud_rcqe()
[all …]
/linux/drivers/net/ethernet/qlogic/qede/
H A Dqede_fp.c650 struct eth_fast_path_rx_tpa_start_cqe *cqe) in qede_set_gro_params()
833 struct eth_fast_path_rx_tpa_start_cqe *cqe) in qede_tpa_start()
959 struct eth_fast_path_rx_tpa_cont_cqe *cqe) in qede_tpa_cont()
974 struct eth_fast_path_rx_tpa_end_cqe *cqe) in qede_tpa_end()
1064 static bool qede_pkt_is_ip_fragmented(struct eth_fast_path_rx_reg_cqe *cqe, in qede_pkt_is_ip_fragmented()
1084 struct eth_fast_path_rx_reg_cqe *cqe, in qede_rx_xdp()
1172 struct eth_fast_path_rx_reg_cqe *cqe, in qede_rx_build_jumbo()
1226 union eth_rx_cqe *cqe, in qede_rx_process_tpa_cqe()
1251 union eth_rx_cqe *cqe; in qede_rx_process_cqe() local
H A Dqede_ptp.h23 union eth_rx_cqe *cqe, in qede_ptp_record_rx_ts()
/linux/drivers/infiniband/hw/mana/
H A Dcq.c187 static inline void handle_ud_sq_cqe(struct mana_ib_qp *qp, struct gdma_comp *cqe) in handle_ud_sq_cqe()
203 static inline void handle_ud_rq_cqe(struct mana_ib_qp *qp, struct gdma_comp *cqe) in handle_ud_rq_cqe()
221 static void mana_handle_cqe(struct mana_ib_dev *mdev, struct gdma_comp *cqe) in mana_handle_cqe()
/linux/drivers/net/ethernet/mellanox/mlx5/core/fpga/
H A Dconn.c251 struct mlx5_cqe64 *cqe, u8 status) in mlx5_fpga_conn_rq_cqe()
291 struct mlx5_cqe64 *cqe, u8 status) in mlx5_fpga_conn_sq_cqe()
333 struct mlx5_cqe64 *cqe) in mlx5_fpga_conn_handle_cqe()
368 struct mlx5_cqe64 *cqe; in mlx5_fpga_conn_cqes() local
418 struct mlx5_cqe64 *cqe; in mlx5_fpga_conn_create_cq() local
/linux/drivers/infiniband/ulp/iser/
H A Diser_memory.c237 struct ib_cqe *cqe, struct ib_send_wr *next_wr) in iser_inv_rkey()
254 struct ib_cqe *cqe = &iser_task->iser_conn->ib_conn.reg_cqe; in iser_reg_sig_mr() local
311 struct ib_cqe *cqe = &iser_task->iser_conn->ib_conn.reg_cqe; in iser_fast_reg_mr() local
H A Discsi_iser.h244 struct ib_cqe cqe; member
271 struct ib_cqe cqe; member
291 struct ib_cqe cqe; member
552 iser_rx(struct ib_cqe *cqe) in iser_rx()
558 iser_tx(struct ib_cqe *cqe) in iser_tx()
564 iser_login(struct ib_cqe *cqe) in iser_login()
/linux/drivers/scsi/qedf/
H A Dqedf_io.c1121 void qedf_scsi_completion(struct qedf_ctx *qedf, struct fcoe_cqe *cqe, in qedf_scsi_completion()
1432 void qedf_process_warning_compl(struct qedf_ctx *qedf, struct fcoe_cqe *cqe, in qedf_process_warning_compl()
1502 void qedf_process_error_detect(struct qedf_ctx *qedf, struct fcoe_cqe *cqe, in qedf_process_error_detect()
1957 void qedf_process_abts_compl(struct qedf_ctx *qedf, struct fcoe_cqe *cqe, in qedf_process_abts_compl()
2278 void qedf_process_cleanup_compl(struct qedf_ctx *qedf, struct fcoe_cqe *cqe, in qedf_process_cleanup_compl()
2476 void qedf_process_tmf_compl(struct qedf_ctx *qedf, struct fcoe_cqe *cqe, in qedf_process_tmf_compl()
2490 struct fcoe_cqe *cqe) in qedf_process_unsol_compl()
/linux/drivers/infiniband/hw/bnxt_re/
H A Dqplib_fp.c1569 struct cq_req *cqe = (struct cq_req *)hw_cqe; in __clean_cq() local
1579 struct cq_res_rc *cqe = (struct cq_res_rc *)hw_cqe; in __clean_cq() local
2384 struct bnxt_qplib_cqe *cqe; in __flush_sq() local
2425 struct bnxt_qplib_cqe *cqe; in __flush_rq() local
2614 struct bnxt_qplib_cqe *cqe; in bnxt_qplib_cq_process_req() local
2740 struct bnxt_qplib_cqe *cqe; in bnxt_qplib_cq_process_res_rc() local
2820 struct bnxt_qplib_cqe *cqe; in bnxt_qplib_cq_process_res_ud() local
2920 struct bnxt_qplib_cqe *cqe; in bnxt_qplib_cq_process_res_raweth_qp1() local
3012 struct bnxt_qplib_cqe *cqe; in bnxt_qplib_cq_process_terminal() local
3124 struct bnxt_qplib_cqe *cqe, in bnxt_qplib_process_flush_list()
[all …]
/linux/io_uring/
H A Dfdinfo.c131 struct io_uring_cqe *cqe = &r->cqes[(entry & cq_mask) << cq_shift]; in __io_uring_show_fdinfo() local
208 struct io_uring_cqe *cqe = &ocqe->cqe; in __io_uring_show_fdinfo() local
/linux/tools/testing/vsock/
H A Dvsock_uring_test.c63 struct io_uring_cqe *cqe; in vsock_io_uring_client() local
144 struct io_uring_cqe *cqe; in vsock_io_uring_server() local
/linux/drivers/infiniband/hw/vmw_pvrdma/
H A Dpvrdma_cq.c295 struct pvrdma_cqe *cqe; in _pvrdma_flush_cqe() local
330 struct pvrdma_cqe *cqe; in pvrdma_poll_one() local
/linux/drivers/net/ethernet/mellanox/mlx5/core/en/xsk/
H A Drx.c249 struct mlx5_cqe64 *cqe, in mlx5e_xsk_skb_from_cqe_mpwrq_linear()
306 struct mlx5_cqe64 *cqe, in mlx5e_xsk_skb_from_cqe_linear()
/linux/drivers/net/ethernet/mellanox/mlx4/
H A Den_rx.c592 static int check_csum(struct mlx4_cqe *cqe, struct sk_buff *skb, void *va, in check_csum()
633 struct mlx4_cqe *cqe; member
655 struct mlx4_cqe *cqe = _ctx->cqe; in mlx4_en_xdp_rx_hash() local
690 struct mlx4_cqe *cqe; in mlx4_en_process_rx_cq() local

123456