Lines Matching refs:cqe

206 	struct t4_cqe cqe;  in insert_recv_cqe()  local
210 memset(&cqe, 0, sizeof(cqe)); in insert_recv_cqe()
211 cqe.header = cpu_to_be32(V_CQE_STATUS(T4_ERR_SWFLUSH) | in insert_recv_cqe()
216 cqe.bits_type_ts = cpu_to_be64(V_CQE_GENBIT((u64)cq->gen)); in insert_recv_cqe()
217 cq->sw_queue[cq->sw_pidx] = cqe; in insert_recv_cqe()
239 struct t4_cqe cqe; in insert_sq_cqe() local
243 memset(&cqe, 0, sizeof(cqe)); in insert_sq_cqe()
244 cqe.header = cpu_to_be32(V_CQE_STATUS(T4_ERR_SWFLUSH) | in insert_sq_cqe()
249 CQE_WRID_SQ_IDX(&cqe) = swcqe->idx; in insert_sq_cqe()
250 cqe.bits_type_ts = cpu_to_be64(V_CQE_GENBIT((u64)cq->gen)); in insert_sq_cqe()
251 cq->sw_queue[cq->sw_pidx] = cqe; in insert_sq_cqe()
314 swsqe->cqe.header |= htonl(V_CQE_SWCQE(1)); in flush_completed_wrs()
315 cq->sw_queue[cq->sw_pidx] = swsqe->cqe; in flush_completed_wrs()
425 swsqe->cqe = *hw_cqe; in c4iw_flush_hw_cq()
440 static int cqe_completes_wr(struct t4_cqe *cqe, struct t4_wq *wq) in cqe_completes_wr() argument
442 if (CQE_OPCODE(cqe) == FW_RI_TERMINATE) in cqe_completes_wr()
445 if ((CQE_OPCODE(cqe) == FW_RI_RDMA_WRITE) && RQ_TYPE(cqe)) in cqe_completes_wr()
448 if ((CQE_OPCODE(cqe) == FW_RI_READ_RESP) && SQ_TYPE(cqe)) in cqe_completes_wr()
451 if (CQE_SEND_OPCODE(cqe) && RQ_TYPE(cqe) && t4_rq_empty(wq)) in cqe_completes_wr()
458 struct t4_cqe *cqe; in c4iw_count_rcqes() local
465 cqe = &cq->sw_queue[ptr]; in c4iw_count_rcqes()
466 if (RQ_TYPE(cqe) && (CQE_OPCODE(cqe) != FW_RI_READ_RESP) && in c4iw_count_rcqes()
467 (CQE_QPID(cqe) == wq->sq.qid) && cqe_completes_wr(cqe, wq)) in c4iw_count_rcqes()
491 static int poll_cq(struct t4_wq *wq, struct t4_cq *cq, struct t4_cqe *cqe, in poll_cq() argument
541 *cqe = *hw_cqe; in poll_cq()
642 swsqe->cqe = *hw_cqe; in poll_cq()
649 *cqe = *hw_cqe; in poll_cq()
718 struct t4_cqe cqe = {0, 0}, *rd_cqe; in c4iw_poll_cq_one() local
737 ret = poll_cq(wq, &(chp->cq), &cqe, &cqe_flushed, &cookie, &credit); in c4iw_poll_cq_one()
743 wc->vendor_err = CQE_STATUS(&cqe); in c4iw_poll_cq_one()
747 __func__, CQE_QPID(&cqe), CQE_TYPE(&cqe), CQE_OPCODE(&cqe), in c4iw_poll_cq_one()
748 CQE_STATUS(&cqe)); in c4iw_poll_cq_one()
750 __func__, CQE_LEN(&cqe), CQE_WRID_HI(&cqe), CQE_WRID_LOW(&cqe), in c4iw_poll_cq_one()
753 if (CQE_TYPE(&cqe) == 0) { in c4iw_poll_cq_one()
754 if (!CQE_STATUS(&cqe)) in c4iw_poll_cq_one()
755 wc->byte_len = CQE_LEN(&cqe); in c4iw_poll_cq_one()
759 if (CQE_OPCODE(&cqe) == FW_RI_SEND_WITH_INV || in c4iw_poll_cq_one()
760 CQE_OPCODE(&cqe) == FW_RI_SEND_WITH_SE_INV) { in c4iw_poll_cq_one()
761 wc->ex.invalidate_rkey = CQE_WRID_STAG(&cqe); in c4iw_poll_cq_one()
766 switch (CQE_OPCODE(&cqe)) { in c4iw_poll_cq_one()
772 wc->byte_len = CQE_LEN(&cqe); in c4iw_poll_cq_one()
790 if (CQE_STATUS(&cqe) != T4_ERR_SUCCESS) in c4iw_poll_cq_one()
792 CQE_WRID_FR_STAG(&cqe)); in c4iw_poll_cq_one()
800 CQE_OPCODE(&cqe), CQE_QPID(&cqe)); in c4iw_poll_cq_one()
810 switch (CQE_STATUS(&cqe)) { in c4iw_poll_cq_one()
855 CQE_STATUS(&cqe), CQE_QPID(&cqe)); in c4iw_poll_cq_one()
908 int entries = attr->cqe; in c4iw_create_cq()
968 chp->ibcq.cqe = entries - 2; in c4iw_create_cq()
1030 int c4iw_resize_cq(struct ib_cq *cq, int cqe, struct ib_udata *udata) in c4iw_resize_cq() argument