Lines Matching refs:op_info
325 struct irdma_rdma_write *op_info; in irdma_uk_rdma_write() local
335 op_info = &info->op.rdma_write; in irdma_uk_rdma_write()
336 if (op_info->num_lo_sges > qp->max_sq_frag_cnt) in irdma_uk_rdma_write()
339 for (i = 0; i < op_info->num_lo_sges; i++) in irdma_uk_rdma_write()
340 total_size += op_info->lo_sg_list[i].length; in irdma_uk_rdma_write()
345 frag_cnt = op_info->num_lo_sges + 1; in irdma_uk_rdma_write()
347 frag_cnt = op_info->num_lo_sges; in irdma_uk_rdma_write()
359 FIELD_PREP(IRDMAQPSQ_FRAG_TO, op_info->rem_addr.addr)); in irdma_uk_rdma_write()
367 op_info->lo_sg_list, in irdma_uk_rdma_write()
372 for (byte_off = IRDMA_BYTE_32; i < op_info->num_lo_sges; i++) { in irdma_uk_rdma_write()
374 &op_info->lo_sg_list[i], in irdma_uk_rdma_write()
388 hdr = FIELD_PREP(IRDMAQPSQ_REMSTAG, op_info->rem_addr.lkey) | in irdma_uk_rdma_write()
421 struct irdma_rdma_read *op_info; in irdma_uk_rdma_read() local
434 op_info = &info->op.rdma_read; in irdma_uk_rdma_read()
435 if (qp->max_sq_frag_cnt < op_info->num_lo_sges) in irdma_uk_rdma_read()
438 for (i = 0; i < op_info->num_lo_sges; i++) in irdma_uk_rdma_read()
439 total_size += op_info->lo_sg_list[i].length; in irdma_uk_rdma_read()
441 ret_code = irdma_fragcnt_to_quanta_sq(op_info->num_lo_sges, &quanta); in irdma_uk_rdma_read()
455 addl_frag_cnt = op_info->num_lo_sges > 1 ? in irdma_uk_rdma_read()
456 (op_info->num_lo_sges - 1) : 0; in irdma_uk_rdma_read()
459 qp->wqe_ops.iw_set_fragment(wqe, IRDMA_BYTE_0, op_info->lo_sg_list, in irdma_uk_rdma_read()
461 for (i = 1, byte_off = IRDMA_BYTE_32; i < op_info->num_lo_sges; ++i) { in irdma_uk_rdma_read()
463 &op_info->lo_sg_list[i], in irdma_uk_rdma_read()
470 !(op_info->num_lo_sges & 0x01) && op_info->num_lo_sges) { in irdma_uk_rdma_read()
477 FIELD_PREP(IRDMAQPSQ_FRAG_TO, op_info->rem_addr.addr)); in irdma_uk_rdma_read()
478 hdr = FIELD_PREP(IRDMAQPSQ_REMSTAG, op_info->rem_addr.lkey) | in irdma_uk_rdma_read()
512 struct irdma_post_send *op_info; in irdma_uk_send() local
522 op_info = &info->op.send; in irdma_uk_send()
523 if (qp->max_sq_frag_cnt < op_info->num_sges) in irdma_uk_send()
526 for (i = 0; i < op_info->num_sges; i++) in irdma_uk_send()
527 total_size += op_info->sg_list[i].length; in irdma_uk_send()
530 frag_cnt = op_info->num_sges + 1; in irdma_uk_send()
532 frag_cnt = op_info->num_sges; in irdma_uk_send()
549 frag_cnt ? op_info->sg_list : NULL, in irdma_uk_send()
554 for (byte_off = IRDMA_BYTE_32; i < op_info->num_sges; i++) { in irdma_uk_send()
555 qp->wqe_ops.iw_set_fragment(wqe, byte_off, &op_info->sg_list[i], in irdma_uk_send()
570 FIELD_PREP(IRDMAQPSQ_DESTQKEY, op_info->qkey) | in irdma_uk_send()
571 FIELD_PREP(IRDMAQPSQ_DESTQPN, op_info->dest_qp)); in irdma_uk_send()
573 FIELD_PREP(IRDMAQPSQ_AHID, op_info->ah_id) | in irdma_uk_send()
605 struct irdma_bind_window *op_info) in irdma_set_mw_bind_wqe_gen_1() argument
607 set_64bit_val(wqe, IRDMA_BYTE_0, (uintptr_t)op_info->va); in irdma_set_mw_bind_wqe_gen_1()
609 FIELD_PREP(IRDMAQPSQ_PARENTMRSTAG, op_info->mw_stag) | in irdma_set_mw_bind_wqe_gen_1()
610 FIELD_PREP(IRDMAQPSQ_MWSTAG, op_info->mr_stag)); in irdma_set_mw_bind_wqe_gen_1()
611 set_64bit_val(wqe, IRDMA_BYTE_16, op_info->bind_len); in irdma_set_mw_bind_wqe_gen_1()
668 struct irdma_bind_window *op_info) in irdma_set_mw_bind_wqe() argument
670 set_64bit_val(wqe, IRDMA_BYTE_0, (uintptr_t)op_info->va); in irdma_set_mw_bind_wqe()
672 FIELD_PREP(IRDMAQPSQ_PARENTMRSTAG, op_info->mr_stag) | in irdma_set_mw_bind_wqe()
673 FIELD_PREP(IRDMAQPSQ_MWSTAG, op_info->mw_stag)); in irdma_set_mw_bind_wqe()
674 set_64bit_val(wqe, IRDMA_BYTE_16, op_info->bind_len); in irdma_set_mw_bind_wqe()
763 struct irdma_rdma_write *op_info; in irdma_uk_inline_rdma_write() local
771 op_info = &info->op.rdma_write; in irdma_uk_inline_rdma_write()
773 if (unlikely(qp->max_sq_frag_cnt < op_info->num_lo_sges)) in irdma_uk_inline_rdma_write()
776 for (i = 0; i < op_info->num_lo_sges; i++) in irdma_uk_inline_rdma_write()
777 total_size += op_info->lo_sg_list[i].length; in irdma_uk_inline_rdma_write()
790 FIELD_PREP(IRDMAQPSQ_FRAG_TO, op_info->rem_addr.addr)); in irdma_uk_inline_rdma_write()
792 hdr = FIELD_PREP(IRDMAQPSQ_REMSTAG, op_info->rem_addr.lkey) | in irdma_uk_inline_rdma_write()
808 qp->wqe_ops.iw_copy_inline_data((u8 *)wqe, op_info->lo_sg_list, in irdma_uk_inline_rdma_write()
809 op_info->num_lo_sges, qp->swqe_polarity); in irdma_uk_inline_rdma_write()
834 struct irdma_post_send *op_info; in irdma_uk_inline_send() local
842 op_info = &info->op.send; in irdma_uk_inline_send()
844 if (unlikely(qp->max_sq_frag_cnt < op_info->num_sges)) in irdma_uk_inline_send()
847 for (i = 0; i < op_info->num_sges; i++) in irdma_uk_inline_send()
848 total_size += op_info->sg_list[i].length; in irdma_uk_inline_send()
859 FIELD_PREP(IRDMAQPSQ_DESTQKEY, op_info->qkey) | in irdma_uk_inline_send()
860 FIELD_PREP(IRDMAQPSQ_DESTQPN, op_info->dest_qp)); in irdma_uk_inline_send()
864 FIELD_PREP(IRDMAQPSQ_AHID, op_info->ah_id) | in irdma_uk_inline_send()
882 qp->wqe_ops.iw_copy_inline_data((u8 *)wqe, op_info->sg_list, in irdma_uk_inline_send()
883 op_info->num_sges, qp->swqe_polarity); in irdma_uk_inline_send()
909 struct irdma_inv_local_stag *op_info; in irdma_uk_stag_local_invalidate() local
917 op_info = &info->op.inv_local_stag; in irdma_uk_stag_local_invalidate()
924 sge.lkey = op_info->target_stag; in irdma_uk_stag_local_invalidate()
959 struct irdma_bind_window *op_info; in irdma_uk_mw_bind() local
966 op_info = &info->op.bind_window; in irdma_uk_mw_bind()
973 qp->wqe_ops.iw_set_mw_bind_wqe(wqe, op_info); in irdma_uk_mw_bind()
977 ((op_info->ena_reads << 2) | (op_info->ena_writes << 3))) | in irdma_uk_mw_bind()
979 (op_info->addressing_type == IRDMA_ADDR_TYPE_VA_BASED ? 1 : 0)) | in irdma_uk_mw_bind()
981 (op_info->mem_window_type_1 ? 1 : 0)) | in irdma_uk_mw_bind()