Searched refs:cq_base (Results 1 – 16 of 16) sorted by relevance
/freebsd/contrib/ofed/libirdma/ |
H A D | irdma_uverbs.c | 463 info.cq_base = irdma_alloc_hw_buf(total_size); in ucreate_cq() 464 if (!info.cq_base) { in ucreate_cq() 469 memset(info.cq_base, 0, total_size); in ucreate_cq() 473 ret = ibv_cmd_reg_mr(&iwvctx->iwupd->ibv_pd, info.cq_base, in ucreate_cq() 474 total_size, (uintptr_t)info.cq_base, in ucreate_cq() 507 info.shadow_area = (__le64 *) ((u8 *)info.cq_base + (cq_pages << IRDMA_HW_PAGE_SHIFT)); in ucreate_cq() 511 cmd.user_cq_buf = (__u64) ((uintptr_t)info.cq_base); in ucreate_cq() 538 irdma_free_hw_buf(info.cq_base, total_size); in ucreate_cq() 581 irdma_free_hw_buf(cq_buf->cq.cq_base, get_cq_total_bytes(cq_buf->cq.cq_size)); in irdma_free_cq_buf() 635 irdma_free_hw_buf(iwucq->cq.cq_base, iwucq->buf_size); in irdma_udestroy_cq() [all …]
|
H A D | irdma_user.h | 334 void irdma_uk_cq_resize(struct irdma_cq_uk *cq, void *cq_base, int size); 406 struct irdma_cqe *cq_base; member 448 struct irdma_cqe *cq_base; member
|
H A D | irdma_defs.h | 316 (_cqe) = (_cq)->cq_base[offset].buf; \ 320 (_cq)->cq_base[IRDMA_RING_CURRENT_HEAD((_cq)->cq_ring)].buf \ 325 ((_cq)->cq_base))[IRDMA_RING_CURRENT_HEAD((_cq)->cq_ring)].buf \
|
H A D | irdma_uk.c | 1059 irdma_uk_cq_resize(struct irdma_cq_uk *cq, void *cq_base, int cq_size) in irdma_uk_cq_resize() argument 1061 cq->cq_base = cq_base; in irdma_uk_cq_resize() 1295 ext_cqe = cq->cq_base[peek_head].buf; in irdma_uk_cq_poll_cmpl() 1778 cq->cq_base = info->cq_base; in irdma_uk_cq_init() 1808 cqe = ((struct irdma_extended_cqe *)(cq->cq_base))[cq_head].buf; in irdma_uk_clean_cq() 1810 cqe = cq->cq_base[cq_head].buf; in irdma_uk_clean_cq()
|
/freebsd/sys/dev/irdma/ |
H A D | irdma_user.h | 433 void irdma_uk_cq_resize(struct irdma_cq_uk *cq, void *cq_base, int size); 507 struct irdma_cqe *cq_base; member 549 struct irdma_cqe *cq_base; member
|
H A D | irdma_uk.c | 975 irdma_uk_cq_resize(struct irdma_cq_uk *cq, void *cq_base, int cq_size) in irdma_uk_cq_resize() argument 977 cq->cq_base = cq_base; in irdma_uk_cq_resize() 1186 ext_cqe = cq->cq_base[peek_head].buf; in irdma_uk_cq_poll_cmpl() 1696 cq->cq_base = info->cq_base; in irdma_uk_cq_init() 1726 cqe = ((struct irdma_extended_cqe *)(cq->cq_base))[cq_head].buf; in irdma_uk_clean_cq() 1728 cqe = cq->cq_base[cq_head].buf; in irdma_uk_clean_cq()
|
H A D | irdma_defs.h | 1400 (_cqe) = (_cq)->cq_base[offset].buf; \ 1404 (_cq)->cq_base[IRDMA_RING_CURRENT_HEAD((_cq)->cq_ring)].buf \ 1409 ((_cq)->cq_base))[IRDMA_RING_CURRENT_HEAD((_cq)->cq_ring)].buf \
|
H A D | irdma_type.h | 664 struct irdma_cqe *cq_base; member 803 struct irdma_cqe *cq_base; member
|
H A D | irdma_puda.c | 281 ext_cqe = cq_uk->cq_base[peek_head].buf; in irdma_puda_poll_info() 838 init_info->cq_base = mem->va; in irdma_puda_cq_create()
|
H A D | irdma_kcompat.c | 1074 ukinfo->cq_base = iwcq->kmem.va; in irdma_create_cq()
|
H A D | irdma_ctrl.c | 2714 irdma_uk_cq_resize(&cq->cq_uk, info->cq_base, info->cq_size); in irdma_sc_cq_resize() 4306 cq->cq_uk.cq_base = info->cq_base; in irdma_sc_ccq_init()
|
H A D | irdma_hw.c | 1100 info.cq_base = ccq->mem_cq.va; in irdma_create_ccq()
|
H A D | irdma_verbs.c | 1534 info.cq_base = kmem_buf.va; in irdma_resize_cq()
|
/freebsd/sys/dev/bnxt/bnxt_re/ |
H A D | qplib_fp.c | 1595 struct cq_base *hw_cqe; in __clean_cq() 2395 hwq_attr.stride = sizeof(struct cq_base); in bnxt_qplib_create_cq() 2503 hwq_attr.stride = sizeof(struct cq_base); in bnxt_qplib_resize_cq() 2754 struct cq_base *peek_hwcqe; in bnxt_re_legacy_do_wa9060() 3176 struct cq_base *hw_cqe; in bnxt_qplib_is_cq_empty() 3449 struct cq_base *hw_cqe; in bnxt_qplib_poll_cq()
|
H A D | ib_verbs.c | 3835 entries * sizeof(struct cq_base), in bnxt_re_create_cq() 4051 entries * sizeof(struct cq_base), in bnxt_re_resize_cq() 5304 resp.cqe_sz = sizeof(struct cq_base); in bnxt_re_alloc_ucontext()
|
/freebsd/sys/dev/bnxt/bnxt_en/ |
H A D | hsi_struct_def.h | 83077 typedef struct cq_base { struct
|