Searched refs:iwcq (Results 1 – 7 of 7) sorted by relevance
/freebsd/sys/dev/irdma/ |
H A D | irdma_kcompat.c | 937 struct irdma_cq *iwcq = to_iwcq(ibcq); in irdma_create_cq() local 962 cq = &iwcq->sc_cq; in irdma_create_cq() 963 cq->back_cq = iwcq; in irdma_create_cq() 964 atomic_set(&iwcq->refcnt, 1); in irdma_create_cq() 965 spin_lock_init(&iwcq->lock); in irdma_create_cq() 966 INIT_LIST_HEAD(&iwcq->resize_list); in irdma_create_cq() 967 INIT_LIST_HEAD(&iwcq->cmpl_generated); in irdma_create_cq() 973 iwcq->ibcq.cqe = info.cq_uk_init_info.cq_size; in irdma_create_cq() 974 atomic_set(&iwcq->armed, 0); in irdma_create_cq() 990 iwcq->user_mode = true; in irdma_create_cq() [all …]
|
H A D | irdma_verbs.c | 272 irdma_clean_cqes(struct irdma_qp *iwqp, struct irdma_cq *iwcq) in irdma_clean_cqes() argument 274 struct irdma_cq_uk *ukcq = &iwcq->sc_cq.cq_uk; in irdma_clean_cqes() 277 spin_lock_irqsave(&iwcq->lock, flags); in irdma_clean_cqes() 279 spin_unlock_irqrestore(&iwcq->lock, flags); in irdma_clean_cqes() 1391 irdma_cq_free_rsrc(struct irdma_pci_f *rf, struct irdma_cq *iwcq) in irdma_cq_free_rsrc() argument 1393 struct irdma_sc_cq *cq = &iwcq->sc_cq; in irdma_cq_free_rsrc() 1395 if (!iwcq->user_mode) { in irdma_cq_free_rsrc() 1396 irdma_free_dma_mem(rf->sc_dev.hw, &iwcq->kmem); in irdma_cq_free_rsrc() 1397 irdma_free_dma_mem(rf->sc_dev.hw, &iwcq->kmem_shadow); in irdma_cq_free_rsrc() 1423 irdma_process_resize_list(struct irdma_cq *iwcq, in irdma_process_resize_list() argument [all …]
|
H A D | fbsd_kcompat.h | 214 void irdma_cq_free_rsrc(struct irdma_pci_f *rf, struct irdma_cq *iwcq); 236 int irdma_process_resize_list(struct irdma_cq *iwcq, struct irdma_device *iwdev, 251 void irdma_clean_cqes(struct irdma_qp *iwqp, struct irdma_cq *iwcq);
|
H A D | irdma_utils.c | 700 struct irdma_cq *iwcq = to_iwcq(ibcq); in irdma_cq_add_ref() local 702 atomic_inc(&iwcq->refcnt); in irdma_cq_add_ref() 708 struct irdma_cq *iwcq = to_iwcq(ibcq); in irdma_cq_rem_ref() local 709 struct irdma_pci_f *rf = container_of(iwcq->sc_cq.dev, struct irdma_pci_f, sc_dev); in irdma_cq_rem_ref() 713 if (!atomic_dec_and_test(&iwcq->refcnt)) { in irdma_cq_rem_ref() 718 rf->cq_table[iwcq->cq_num] = NULL; in irdma_cq_rem_ref() 720 complete(&iwcq->free_cq); in irdma_cq_rem_ref() 2314 irdma_cq_empty(struct irdma_cq *iwcq) in irdma_cq_empty() argument 2321 ukcq = &iwcq->sc_cq.cq_uk; in irdma_cq_empty() 2330 irdma_remove_cmpls_list(struct irdma_cq *iwcq) in irdma_remove_cmpls_list() argument [all …]
|
H A D | irdma_verbs.h | 392 void irdma_remove_cmpls_list(struct irdma_cq *iwcq); 393 int irdma_generated_cmpls(struct irdma_cq *iwcq, struct irdma_cq_poll_info *cq_poll_info);
|
H A D | irdma_hw.c | 93 irdma_iwarp_ce_handler(struct irdma_sc_cq *iwcq) in irdma_iwarp_ce_handler() argument 95 struct irdma_cq *cq = iwcq->back_cq; in irdma_iwarp_ce_handler() 217 struct irdma_cq *iwcq = NULL; in irdma_process_aeq() local 345 iwcq = rf->cq_table[info->qp_cq_id]; in irdma_process_aeq() 346 if (!iwcq) { in irdma_process_aeq() 354 irdma_cq_add_ref(&iwcq->ibcq); in irdma_process_aeq() 356 if (iwcq->ibcq.event_handler) { in irdma_process_aeq() 359 ibevent.device = iwcq->ibcq.device; in irdma_process_aeq() 361 ibevent.element.cq = &iwcq->ibcq; in irdma_process_aeq() 362 iwcq->ibcq.event_handler(&ibevent, in irdma_process_aeq() [all …]
|
H A D | irdma_main.h | 599 bool irdma_cq_empty(struct irdma_cq *iwcq);
|