Lines Matching full:gc
102 struct gdma_context *gc = device_get_softc(dev); in mana_gd_query_max_resources() local
110 err = mana_gd_send_request(gc, sizeof(req), &req, sizeof(resp), &resp); in mana_gd_query_max_resources()
112 device_printf(gc->dev, in mana_gd_query_max_resources()
123 if (gc->num_msix_usable > resp.max_msix) in mana_gd_query_max_resources()
124 gc->num_msix_usable = resp.max_msix; in mana_gd_query_max_resources()
126 if (gc->num_msix_usable <= 1) in mana_gd_query_max_resources()
129 gc->max_num_queues = mp_ncpus; in mana_gd_query_max_resources()
130 if (gc->max_num_queues > MANA_MAX_NUM_QUEUES) in mana_gd_query_max_resources()
131 gc->max_num_queues = MANA_MAX_NUM_QUEUES; in mana_gd_query_max_resources()
133 if (gc->max_num_queues > resp.max_eq) in mana_gd_query_max_resources()
134 gc->max_num_queues = resp.max_eq; in mana_gd_query_max_resources()
136 if (gc->max_num_queues > resp.max_cq) in mana_gd_query_max_resources()
137 gc->max_num_queues = resp.max_cq; in mana_gd_query_max_resources()
139 if (gc->max_num_queues > resp.max_sq) in mana_gd_query_max_resources()
140 gc->max_num_queues = resp.max_sq; in mana_gd_query_max_resources()
142 if (gc->max_num_queues > resp.max_rq) in mana_gd_query_max_resources()
143 gc->max_num_queues = resp.max_rq; in mana_gd_query_max_resources()
151 struct gdma_context *gc = device_get_softc(dev); in mana_gd_detect_devices() local
162 err = mana_gd_send_request(gc, sizeof(req), &req, sizeof(resp), &resp); in mana_gd_detect_devices()
164 device_printf(gc->dev, in mana_gd_detect_devices()
184 gc->mana.gdma_context = gc; in mana_gd_detect_devices()
185 gc->mana.dev_id = gd_dev; in mana_gd_detect_devices()
189 return gc->mana.dev_id.type == 0 ? ENODEV : 0; in mana_gd_detect_devices()
193 mana_gd_send_request(struct gdma_context *gc, uint32_t req_len, in mana_gd_send_request() argument
196 struct hw_channel_context *hwc = gc->hwc.driver_data; in mana_gd_send_request()
214 mana_gd_alloc_memory(struct gdma_context *gc, unsigned int length, in mana_gd_alloc_memory() argument
221 if (!gc || !gmi) in mana_gd_alloc_memory()
227 err = bus_dma_tag_create(bus_get_dma_tag(gc->dev), /* parent */ in mana_gd_alloc_memory()
239 device_printf(gc->dev, in mana_gd_alloc_memory()
252 device_printf(gc->dev, in mana_gd_alloc_memory()
261 device_printf(gc->dev, in mana_gd_alloc_memory()
268 gmi->dev = gc->dev; in mana_gd_alloc_memory()
285 mana_gd_destroy_doorbell_page(struct gdma_context *gc, int doorbell_page) in mana_gd_destroy_doorbell_page() argument
298 err = mana_gd_send_request(gc, sizeof(req), &req, sizeof(resp), &resp); in mana_gd_destroy_doorbell_page()
300 device_printf(gc->dev, in mana_gd_destroy_doorbell_page()
310 mana_gd_allocate_doorbell_page(struct gdma_context *gc, int *doorbell_page) in mana_gd_allocate_doorbell_page() argument
326 err = mana_gd_send_request(gc, sizeof(req), &req, sizeof(resp), &resp); in mana_gd_allocate_doorbell_page()
328 device_printf(gc->dev, in mana_gd_allocate_doorbell_page()
340 mana_gd_create_hw_eq(struct gdma_context *gc, in mana_gd_create_hw_eq() argument
362 err = mana_gd_send_request(gc, sizeof(req), &req, sizeof(resp), &resp); in mana_gd_create_hw_eq()
364 device_printf(gc->dev, in mana_gd_create_hw_eq()
379 struct gdma_context *gc = queue->gdma_dev->gdma_context; in mana_gd_disable_queue() local
396 err = mana_gd_send_request(gc, sizeof(req), &req, sizeof(resp), &resp); in mana_gd_disable_queue()
398 device_printf(gc->dev, in mana_gd_disable_queue()
413 mana_gd_ring_doorbell(struct gdma_context *gc, uint32_t db_index, in mana_gd_ring_doorbell() argument
420 addr = (char *)gc->db_page_base + gc->db_page_size * db_index; in mana_gd_ring_doorbell()
471 mana_gd_wq_ring_doorbell(struct gdma_context *gc, struct gdma_queue *queue) in mana_gd_wq_ring_doorbell() argument
473 mana_gd_ring_doorbell(gc, queue->gdma_dev->doorbell, queue->type, in mana_gd_wq_ring_doorbell()
480 struct gdma_context *gc = cq->gdma_dev->gdma_context; in mana_gd_ring_cq() local
486 mana_gd_ring_doorbell(gc, cq->gdma_dev->doorbell, cq->type, cq->id, in mana_gd_ring_cq()
494 struct gdma_context *gc = eq->gdma_dev->gdma_context; in mana_gd_process_eqe() local
510 if (cq_id >= gc->max_num_cqs) { in mana_gd_process_eqe()
513 cq_id, gc->max_num_cqs); in mana_gd_process_eqe()
517 cq = gc->cq_table[cq_id]; in mana_gd_process_eqe()
530 gc->test_event_eq_id = eq->id; in mana_gd_process_eqe()
535 complete(&gc->eq_test_event); in mana_gd_process_eqe()
561 struct gdma_context *gc; in mana_gd_process_eq_events() local
566 gc = eq->gdma_dev->gdma_context; in mana_gd_process_eq_events()
589 device_printf(gc->dev, in mana_gd_process_eq_events()
605 device_printf(gc->dev, "%p: %x\t%x\t%x\t%x\n", in mana_gd_process_eq_events()
624 mana_gd_ring_doorbell(gc, eq->gdma_dev->doorbell, eq->type, eq->id, in mana_gd_process_eq_events()
634 struct gdma_context *gc; in mana_gd_register_irq() local
639 gc = gd->gdma_context; in mana_gd_register_irq()
640 r = &gc->msix_resource; in mana_gd_register_irq()
658 if (unlikely(msi_index >= gc->num_msix_usable)) { in mana_gd_register_irq()
659 device_printf(gc->dev, in mana_gd_register_irq()
661 msi_index, gc->num_msix_usable); in mana_gd_register_irq()
665 gic = &gc->irq_contexts[msi_index]; in mana_gd_register_irq()
668 device_printf(gc->dev, in mana_gd_register_irq()
688 struct gdma_context *gc; in mana_gd_deregiser_irq() local
692 gc = gd->gdma_context; in mana_gd_deregiser_irq()
693 r = &gc->msix_resource; in mana_gd_deregiser_irq()
697 if (unlikely(msix_index >= gc->num_msix_usable)) in mana_gd_deregiser_irq()
700 gic = &gc->irq_contexts[msix_index]; in mana_gd_deregiser_irq()
715 mana_gd_test_eq(struct gdma_context *gc, struct gdma_queue *eq) in mana_gd_test_eq() argument
719 device_t dev = gc->dev; in mana_gd_test_eq()
722 sx_xlock(&gc->eq_test_event_sx); in mana_gd_test_eq()
724 init_completion(&gc->eq_test_event); in mana_gd_test_eq()
725 gc->test_event_eq_id = INVALID_QUEUE_ID; in mana_gd_test_eq()
733 err = mana_gd_send_request(gc, sizeof(req), &req, in mana_gd_test_eq()
748 if (wait_for_completion_timeout(&gc->eq_test_event, 30 * hz)) { in mana_gd_test_eq()
754 if (eq->id != gc->test_event_eq_id) { in mana_gd_test_eq()
757 gc->test_event_eq_id, eq->id); in mana_gd_test_eq()
763 sx_xunlock(&gc->eq_test_event_sx); in mana_gd_test_eq()
768 mana_gd_destroy_eq(struct gdma_context *gc, bool flush_evenets, in mana_gd_destroy_eq() argument
774 err = mana_gd_test_eq(gc, queue); in mana_gd_destroy_eq()
776 device_printf(gc->dev, in mana_gd_destroy_eq()
790 struct gdma_context *gc = gd->gdma_context; in mana_gd_create_eq() local
791 device_t dev = gc->dev; in mana_gd_create_eq()
818 err = mana_gd_create_hw_eq(gc, queue); in mana_gd_create_eq()
822 err = mana_gd_test_eq(gc, queue); in mana_gd_create_eq()
830 mana_gd_destroy_eq(gc, false, queue); in mana_gd_create_eq()
847 mana_gd_destroy_cq(struct gdma_context *gc, in mana_gd_destroy_cq() argument
852 if (id >= gc->max_num_cqs) in mana_gd_destroy_cq()
855 if (!gc->cq_table[id]) in mana_gd_destroy_cq()
858 gc->cq_table[id] = NULL; in mana_gd_destroy_cq()
865 struct gdma_context *gc = gd->gdma_context; in mana_gd_create_hwc_queue() local
872 err = mana_gd_alloc_memory(gc, spec->queue_size, gmi); in mana_gd_create_hwc_queue()
902 mana_gd_destroy_dma_region(struct gdma_context *gc, in mana_gd_destroy_dma_region() argument
916 err = mana_gd_send_request(gc, sizeof(req), &req, sizeof(resp), in mana_gd_destroy_dma_region()
919 device_printf(gc->dev, in mana_gd_destroy_dma_region()
935 struct gdma_context *gc = gd->gdma_context; in mana_gd_create_dma_region() local
953 hwc = gc->hwc.driver_data; in mana_gd_create_dma_region()
973 err = mana_gd_send_request(gc, req_msg_size, req, sizeof(resp), &resp); in mana_gd_create_dma_region()
979 device_printf(gc->dev, "Failed to create DMA region: 0x%x\n", in mana_gd_create_dma_region()
996 struct gdma_context *gc = gd->gdma_context; in mana_gd_create_mana_eq() local
1006 err = mana_gd_alloc_memory(gc, spec->queue_size, gmi); in mana_gd_create_mana_eq()
1040 struct gdma_context *gc = gd->gdma_context; in mana_gd_create_mana_wq_cq() local
1051 err = mana_gd_alloc_memory(gc, spec->queue_size, gmi); in mana_gd_create_mana_wq_cq()
1081 mana_gd_destroy_queue(struct gdma_context *gc, struct gdma_queue *queue) in mana_gd_destroy_queue() argument
1087 mana_gd_destroy_eq(gc, queue->eq.disable_needed, queue); in mana_gd_destroy_queue()
1091 mana_gd_destroy_cq(gc, queue); in mana_gd_destroy_queue()
1101 device_printf(gc->dev, in mana_gd_destroy_queue()
1107 mana_gd_destroy_dma_region(gc, gmi->dma_region_handle); in mana_gd_destroy_queue()
1118 struct gdma_context *gc = device_get_softc(dev); in mana_gd_verify_vf_version() local
1137 err = mana_gd_send_request(gc, sizeof(req), &req, sizeof(resp), &resp); in mana_gd_verify_vf_version()
1139 device_printf(gc->dev, in mana_gd_verify_vf_version()
1151 struct gdma_context *gc = gd->gdma_context; in mana_gd_register_device() local
1165 err = mana_gd_send_request(gc, sizeof(req), &req, sizeof(resp), &resp); in mana_gd_register_device()
1167 device_printf(gc->dev, in mana_gd_register_device()
1186 struct gdma_context *gc = gd->gdma_context; in mana_gd_deregister_device() local
1199 err = mana_gd_send_request(gc, sizeof(req), &req, sizeof(resp), &resp); in mana_gd_deregister_device()
1201 device_printf(gc->dev, in mana_gd_deregister_device()
1321 struct gdma_context *gc; in mana_gd_post_work_request() local
1352 gc = wq->gdma_dev->gdma_context; in mana_gd_post_work_request()
1353 device_printf(gc->dev, "unsuccessful flow control!\n"); in mana_gd_post_work_request()
1381 struct gdma_context *gc = queue->gdma_dev->gdma_context; in mana_gd_post_and_ring() local
1388 mana_gd_wq_ring_doorbell(gc, queue); in mana_gd_post_and_ring()
1492 mana_gd_init_registers(struct gdma_context *gc) in mana_gd_init_registers() argument
1494 uintptr_t bar0_va = rman_get_bushandle(gc->bar0); in mana_gd_init_registers()
1495 vm_paddr_t bar0_pa = rman_get_start(gc->bar0); in mana_gd_init_registers()
1497 gc->db_page_size = mana_gd_r32(gc, GDMA_REG_DB_PAGE_SIZE) & 0xFFFF; in mana_gd_init_registers()
1499 gc->db_page_base = in mana_gd_init_registers()
1500 (void *)(bar0_va + (size_t)mana_gd_r64(gc, GDMA_REG_DB_PAGE_OFFSET)); in mana_gd_init_registers()
1502 gc->phys_db_page_base = in mana_gd_init_registers()
1503 bar0_pa + mana_gd_r64(gc, GDMA_REG_DB_PAGE_OFFSET); in mana_gd_init_registers()
1505 gc->shm_base = in mana_gd_init_registers()
1506 (void *)(bar0_va + (size_t)mana_gd_r64(gc, GDMA_REG_SHM_OFFSET)); in mana_gd_init_registers()
1510 gc->db_page_size, gc->db_page_base, gc->shm_base); in mana_gd_init_registers()
1548 mana_gd_free_pci_res(struct gdma_context *gc) in mana_gd_free_pci_res() argument
1550 if (!gc || !gc->dev) in mana_gd_free_pci_res()
1553 if (gc->bar0 != NULL) { in mana_gd_free_pci_res()
1554 bus_release_resource(gc->dev, SYS_RES_MEMORY, in mana_gd_free_pci_res()
1555 PCIR_BAR(GDMA_BAR0), gc->bar0); in mana_gd_free_pci_res()
1558 if (gc->msix != NULL) { in mana_gd_free_pci_res()
1559 bus_release_resource(gc->dev, SYS_RES_MEMORY, in mana_gd_free_pci_res()
1560 gc->msix_rid, gc->msix); in mana_gd_free_pci_res()
1568 struct gdma_context *gc = device_get_softc(dev); in mana_gd_setup_irqs() local
1602 gc->irq_contexts = malloc(nvec * sizeof(struct gdma_irq_context), in mana_gd_setup_irqs()
1606 gic = &gc->irq_contexts[i]; in mana_gd_setup_irqs()
1637 rc = mana_gd_alloc_res_map(nvec, &gc->msix_resource, in mana_gd_setup_irqs()
1645 gc->max_num_msix = nvec; in mana_gd_setup_irqs()
1646 gc->num_msix_usable = nvec; in mana_gd_setup_irqs()
1654 gic = &gc->irq_contexts[i]; in mana_gd_setup_irqs()
1681 free(gc->irq_contexts, M_DEVBUF); in mana_gd_setup_irqs()
1682 gc->irq_contexts = NULL; in mana_gd_setup_irqs()
1692 struct gdma_context *gc = device_get_softc(dev); in mana_gd_remove_irqs() local
1696 mana_gd_free_res_map(&gc->msix_resource); in mana_gd_remove_irqs()
1698 for (i = 0; i < gc->max_num_msix; i++) { in mana_gd_remove_irqs()
1699 gic = &gc->irq_contexts[i]; in mana_gd_remove_irqs()
1722 gc->max_num_msix = 0; in mana_gd_remove_irqs()
1723 gc->num_msix_usable = 0; in mana_gd_remove_irqs()
1724 free(gc->irq_contexts, M_DEVBUF); in mana_gd_remove_irqs()
1725 gc->irq_contexts = NULL; in mana_gd_remove_irqs()
1768 struct gdma_context *gc; in mana_gd_attach() local
1772 gc = device_get_softc(dev); in mana_gd_attach()
1773 gc->dev = dev; in mana_gd_attach()
1780 gc->bar0 = mana_gd_alloc_bar(dev, GDMA_BAR0); in mana_gd_attach()
1781 if (unlikely(gc->bar0 == NULL)) { in mana_gd_attach()
1789 gc->gd_bus.bar0_t = rman_get_bustag(gc->bar0); in mana_gd_attach()
1790 gc->gd_bus.bar0_h = rman_get_bushandle(gc->bar0); in mana_gd_attach()
1797 gc->msix = bus_alloc_resource_any(dev, SYS_RES_MEMORY, in mana_gd_attach()
1799 if (unlikely(gc->msix == NULL)) { in mana_gd_attach()
1805 gc->msix_rid = msix_rid; in mana_gd_attach()
1807 if (unlikely(gc->gd_bus.bar0_h == 0)) { in mana_gd_attach()
1813 mana_gd_init_registers(gc); in mana_gd_attach()
1815 mana_smc_init(&gc->shm_channel, gc->dev, gc->shm_base); in mana_gd_attach()
1822 sx_init(&gc->eq_test_event_sx, "gdma test event sx"); in mana_gd_attach()
1824 rc = mana_hwc_create_channel(gc); in mana_gd_attach()
1851 rc = mana_probe(&gc->mana); in mana_gd_attach()
1860 mana_hwc_destroy_channel(gc); in mana_gd_attach()
1864 mana_gd_free_pci_res(gc); in mana_gd_attach()
1881 struct gdma_context *gc = device_get_softc(dev); in mana_gd_detach() local
1888 mana_remove(&gc->mana); in mana_gd_detach()
1890 mana_hwc_destroy_channel(gc); in mana_gd_detach()
1894 mana_gd_free_pci_res(gc); in mana_gd_detach()