Lines Matching +full:ch3 +full:- +full:0

1 /*-
65 #define BUS_SPACE_MAXADDR_40BIT MIN(BUS_SPACE_MAXADDR, 0xFFFFFFFFFFULL)
68 #define BUS_SPACE_MAXADDR_46BIT MIN(BUS_SPACE_MAXADDR, 0x3FFFFFFFFFFFULL)
111 device_printf(ioat->device, __VA_ARGS__); \
113 } while (0)
116 SYSCTL_NODE(_hw, OID_AUTO, ioat, CTLFLAG_RD | CTLFLAG_MPSAFE, 0,
121 &g_force_legacy_interrupts, 0, "Set to non-zero to force MSI-X disabled");
123 int g_ioat_debug_level = 0;
125 0, "Set log level (0-3) for ioat(4). Higher is more verbose.");
129 0, "Set IOAT ring order. (1 << this) == ring size.");
132 * OS <-> Driver interface structures
155 static unsigned ioat_channel_index = 0;
156 SYSCTL_UINT(_hw_ioat, OID_AUTO, channels, CTLFLAG_RD, &ioat_channel_index, 0,
166 { 0x34308086, "TBG IOAT Ch0" },
167 { 0x34318086, "TBG IOAT Ch1" },
168 { 0x34328086, "TBG IOAT Ch2" },
169 { 0x34338086, "TBG IOAT Ch3" },
170 { 0x34298086, "TBG IOAT Ch4" },
171 { 0x342a8086, "TBG IOAT Ch5" },
172 { 0x342b8086, "TBG IOAT Ch6" },
173 { 0x342c8086, "TBG IOAT Ch7" },
175 { 0x37108086, "JSF IOAT Ch0" },
176 { 0x37118086, "JSF IOAT Ch1" },
177 { 0x37128086, "JSF IOAT Ch2" },
178 { 0x37138086, "JSF IOAT Ch3" },
179 { 0x37148086, "JSF IOAT Ch4" },
180 { 0x37158086, "JSF IOAT Ch5" },
181 { 0x37168086, "JSF IOAT Ch6" },
182 { 0x37178086, "JSF IOAT Ch7" },
183 { 0x37188086, "JSF IOAT Ch0 (RAID)" },
184 { 0x37198086, "JSF IOAT Ch1 (RAID)" },
186 { 0x3c208086, "SNB IOAT Ch0" },
187 { 0x3c218086, "SNB IOAT Ch1" },
188 { 0x3c228086, "SNB IOAT Ch2" },
189 { 0x3c238086, "SNB IOAT Ch3" },
190 { 0x3c248086, "SNB IOAT Ch4" },
191 { 0x3c258086, "SNB IOAT Ch5" },
192 { 0x3c268086, "SNB IOAT Ch6" },
193 { 0x3c278086, "SNB IOAT Ch7" },
194 { 0x3c2e8086, "SNB IOAT Ch0 (RAID)" },
195 { 0x3c2f8086, "SNB IOAT Ch1 (RAID)" },
197 { 0x0e208086, "IVB IOAT Ch0" },
198 { 0x0e218086, "IVB IOAT Ch1" },
199 { 0x0e228086, "IVB IOAT Ch2" },
200 { 0x0e238086, "IVB IOAT Ch3" },
201 { 0x0e248086, "IVB IOAT Ch4" },
202 { 0x0e258086, "IVB IOAT Ch5" },
203 { 0x0e268086, "IVB IOAT Ch6" },
204 { 0x0e278086, "IVB IOAT Ch7" },
205 { 0x0e2e8086, "IVB IOAT Ch0 (RAID)" },
206 { 0x0e2f8086, "IVB IOAT Ch1 (RAID)" },
208 { 0x2f208086, "HSW IOAT Ch0" },
209 { 0x2f218086, "HSW IOAT Ch1" },
210 { 0x2f228086, "HSW IOAT Ch2" },
211 { 0x2f238086, "HSW IOAT Ch3" },
212 { 0x2f248086, "HSW IOAT Ch4" },
213 { 0x2f258086, "HSW IOAT Ch5" },
214 { 0x2f268086, "HSW IOAT Ch6" },
215 { 0x2f278086, "HSW IOAT Ch7" },
216 { 0x2f2e8086, "HSW IOAT Ch0 (RAID)" },
217 { 0x2f2f8086, "HSW IOAT Ch1 (RAID)" },
219 { 0x0c508086, "BWD IOAT Ch0" },
220 { 0x0c518086, "BWD IOAT Ch1" },
221 { 0x0c528086, "BWD IOAT Ch2" },
222 { 0x0c538086, "BWD IOAT Ch3" },
224 { 0x6f508086, "BDXDE IOAT Ch0" },
225 { 0x6f518086, "BDXDE IOAT Ch1" },
226 { 0x6f528086, "BDXDE IOAT Ch2" },
227 { 0x6f538086, "BDXDE IOAT Ch3" },
229 { 0x6f208086, "BDX IOAT Ch0" },
230 { 0x6f218086, "BDX IOAT Ch1" },
231 { 0x6f228086, "BDX IOAT Ch2" },
232 { 0x6f238086, "BDX IOAT Ch3" },
233 { 0x6f248086, "BDX IOAT Ch4" },
234 { 0x6f258086, "BDX IOAT Ch5" },
235 { 0x6f268086, "BDX IOAT Ch6" },
236 { 0x6f278086, "BDX IOAT Ch7" },
237 { 0x6f2e8086, "BDX IOAT Ch0 (RAID)" },
238 { 0x6f2f8086, "BDX IOAT Ch1 (RAID)" },
240 { 0x20218086, "SKX IOAT" },
242 { 0x0b008086, "ICX IOAT" },
249 * OS <-> Driver linkage functions
269 return (0); in ioat_modevent()
280 if (ep->type == type) { in ioat_probe()
281 device_set_desc(device, ep->desc); in ioat_probe()
282 return (0); in ioat_probe()
295 ioat->device = device; in ioat_attach()
296 if (bus_get_domain(device, &ioat->domain) != 0) in ioat_attach()
297 ioat->domain = 0; in ioat_attach()
298 ioat->cpu = CPU_FFS(&cpuset_domain[ioat->domain]) - 1; in ioat_attach()
299 if (ioat->cpu < 0) in ioat_attach()
300 ioat->cpu = CPU_FIRST(); in ioat_attach()
303 if (error != 0) in ioat_attach()
306 ioat->version = ioat_read_cbver(ioat); in ioat_attach()
307 if (ioat->version < IOAT_VER_3_0) { in ioat_attach()
313 if (error != 0) in ioat_attach()
317 if (error != 0) in ioat_attach()
321 if (error != 0) in ioat_attach()
325 if (error != 0) in ioat_attach()
332 for (i = 0; i < IOAT_MAX_CHANNELS; i++) { in ioat_attach()
342 ioat->chan_idx = i; in ioat_attach()
351 if (error != 0) in ioat_attach()
363 if (error != 0) { in ioat_bus_dmamap_destroy()
364 ioat_log_message(0, in ioat_bus_dmamap_destroy()
380 ioat_channel[ioat->chan_idx] = NULL; in ioat_detach()
381 while (ioat_channel_index > 0 && in ioat_detach()
382 ioat_channel[ioat_channel_index - 1] == NULL) in ioat_detach()
383 ioat_channel_index--; in ioat_detach()
386 taskqueue_drain(taskqueue_thread, &ioat->reset_task); in ioat_detach()
388 mtx_lock(&ioat->submit_lock); in ioat_detach()
389 ioat->quiescing = TRUE; in ioat_detach()
390 ioat->destroying = TRUE; in ioat_detach()
391 wakeup(&ioat->quiescing); in ioat_detach()
392 wakeup(&ioat->resetting); in ioat_detach()
395 mtx_unlock(&ioat->submit_lock); in ioat_detach()
396 mtx_lock(&ioat->cleanup_lock); in ioat_detach()
397 while (ioat_get_active(ioat) > 0) in ioat_detach()
398 msleep(&ioat->tail, &ioat->cleanup_lock, 0, "ioat_drain", 1); in ioat_detach()
399 mtx_unlock(&ioat->cleanup_lock); in ioat_detach()
402 callout_drain(&ioat->poll_timer); in ioat_detach()
406 if (ioat->pci_resource != NULL) in ioat_detach()
408 ioat->pci_resource_id, ioat->pci_resource); in ioat_detach()
410 if (ioat->data_tag != NULL) { in ioat_detach()
411 for (i = 0; i < 1 << ioat->ring_size_order; i++) { in ioat_detach()
413 ioat->data_tag, ioat->ring[i].src_dmamap); in ioat_detach()
414 if (error != 0) in ioat_detach()
417 for (i = 0; i < 1 << ioat->ring_size_order; i++) { in ioat_detach()
419 ioat->data_tag, ioat->ring[i].dst_dmamap); in ioat_detach()
420 if (error != 0) in ioat_detach()
424 for (i = 0; i < 1 << ioat->ring_size_order; i++) { in ioat_detach()
426 ioat->data_tag, ioat->ring[i].src2_dmamap); in ioat_detach()
427 if (error != 0) in ioat_detach()
430 for (i = 0; i < 1 << ioat->ring_size_order; i++) { in ioat_detach()
432 ioat->data_tag, ioat->ring[i].dst2_dmamap); in ioat_detach()
433 if (error != 0) in ioat_detach()
437 bus_dma_tag_destroy(ioat->data_tag); in ioat_detach()
440 if (ioat->ring != NULL) in ioat_detach()
441 ioat_free_ring(ioat, 1 << ioat->ring_size_order, ioat->ring); in ioat_detach()
443 if (ioat->comp_update != NULL) { in ioat_detach()
444 bus_dmamap_unload(ioat->comp_update_tag, ioat->comp_update_map); in ioat_detach()
445 bus_dmamem_free(ioat->comp_update_tag, ioat->comp_update, in ioat_detach()
446 ioat->comp_update_map); in ioat_detach()
447 bus_dma_tag_destroy(ioat->comp_update_tag); in ioat_detach()
450 if (ioat->hw_desc_ring != NULL) { in ioat_detach()
451 bus_dmamap_unload(ioat->hw_desc_tag, ioat->hw_desc_map); in ioat_detach()
452 bus_dmamem_free(ioat->hw_desc_tag, ioat->hw_desc_ring, in ioat_detach()
453 ioat->hw_desc_map); in ioat_detach()
454 bus_dma_tag_destroy(ioat->hw_desc_tag); in ioat_detach()
457 return (0); in ioat_detach()
464 if (ioat->tag != NULL) in ioat_teardown_intr()
465 bus_teardown_intr(ioat->device, ioat->res, ioat->tag); in ioat_teardown_intr()
467 if (ioat->res != NULL) in ioat_teardown_intr()
468 bus_release_resource(ioat->device, SYS_RES_IRQ, in ioat_teardown_intr()
469 rman_get_rid(ioat->res), ioat->res); in ioat_teardown_intr()
471 pci_release_msi(ioat->device); in ioat_teardown_intr()
472 return (0); in ioat_teardown_intr()
485 ioat_acquire(&ioat->dmaengine); in ioat_start_channel()
488 desc = ioat_get_ring_entry(ioat, ioat->head); in ioat_start_channel()
489 hw_desc = &ioat_get_descriptor(ioat, ioat->head)->dma; in ioat_start_channel()
490 dmadesc = &desc->bus_dmadesc; in ioat_start_channel()
492 dmadesc->callback_fn = NULL; in ioat_start_channel()
493 dmadesc->callback_arg = NULL; in ioat_start_channel()
495 hw_desc->u.control_raw = 0; in ioat_start_channel()
496 hw_desc->u.control_generic.op = IOAT_OP_COPY; in ioat_start_channel()
497 hw_desc->u.control_generic.completion_update = 1; in ioat_start_channel()
498 hw_desc->size = 8; in ioat_start_channel()
499 hw_desc->src_addr = 0; in ioat_start_channel()
500 hw_desc->dest_addr = 0; in ioat_start_channel()
501 hw_desc->u.control.null = 1; in ioat_start_channel()
504 ioat_release(&ioat->dmaengine); in ioat_start_channel()
506 for (i = 0; i < 100; i++) { in ioat_start_channel()
510 return (0); in ioat_start_channel()
514 ioat_log_message(0, "could not start channel: " in ioat_start_channel()
536 error = 0; in ioat3_attach()
538 ioat->capabilities = ioat_read_dmacapability(ioat); in ioat3_attach()
540 ioat_log_message(0, "Capabilities: %b\n", (int)ioat->capabilities, in ioat3_attach()
544 ioat->max_xfer_size = 1 << xfercap; in ioat3_attach()
546 ioat->intrdelay_supported = (ioat_read_2(ioat, IOAT_INTRDELAY_OFFSET) & in ioat3_attach()
547 IOAT_INTRDELAY_SUPPORTED) != 0; in ioat3_attach()
548 if (ioat->intrdelay_supported) in ioat3_attach()
549 ioat->intrdelay_max = IOAT_INTRDELAY_US_MASK; in ioat3_attach()
553 mtx_init(&ioat->submit_lock, "ioat_submit", NULL, MTX_DEF); in ioat3_attach()
554 mtx_init(&ioat->cleanup_lock, "ioat_cleanup", NULL, MTX_DEF); in ioat3_attach()
555 callout_init(&ioat->poll_timer, 1); in ioat3_attach()
556 TASK_INIT(&ioat->reset_task, 0, ioat_reset_hw_task, ioat); in ioat3_attach()
559 mtx_lock(&ioat->cleanup_lock); in ioat3_attach()
560 mtx_lock(&ioat->submit_lock); in ioat3_attach()
561 mtx_unlock(&ioat->submit_lock); in ioat3_attach()
562 mtx_unlock(&ioat->cleanup_lock); in ioat3_attach()
564 ioat->is_submitter_processing = FALSE; in ioat3_attach()
566 if (ioat->version >= IOAT_VER_3_3) in ioat3_attach()
568 else if (ioat->version >= IOAT_VER_3_2) in ioat3_attach()
573 error = bus_dma_tag_create(bus_get_dma_tag(ioat->device), in ioat3_attach()
574 sizeof(uint64_t), 0x0, lowaddr, BUS_SPACE_MAXADDR, NULL, NULL, in ioat3_attach()
575 sizeof(uint64_t), 1, sizeof(uint64_t), 0, NULL, NULL, in ioat3_attach()
576 &ioat->comp_update_tag); in ioat3_attach()
577 if (error != 0) in ioat3_attach()
580 error = bus_dmamem_alloc(ioat->comp_update_tag, in ioat3_attach()
581 (void **)&ioat->comp_update, BUS_DMA_ZERO | BUS_DMA_WAITOK, in ioat3_attach()
582 &ioat->comp_update_map); in ioat3_attach()
583 if (error != 0) in ioat3_attach()
586 error = bus_dmamap_load(ioat->comp_update_tag, ioat->comp_update_map, in ioat3_attach()
587 ioat->comp_update, sizeof(uint64_t), ioat_comp_update_map, ioat, in ioat3_attach()
589 if (error != 0) in ioat3_attach()
592 ioat->ring_size_order = g_ioat_ring_order; in ioat3_attach()
593 num_descriptors = 1 << ioat->ring_size_order; in ioat3_attach()
596 error = bus_dma_tag_create(bus_get_dma_tag(ioat->device), in ioat3_attach()
597 2 * 1024 * 1024, 0x0, lowaddr, BUS_SPACE_MAXADDR, NULL, NULL, in ioat3_attach()
598 ringsz, 1, ringsz, 0, NULL, NULL, &ioat->hw_desc_tag); in ioat3_attach()
599 if (error != 0) in ioat3_attach()
602 error = bus_dmamem_alloc(ioat->hw_desc_tag, &hw_desc, in ioat3_attach()
603 BUS_DMA_ZERO | BUS_DMA_WAITOK, &ioat->hw_desc_map); in ioat3_attach()
604 if (error != 0) in ioat3_attach()
607 error = bus_dmamap_load(ioat->hw_desc_tag, ioat->hw_desc_map, hw_desc, in ioat3_attach()
608 ringsz, ioat_dmamap_cb, &ioat->hw_desc_bus_addr, BUS_DMA_NOWAIT); in ioat3_attach()
612 ioat->hw_desc_ring = hw_desc; in ioat3_attach()
614 error = bus_dma_tag_create(bus_get_dma_tag(ioat->device), in ioat3_attach()
615 1, 0, lowaddr, BUS_SPACE_MAXADDR, NULL, NULL, in ioat3_attach()
616 ioat->max_xfer_size, 1, ioat->max_xfer_size, 0, NULL, NULL, in ioat3_attach()
617 &ioat->data_tag); in ioat3_attach()
618 if (error != 0) in ioat3_attach()
620 ioat->ring = malloc_domainset(num_descriptors * sizeof(*ring), M_IOAT, in ioat3_attach()
621 DOMAINSET_PREF(ioat->domain), M_ZERO | M_WAITOK); in ioat3_attach()
623 ring = ioat->ring; in ioat3_attach()
624 for (i = 0; i < num_descriptors; i++) { in ioat3_attach()
625 memset(&ring[i].bus_dmadesc, 0, sizeof(ring[i].bus_dmadesc)); in ioat3_attach()
627 error = bus_dmamap_create(ioat->data_tag, 0, in ioat3_attach()
629 if (error != 0) { in ioat3_attach()
630 ioat_log_message(0, in ioat3_attach()
635 error = bus_dmamap_create(ioat->data_tag, 0, in ioat3_attach()
637 if (error != 0) { in ioat3_attach()
638 ioat_log_message(0, in ioat3_attach()
643 error = bus_dmamap_create(ioat->data_tag, 0, in ioat3_attach()
645 if (error != 0) { in ioat3_attach()
646 ioat_log_message(0, in ioat3_attach()
651 error = bus_dmamap_create(ioat->data_tag, 0, in ioat3_attach()
653 if (error != 0) { in ioat3_attach()
654 ioat_log_message(0, in ioat3_attach()
661 for (i = 0; i < num_descriptors; i++) { in ioat3_attach()
662 dma_hw_desc = &ioat->hw_desc_ring[i].dma; in ioat3_attach()
663 dma_hw_desc->next = RING_PHYS_ADDR(ioat, i + 1); in ioat3_attach()
666 ioat->tail = ioat->head = 0; in ioat3_attach()
667 *ioat->comp_update = ioat->last_seen = in ioat3_attach()
668 RING_PHYS_ADDR(ioat, ioat->tail - 1); in ioat3_attach()
669 return (0); in ioat3_attach()
676 ioat->pci_resource_id = PCIR_BAR(0); in ioat_map_pci_bar()
677 ioat->pci_resource = bus_alloc_resource_any(ioat->device, in ioat_map_pci_bar()
678 SYS_RES_MEMORY, &ioat->pci_resource_id, RF_ACTIVE); in ioat_map_pci_bar()
680 if (ioat->pci_resource == NULL) { in ioat_map_pci_bar()
681 ioat_log_message(0, "unable to allocate pci resource\n"); in ioat_map_pci_bar()
685 ioat->pci_bus_tag = rman_get_bustag(ioat->pci_resource); in ioat_map_pci_bar()
686 ioat->pci_bus_handle = rman_get_bushandle(ioat->pci_resource); in ioat_map_pci_bar()
687 return (0); in ioat_map_pci_bar()
695 KASSERT(error == 0, ("%s: error:%d", __func__, error)); in ioat_comp_update_map()
696 ioat->comp_update_bus_addr = seg[0].ds_addr; in ioat_comp_update_map()
704 KASSERT(error == 0, ("%s: error:%d", __func__, error)); in ioat_dmamap_cb()
706 *baddr = segs->ds_addr; in ioat_dmamap_cb()
721 if (!g_force_legacy_interrupts && pci_msix_count(ioat->device) >= 1) { in ioat_setup_intr()
723 pci_alloc_msix(ioat->device, &num_vectors); in ioat_setup_intr()
729 ioat->rid = 1; in ioat_setup_intr()
730 ioat->res = bus_alloc_resource_any(ioat->device, SYS_RES_IRQ, in ioat_setup_intr()
731 &ioat->rid, RF_ACTIVE); in ioat_setup_intr()
733 ioat->rid = 0; in ioat_setup_intr()
734 ioat->res = bus_alloc_resource_any(ioat->device, SYS_RES_IRQ, in ioat_setup_intr()
735 &ioat->rid, RF_SHAREABLE | RF_ACTIVE); in ioat_setup_intr()
737 if (ioat->res == NULL) { in ioat_setup_intr()
738 ioat_log_message(0, "bus_alloc_resource failed\n"); in ioat_setup_intr()
742 ioat->tag = NULL; in ioat_setup_intr()
743 error = bus_setup_intr(ioat->device, ioat->res, INTR_MPSAFE | in ioat_setup_intr()
744 INTR_TYPE_MISC, NULL, ioat_interrupt_handler, ioat, &ioat->tag); in ioat_setup_intr()
745 if (error != 0) { in ioat_setup_intr()
746 ioat_log_message(0, "bus_setup_intr failed\n"); in ioat_setup_intr()
751 return (0); in ioat_setup_intr()
759 pciid = pci_get_devid(ioat->device); in ioat_model_resets_msix()
762 case 0x0c508086: in ioat_model_resets_msix()
763 case 0x0c518086: in ioat_model_resets_msix()
764 case 0x0c528086: in ioat_model_resets_msix()
765 case 0x0c538086: in ioat_model_resets_msix()
767 case 0x6f508086: in ioat_model_resets_msix()
768 case 0x6f518086: in ioat_model_resets_msix()
769 case 0x6f528086: in ioat_model_resets_msix()
770 case 0x6f538086: in ioat_model_resets_msix()
782 ioat->stats.interrupts++; in ioat_interrupt_handler()
790 if (chanerr == 0) in chanerr_to_errno()
791 return (0); in chanerr_to_errno()
792 if ((chanerr & (IOAT_CHANERR_XSADDERR | IOAT_CHANERR_XDADDERR)) != 0) in chanerr_to_errno()
794 if ((chanerr & (IOAT_CHANERR_RDERR | IOAT_CHANERR_WDERR)) != 0) in chanerr_to_errno()
797 if ((chanerr & IOAT_CHANERR_NDADDERR) != 0) in chanerr_to_errno()
812 mtx_lock(&ioat->cleanup_lock); in ioat_process_events()
814 if (!mtx_trylock(&ioat->cleanup_lock)) in ioat_process_events()
823 if (ioat->resetting_cleanup) { in ioat_process_events()
824 mtx_unlock(&ioat->cleanup_lock); in ioat_process_events()
828 completed = 0; in ioat_process_events()
829 comp_update = *ioat->comp_update; in ioat_process_events()
832 if (status < ioat->hw_desc_bus_addr || in ioat_process_events()
833 status >= ioat->hw_desc_bus_addr + (1 << ioat->ring_size_order) * in ioat_process_events()
836 (uintmax_t)status, ioat->chan_idx); in ioat_process_events()
838 if (status == ioat->last_seen) { in ioat_process_events()
845 CTR4(KTR_IOAT, "%s channel=%u hw_status=0x%lx last_seen=0x%lx", in ioat_process_events()
846 __func__, ioat->chan_idx, comp_update, ioat->last_seen); in ioat_process_events()
848 while (RING_PHYS_ADDR(ioat, ioat->tail - 1) != status) { in ioat_process_events()
849 desc = ioat_get_ring_entry(ioat, ioat->tail); in ioat_process_events()
850 dmadesc = &desc->bus_dmadesc; in ioat_process_events()
852 ioat->chan_idx, ioat->tail, dmadesc, dmadesc->callback_fn, in ioat_process_events()
853 dmadesc->callback_arg); in ioat_process_events()
855 bus_dmamap_unload(ioat->data_tag, desc->src_dmamap); in ioat_process_events()
856 bus_dmamap_unload(ioat->data_tag, desc->dst_dmamap); in ioat_process_events()
857 bus_dmamap_unload(ioat->data_tag, desc->src2_dmamap); in ioat_process_events()
858 bus_dmamap_unload(ioat->data_tag, desc->dst2_dmamap); in ioat_process_events()
860 if (dmadesc->callback_fn != NULL) in ioat_process_events()
861 dmadesc->callback_fn(dmadesc->callback_arg, 0); in ioat_process_events()
864 ioat->tail++; in ioat_process_events()
867 ioat->chan_idx, ioat->head, ioat->tail, ioat_get_active(ioat)); in ioat_process_events()
869 if (completed != 0) { in ioat_process_events()
870 ioat->last_seen = RING_PHYS_ADDR(ioat, ioat->tail - 1); in ioat_process_events()
871 ioat->stats.descriptors_processed += completed; in ioat_process_events()
872 wakeup(&ioat->tail); in ioat_process_events()
877 mtx_unlock(&ioat->cleanup_lock); in ioat_process_events()
890 ioat->stats.channel_halts++; in ioat_process_events()
896 mtx_lock(&ioat->submit_lock); in ioat_process_events()
897 ioat->quiescing = TRUE; in ioat_process_events()
898 mtx_unlock(&ioat->submit_lock); in ioat_process_events()
907 mtx_lock(&ioat->cleanup_lock); in ioat_process_events()
908 ioat->resetting_cleanup = TRUE; in ioat_process_events()
913 ioat->stats.last_halt_chanerr = chanerr; in ioat_process_events()
915 while (ioat_get_active(ioat) > 0) { in ioat_process_events()
916 desc = ioat_get_ring_entry(ioat, ioat->tail); in ioat_process_events()
917 dmadesc = &desc->bus_dmadesc; in ioat_process_events()
919 ioat->chan_idx, ioat->tail, dmadesc, dmadesc->callback_fn, in ioat_process_events()
920 dmadesc->callback_arg); in ioat_process_events()
922 if (dmadesc->callback_fn != NULL) in ioat_process_events()
923 dmadesc->callback_fn(dmadesc->callback_arg, in ioat_process_events()
926 ioat->tail++; in ioat_process_events()
927 ioat->stats.descriptors_processed++; in ioat_process_events()
928 ioat->stats.descriptors_error++; in ioat_process_events()
931 ioat->chan_idx, ioat->head, ioat->tail, ioat_get_active(ioat)); in ioat_process_events()
936 mtx_unlock(&ioat->cleanup_lock); in ioat_process_events()
938 ioat_log_message(0, "Resetting channel to recover from error\n"); in ioat_process_events()
939 error = taskqueue_enqueue(taskqueue_thread, &ioat->reset_task); in ioat_process_events()
940 KASSERT(error == 0, in ioat_process_events()
954 KASSERT(error == 0, ("%s: reset failed: %d", __func__, error)); in ioat_reset_hw_task()
972 KASSERT((flags & ~(M_NOWAIT | M_WAITOK)) == 0, in ioat_get_dmaengine()
973 ("invalid flags: 0x%08x", flags)); in ioat_get_dmaengine()
983 mtx_lock(&ioat->submit_lock); in ioat_get_dmaengine()
986 if (ioat->destroying) { in ioat_get_dmaengine()
987 mtx_unlock(&ioat->submit_lock); in ioat_get_dmaengine()
992 if (ioat->quiescing) { in ioat_get_dmaengine()
993 if ((flags & M_NOWAIT) != 0) { in ioat_get_dmaengine()
995 mtx_unlock(&ioat->submit_lock); in ioat_get_dmaengine()
999 while (ioat->quiescing && !ioat->destroying) in ioat_get_dmaengine()
1000 msleep(&ioat->quiescing, &ioat->submit_lock, 0, "getdma", 0); in ioat_get_dmaengine()
1002 if (ioat->destroying) { in ioat_get_dmaengine()
1004 mtx_unlock(&ioat->submit_lock); in ioat_get_dmaengine()
1008 mtx_unlock(&ioat->submit_lock); in ioat_get_dmaengine()
1009 return (&ioat->dmaengine); in ioat_get_dmaengine()
1018 mtx_lock(&ioat->submit_lock); in ioat_put_dmaengine()
1020 mtx_unlock(&ioat->submit_lock); in ioat_put_dmaengine()
1029 return (ioat->version); in ioat_get_hwversion()
1038 return (ioat->max_xfer_size); in ioat_get_max_io_size()
1047 return (ioat->capabilities); in ioat_get_capabilities()
1056 return (bus_get_domain(ioat->device, domain)); in ioat_get_domain()
1065 if (!ioat->intrdelay_supported) in ioat_set_interrupt_coalesce()
1067 if (delay > ioat->intrdelay_max) in ioat_set_interrupt_coalesce()
1071 ioat->cached_intrdelay = in ioat_set_interrupt_coalesce()
1073 return (0); in ioat_set_interrupt_coalesce()
1082 return (ioat->intrdelay_max); in ioat_get_max_coalesce_period()
1091 mtx_lock(&ioat->submit_lock); in ioat_acquire()
1092 CTR2(KTR_IOAT, "%s channel=%u", __func__, ioat->chan_idx); in ioat_acquire()
1093 ioat->acq_head = ioat->head; in ioat_acquire()
1106 if (error != 0) in ioat_acquire_reserve()
1118 ioat->chan_idx, ioat->head); in ioat_release()
1121 ioat->chan_idx, ioat->head); in ioat_release()
1123 if (ioat->acq_head != ioat->head) { in ioat_release()
1125 (uint16_t)ioat->head); in ioat_release()
1127 if (!callout_pending(&ioat->poll_timer)) { in ioat_release()
1128 callout_reset_on(&ioat->poll_timer, 1, in ioat_release()
1129 ioat_poll_timer_callback, ioat, ioat->cpu); in ioat_release()
1132 mtx_unlock(&ioat->submit_lock); in ioat_release()
1146 mtx_assert(&ioat->submit_lock, MA_OWNED); in ioat_op_generic()
1148 KASSERT((flags & ~_DMA_GENERIC_FLAGS) == 0, in ioat_op_generic()
1150 KASSERT(size <= ioat->max_xfer_size, ("%s: size too big (%u > %u)", in ioat_op_generic()
1151 __func__, (unsigned)size, ioat->max_xfer_size)); in ioat_op_generic()
1153 if ((flags & DMA_NO_WAIT) != 0) in ioat_op_generic()
1158 if (ioat_reserve_space(ioat, 1, mflags) != 0) in ioat_op_generic()
1161 desc = ioat_get_ring_entry(ioat, ioat->head); in ioat_op_generic()
1162 hw_desc = &ioat_get_descriptor(ioat, ioat->head)->generic; in ioat_op_generic()
1164 hw_desc->u.control_raw = 0; in ioat_op_generic()
1165 hw_desc->u.control_generic.op = op; in ioat_op_generic()
1166 hw_desc->u.control_generic.completion_update = 1; in ioat_op_generic()
1168 if ((flags & DMA_INT_EN) != 0) in ioat_op_generic()
1169 hw_desc->u.control_generic.int_enable = 1; in ioat_op_generic()
1170 if ((flags & DMA_FENCE) != 0) in ioat_op_generic()
1171 hw_desc->u.control_generic.fence = 1; in ioat_op_generic()
1173 hw_desc->size = size; in ioat_op_generic()
1175 if (src != 0) { in ioat_op_generic()
1176 nseg = -1; in ioat_op_generic()
1177 error = _bus_dmamap_load_phys(ioat->data_tag, desc->src_dmamap, in ioat_op_generic()
1178 src, size, 0, &seg, &nseg); in ioat_op_generic()
1179 if (error != 0) { in ioat_op_generic()
1180 ioat_log_message(0, "%s: _bus_dmamap_load_phys" in ioat_op_generic()
1184 hw_desc->src_addr = seg.ds_addr; in ioat_op_generic()
1187 if (dst != 0) { in ioat_op_generic()
1188 nseg = -1; in ioat_op_generic()
1189 error = _bus_dmamap_load_phys(ioat->data_tag, desc->dst_dmamap, in ioat_op_generic()
1190 dst, size, 0, &seg, &nseg); in ioat_op_generic()
1191 if (error != 0) { in ioat_op_generic()
1192 ioat_log_message(0, "%s: _bus_dmamap_load_phys" in ioat_op_generic()
1196 hw_desc->dest_addr = seg.ds_addr; in ioat_op_generic()
1199 desc->bus_dmadesc.callback_fn = callback_fn; in ioat_op_generic()
1200 desc->bus_dmadesc.callback_arg = callback_arg; in ioat_op_generic()
1213 CTR2(KTR_IOAT, "%s channel=%u", __func__, ioat->chan_idx); in ioat_null()
1215 desc = ioat_op_generic(ioat, IOAT_OP_COPY, 8, 0, 0, callback_fn, in ioat_null()
1220 hw_desc = &ioat_get_descriptor(ioat, desc->id)->dma; in ioat_null()
1221 hw_desc->u.control.null = 1; in ioat_null()
1223 return (&desc->bus_dmadesc); in ioat_null()
1241 hw_desc = &ioat_get_descriptor(ioat, desc->id)->dma; in ioat_copy()
1247 __func__, ioat->chan_idx, &desc->bus_dmadesc, dst, src, len); in ioat_copy()
1248 return (&desc->bus_dmadesc); in ioat_copy()
1264 CTR2(KTR_IOAT, "%s channel=%u", __func__, ioat->chan_idx); in ioat_copy_8k_aligned()
1266 KASSERT(((src1 | src2 | dst1 | dst2) & PAGE_MASK) == 0, in ioat_copy_8k_aligned()
1267 ("%s: addresses are not page-aligned", __func__)); in ioat_copy_8k_aligned()
1269 desc = ioat_op_generic(ioat, IOAT_OP_COPY, 2 * PAGE_SIZE, 0, 0, in ioat_copy_8k_aligned()
1274 hw_desc = &ioat_get_descriptor(ioat, desc->id)->dma; in ioat_copy_8k_aligned()
1277 nseg = -1; in ioat_copy_8k_aligned()
1278 error = _bus_dmamap_load_phys(ioat->data_tag, in ioat_copy_8k_aligned()
1279 desc->src_dmamap, src1, src1_len, 0, &seg, &nseg); in ioat_copy_8k_aligned()
1280 if (error != 0) { in ioat_copy_8k_aligned()
1281 ioat_log_message(0, "%s: _bus_dmamap_load_phys" in ioat_copy_8k_aligned()
1285 hw_desc->src_addr = seg.ds_addr; in ioat_copy_8k_aligned()
1287 hw_desc->u.control.src_page_break = 1; in ioat_copy_8k_aligned()
1288 nseg = -1; in ioat_copy_8k_aligned()
1289 error = _bus_dmamap_load_phys(ioat->data_tag, in ioat_copy_8k_aligned()
1290 desc->src2_dmamap, src2, PAGE_SIZE, 0, &seg, &nseg); in ioat_copy_8k_aligned()
1291 if (error != 0) { in ioat_copy_8k_aligned()
1292 ioat_log_message(0, "%s: _bus_dmamap_load_phys" in ioat_copy_8k_aligned()
1296 hw_desc->next_src_addr = seg.ds_addr; in ioat_copy_8k_aligned()
1300 nseg = -1; in ioat_copy_8k_aligned()
1301 error = _bus_dmamap_load_phys(ioat->data_tag, in ioat_copy_8k_aligned()
1302 desc->dst_dmamap, dst1, dst1_len, 0, &seg, &nseg); in ioat_copy_8k_aligned()
1303 if (error != 0) { in ioat_copy_8k_aligned()
1304 ioat_log_message(0, "%s: _bus_dmamap_load_phys" in ioat_copy_8k_aligned()
1308 hw_desc->dest_addr = seg.ds_addr; in ioat_copy_8k_aligned()
1310 hw_desc->u.control.dest_page_break = 1; in ioat_copy_8k_aligned()
1311 nseg = -1; in ioat_copy_8k_aligned()
1312 error = _bus_dmamap_load_phys(ioat->data_tag, in ioat_copy_8k_aligned()
1313 desc->dst2_dmamap, dst2, PAGE_SIZE, 0, &seg, &nseg); in ioat_copy_8k_aligned()
1314 if (error != 0) { in ioat_copy_8k_aligned()
1315 ioat_log_message(0, "%s: _bus_dmamap_load_phys" in ioat_copy_8k_aligned()
1319 hw_desc->next_dest_addr = seg.ds_addr; in ioat_copy_8k_aligned()
1326 return (&desc->bus_dmadesc); in ioat_copy_8k_aligned()
1343 CTR2(KTR_IOAT, "%s channel=%u", __func__, ioat->chan_idx); in ioat_copy_crc()
1345 KASSERT((ioat->capabilities & IOAT_DMACAP_MOVECRC) != 0, in ioat_copy_crc()
1350 KASSERT(teststore != 0 || (flags & DMA_CRC_INLINE) == 0, in ioat_copy_crc()
1361 KASSERT(teststore == 0, ("bogus")); in ioat_copy_crc()
1371 hw_desc = &ioat_get_descriptor(ioat, desc->id)->crc32; in ioat_copy_crc()
1373 if ((flags & DMA_CRC_INLINE) == 0) { in ioat_copy_crc()
1374 nseg = -1; in ioat_copy_crc()
1375 error = _bus_dmamap_load_phys(ioat->data_tag, in ioat_copy_crc()
1376 desc->dst2_dmamap, crcptr, sizeof(uint32_t), 0, in ioat_copy_crc()
1378 if (error != 0) { in ioat_copy_crc()
1379 ioat_log_message(0, "%s: _bus_dmamap_load_phys" in ioat_copy_crc()
1383 hw_desc->crc_address = seg.ds_addr; in ioat_copy_crc()
1385 hw_desc->u.control.crc_location = 1; in ioat_copy_crc()
1388 hw_desc->u.control.use_seed = 1; in ioat_copy_crc()
1389 hw_desc->seed = *initialseed; in ioat_copy_crc()
1396 return (&desc->bus_dmadesc); in ioat_copy_crc()
1413 CTR2(KTR_IOAT, "%s channel=%u", __func__, ioat->chan_idx); in ioat_crc()
1415 KASSERT((ioat->capabilities & IOAT_DMACAP_CRC) != 0, in ioat_crc()
1420 KASSERT(teststore != 0 || (flags & DMA_CRC_INLINE) == 0, in ioat_crc()
1431 KASSERT(teststore == 0, ("bogus")); in ioat_crc()
1436 desc = ioat_op_generic(ioat, op, len, src, 0, callback_fn, in ioat_crc()
1441 hw_desc = &ioat_get_descriptor(ioat, desc->id)->crc32; in ioat_crc()
1443 if ((flags & DMA_CRC_INLINE) == 0) { in ioat_crc()
1444 nseg = -1; in ioat_crc()
1445 error = _bus_dmamap_load_phys(ioat->data_tag, in ioat_crc()
1446 desc->dst2_dmamap, crcptr, sizeof(uint32_t), 0, in ioat_crc()
1448 if (error != 0) { in ioat_crc()
1449 ioat_log_message(0, "%s: _bus_dmamap_load_phys" in ioat_crc()
1453 hw_desc->crc_address = seg.ds_addr; in ioat_crc()
1455 hw_desc->u.control.crc_location = 1; in ioat_crc()
1458 hw_desc->u.control.use_seed = 1; in ioat_crc()
1459 hw_desc->seed = *initialseed; in ioat_crc()
1466 return (&desc->bus_dmadesc); in ioat_crc()
1479 CTR2(KTR_IOAT, "%s channel=%u", __func__, ioat->chan_idx); in ioat_blockfill()
1481 KASSERT((ioat->capabilities & IOAT_DMACAP_BFILL) != 0, in ioat_blockfill()
1484 desc = ioat_op_generic(ioat, IOAT_OP_FILL, len, 0, dst, in ioat_blockfill()
1489 hw_desc = &ioat_get_descriptor(ioat, desc->id)->fill; in ioat_blockfill()
1490 hw_desc->src_data = fillpattern; in ioat_blockfill()
1495 return (&desc->bus_dmadesc); in ioat_blockfill()
1505 return ((ioat->head - ioat->tail) & ((1 << ioat->ring_size_order) - 1)); in ioat_get_active()
1512 return ((1 << ioat->ring_size_order) - ioat_get_active(ioat) - 1); in ioat_get_ring_space()
1537 mtx_assert(&ioat->submit_lock, MA_OWNED); in ioat_reserve_space()
1538 error = 0; in ioat_reserve_space()
1541 if (num_descs < 1 || num_descs >= (1 << ioat->ring_size_order)) { in ioat_reserve_space()
1547 if (ioat->quiescing) { in ioat_reserve_space()
1556 ioat->chan_idx, num_descs); in ioat_reserve_space()
1558 if (!dug && !ioat->is_submitter_processing) { in ioat_reserve_space()
1559 ioat->is_submitter_processing = TRUE; in ioat_reserve_space()
1560 mtx_unlock(&ioat->submit_lock); in ioat_reserve_space()
1563 __func__, ioat->chan_idx); in ioat_reserve_space()
1566 mtx_lock(&ioat->submit_lock); in ioat_reserve_space()
1568 KASSERT(ioat->is_submitter_processing == TRUE, in ioat_reserve_space()
1570 ioat->is_submitter_processing = FALSE; in ioat_reserve_space()
1571 wakeup(&ioat->tail); in ioat_reserve_space()
1575 if ((mflags & M_WAITOK) == 0) { in ioat_reserve_space()
1580 __func__, ioat->chan_idx); in ioat_reserve_space()
1581 msleep(&ioat->tail, &ioat->submit_lock, 0, in ioat_reserve_space()
1582 "ioat_full", 0); in ioat_reserve_space()
1587 mtx_assert(&ioat->submit_lock, MA_OWNED); in ioat_reserve_space()
1588 KASSERT(!ioat->quiescing || error == ENXIO, in ioat_reserve_space()
1605 return (&ioat->ring[index % (1 << ioat->ring_size_order)]); in ioat_get_ring_entry()
1612 return (&ioat->hw_desc_ring[index % (1 << ioat->ring_size_order)]); in ioat_get_descriptor()
1620 ioat_log_message(0, "Channel halted (%b)\n", (int)chanerr, in ioat_halted_debug()
1622 if (chanerr == 0) in ioat_halted_debug()
1625 mtx_assert(&ioat->cleanup_lock, MA_OWNED); in ioat_halted_debug()
1627 desc = ioat_get_descriptor(ioat, ioat->tail + 0); in ioat_halted_debug()
1630 desc = ioat_get_descriptor(ioat, ioat->tail + 1); in ioat_halted_debug()
1644 mtx_lock(&ioat->submit_lock); in ioat_poll_timer_callback()
1645 if (ioat_get_active(ioat) > 0) in ioat_poll_timer_callback()
1646 callout_schedule(&ioat->poll_timer, 1); in ioat_poll_timer_callback()
1647 mtx_unlock(&ioat->submit_lock); in ioat_poll_timer_callback()
1657 mtx_assert(&ioat->submit_lock, MA_OWNED); in ioat_submit_single()
1659 ioat->head++; in ioat_submit_single()
1661 ioat->chan_idx, ioat->head, ioat->tail); in ioat_submit_single()
1663 ioat->stats.descriptors_submitted++; in ioat_submit_single()
1674 CTR2(KTR_IOAT, "%s channel=%u", __func__, ioat->chan_idx); in ioat_reset_hw()
1676 mtx_lock(&ioat->submit_lock); in ioat_reset_hw()
1677 while (ioat->resetting && !ioat->destroying) in ioat_reset_hw()
1678 msleep(&ioat->resetting, &ioat->submit_lock, 0, "IRH_drain", 0); in ioat_reset_hw()
1679 if (ioat->destroying) { in ioat_reset_hw()
1680 mtx_unlock(&ioat->submit_lock); in ioat_reset_hw()
1683 ioat->resetting = TRUE; in ioat_reset_hw()
1684 ioat->quiescing = TRUE; in ioat_reset_hw()
1685 mtx_unlock(&ioat->submit_lock); in ioat_reset_hw()
1686 mtx_lock(&ioat->cleanup_lock); in ioat_reset_hw()
1687 while (ioat_get_active(ioat) > 0) in ioat_reset_hw()
1688 msleep(&ioat->tail, &ioat->cleanup_lock, 0, "ioat_drain", 1); in ioat_reset_hw()
1694 ioat->resetting_cleanup = TRUE; in ioat_reset_hw()
1695 mtx_unlock(&ioat->cleanup_lock); in ioat_reset_hw()
1698 ioat->chan_idx); in ioat_reset_hw()
1705 for (timeout = 0; (is_ioat_active(status) || is_ioat_idle(status)) && in ioat_reset_hw()
1715 KASSERT(ioat_get_active(ioat) == 0, ("active after quiesce")); in ioat_reset_hw()
1721 ioat->chan_idx); in ioat_reset_hw()
1724 * IOAT v3 workaround - CHANERRMSK_INT with 3E07h to masks out errors in ioat_reset_hw()
1727 pci_write_config(ioat->device, IOAT_CFG_CHANERRMASK_INT_OFFSET, 0x3e07, in ioat_reset_hw()
1729 chanerr = pci_read_config(ioat->device, IOAT_CFG_CHANERR_INT_OFFSET, 4); in ioat_reset_hw()
1730 pci_write_config(ioat->device, IOAT_CFG_CHANERR_INT_OFFSET, chanerr, 4); in ioat_reset_hw()
1733 * BDXDE and BWD models reset MSI-X registers on device reset. in ioat_reset_hw()
1737 ioat_log_message(1, "device resets MSI-X registers; saving\n"); in ioat_reset_hw()
1738 pci_save_state(ioat->device); in ioat_reset_hw()
1743 ioat->chan_idx); in ioat_reset_hw()
1746 for (timeout = 0; ioat_reset_pending(ioat) && timeout < 20; timeout++) in ioat_reset_hw()
1755 pci_restore_state(ioat->device); in ioat_reset_hw()
1762 ioat_log_message(0, "Device is active after a reset?\n"); in ioat_reset_hw()
1764 error = 0; in ioat_reset_hw()
1769 if (chanerr != 0) { in ioat_reset_hw()
1770 mtx_lock(&ioat->cleanup_lock); in ioat_reset_hw()
1772 mtx_unlock(&ioat->cleanup_lock); in ioat_reset_hw()
1784 ioat->tail = ioat->head = 0; in ioat_reset_hw()
1785 *ioat->comp_update = ioat->last_seen = in ioat_reset_hw()
1786 RING_PHYS_ADDR(ioat, ioat->tail - 1); in ioat_reset_hw()
1789 ioat_write_chancmp(ioat, ioat->comp_update_bus_addr); in ioat_reset_hw()
1790 ioat_write_chainaddr(ioat, RING_PHYS_ADDR(ioat, 0)); in ioat_reset_hw()
1791 error = 0; in ioat_reset_hw()
1793 ioat->chan_idx); in ioat_reset_hw()
1797 if (error == 0) { in ioat_reset_hw()
1800 ioat->chan_idx); in ioat_reset_hw()
1806 mtx_lock(&ioat->cleanup_lock); in ioat_reset_hw()
1807 ioat->resetting_cleanup = FALSE; in ioat_reset_hw()
1808 mtx_unlock(&ioat->cleanup_lock); in ioat_reset_hw()
1811 mtx_lock(&ioat->submit_lock); in ioat_reset_hw()
1812 ioat->quiescing = FALSE; in ioat_reset_hw()
1813 wakeup(&ioat->quiescing); in ioat_reset_hw()
1815 ioat->resetting = FALSE; in ioat_reset_hw()
1816 wakeup(&ioat->resetting); in ioat_reset_hw()
1818 CTR2(KTR_IOAT, "%s channel=%u reset done", __func__, ioat->chan_idx); in ioat_reset_hw()
1819 mtx_unlock(&ioat->submit_lock); in ioat_reset_hw()
1860 if (error != 0 || req->newptr == NULL) in sysctl_handle_chansts()
1878 if (ioat->stats.interrupts == 0) { in sysctl_handle_dpi()
1882 rate = ioat->stats.descriptors_processed * factor / in sysctl_handle_dpi()
1883 ioat->stats.interrupts; in sysctl_handle_dpi()
1890 if (error != 0 || req->newptr == NULL) in sysctl_handle_dpi()
1903 arg = 0; in sysctl_handle_reset()
1905 if (error != 0 || req->newptr == NULL) in sysctl_handle_reset()
1909 if (error != 0) in sysctl_handle_reset()
1912 if (arg != 0) in sysctl_handle_reset()
1923 for (i = 0; i < 2; i++) { in dump_descriptor()
1924 for (j = 0; j < 8; j++) in dump_descriptor()
1944 &ioat->version, 0, "HW version (0xMM form)"); in ioat_setup_sysctl()
1946 &ioat->max_xfer_size, 0, "HW maximum transfer size"); in ioat_setup_sysctl()
1948 &ioat->intrdelay_supported, 0, "Is INTRDELAY supported"); in ioat_setup_sysctl()
1950 &ioat->intrdelay_max, 0, in ioat_setup_sysctl()
1958 &ioat->ring_size_order, 0, "SW descriptor ring size order"); in ioat_setup_sysctl()
1959 SYSCTL_ADD_UINT(ctx, state, OID_AUTO, "head", CTLFLAG_RD, &ioat->head, in ioat_setup_sysctl()
1960 0, "SW descriptor head pointer index"); in ioat_setup_sysctl()
1961 SYSCTL_ADD_UINT(ctx, state, OID_AUTO, "tail", CTLFLAG_RD, &ioat->tail, in ioat_setup_sysctl()
1962 0, "SW descriptor tail pointer index"); in ioat_setup_sysctl()
1965 ioat->comp_update, "HW addr of last completion"); in ioat_setup_sysctl()
1968 CTLFLAG_RD, &ioat->is_submitter_processing, 0, in ioat_setup_sysctl()
1972 CTLTYPE_STRING | CTLFLAG_RD | CTLFLAG_MPSAFE, ioat, 0, in ioat_setup_sysctl()
1976 &ioat->cached_intrdelay, 0, in ioat_setup_sysctl()
1985 CTLTYPE_INT | CTLFLAG_RW | CTLFLAG_MPSAFE, ioat, 0, in ioat_setup_sysctl()
1986 sysctl_handle_reset, "I", "Set to non-zero to reset the hardware"); in ioat_setup_sysctl()
1993 CTLFLAG_RW | CTLFLAG_STATS, &ioat->stats.interrupts, in ioat_setup_sysctl()
1996 CTLFLAG_RW | CTLFLAG_STATS, &ioat->stats.descriptors_processed, in ioat_setup_sysctl()
1999 CTLFLAG_RW | CTLFLAG_STATS, &ioat->stats.descriptors_submitted, in ioat_setup_sysctl()
2002 CTLFLAG_RW | CTLFLAG_STATS, &ioat->stats.descriptors_error, in ioat_setup_sysctl()
2005 CTLFLAG_RW | CTLFLAG_STATS, &ioat->stats.channel_halts, 0, in ioat_setup_sysctl()
2008 CTLFLAG_RW | CTLFLAG_STATS, &ioat->stats.last_halt_chanerr, 0, in ioat_setup_sysctl()
2012 CTLTYPE_STRING | CTLFLAG_RD | CTLFLAG_MPSAFE, ioat, 0, in ioat_setup_sysctl()
2020 mtx_assert(&ioat->submit_lock, MA_OWNED); in ioat_get()
2021 KASSERT(ioat->refcnt < UINT32_MAX, ("refcnt overflow")); in ioat_get()
2023 ioat->refcnt++; in ioat_get()
2030 mtx_assert(&ioat->submit_lock, MA_OWNED); in ioat_put()
2031 KASSERT(ioat->refcnt >= 1, ("refcnt error")); in ioat_put()
2033 if (--ioat->refcnt == 0) in ioat_put()
2034 wakeup(&ioat->refcnt); in ioat_put()
2041 mtx_assert(&ioat->submit_lock, MA_OWNED); in ioat_drain_locked()
2043 while (ioat->refcnt > 0) in ioat_drain_locked()
2044 msleep(&ioat->refcnt, &ioat->submit_lock, 0, "ioat_drain", 0); in ioat_drain_locked()
2048 #define _db_show_lock(lo) LOCK_CLASS(lo)->lc_ddb_show(lo)
2049 #define db_show_lock(lk) _db_show_lock(&(lk)->lock_object)
2066 db_printf(" version: %d\n", sc->version); in DB_SHOW_COMMAND()
2067 db_printf(" chan_idx: %u\n", sc->chan_idx); in DB_SHOW_COMMAND()
2069 db_show_lock(&sc->submit_lock); in DB_SHOW_COMMAND()
2071 db_printf(" capabilities: %b\n", (int)sc->capabilities, in DB_SHOW_COMMAND()
2073 db_printf(" cached_intrdelay: %u\n", sc->cached_intrdelay); in DB_SHOW_COMMAND()
2074 db_printf(" *comp_update: 0x%jx\n", (uintmax_t)*sc->comp_update); in DB_SHOW_COMMAND()
2077 db_printf(" c_time: %ju\n", (uintmax_t)sc->poll_timer.c_time); in DB_SHOW_COMMAND()
2078 db_printf(" c_arg: %p\n", sc->poll_timer.c_arg); in DB_SHOW_COMMAND()
2079 db_printf(" c_func: %p\n", sc->poll_timer.c_func); in DB_SHOW_COMMAND()
2080 db_printf(" c_lock: %p\n", sc->poll_timer.c_lock); in DB_SHOW_COMMAND()
2081 db_printf(" c_flags: 0x%x\n", (unsigned)sc->poll_timer.c_flags); in DB_SHOW_COMMAND()
2083 db_printf(" quiescing: %d\n", (int)sc->quiescing); in DB_SHOW_COMMAND()
2084 db_printf(" destroying: %d\n", (int)sc->destroying); in DB_SHOW_COMMAND()
2086 (int)sc->is_submitter_processing); in DB_SHOW_COMMAND()
2087 db_printf(" intrdelay_supported: %d\n", (int)sc->intrdelay_supported); in DB_SHOW_COMMAND()
2088 db_printf(" resetting: %d\n", (int)sc->resetting); in DB_SHOW_COMMAND()
2090 db_printf(" head: %u\n", sc->head); in DB_SHOW_COMMAND()
2091 db_printf(" tail: %u\n", sc->tail); in DB_SHOW_COMMAND()
2092 db_printf(" ring_size_order: %u\n", sc->ring_size_order); in DB_SHOW_COMMAND()
2093 db_printf(" last_seen: 0x%lx\n", sc->last_seen); in DB_SHOW_COMMAND()
2094 db_printf(" ring: %p\n", sc->ring); in DB_SHOW_COMMAND()
2095 db_printf(" descriptors: %p\n", sc->hw_desc_ring); in DB_SHOW_COMMAND()
2096 db_printf(" descriptors (phys): 0x%jx\n", in DB_SHOW_COMMAND()
2097 (uintmax_t)sc->hw_desc_bus_addr); in DB_SHOW_COMMAND()
2099 db_printf(" ring[%u] (tail):\n", sc->tail % in DB_SHOW_COMMAND()
2100 (1 << sc->ring_size_order)); in DB_SHOW_COMMAND()
2101 db_printf(" id: %u\n", ioat_get_ring_entry(sc, sc->tail)->id); in DB_SHOW_COMMAND()
2102 db_printf(" addr: 0x%lx\n", in DB_SHOW_COMMAND()
2103 RING_PHYS_ADDR(sc, sc->tail)); in DB_SHOW_COMMAND()
2104 db_printf(" next: 0x%lx\n", in DB_SHOW_COMMAND()
2105 ioat_get_descriptor(sc, sc->tail)->generic.next); in DB_SHOW_COMMAND()
2107 db_printf(" ring[%u] (head - 1):\n", (sc->head - 1) % in DB_SHOW_COMMAND()
2108 (1 << sc->ring_size_order)); in DB_SHOW_COMMAND()
2109 db_printf(" id: %u\n", ioat_get_ring_entry(sc, sc->head - 1)->id); in DB_SHOW_COMMAND()
2110 db_printf(" addr: 0x%lx\n", in DB_SHOW_COMMAND()
2111 RING_PHYS_ADDR(sc, sc->head - 1)); in DB_SHOW_COMMAND()
2112 db_printf(" next: 0x%lx\n", in DB_SHOW_COMMAND()
2113 ioat_get_descriptor(sc, sc->head - 1)->generic.next); in DB_SHOW_COMMAND()
2115 db_printf(" ring[%u] (head):\n", (sc->head) % in DB_SHOW_COMMAND()
2116 (1 << sc->ring_size_order)); in DB_SHOW_COMMAND()
2117 db_printf(" id: %u\n", ioat_get_ring_entry(sc, sc->head)->id); in DB_SHOW_COMMAND()
2118 db_printf(" addr: 0x%lx\n", in DB_SHOW_COMMAND()
2119 RING_PHYS_ADDR(sc, sc->head)); in DB_SHOW_COMMAND()
2120 db_printf(" next: 0x%lx\n", in DB_SHOW_COMMAND()
2121 ioat_get_descriptor(sc, sc->head)->generic.next); in DB_SHOW_COMMAND()
2123 for (idx = 0; idx < (1 << sc->ring_size_order); idx++) in DB_SHOW_COMMAND()
2124 if ((*sc->comp_update & IOAT_CHANSTS_COMPLETED_DESCRIPTOR_MASK) in DB_SHOW_COMMAND()
2129 db_show_lock(&sc->cleanup_lock); in DB_SHOW_COMMAND()
2131 db_printf(" refcnt: %u\n", sc->refcnt); in DB_SHOW_COMMAND()
2133 db_printf(" interrupts: %lu\n", sc->stats.interrupts); in DB_SHOW_COMMAND()
2134 db_printf(" descriptors_processed: %lu\n", sc->stats.descriptors_processed); in DB_SHOW_COMMAND()
2135 db_printf(" descriptors_error: %lu\n", sc->stats.descriptors_error); in DB_SHOW_COMMAND()
2136 db_printf(" descriptors_submitted: %lu\n", sc->stats.descriptors_submitted); in DB_SHOW_COMMAND()
2138 db_printf(" channel_halts: %u\n", sc->stats.channel_halts); in DB_SHOW_COMMAND()
2139 db_printf(" last_halt_chanerr: %u\n", sc->stats.last_halt_chanerr); in DB_SHOW_COMMAND()
2145 db_printf(" status: 0x%lx\n", ioat_get_chansts(sc)); in DB_SHOW_COMMAND()
2146 db_printf(" chanctrl: 0x%x\n", in DB_SHOW_COMMAND()
2148 db_printf(" chancmd: 0x%x\n", in DB_SHOW_COMMAND()
2150 db_printf(" dmacount: 0x%x\n", in DB_SHOW_COMMAND()
2152 db_printf(" chainaddr: 0x%lx\n", in DB_SHOW_COMMAND()
2154 db_printf(" chancmp: 0x%lx\n", in DB_SHOW_COMMAND()
2160 db_printf("usage: show ioat <0-%u>\n", ioat_channel_index); in DB_SHOW_COMMAND()