Lines Matching +full:ch0 +full:- +full:2

1 /*-
11 * 2. Redistributions in binary form must reproduce the above copyright
111 device_printf(ioat->device, __VA_ARGS__); \
121 &g_force_legacy_interrupts, 0, "Set to non-zero to force MSI-X disabled");
125 0, "Set log level (0-3) for ioat(4). Higher is more verbose.");
132 * OS <-> Driver interface structures
166 { 0x34308086, "TBG IOAT Ch0" },
175 { 0x37108086, "JSF IOAT Ch0" },
183 { 0x37188086, "JSF IOAT Ch0 (RAID)" },
186 { 0x3c208086, "SNB IOAT Ch0" },
194 { 0x3c2e8086, "SNB IOAT Ch0 (RAID)" },
197 { 0x0e208086, "IVB IOAT Ch0" },
205 { 0x0e2e8086, "IVB IOAT Ch0 (RAID)" },
208 { 0x2f208086, "HSW IOAT Ch0" },
216 { 0x2f2e8086, "HSW IOAT Ch0 (RAID)" },
219 { 0x0c508086, "BWD IOAT Ch0" },
224 { 0x6f508086, "BDXDE IOAT Ch0" },
229 { 0x6f208086, "BDX IOAT Ch0" },
237 { 0x6f2e8086, "BDX IOAT Ch0 (RAID)" },
249 * OS <-> Driver linkage functions
280 if (ep->type == type) { in ioat_probe()
281 device_set_desc(device, ep->desc); in ioat_probe()
295 ioat->device = device; in ioat_attach()
296 if (bus_get_domain(device, &ioat->domain) != 0) in ioat_attach()
297 ioat->domain = 0; in ioat_attach()
298 ioat->cpu = CPU_FFS(&cpuset_domain[ioat->domain]) - 1; in ioat_attach()
299 if (ioat->cpu < 0) in ioat_attach()
300 ioat->cpu = CPU_FIRST(); in ioat_attach()
306 ioat->version = ioat_read_cbver(ioat); in ioat_attach()
307 if (ioat->version < IOAT_VER_3_0) { in ioat_attach()
342 ioat->chan_idx = i; in ioat_attach()
380 ioat_channel[ioat->chan_idx] = NULL; in ioat_detach()
382 ioat_channel[ioat_channel_index - 1] == NULL) in ioat_detach()
383 ioat_channel_index--; in ioat_detach()
386 taskqueue_drain(taskqueue_thread, &ioat->reset_task); in ioat_detach()
388 mtx_lock(&ioat->submit_lock); in ioat_detach()
389 ioat->quiescing = TRUE; in ioat_detach()
390 ioat->destroying = TRUE; in ioat_detach()
391 wakeup(&ioat->quiescing); in ioat_detach()
392 wakeup(&ioat->resetting); in ioat_detach()
395 mtx_unlock(&ioat->submit_lock); in ioat_detach()
396 mtx_lock(&ioat->cleanup_lock); in ioat_detach()
398 msleep(&ioat->tail, &ioat->cleanup_lock, 0, "ioat_drain", 1); in ioat_detach()
399 mtx_unlock(&ioat->cleanup_lock); in ioat_detach()
402 callout_drain(&ioat->poll_timer); in ioat_detach()
406 if (ioat->pci_resource != NULL) in ioat_detach()
408 ioat->pci_resource_id, ioat->pci_resource); in ioat_detach()
410 if (ioat->data_tag != NULL) { in ioat_detach()
411 for (i = 0; i < 1 << ioat->ring_size_order; i++) { in ioat_detach()
413 ioat->data_tag, ioat->ring[i].src_dmamap); in ioat_detach()
417 for (i = 0; i < 1 << ioat->ring_size_order; i++) { in ioat_detach()
419 ioat->data_tag, ioat->ring[i].dst_dmamap); in ioat_detach()
424 for (i = 0; i < 1 << ioat->ring_size_order; i++) { in ioat_detach()
426 ioat->data_tag, ioat->ring[i].src2_dmamap); in ioat_detach()
430 for (i = 0; i < 1 << ioat->ring_size_order; i++) { in ioat_detach()
432 ioat->data_tag, ioat->ring[i].dst2_dmamap); in ioat_detach()
437 bus_dma_tag_destroy(ioat->data_tag); in ioat_detach()
440 if (ioat->ring != NULL) in ioat_detach()
441 ioat_free_ring(ioat, 1 << ioat->ring_size_order, ioat->ring); in ioat_detach()
443 if (ioat->comp_update != NULL) { in ioat_detach()
444 bus_dmamap_unload(ioat->comp_update_tag, ioat->comp_update_map); in ioat_detach()
445 bus_dmamem_free(ioat->comp_update_tag, ioat->comp_update, in ioat_detach()
446 ioat->comp_update_map); in ioat_detach()
447 bus_dma_tag_destroy(ioat->comp_update_tag); in ioat_detach()
450 if (ioat->hw_desc_ring != NULL) { in ioat_detach()
451 bus_dmamap_unload(ioat->hw_desc_tag, ioat->hw_desc_map); in ioat_detach()
452 bus_dmamem_free(ioat->hw_desc_tag, ioat->hw_desc_ring, in ioat_detach()
453 ioat->hw_desc_map); in ioat_detach()
454 bus_dma_tag_destroy(ioat->hw_desc_tag); in ioat_detach()
464 if (ioat->tag != NULL) in ioat_teardown_intr()
465 bus_teardown_intr(ioat->device, ioat->res, ioat->tag); in ioat_teardown_intr()
467 if (ioat->res != NULL) in ioat_teardown_intr()
468 bus_release_resource(ioat->device, SYS_RES_IRQ, in ioat_teardown_intr()
469 rman_get_rid(ioat->res), ioat->res); in ioat_teardown_intr()
471 pci_release_msi(ioat->device); in ioat_teardown_intr()
485 ioat_acquire(&ioat->dmaengine); in ioat_start_channel()
488 desc = ioat_get_ring_entry(ioat, ioat->head); in ioat_start_channel()
489 hw_desc = &ioat_get_descriptor(ioat, ioat->head)->dma; in ioat_start_channel()
490 dmadesc = &desc->bus_dmadesc; in ioat_start_channel()
492 dmadesc->callback_fn = NULL; in ioat_start_channel()
493 dmadesc->callback_arg = NULL; in ioat_start_channel()
495 hw_desc->u.control_raw = 0; in ioat_start_channel()
496 hw_desc->u.control_generic.op = IOAT_OP_COPY; in ioat_start_channel()
497 hw_desc->u.control_generic.completion_update = 1; in ioat_start_channel()
498 hw_desc->size = 8; in ioat_start_channel()
499 hw_desc->src_addr = 0; in ioat_start_channel()
500 hw_desc->dest_addr = 0; in ioat_start_channel()
501 hw_desc->u.control.null = 1; in ioat_start_channel()
504 ioat_release(&ioat->dmaengine); in ioat_start_channel()
538 ioat->capabilities = ioat_read_dmacapability(ioat); in ioat3_attach()
540 ioat_log_message(0, "Capabilities: %b\n", (int)ioat->capabilities, in ioat3_attach()
544 ioat->max_xfer_size = 1 << xfercap; in ioat3_attach()
546 ioat->intrdelay_supported = (ioat_read_2(ioat, IOAT_INTRDELAY_OFFSET) & in ioat3_attach()
548 if (ioat->intrdelay_supported) in ioat3_attach()
549 ioat->intrdelay_max = IOAT_INTRDELAY_US_MASK; in ioat3_attach()
553 mtx_init(&ioat->submit_lock, "ioat_submit", NULL, MTX_DEF); in ioat3_attach()
554 mtx_init(&ioat->cleanup_lock, "ioat_cleanup", NULL, MTX_DEF); in ioat3_attach()
555 callout_init(&ioat->poll_timer, 1); in ioat3_attach()
556 TASK_INIT(&ioat->reset_task, 0, ioat_reset_hw_task, ioat); in ioat3_attach()
559 mtx_lock(&ioat->cleanup_lock); in ioat3_attach()
560 mtx_lock(&ioat->submit_lock); in ioat3_attach()
561 mtx_unlock(&ioat->submit_lock); in ioat3_attach()
562 mtx_unlock(&ioat->cleanup_lock); in ioat3_attach()
564 ioat->is_submitter_processing = FALSE; in ioat3_attach()
566 if (ioat->version >= IOAT_VER_3_3) in ioat3_attach()
568 else if (ioat->version >= IOAT_VER_3_2) in ioat3_attach()
573 error = bus_dma_tag_create(bus_get_dma_tag(ioat->device), in ioat3_attach()
576 &ioat->comp_update_tag); in ioat3_attach()
580 error = bus_dmamem_alloc(ioat->comp_update_tag, in ioat3_attach()
581 (void **)&ioat->comp_update, BUS_DMA_ZERO | BUS_DMA_WAITOK, in ioat3_attach()
582 &ioat->comp_update_map); in ioat3_attach()
586 error = bus_dmamap_load(ioat->comp_update_tag, ioat->comp_update_map, in ioat3_attach()
587 ioat->comp_update, sizeof(uint64_t), ioat_comp_update_map, ioat, in ioat3_attach()
592 ioat->ring_size_order = g_ioat_ring_order; in ioat3_attach()
593 num_descriptors = 1 << ioat->ring_size_order; in ioat3_attach()
596 error = bus_dma_tag_create(bus_get_dma_tag(ioat->device), in ioat3_attach()
597 2 * 1024 * 1024, 0x0, lowaddr, BUS_SPACE_MAXADDR, NULL, NULL, in ioat3_attach()
598 ringsz, 1, ringsz, 0, NULL, NULL, &ioat->hw_desc_tag); in ioat3_attach()
602 error = bus_dmamem_alloc(ioat->hw_desc_tag, &hw_desc, in ioat3_attach()
603 BUS_DMA_ZERO | BUS_DMA_WAITOK, &ioat->hw_desc_map); in ioat3_attach()
607 error = bus_dmamap_load(ioat->hw_desc_tag, ioat->hw_desc_map, hw_desc, in ioat3_attach()
608 ringsz, ioat_dmamap_cb, &ioat->hw_desc_bus_addr, BUS_DMA_NOWAIT); in ioat3_attach()
612 ioat->hw_desc_ring = hw_desc; in ioat3_attach()
614 error = bus_dma_tag_create(bus_get_dma_tag(ioat->device), in ioat3_attach()
616 ioat->max_xfer_size, 1, ioat->max_xfer_size, 0, NULL, NULL, in ioat3_attach()
617 &ioat->data_tag); in ioat3_attach()
620 ioat->ring = malloc_domainset(num_descriptors * sizeof(*ring), M_IOAT, in ioat3_attach()
621 DOMAINSET_PREF(ioat->domain), M_ZERO | M_WAITOK); in ioat3_attach()
623 ring = ioat->ring; in ioat3_attach()
627 error = bus_dmamap_create(ioat->data_tag, 0, in ioat3_attach()
635 error = bus_dmamap_create(ioat->data_tag, 0, in ioat3_attach()
643 error = bus_dmamap_create(ioat->data_tag, 0, in ioat3_attach()
651 error = bus_dmamap_create(ioat->data_tag, 0, in ioat3_attach()
662 dma_hw_desc = &ioat->hw_desc_ring[i].dma; in ioat3_attach()
663 dma_hw_desc->next = RING_PHYS_ADDR(ioat, i + 1); in ioat3_attach()
666 ioat->tail = ioat->head = 0; in ioat3_attach()
667 *ioat->comp_update = ioat->last_seen = in ioat3_attach()
668 RING_PHYS_ADDR(ioat, ioat->tail - 1); in ioat3_attach()
676 ioat->pci_resource_id = PCIR_BAR(0); in ioat_map_pci_bar()
677 ioat->pci_resource = bus_alloc_resource_any(ioat->device, in ioat_map_pci_bar()
678 SYS_RES_MEMORY, &ioat->pci_resource_id, RF_ACTIVE); in ioat_map_pci_bar()
680 if (ioat->pci_resource == NULL) { in ioat_map_pci_bar()
685 ioat->pci_bus_tag = rman_get_bustag(ioat->pci_resource); in ioat_map_pci_bar()
686 ioat->pci_bus_handle = rman_get_bushandle(ioat->pci_resource); in ioat_map_pci_bar()
696 ioat->comp_update_bus_addr = seg[0].ds_addr; in ioat_comp_update_map()
706 *baddr = segs->ds_addr; in ioat_dmamap_cb()
721 if (!g_force_legacy_interrupts && pci_msix_count(ioat->device) >= 1) { in ioat_setup_intr()
723 pci_alloc_msix(ioat->device, &num_vectors); in ioat_setup_intr()
729 ioat->rid = 1; in ioat_setup_intr()
730 ioat->res = bus_alloc_resource_any(ioat->device, SYS_RES_IRQ, in ioat_setup_intr()
731 &ioat->rid, RF_ACTIVE); in ioat_setup_intr()
733 ioat->rid = 0; in ioat_setup_intr()
734 ioat->res = bus_alloc_resource_any(ioat->device, SYS_RES_IRQ, in ioat_setup_intr()
735 &ioat->rid, RF_SHAREABLE | RF_ACTIVE); in ioat_setup_intr()
737 if (ioat->res == NULL) { in ioat_setup_intr()
742 ioat->tag = NULL; in ioat_setup_intr()
743 error = bus_setup_intr(ioat->device, ioat->res, INTR_MPSAFE | in ioat_setup_intr()
744 INTR_TYPE_MISC, NULL, ioat_interrupt_handler, ioat, &ioat->tag); in ioat_setup_intr()
759 pciid = pci_get_devid(ioat->device); in ioat_model_resets_msix()
782 ioat->stats.interrupts++; in ioat_interrupt_handler()
812 mtx_lock(&ioat->cleanup_lock); in ioat_process_events()
814 if (!mtx_trylock(&ioat->cleanup_lock)) in ioat_process_events()
823 if (ioat->resetting_cleanup) { in ioat_process_events()
824 mtx_unlock(&ioat->cleanup_lock); in ioat_process_events()
829 comp_update = *ioat->comp_update; in ioat_process_events()
832 if (status < ioat->hw_desc_bus_addr || in ioat_process_events()
833 status >= ioat->hw_desc_bus_addr + (1 << ioat->ring_size_order) * in ioat_process_events()
836 (uintmax_t)status, ioat->chan_idx); in ioat_process_events()
838 if (status == ioat->last_seen) { in ioat_process_events()
846 __func__, ioat->chan_idx, comp_update, ioat->last_seen); in ioat_process_events()
848 while (RING_PHYS_ADDR(ioat, ioat->tail - 1) != status) { in ioat_process_events()
849 desc = ioat_get_ring_entry(ioat, ioat->tail); in ioat_process_events()
850 dmadesc = &desc->bus_dmadesc; in ioat_process_events()
852 ioat->chan_idx, ioat->tail, dmadesc, dmadesc->callback_fn, in ioat_process_events()
853 dmadesc->callback_arg); in ioat_process_events()
855 bus_dmamap_unload(ioat->data_tag, desc->src_dmamap); in ioat_process_events()
856 bus_dmamap_unload(ioat->data_tag, desc->dst_dmamap); in ioat_process_events()
857 bus_dmamap_unload(ioat->data_tag, desc->src2_dmamap); in ioat_process_events()
858 bus_dmamap_unload(ioat->data_tag, desc->dst2_dmamap); in ioat_process_events()
860 if (dmadesc->callback_fn != NULL) in ioat_process_events()
861 dmadesc->callback_fn(dmadesc->callback_arg, 0); in ioat_process_events()
864 ioat->tail++; in ioat_process_events()
867 ioat->chan_idx, ioat->head, ioat->tail, ioat_get_active(ioat)); in ioat_process_events()
870 ioat->last_seen = RING_PHYS_ADDR(ioat, ioat->tail - 1); in ioat_process_events()
871 ioat->stats.descriptors_processed += completed; in ioat_process_events()
872 wakeup(&ioat->tail); in ioat_process_events()
877 mtx_unlock(&ioat->cleanup_lock); in ioat_process_events()
890 ioat->stats.channel_halts++; in ioat_process_events()
896 mtx_lock(&ioat->submit_lock); in ioat_process_events()
897 ioat->quiescing = TRUE; in ioat_process_events()
898 mtx_unlock(&ioat->submit_lock); in ioat_process_events()
907 mtx_lock(&ioat->cleanup_lock); in ioat_process_events()
908 ioat->resetting_cleanup = TRUE; in ioat_process_events()
913 ioat->stats.last_halt_chanerr = chanerr; in ioat_process_events()
916 desc = ioat_get_ring_entry(ioat, ioat->tail); in ioat_process_events()
917 dmadesc = &desc->bus_dmadesc; in ioat_process_events()
919 ioat->chan_idx, ioat->tail, dmadesc, dmadesc->callback_fn, in ioat_process_events()
920 dmadesc->callback_arg); in ioat_process_events()
922 if (dmadesc->callback_fn != NULL) in ioat_process_events()
923 dmadesc->callback_fn(dmadesc->callback_arg, in ioat_process_events()
926 ioat->tail++; in ioat_process_events()
927 ioat->stats.descriptors_processed++; in ioat_process_events()
928 ioat->stats.descriptors_error++; in ioat_process_events()
931 ioat->chan_idx, ioat->head, ioat->tail, ioat_get_active(ioat)); in ioat_process_events()
936 mtx_unlock(&ioat->cleanup_lock); in ioat_process_events()
939 error = taskqueue_enqueue(taskqueue_thread, &ioat->reset_task); in ioat_process_events()
983 mtx_lock(&ioat->submit_lock); in ioat_get_dmaengine()
986 if (ioat->destroying) { in ioat_get_dmaengine()
987 mtx_unlock(&ioat->submit_lock); in ioat_get_dmaengine()
992 if (ioat->quiescing) { in ioat_get_dmaengine()
995 mtx_unlock(&ioat->submit_lock); in ioat_get_dmaengine()
999 while (ioat->quiescing && !ioat->destroying) in ioat_get_dmaengine()
1000 msleep(&ioat->quiescing, &ioat->submit_lock, 0, "getdma", 0); in ioat_get_dmaengine()
1002 if (ioat->destroying) { in ioat_get_dmaengine()
1004 mtx_unlock(&ioat->submit_lock); in ioat_get_dmaengine()
1008 mtx_unlock(&ioat->submit_lock); in ioat_get_dmaengine()
1009 return (&ioat->dmaengine); in ioat_get_dmaengine()
1018 mtx_lock(&ioat->submit_lock); in ioat_put_dmaengine()
1020 mtx_unlock(&ioat->submit_lock); in ioat_put_dmaengine()
1029 return (ioat->version); in ioat_get_hwversion()
1038 return (ioat->max_xfer_size); in ioat_get_max_io_size()
1047 return (ioat->capabilities); in ioat_get_capabilities()
1056 return (bus_get_domain(ioat->device, domain)); in ioat_get_domain()
1065 if (!ioat->intrdelay_supported) in ioat_set_interrupt_coalesce()
1067 if (delay > ioat->intrdelay_max) in ioat_set_interrupt_coalesce()
1071 ioat->cached_intrdelay = in ioat_set_interrupt_coalesce()
1082 return (ioat->intrdelay_max); in ioat_get_max_coalesce_period()
1091 mtx_lock(&ioat->submit_lock); in ioat_acquire()
1092 CTR2(KTR_IOAT, "%s channel=%u", __func__, ioat->chan_idx); in ioat_acquire()
1093 ioat->acq_head = ioat->head; in ioat_acquire()
1118 ioat->chan_idx, ioat->head); in ioat_release()
1121 ioat->chan_idx, ioat->head); in ioat_release()
1123 if (ioat->acq_head != ioat->head) { in ioat_release()
1125 (uint16_t)ioat->head); in ioat_release()
1127 if (!callout_pending(&ioat->poll_timer)) { in ioat_release()
1128 callout_reset_on(&ioat->poll_timer, 1, in ioat_release()
1129 ioat_poll_timer_callback, ioat, ioat->cpu); in ioat_release()
1132 mtx_unlock(&ioat->submit_lock); in ioat_release()
1146 mtx_assert(&ioat->submit_lock, MA_OWNED); in ioat_op_generic()
1150 KASSERT(size <= ioat->max_xfer_size, ("%s: size too big (%u > %u)", in ioat_op_generic()
1151 __func__, (unsigned)size, ioat->max_xfer_size)); in ioat_op_generic()
1161 desc = ioat_get_ring_entry(ioat, ioat->head); in ioat_op_generic()
1162 hw_desc = &ioat_get_descriptor(ioat, ioat->head)->generic; in ioat_op_generic()
1164 hw_desc->u.control_raw = 0; in ioat_op_generic()
1165 hw_desc->u.control_generic.op = op; in ioat_op_generic()
1166 hw_desc->u.control_generic.completion_update = 1; in ioat_op_generic()
1169 hw_desc->u.control_generic.int_enable = 1; in ioat_op_generic()
1171 hw_desc->u.control_generic.fence = 1; in ioat_op_generic()
1173 hw_desc->size = size; in ioat_op_generic()
1176 nseg = -1; in ioat_op_generic()
1177 error = _bus_dmamap_load_phys(ioat->data_tag, desc->src_dmamap, in ioat_op_generic()
1184 hw_desc->src_addr = seg.ds_addr; in ioat_op_generic()
1188 nseg = -1; in ioat_op_generic()
1189 error = _bus_dmamap_load_phys(ioat->data_tag, desc->dst_dmamap, in ioat_op_generic()
1196 hw_desc->dest_addr = seg.ds_addr; in ioat_op_generic()
1199 desc->bus_dmadesc.callback_fn = callback_fn; in ioat_op_generic()
1200 desc->bus_dmadesc.callback_arg = callback_arg; in ioat_op_generic()
1213 CTR2(KTR_IOAT, "%s channel=%u", __func__, ioat->chan_idx); in ioat_null()
1220 hw_desc = &ioat_get_descriptor(ioat, desc->id)->dma; in ioat_null()
1221 hw_desc->u.control.null = 1; in ioat_null()
1223 return (&desc->bus_dmadesc); in ioat_null()
1241 hw_desc = &ioat_get_descriptor(ioat, desc->id)->dma; in ioat_copy()
1247 __func__, ioat->chan_idx, &desc->bus_dmadesc, dst, src, len); in ioat_copy()
1248 return (&desc->bus_dmadesc); in ioat_copy()
1264 CTR2(KTR_IOAT, "%s channel=%u", __func__, ioat->chan_idx); in ioat_copy_8k_aligned()
1267 ("%s: addresses are not page-aligned", __func__)); in ioat_copy_8k_aligned()
1269 desc = ioat_op_generic(ioat, IOAT_OP_COPY, 2 * PAGE_SIZE, 0, 0, in ioat_copy_8k_aligned()
1274 hw_desc = &ioat_get_descriptor(ioat, desc->id)->dma; in ioat_copy_8k_aligned()
1276 src1_len = (src2 != src1 + PAGE_SIZE) ? PAGE_SIZE : 2 * PAGE_SIZE; in ioat_copy_8k_aligned()
1277 nseg = -1; in ioat_copy_8k_aligned()
1278 error = _bus_dmamap_load_phys(ioat->data_tag, in ioat_copy_8k_aligned()
1279 desc->src_dmamap, src1, src1_len, 0, &seg, &nseg); in ioat_copy_8k_aligned()
1285 hw_desc->src_addr = seg.ds_addr; in ioat_copy_8k_aligned()
1286 if (src1_len != 2 * PAGE_SIZE) { in ioat_copy_8k_aligned()
1287 hw_desc->u.control.src_page_break = 1; in ioat_copy_8k_aligned()
1288 nseg = -1; in ioat_copy_8k_aligned()
1289 error = _bus_dmamap_load_phys(ioat->data_tag, in ioat_copy_8k_aligned()
1290 desc->src2_dmamap, src2, PAGE_SIZE, 0, &seg, &nseg); in ioat_copy_8k_aligned()
1296 hw_desc->next_src_addr = seg.ds_addr; in ioat_copy_8k_aligned()
1299 dst1_len = (dst2 != dst1 + PAGE_SIZE) ? PAGE_SIZE : 2 * PAGE_SIZE; in ioat_copy_8k_aligned()
1300 nseg = -1; in ioat_copy_8k_aligned()
1301 error = _bus_dmamap_load_phys(ioat->data_tag, in ioat_copy_8k_aligned()
1302 desc->dst_dmamap, dst1, dst1_len, 0, &seg, &nseg); in ioat_copy_8k_aligned()
1308 hw_desc->dest_addr = seg.ds_addr; in ioat_copy_8k_aligned()
1309 if (dst1_len != 2 * PAGE_SIZE) { in ioat_copy_8k_aligned()
1310 hw_desc->u.control.dest_page_break = 1; in ioat_copy_8k_aligned()
1311 nseg = -1; in ioat_copy_8k_aligned()
1312 error = _bus_dmamap_load_phys(ioat->data_tag, in ioat_copy_8k_aligned()
1313 desc->dst2_dmamap, dst2, PAGE_SIZE, 0, &seg, &nseg); in ioat_copy_8k_aligned()
1319 hw_desc->next_dest_addr = seg.ds_addr; in ioat_copy_8k_aligned()
1326 return (&desc->bus_dmadesc); in ioat_copy_8k_aligned()
1343 CTR2(KTR_IOAT, "%s channel=%u", __func__, ioat->chan_idx); in ioat_copy_crc()
1345 KASSERT((ioat->capabilities & IOAT_DMACAP_MOVECRC) != 0, in ioat_copy_crc()
1371 hw_desc = &ioat_get_descriptor(ioat, desc->id)->crc32; in ioat_copy_crc()
1374 nseg = -1; in ioat_copy_crc()
1375 error = _bus_dmamap_load_phys(ioat->data_tag, in ioat_copy_crc()
1376 desc->dst2_dmamap, crcptr, sizeof(uint32_t), 0, in ioat_copy_crc()
1383 hw_desc->crc_address = seg.ds_addr; in ioat_copy_crc()
1385 hw_desc->u.control.crc_location = 1; in ioat_copy_crc()
1388 hw_desc->u.control.use_seed = 1; in ioat_copy_crc()
1389 hw_desc->seed = *initialseed; in ioat_copy_crc()
1396 return (&desc->bus_dmadesc); in ioat_copy_crc()
1413 CTR2(KTR_IOAT, "%s channel=%u", __func__, ioat->chan_idx); in ioat_crc()
1415 KASSERT((ioat->capabilities & IOAT_DMACAP_CRC) != 0, in ioat_crc()
1441 hw_desc = &ioat_get_descriptor(ioat, desc->id)->crc32; in ioat_crc()
1444 nseg = -1; in ioat_crc()
1445 error = _bus_dmamap_load_phys(ioat->data_tag, in ioat_crc()
1446 desc->dst2_dmamap, crcptr, sizeof(uint32_t), 0, in ioat_crc()
1453 hw_desc->crc_address = seg.ds_addr; in ioat_crc()
1455 hw_desc->u.control.crc_location = 1; in ioat_crc()
1458 hw_desc->u.control.use_seed = 1; in ioat_crc()
1459 hw_desc->seed = *initialseed; in ioat_crc()
1466 return (&desc->bus_dmadesc); in ioat_crc()
1479 CTR2(KTR_IOAT, "%s channel=%u", __func__, ioat->chan_idx); in ioat_blockfill()
1481 KASSERT((ioat->capabilities & IOAT_DMACAP_BFILL) != 0, in ioat_blockfill()
1489 hw_desc = &ioat_get_descriptor(ioat, desc->id)->fill; in ioat_blockfill()
1490 hw_desc->src_data = fillpattern; in ioat_blockfill()
1495 return (&desc->bus_dmadesc); in ioat_blockfill()
1505 return ((ioat->head - ioat->tail) & ((1 << ioat->ring_size_order) - 1)); in ioat_get_active()
1512 return ((1 << ioat->ring_size_order) - ioat_get_active(ioat) - 1); in ioat_get_ring_space()
1537 mtx_assert(&ioat->submit_lock, MA_OWNED); in ioat_reserve_space()
1541 if (num_descs < 1 || num_descs >= (1 << ioat->ring_size_order)) { in ioat_reserve_space()
1547 if (ioat->quiescing) { in ioat_reserve_space()
1556 ioat->chan_idx, num_descs); in ioat_reserve_space()
1558 if (!dug && !ioat->is_submitter_processing) { in ioat_reserve_space()
1559 ioat->is_submitter_processing = TRUE; in ioat_reserve_space()
1560 mtx_unlock(&ioat->submit_lock); in ioat_reserve_space()
1563 __func__, ioat->chan_idx); in ioat_reserve_space()
1566 mtx_lock(&ioat->submit_lock); in ioat_reserve_space()
1568 KASSERT(ioat->is_submitter_processing == TRUE, in ioat_reserve_space()
1570 ioat->is_submitter_processing = FALSE; in ioat_reserve_space()
1571 wakeup(&ioat->tail); in ioat_reserve_space()
1580 __func__, ioat->chan_idx); in ioat_reserve_space()
1581 msleep(&ioat->tail, &ioat->submit_lock, 0, in ioat_reserve_space()
1587 mtx_assert(&ioat->submit_lock, MA_OWNED); in ioat_reserve_space()
1588 KASSERT(!ioat->quiescing || error == ENXIO, in ioat_reserve_space()
1605 return (&ioat->ring[index % (1 << ioat->ring_size_order)]); in ioat_get_ring_entry()
1612 return (&ioat->hw_desc_ring[index % (1 << ioat->ring_size_order)]); in ioat_get_descriptor()
1625 mtx_assert(&ioat->cleanup_lock, MA_OWNED); in ioat_halted_debug()
1627 desc = ioat_get_descriptor(ioat, ioat->tail + 0); in ioat_halted_debug()
1630 desc = ioat_get_descriptor(ioat, ioat->tail + 1); in ioat_halted_debug()
1644 mtx_lock(&ioat->submit_lock); in ioat_poll_timer_callback()
1646 callout_schedule(&ioat->poll_timer, 1); in ioat_poll_timer_callback()
1647 mtx_unlock(&ioat->submit_lock); in ioat_poll_timer_callback()
1657 mtx_assert(&ioat->submit_lock, MA_OWNED); in ioat_submit_single()
1659 ioat->head++; in ioat_submit_single()
1661 ioat->chan_idx, ioat->head, ioat->tail); in ioat_submit_single()
1663 ioat->stats.descriptors_submitted++; in ioat_submit_single()
1674 CTR2(KTR_IOAT, "%s channel=%u", __func__, ioat->chan_idx); in ioat_reset_hw()
1676 mtx_lock(&ioat->submit_lock); in ioat_reset_hw()
1677 while (ioat->resetting && !ioat->destroying) in ioat_reset_hw()
1678 msleep(&ioat->resetting, &ioat->submit_lock, 0, "IRH_drain", 0); in ioat_reset_hw()
1679 if (ioat->destroying) { in ioat_reset_hw()
1680 mtx_unlock(&ioat->submit_lock); in ioat_reset_hw()
1683 ioat->resetting = TRUE; in ioat_reset_hw()
1684 ioat->quiescing = TRUE; in ioat_reset_hw()
1685 mtx_unlock(&ioat->submit_lock); in ioat_reset_hw()
1686 mtx_lock(&ioat->cleanup_lock); in ioat_reset_hw()
1688 msleep(&ioat->tail, &ioat->cleanup_lock, 0, "ioat_drain", 1); in ioat_reset_hw()
1694 ioat->resetting_cleanup = TRUE; in ioat_reset_hw()
1695 mtx_unlock(&ioat->cleanup_lock); in ioat_reset_hw()
1698 ioat->chan_idx); in ioat_reset_hw()
1721 ioat->chan_idx); in ioat_reset_hw()
1724 * IOAT v3 workaround - CHANERRMSK_INT with 3E07h to masks out errors in ioat_reset_hw()
1727 pci_write_config(ioat->device, IOAT_CFG_CHANERRMASK_INT_OFFSET, 0x3e07, in ioat_reset_hw()
1729 chanerr = pci_read_config(ioat->device, IOAT_CFG_CHANERR_INT_OFFSET, 4); in ioat_reset_hw()
1730 pci_write_config(ioat->device, IOAT_CFG_CHANERR_INT_OFFSET, chanerr, 4); in ioat_reset_hw()
1733 * BDXDE and BWD models reset MSI-X registers on device reset. in ioat_reset_hw()
1737 ioat_log_message(1, "device resets MSI-X registers; saving\n"); in ioat_reset_hw()
1738 pci_save_state(ioat->device); in ioat_reset_hw()
1743 ioat->chan_idx); in ioat_reset_hw()
1755 pci_restore_state(ioat->device); in ioat_reset_hw()
1770 mtx_lock(&ioat->cleanup_lock); in ioat_reset_hw()
1772 mtx_unlock(&ioat->cleanup_lock); in ioat_reset_hw()
1784 ioat->tail = ioat->head = 0; in ioat_reset_hw()
1785 *ioat->comp_update = ioat->last_seen = in ioat_reset_hw()
1786 RING_PHYS_ADDR(ioat, ioat->tail - 1); in ioat_reset_hw()
1789 ioat_write_chancmp(ioat, ioat->comp_update_bus_addr); in ioat_reset_hw()
1793 ioat->chan_idx); in ioat_reset_hw()
1800 ioat->chan_idx); in ioat_reset_hw()
1806 mtx_lock(&ioat->cleanup_lock); in ioat_reset_hw()
1807 ioat->resetting_cleanup = FALSE; in ioat_reset_hw()
1808 mtx_unlock(&ioat->cleanup_lock); in ioat_reset_hw()
1811 mtx_lock(&ioat->submit_lock); in ioat_reset_hw()
1812 ioat->quiescing = FALSE; in ioat_reset_hw()
1813 wakeup(&ioat->quiescing); in ioat_reset_hw()
1815 ioat->resetting = FALSE; in ioat_reset_hw()
1816 wakeup(&ioat->resetting); in ioat_reset_hw()
1818 CTR2(KTR_IOAT, "%s channel=%u reset done", __func__, ioat->chan_idx); in ioat_reset_hw()
1819 mtx_unlock(&ioat->submit_lock); in ioat_reset_hw()
1860 if (error != 0 || req->newptr == NULL) in sysctl_handle_chansts()
1878 if (ioat->stats.interrupts == 0) { in sysctl_handle_dpi()
1882 rate = ioat->stats.descriptors_processed * factor / in sysctl_handle_dpi()
1883 ioat->stats.interrupts; in sysctl_handle_dpi()
1890 if (error != 0 || req->newptr == NULL) in sysctl_handle_dpi()
1905 if (error != 0 || req->newptr == NULL) in sysctl_handle_reset()
1923 for (i = 0; i < 2; i++) { in dump_descriptor()
1944 &ioat->version, 0, "HW version (0xMM form)"); in ioat_setup_sysctl()
1946 &ioat->max_xfer_size, 0, "HW maximum transfer size"); in ioat_setup_sysctl()
1948 &ioat->intrdelay_supported, 0, "Is INTRDELAY supported"); in ioat_setup_sysctl()
1950 &ioat->intrdelay_max, 0, in ioat_setup_sysctl()
1958 &ioat->ring_size_order, 0, "SW descriptor ring size order"); in ioat_setup_sysctl()
1959 SYSCTL_ADD_UINT(ctx, state, OID_AUTO, "head", CTLFLAG_RD, &ioat->head, in ioat_setup_sysctl()
1961 SYSCTL_ADD_UINT(ctx, state, OID_AUTO, "tail", CTLFLAG_RD, &ioat->tail, in ioat_setup_sysctl()
1965 ioat->comp_update, "HW addr of last completion"); in ioat_setup_sysctl()
1968 CTLFLAG_RD, &ioat->is_submitter_processing, 0, in ioat_setup_sysctl()
1976 &ioat->cached_intrdelay, 0, in ioat_setup_sysctl()
1986 sysctl_handle_reset, "I", "Set to non-zero to reset the hardware"); in ioat_setup_sysctl()
1993 CTLFLAG_RW | CTLFLAG_STATS, &ioat->stats.interrupts, in ioat_setup_sysctl()
1996 CTLFLAG_RW | CTLFLAG_STATS, &ioat->stats.descriptors_processed, in ioat_setup_sysctl()
1999 CTLFLAG_RW | CTLFLAG_STATS, &ioat->stats.descriptors_submitted, in ioat_setup_sysctl()
2002 CTLFLAG_RW | CTLFLAG_STATS, &ioat->stats.descriptors_error, in ioat_setup_sysctl()
2005 CTLFLAG_RW | CTLFLAG_STATS, &ioat->stats.channel_halts, 0, in ioat_setup_sysctl()
2008 CTLFLAG_RW | CTLFLAG_STATS, &ioat->stats.last_halt_chanerr, 0, in ioat_setup_sysctl()
2020 mtx_assert(&ioat->submit_lock, MA_OWNED); in ioat_get()
2021 KASSERT(ioat->refcnt < UINT32_MAX, ("refcnt overflow")); in ioat_get()
2023 ioat->refcnt++; in ioat_get()
2030 mtx_assert(&ioat->submit_lock, MA_OWNED); in ioat_put()
2031 KASSERT(ioat->refcnt >= 1, ("refcnt error")); in ioat_put()
2033 if (--ioat->refcnt == 0) in ioat_put()
2034 wakeup(&ioat->refcnt); in ioat_put()
2041 mtx_assert(&ioat->submit_lock, MA_OWNED); in ioat_drain_locked()
2043 while (ioat->refcnt > 0) in ioat_drain_locked()
2044 msleep(&ioat->refcnt, &ioat->submit_lock, 0, "ioat_drain", 0); in ioat_drain_locked()
2048 #define _db_show_lock(lo) LOCK_CLASS(lo)->lc_ddb_show(lo)
2049 #define db_show_lock(lk) _db_show_lock(&(lk)->lock_object)
2066 db_printf(" version: %d\n", sc->version); in DB_SHOW_COMMAND()
2067 db_printf(" chan_idx: %u\n", sc->chan_idx); in DB_SHOW_COMMAND()
2069 db_show_lock(&sc->submit_lock); in DB_SHOW_COMMAND()
2071 db_printf(" capabilities: %b\n", (int)sc->capabilities, in DB_SHOW_COMMAND()
2073 db_printf(" cached_intrdelay: %u\n", sc->cached_intrdelay); in DB_SHOW_COMMAND()
2074 db_printf(" *comp_update: 0x%jx\n", (uintmax_t)*sc->comp_update); in DB_SHOW_COMMAND()
2077 db_printf(" c_time: %ju\n", (uintmax_t)sc->poll_timer.c_time); in DB_SHOW_COMMAND()
2078 db_printf(" c_arg: %p\n", sc->poll_timer.c_arg); in DB_SHOW_COMMAND()
2079 db_printf(" c_func: %p\n", sc->poll_timer.c_func); in DB_SHOW_COMMAND()
2080 db_printf(" c_lock: %p\n", sc->poll_timer.c_lock); in DB_SHOW_COMMAND()
2081 db_printf(" c_flags: 0x%x\n", (unsigned)sc->poll_timer.c_flags); in DB_SHOW_COMMAND()
2083 db_printf(" quiescing: %d\n", (int)sc->quiescing); in DB_SHOW_COMMAND()
2084 db_printf(" destroying: %d\n", (int)sc->destroying); in DB_SHOW_COMMAND()
2086 (int)sc->is_submitter_processing); in DB_SHOW_COMMAND()
2087 db_printf(" intrdelay_supported: %d\n", (int)sc->intrdelay_supported); in DB_SHOW_COMMAND()
2088 db_printf(" resetting: %d\n", (int)sc->resetting); in DB_SHOW_COMMAND()
2090 db_printf(" head: %u\n", sc->head); in DB_SHOW_COMMAND()
2091 db_printf(" tail: %u\n", sc->tail); in DB_SHOW_COMMAND()
2092 db_printf(" ring_size_order: %u\n", sc->ring_size_order); in DB_SHOW_COMMAND()
2093 db_printf(" last_seen: 0x%lx\n", sc->last_seen); in DB_SHOW_COMMAND()
2094 db_printf(" ring: %p\n", sc->ring); in DB_SHOW_COMMAND()
2095 db_printf(" descriptors: %p\n", sc->hw_desc_ring); in DB_SHOW_COMMAND()
2097 (uintmax_t)sc->hw_desc_bus_addr); in DB_SHOW_COMMAND()
2099 db_printf(" ring[%u] (tail):\n", sc->tail % in DB_SHOW_COMMAND()
2100 (1 << sc->ring_size_order)); in DB_SHOW_COMMAND()
2101 db_printf(" id: %u\n", ioat_get_ring_entry(sc, sc->tail)->id); in DB_SHOW_COMMAND()
2103 RING_PHYS_ADDR(sc, sc->tail)); in DB_SHOW_COMMAND()
2105 ioat_get_descriptor(sc, sc->tail)->generic.next); in DB_SHOW_COMMAND()
2107 db_printf(" ring[%u] (head - 1):\n", (sc->head - 1) % in DB_SHOW_COMMAND()
2108 (1 << sc->ring_size_order)); in DB_SHOW_COMMAND()
2109 db_printf(" id: %u\n", ioat_get_ring_entry(sc, sc->head - 1)->id); in DB_SHOW_COMMAND()
2111 RING_PHYS_ADDR(sc, sc->head - 1)); in DB_SHOW_COMMAND()
2113 ioat_get_descriptor(sc, sc->head - 1)->generic.next); in DB_SHOW_COMMAND()
2115 db_printf(" ring[%u] (head):\n", (sc->head) % in DB_SHOW_COMMAND()
2116 (1 << sc->ring_size_order)); in DB_SHOW_COMMAND()
2117 db_printf(" id: %u\n", ioat_get_ring_entry(sc, sc->head)->id); in DB_SHOW_COMMAND()
2119 RING_PHYS_ADDR(sc, sc->head)); in DB_SHOW_COMMAND()
2121 ioat_get_descriptor(sc, sc->head)->generic.next); in DB_SHOW_COMMAND()
2123 for (idx = 0; idx < (1 << sc->ring_size_order); idx++) in DB_SHOW_COMMAND()
2124 if ((*sc->comp_update & IOAT_CHANSTS_COMPLETED_DESCRIPTOR_MASK) in DB_SHOW_COMMAND()
2129 db_show_lock(&sc->cleanup_lock); in DB_SHOW_COMMAND()
2131 db_printf(" refcnt: %u\n", sc->refcnt); in DB_SHOW_COMMAND()
2133 db_printf(" interrupts: %lu\n", sc->stats.interrupts); in DB_SHOW_COMMAND()
2134 db_printf(" descriptors_processed: %lu\n", sc->stats.descriptors_processed); in DB_SHOW_COMMAND()
2135 db_printf(" descriptors_error: %lu\n", sc->stats.descriptors_error); in DB_SHOW_COMMAND()
2136 db_printf(" descriptors_submitted: %lu\n", sc->stats.descriptors_submitted); in DB_SHOW_COMMAND()
2138 db_printf(" channel_halts: %u\n", sc->stats.channel_halts); in DB_SHOW_COMMAND()
2139 db_printf(" last_halt_chanerr: %u\n", sc->stats.last_halt_chanerr); in DB_SHOW_COMMAND()
2160 db_printf("usage: show ioat <0-%u>\n", ioat_channel_index); in DB_SHOW_COMMAND()