Lines Matching refs:xhci

29 static struct xhci_segment *xhci_segment_alloc(struct xhci_hcd *xhci,  in xhci_segment_alloc()  argument
38 struct device *dev = xhci_to_hcd(xhci)->self.sysdev; in xhci_segment_alloc()
44 seg->trbs = dma_pool_zalloc(xhci->segment_pool, flags, &dma); in xhci_segment_alloc()
54 dma_pool_free(xhci->segment_pool, seg->trbs, dma); in xhci_segment_alloc()
71 static void xhci_segment_free(struct xhci_hcd *xhci, struct xhci_segment *seg) in xhci_segment_free() argument
74 dma_pool_free(xhci->segment_pool, seg->trbs, seg->dma); in xhci_segment_free()
81 static void xhci_free_segments_for_ring(struct xhci_hcd *xhci, in xhci_free_segments_for_ring() argument
89 xhci_segment_free(xhci, seg); in xhci_free_segments_for_ring()
92 xhci_segment_free(xhci, first); in xhci_free_segments_for_ring()
129 static void xhci_link_rings(struct xhci_hcd *xhci, struct xhci_ring *ring, in xhci_link_rings() argument
139 chain_links = xhci_link_chain_quirk(xhci, ring->type); in xhci_link_rings()
284 void xhci_ring_free(struct xhci_hcd *xhci, struct xhci_ring *ring) in xhci_ring_free() argument
294 xhci_free_segments_for_ring(xhci, ring->first_seg); in xhci_ring_free()
326 static int xhci_alloc_segments_for_ring(struct xhci_hcd *xhci, in xhci_alloc_segments_for_ring() argument
339 chain_links = xhci_link_chain_quirk(xhci, type); in xhci_alloc_segments_for_ring()
341 prev = xhci_segment_alloc(xhci, cycle_state, max_packet, num, flags); in xhci_alloc_segments_for_ring()
350 next = xhci_segment_alloc(xhci, cycle_state, max_packet, num, in xhci_alloc_segments_for_ring()
365 xhci_free_segments_for_ring(xhci, *first); in xhci_alloc_segments_for_ring()
376 struct xhci_ring *xhci_ring_alloc(struct xhci_hcd *xhci, in xhci_ring_alloc() argument
382 struct device *dev = xhci_to_hcd(xhci)->self.sysdev; in xhci_ring_alloc()
395 ret = xhci_alloc_segments_for_ring(xhci, &ring->first_seg, &ring->last_seg, num_segs, in xhci_ring_alloc()
415 void xhci_free_endpoint_ring(struct xhci_hcd *xhci, in xhci_free_endpoint_ring() argument
419 xhci_ring_free(xhci, virt_dev->eps[ep_index].ring); in xhci_free_endpoint_ring()
427 int xhci_ring_expansion(struct xhci_hcd *xhci, struct xhci_ring *ring, in xhci_ring_expansion() argument
434 ret = xhci_alloc_segments_for_ring(xhci, &first, &last, num_new_segs, ring->cycle_state, in xhci_ring_expansion()
446 xhci_link_rings(xhci, ring, first, last, num_new_segs); in xhci_ring_expansion()
448 xhci_dbg_trace(xhci, trace_xhci_dbg_ring_expansion, in xhci_ring_expansion()
455 xhci_free_segments_for_ring(xhci, first); in xhci_ring_expansion()
459 struct xhci_container_ctx *xhci_alloc_container_ctx(struct xhci_hcd *xhci, in xhci_alloc_container_ctx() argument
463 struct device *dev = xhci_to_hcd(xhci)->self.sysdev; in xhci_alloc_container_ctx()
473 ctx->size = HCC_64BYTE_CONTEXT(xhci->hcc_params) ? 2048 : 1024; in xhci_alloc_container_ctx()
475 ctx->size += CTX_SIZE(xhci->hcc_params); in xhci_alloc_container_ctx()
477 ctx->bytes = dma_pool_zalloc(xhci->device_pool, flags, &ctx->dma); in xhci_alloc_container_ctx()
485 void xhci_free_container_ctx(struct xhci_hcd *xhci, in xhci_free_container_ctx() argument
490 dma_pool_free(xhci->device_pool, ctx->bytes, ctx->dma); in xhci_free_container_ctx()
503 struct xhci_slot_ctx *xhci_get_slot_ctx(struct xhci_hcd *xhci, in xhci_get_slot_ctx() argument
510 (ctx->bytes + CTX_SIZE(xhci->hcc_params)); in xhci_get_slot_ctx()
513 struct xhci_ep_ctx *xhci_get_ep_ctx(struct xhci_hcd *xhci, in xhci_get_ep_ctx() argument
523 (ctx->bytes + (ep_index * CTX_SIZE(xhci->hcc_params))); in xhci_get_ep_ctx()
529 static void xhci_free_stream_ctx(struct xhci_hcd *xhci, in xhci_free_stream_ctx() argument
533 struct device *dev = xhci_to_hcd(xhci)->self.sysdev; in xhci_free_stream_ctx()
539 dma_pool_free(xhci->medium_streams_pool, stream_ctx, dma); in xhci_free_stream_ctx()
541 dma_pool_free(xhci->small_streams_pool, stream_ctx, dma); in xhci_free_stream_ctx()
554 static struct xhci_stream_ctx *xhci_alloc_stream_ctx(struct xhci_hcd *xhci, in xhci_alloc_stream_ctx() argument
558 struct device *dev = xhci_to_hcd(xhci)->self.sysdev; in xhci_alloc_stream_ctx()
564 return dma_pool_zalloc(xhci->medium_streams_pool, mem_flags, dma); in xhci_alloc_stream_ctx()
566 return dma_pool_zalloc(xhci->small_streams_pool, mem_flags, dma); in xhci_alloc_stream_ctx()
588 struct xhci_stream_info *xhci_alloc_stream_info(struct xhci_hcd *xhci, in xhci_alloc_stream_info() argument
598 struct device *dev = xhci_to_hcd(xhci)->self.sysdev; in xhci_alloc_stream_info()
600 xhci_dbg(xhci, "Allocating %u streams and %u stream context array entries.\n", in xhci_alloc_stream_info()
602 if (xhci->cmd_ring_reserved_trbs == MAX_RSVD_CMD_TRBS) { in xhci_alloc_stream_info()
603 xhci_dbg(xhci, "Command ring has no reserved TRBs available\n"); in xhci_alloc_stream_info()
606 xhci->cmd_ring_reserved_trbs++; in xhci_alloc_stream_info()
624 stream_info->stream_ctx_array = xhci_alloc_stream_ctx(xhci, in xhci_alloc_stream_info()
632 xhci_alloc_command_with_ctx(xhci, true, mem_flags); in xhci_alloc_stream_info()
645 xhci_ring_alloc(xhci, 2, 1, TYPE_STREAM, max_packet, in xhci_alloc_stream_info()
658 xhci_dbg(xhci, "Setting stream %d ring ptr to 0x%08llx\n", cur_stream, addr); in xhci_alloc_stream_info()
662 xhci_ring_free(xhci, cur_ring); in xhci_alloc_stream_info()
680 xhci_ring_free(xhci, cur_ring); in xhci_alloc_stream_info()
684 xhci_free_command(xhci, stream_info->free_streams_command); in xhci_alloc_stream_info()
686 xhci_free_stream_ctx(xhci, in xhci_alloc_stream_info()
695 xhci->cmd_ring_reserved_trbs--; in xhci_alloc_stream_info()
702 void xhci_setup_streams_ep_input_ctx(struct xhci_hcd *xhci, in xhci_setup_streams_ep_input_ctx() argument
712 xhci_dbg_trace(xhci, trace_xhci_dbg_context_change, in xhci_setup_streams_ep_input_ctx()
739 void xhci_free_stream_info(struct xhci_hcd *xhci, in xhci_free_stream_info() argument
752 xhci_ring_free(xhci, cur_ring); in xhci_free_stream_info()
756 xhci_free_command(xhci, stream_info->free_streams_command); in xhci_free_stream_info()
757 xhci->cmd_ring_reserved_trbs--; in xhci_free_stream_info()
759 xhci_free_stream_ctx(xhci, in xhci_free_stream_info()
771 static void xhci_free_tt_info(struct xhci_hcd *xhci, in xhci_free_tt_info() argument
783 xhci_dbg(xhci, "Bad rhub port.\n"); in xhci_free_tt_info()
787 tt_list_head = &(xhci->rh_bw[virt_dev->rhub_port->hw_portnum].tts); in xhci_free_tt_info()
800 int xhci_alloc_tt_info(struct xhci_hcd *xhci, in xhci_alloc_tt_info() argument
808 struct device *dev = xhci_to_hcd(xhci)->self.sysdev; in xhci_alloc_tt_info()
824 &xhci->rh_bw[virt_dev->rhub_port->hw_portnum].tts); in xhci_alloc_tt_info()
835 xhci_free_tt_info(xhci, virt_dev, virt_dev->udev->slot_id); in xhci_alloc_tt_info()
845 void xhci_free_virt_device(struct xhci_hcd *xhci, int slot_id) in xhci_free_virt_device() argument
852 if (slot_id == 0 || !xhci->devs[slot_id]) in xhci_free_virt_device()
855 dev = xhci->devs[slot_id]; in xhci_free_virt_device()
857 xhci->dcbaa->dev_context_ptrs[slot_id] = 0; in xhci_free_virt_device()
868 xhci_ring_free(xhci, dev->eps[i].ring); in xhci_free_virt_device()
870 xhci_free_stream_info(xhci, in xhci_free_virt_device()
882 xhci_dbg(xhci, "Slot %u endpoint %u not removed from BW list!\n", in xhci_free_virt_device()
887 xhci_free_tt_info(xhci, dev, slot_id); in xhci_free_virt_device()
889 xhci_update_tt_active_eps(xhci, dev, old_active_eps); in xhci_free_virt_device()
892 xhci_free_container_ctx(xhci, dev->in_ctx); in xhci_free_virt_device()
894 xhci_free_container_ctx(xhci, dev->out_ctx); in xhci_free_virt_device()
900 kfree(xhci->devs[slot_id]); in xhci_free_virt_device()
901 xhci->devs[slot_id] = NULL; in xhci_free_virt_device()
910 static void xhci_free_virt_devices_depth_first(struct xhci_hcd *xhci, int slot_id) in xhci_free_virt_devices_depth_first() argument
917 vdev = xhci->devs[slot_id]; in xhci_free_virt_devices_depth_first()
922 xhci_dbg(xhci, "Bad rhub port.\n"); in xhci_free_virt_devices_depth_first()
926 tt_list_head = &(xhci->rh_bw[vdev->rhub_port->hw_portnum].tts); in xhci_free_virt_devices_depth_first()
931 for (i = 1; i < HCS_MAX_SLOTS(xhci->hcs_params1); i++) { in xhci_free_virt_devices_depth_first()
932 vdev = xhci->devs[i]; in xhci_free_virt_devices_depth_first()
935 xhci, i); in xhci_free_virt_devices_depth_first()
941 xhci_debugfs_remove_slot(xhci, slot_id); in xhci_free_virt_devices_depth_first()
942 xhci_free_virt_device(xhci, slot_id); in xhci_free_virt_devices_depth_first()
945 int xhci_alloc_virt_device(struct xhci_hcd *xhci, int slot_id, in xhci_alloc_virt_device() argument
952 if (slot_id == 0 || xhci->devs[slot_id]) { in xhci_alloc_virt_device()
953 xhci_warn(xhci, "Bad Slot ID %d\n", slot_id); in xhci_alloc_virt_device()
964 dev->out_ctx = xhci_alloc_container_ctx(xhci, XHCI_CTX_TYPE_DEVICE, flags); in xhci_alloc_virt_device()
968 xhci_dbg(xhci, "Slot %d output ctx = 0x%pad (dma)\n", slot_id, &dev->out_ctx->dma); in xhci_alloc_virt_device()
971 dev->in_ctx = xhci_alloc_container_ctx(xhci, XHCI_CTX_TYPE_INPUT, flags); in xhci_alloc_virt_device()
975 xhci_dbg(xhci, "Slot %d input ctx = 0x%pad (dma)\n", slot_id, &dev->in_ctx->dma); in xhci_alloc_virt_device()
981 dev->eps[i].xhci = xhci; in xhci_alloc_virt_device()
987 dev->eps[0].ring = xhci_ring_alloc(xhci, 2, 1, TYPE_CTRL, 0, flags); in xhci_alloc_virt_device()
994 xhci->dcbaa->dev_context_ptrs[slot_id] = cpu_to_le64(dev->out_ctx->dma); in xhci_alloc_virt_device()
995 xhci_dbg(xhci, "Set slot id %d dcbaa entry %p to 0x%llx\n", in xhci_alloc_virt_device()
997 &xhci->dcbaa->dev_context_ptrs[slot_id], in xhci_alloc_virt_device()
998 le64_to_cpu(xhci->dcbaa->dev_context_ptrs[slot_id])); in xhci_alloc_virt_device()
1002 xhci->devs[slot_id] = dev; in xhci_alloc_virt_device()
1008 xhci_free_container_ctx(xhci, dev->in_ctx); in xhci_alloc_virt_device()
1010 xhci_free_container_ctx(xhci, dev->out_ctx); in xhci_alloc_virt_device()
1016 void xhci_copy_ep0_dequeue_into_input_ctx(struct xhci_hcd *xhci, in xhci_copy_ep0_dequeue_into_input_ctx() argument
1023 virt_dev = xhci->devs[udev->slot_id]; in xhci_copy_ep0_dequeue_into_input_ctx()
1024 ep0_ctx = xhci_get_ep_ctx(xhci, virt_dev->in_ctx, 0); in xhci_copy_ep0_dequeue_into_input_ctx()
1048 static struct xhci_port *xhci_find_rhub_port(struct xhci_hcd *xhci, struct usb_device *udev) in xhci_find_rhub_port() argument
1055 hcd = xhci_get_usb3_hcd(xhci); in xhci_find_rhub_port()
1057 hcd = xhci->main_hcd; in xhci_find_rhub_port()
1068 int xhci_setup_addressable_virt_dev(struct xhci_hcd *xhci, struct usb_device *udev) in xhci_setup_addressable_virt_dev() argument
1075 dev = xhci->devs[udev->slot_id]; in xhci_setup_addressable_virt_dev()
1078 xhci_warn(xhci, "Slot ID %d is not assigned to this device\n", in xhci_setup_addressable_virt_dev()
1082 ep0_ctx = xhci_get_ep_ctx(xhci, dev->in_ctx, 0); in xhci_setup_addressable_virt_dev()
1083 slot_ctx = xhci_get_slot_ctx(xhci, dev->in_ctx); in xhci_setup_addressable_virt_dev()
1114 dev->rhub_port = xhci_find_rhub_port(xhci, udev); in xhci_setup_addressable_virt_dev()
1121 xhci_dbg(xhci, "Slot ID %d: HW portnum %d, hcd portnum %d\n", in xhci_setup_addressable_virt_dev()
1131 dev->bw_table = &xhci->rh_bw[dev->rhub_port->hw_portnum].bw_table; in xhci_setup_addressable_virt_dev()
1136 rh_bw = &xhci->rh_bw[dev->rhub_port->hw_portnum]; in xhci_setup_addressable_virt_dev()
1151 xhci_warn(xhci, "WARN: Didn't find a matching TT\n"); in xhci_setup_addressable_virt_dev()
1161 xhci_dbg(xhci, "udev->tt = %p\n", udev->tt); in xhci_setup_addressable_virt_dev()
1162 xhci_dbg(xhci, "udev->ttport = 0x%x\n", udev->ttport); in xhci_setup_addressable_virt_dev()
1389 int xhci_endpoint_init(struct xhci_hcd *xhci, in xhci_endpoint_init() argument
1409 ep_ctx = xhci_get_ep_ctx(xhci, virt_dev->in_ctx, ep_index); in xhci_endpoint_init()
1429 if ((xhci->quirks & XHCI_LIMIT_ENDPOINT_INTERVAL_7) && in xhci_endpoint_init()
1456 if (usb_endpoint_xfer_control(&ep->desc) && xhci->hci_version >= 0x100) in xhci_endpoint_init()
1459 if ((xhci->hci_version > 0x100) && HCC2_LEC(xhci->hcc_params2)) in xhci_endpoint_init()
1464 xhci_ring_alloc(xhci, 2, 1, ring_type, max_packet, mem_flags); in xhci_endpoint_init()
1488 void xhci_endpoint_zero(struct xhci_hcd *xhci, in xhci_endpoint_zero() argument
1496 ep_ctx = xhci_get_ep_ctx(xhci, virt_dev->in_ctx, ep_index); in xhci_endpoint_zero()
1517 void xhci_update_bw_info(struct xhci_hcd *xhci, in xhci_update_bw_info() argument
1542 ep_ctx = xhci_get_ep_ctx(xhci, in_ctx, i); in xhci_update_bw_info()
1575 void xhci_endpoint_copy(struct xhci_hcd *xhci, in xhci_endpoint_copy() argument
1583 out_ep_ctx = xhci_get_ep_ctx(xhci, out_ctx, ep_index); in xhci_endpoint_copy()
1584 in_ep_ctx = xhci_get_ep_ctx(xhci, in_ctx, ep_index); in xhci_endpoint_copy()
1590 if (xhci->quirks & XHCI_MTK_HOST) { in xhci_endpoint_copy()
1601 void xhci_slot_copy(struct xhci_hcd *xhci, in xhci_slot_copy() argument
1608 in_slot_ctx = xhci_get_slot_ctx(xhci, in_ctx); in xhci_slot_copy()
1609 out_slot_ctx = xhci_get_slot_ctx(xhci, out_ctx); in xhci_slot_copy()
1618 static int scratchpad_alloc(struct xhci_hcd *xhci, gfp_t flags) in scratchpad_alloc() argument
1621 struct device *dev = xhci_to_hcd(xhci)->self.sysdev; in scratchpad_alloc()
1622 int num_sp = HCS_MAX_SCRATCHPAD(xhci->hcs_params2); in scratchpad_alloc()
1624 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in scratchpad_alloc()
1630 xhci->scratchpad = kzalloc_node(sizeof(*xhci->scratchpad), flags, in scratchpad_alloc()
1632 if (!xhci->scratchpad) in scratchpad_alloc()
1635 xhci->scratchpad->sp_array = dma_alloc_coherent(dev, in scratchpad_alloc()
1637 &xhci->scratchpad->sp_dma, flags); in scratchpad_alloc()
1638 if (!xhci->scratchpad->sp_array) in scratchpad_alloc()
1641 xhci->scratchpad->sp_buffers = kcalloc_node(num_sp, sizeof(void *), in scratchpad_alloc()
1643 if (!xhci->scratchpad->sp_buffers) in scratchpad_alloc()
1646 xhci->dcbaa->dev_context_ptrs[0] = cpu_to_le64(xhci->scratchpad->sp_dma); in scratchpad_alloc()
1649 void *buf = dma_alloc_coherent(dev, xhci->page_size, &dma, in scratchpad_alloc()
1654 xhci->scratchpad->sp_array[i] = dma; in scratchpad_alloc()
1655 xhci->scratchpad->sp_buffers[i] = buf; in scratchpad_alloc()
1662 dma_free_coherent(dev, xhci->page_size, in scratchpad_alloc()
1663 xhci->scratchpad->sp_buffers[i], in scratchpad_alloc()
1664 xhci->scratchpad->sp_array[i]); in scratchpad_alloc()
1666 kfree(xhci->scratchpad->sp_buffers); in scratchpad_alloc()
1670 xhci->scratchpad->sp_array, in scratchpad_alloc()
1671 xhci->scratchpad->sp_dma); in scratchpad_alloc()
1674 kfree(xhci->scratchpad); in scratchpad_alloc()
1675 xhci->scratchpad = NULL; in scratchpad_alloc()
1681 static void scratchpad_free(struct xhci_hcd *xhci) in scratchpad_free() argument
1685 struct device *dev = xhci_to_hcd(xhci)->self.sysdev; in scratchpad_free()
1687 if (!xhci->scratchpad) in scratchpad_free()
1690 num_sp = HCS_MAX_SCRATCHPAD(xhci->hcs_params2); in scratchpad_free()
1693 dma_free_coherent(dev, xhci->page_size, in scratchpad_free()
1694 xhci->scratchpad->sp_buffers[i], in scratchpad_free()
1695 xhci->scratchpad->sp_array[i]); in scratchpad_free()
1697 kfree(xhci->scratchpad->sp_buffers); in scratchpad_free()
1699 xhci->scratchpad->sp_array, in scratchpad_free()
1700 xhci->scratchpad->sp_dma); in scratchpad_free()
1701 kfree(xhci->scratchpad); in scratchpad_free()
1702 xhci->scratchpad = NULL; in scratchpad_free()
1705 struct xhci_command *xhci_alloc_command(struct xhci_hcd *xhci, in xhci_alloc_command() argument
1709 struct device *dev = xhci_to_hcd(xhci)->self.sysdev; in xhci_alloc_command()
1733 struct xhci_command *xhci_alloc_command_with_ctx(struct xhci_hcd *xhci, in xhci_alloc_command_with_ctx() argument
1738 command = xhci_alloc_command(xhci, allocate_completion, mem_flags); in xhci_alloc_command_with_ctx()
1742 command->in_ctx = xhci_alloc_container_ctx(xhci, XHCI_CTX_TYPE_INPUT, in xhci_alloc_command_with_ctx()
1757 void xhci_free_command(struct xhci_hcd *xhci, in xhci_free_command() argument
1760 xhci_free_container_ctx(xhci, in xhci_free_command()
1766 static int xhci_alloc_erst(struct xhci_hcd *xhci, in xhci_alloc_erst() argument
1777 erst->entries = dma_alloc_coherent(xhci_to_hcd(xhci)->self.sysdev, in xhci_alloc_erst()
1797 xhci_remove_interrupter(struct xhci_hcd *xhci, struct xhci_interrupter *ir) in xhci_remove_interrupter() argument
1814 xhci_write_64(xhci, ERST_EHB, &ir->ir_set->erst_dequeue); in xhci_remove_interrupter()
1819 xhci_free_interrupter(struct xhci_hcd *xhci, struct xhci_interrupter *ir) in xhci_free_interrupter() argument
1821 struct device *dev = xhci_to_hcd(xhci)->self.sysdev; in xhci_free_interrupter()
1836 xhci_ring_free(xhci, ir->event_ring); in xhci_free_interrupter()
1845 struct xhci_hcd *xhci = hcd_to_xhci(hcd); in xhci_remove_secondary_interrupter() local
1848 spin_lock_irq(&xhci->lock); in xhci_remove_secondary_interrupter()
1851 if (!ir || !ir->intr_num || ir->intr_num >= xhci->max_interrupters) { in xhci_remove_secondary_interrupter()
1852 xhci_dbg(xhci, "Invalid secondary interrupter, can't remove\n"); in xhci_remove_secondary_interrupter()
1853 spin_unlock_irq(&xhci->lock); in xhci_remove_secondary_interrupter()
1859 xhci_remove_interrupter(xhci, ir); in xhci_remove_secondary_interrupter()
1860 xhci->interrupters[intr_num] = NULL; in xhci_remove_secondary_interrupter()
1862 spin_unlock_irq(&xhci->lock); in xhci_remove_secondary_interrupter()
1864 xhci_free_interrupter(xhci, ir); in xhci_remove_secondary_interrupter()
1868 void xhci_mem_cleanup(struct xhci_hcd *xhci) in xhci_mem_cleanup() argument
1870 struct device *dev = xhci_to_hcd(xhci)->self.sysdev; in xhci_mem_cleanup()
1873 cancel_delayed_work_sync(&xhci->cmd_timer); in xhci_mem_cleanup()
1875 for (i = 0; xhci->interrupters && i < xhci->max_interrupters; i++) { in xhci_mem_cleanup()
1876 if (xhci->interrupters[i]) { in xhci_mem_cleanup()
1877 xhci_remove_interrupter(xhci, xhci->interrupters[i]); in xhci_mem_cleanup()
1878 xhci_free_interrupter(xhci, xhci->interrupters[i]); in xhci_mem_cleanup()
1879 xhci->interrupters[i] = NULL; in xhci_mem_cleanup()
1882 xhci_dbg_trace(xhci, trace_xhci_dbg_init, "Freed interrupters"); in xhci_mem_cleanup()
1884 if (xhci->cmd_ring) in xhci_mem_cleanup()
1885 xhci_ring_free(xhci, xhci->cmd_ring); in xhci_mem_cleanup()
1886 xhci->cmd_ring = NULL; in xhci_mem_cleanup()
1887 xhci_dbg_trace(xhci, trace_xhci_dbg_init, "Freed command ring"); in xhci_mem_cleanup()
1888 xhci_cleanup_command_queue(xhci); in xhci_mem_cleanup()
1890 num_ports = HCS_MAX_PORTS(xhci->hcs_params1); in xhci_mem_cleanup()
1891 for (i = 0; i < num_ports && xhci->rh_bw; i++) { in xhci_mem_cleanup()
1892 struct xhci_interval_bw_table *bwt = &xhci->rh_bw[i].bw_table; in xhci_mem_cleanup()
1900 for (i = HCS_MAX_SLOTS(xhci->hcs_params1); i > 0; i--) in xhci_mem_cleanup()
1901 xhci_free_virt_devices_depth_first(xhci, i); in xhci_mem_cleanup()
1903 dma_pool_destroy(xhci->segment_pool); in xhci_mem_cleanup()
1904 xhci->segment_pool = NULL; in xhci_mem_cleanup()
1905 xhci_dbg_trace(xhci, trace_xhci_dbg_init, "Freed segment pool"); in xhci_mem_cleanup()
1907 dma_pool_destroy(xhci->device_pool); in xhci_mem_cleanup()
1908 xhci->device_pool = NULL; in xhci_mem_cleanup()
1909 xhci_dbg_trace(xhci, trace_xhci_dbg_init, "Freed device context pool"); in xhci_mem_cleanup()
1911 dma_pool_destroy(xhci->small_streams_pool); in xhci_mem_cleanup()
1912 xhci->small_streams_pool = NULL; in xhci_mem_cleanup()
1913 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_cleanup()
1916 dma_pool_destroy(xhci->medium_streams_pool); in xhci_mem_cleanup()
1917 xhci->medium_streams_pool = NULL; in xhci_mem_cleanup()
1918 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_cleanup()
1921 if (xhci->dcbaa) in xhci_mem_cleanup()
1922 dma_free_coherent(dev, sizeof(*xhci->dcbaa), in xhci_mem_cleanup()
1923 xhci->dcbaa, xhci->dcbaa->dma); in xhci_mem_cleanup()
1924 xhci->dcbaa = NULL; in xhci_mem_cleanup()
1926 scratchpad_free(xhci); in xhci_mem_cleanup()
1928 if (!xhci->rh_bw) in xhci_mem_cleanup()
1933 list_for_each_entry_safe(tt, n, &xhci->rh_bw[i].tts, tt_list) { in xhci_mem_cleanup()
1940 xhci->cmd_ring_reserved_trbs = 0; in xhci_mem_cleanup()
1941 xhci->usb2_rhub.num_ports = 0; in xhci_mem_cleanup()
1942 xhci->usb3_rhub.num_ports = 0; in xhci_mem_cleanup()
1943 xhci->num_active_eps = 0; in xhci_mem_cleanup()
1944 kfree(xhci->usb2_rhub.ports); in xhci_mem_cleanup()
1945 kfree(xhci->usb3_rhub.ports); in xhci_mem_cleanup()
1946 kfree(xhci->hw_ports); in xhci_mem_cleanup()
1947 kfree(xhci->rh_bw); in xhci_mem_cleanup()
1948 for (i = 0; i < xhci->num_port_caps; i++) in xhci_mem_cleanup()
1949 kfree(xhci->port_caps[i].psi); in xhci_mem_cleanup()
1950 kfree(xhci->port_caps); in xhci_mem_cleanup()
1951 kfree(xhci->interrupters); in xhci_mem_cleanup()
1952 xhci->num_port_caps = 0; in xhci_mem_cleanup()
1954 xhci->usb2_rhub.ports = NULL; in xhci_mem_cleanup()
1955 xhci->usb3_rhub.ports = NULL; in xhci_mem_cleanup()
1956 xhci->hw_ports = NULL; in xhci_mem_cleanup()
1957 xhci->rh_bw = NULL; in xhci_mem_cleanup()
1958 xhci->port_caps = NULL; in xhci_mem_cleanup()
1959 xhci->interrupters = NULL; in xhci_mem_cleanup()
1961 xhci->page_size = 0; in xhci_mem_cleanup()
1962 xhci->page_shift = 0; in xhci_mem_cleanup()
1963 xhci->usb2_rhub.bus_state.bus_suspended = 0; in xhci_mem_cleanup()
1964 xhci->usb3_rhub.bus_state.bus_suspended = 0; in xhci_mem_cleanup()
1967 static void xhci_set_hc_event_deq(struct xhci_hcd *xhci, struct xhci_interrupter *ir) in xhci_set_hc_event_deq() argument
1974 xhci_warn(xhci, "WARN something wrong with SW event ring dequeue ptr.\n"); in xhci_set_hc_event_deq()
1979 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_set_hc_event_deq()
1981 xhci_write_64(xhci, deq & ERST_PTR_MASK, &ir->ir_set->erst_dequeue); in xhci_set_hc_event_deq()
1984 static void xhci_add_in_port(struct xhci_hcd *xhci, unsigned int num_ports, in xhci_add_in_port() argument
1991 struct device *dev = xhci_to_hcd(xhci)->self.sysdev; in xhci_add_in_port()
1999 rhub = &xhci->usb3_rhub; in xhci_add_in_port()
2013 if (xhci->quirks & XHCI_ZHAOXIN_HOST) { in xhci_add_in_port()
2019 rhub = &xhci->usb2_rhub; in xhci_add_in_port()
2021 xhci_warn(xhci, "Ignoring unknown port speed, Ext Cap %p, revision = 0x%x\n", in xhci_add_in_port()
2031 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_add_in_port()
2039 port_cap = &xhci->port_caps[xhci->num_port_caps++]; in xhci_add_in_port()
2040 if (xhci->num_port_caps > max_caps) in xhci_add_in_port()
2063 if (xhci->quirks & XHCI_ZHAOXIN_HOST && in xhci_add_in_port()
2068 xhci_dbg(xhci, "PSIV:%d PSIE:%d PLT:%d PFD:%d LP:%d PSIM:%d\n", in xhci_add_in_port()
2087 if ((xhci->hci_version >= 0x100) && (major_revision != 0x03) && in xhci_add_in_port()
2089 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_add_in_port()
2091 xhci->hw_lpm_support = 1; in xhci_add_in_port()
2096 struct xhci_port *hw_port = &xhci->hw_ports[i]; in xhci_add_in_port()
2099 xhci_warn(xhci, "Duplicate port entry, Ext Cap %p, port %u\n", addr, i); in xhci_add_in_port()
2100 xhci_warn(xhci, "Port was marked as USB %u, duplicated as USB %u\n", in xhci_add_in_port()
2119 static void xhci_create_rhub_port_array(struct xhci_hcd *xhci, in xhci_create_rhub_port_array() argument
2124 struct device *dev = xhci_to_hcd(xhci)->self.sysdev; in xhci_create_rhub_port_array()
2133 for (i = 0; i < HCS_MAX_PORTS(xhci->hcs_params1); i++) { in xhci_create_rhub_port_array()
2134 if (xhci->hw_ports[i].rhub != rhub || in xhci_create_rhub_port_array()
2135 xhci->hw_ports[i].hcd_portnum == DUPLICATE_ENTRY) in xhci_create_rhub_port_array()
2137 xhci->hw_ports[i].hcd_portnum = port_index; in xhci_create_rhub_port_array()
2138 rhub->ports[port_index] = &xhci->hw_ports[i]; in xhci_create_rhub_port_array()
2152 static int xhci_setup_port_arrays(struct xhci_hcd *xhci, gfp_t flags) in xhci_setup_port_arrays() argument
2160 struct device *dev = xhci_to_hcd(xhci)->self.sysdev; in xhci_setup_port_arrays()
2162 num_ports = HCS_MAX_PORTS(xhci->hcs_params1); in xhci_setup_port_arrays()
2163 xhci->hw_ports = kcalloc_node(num_ports, sizeof(*xhci->hw_ports), in xhci_setup_port_arrays()
2165 if (!xhci->hw_ports) in xhci_setup_port_arrays()
2169 xhci->hw_ports[i].addr = &xhci->op_regs->port_status_base + in xhci_setup_port_arrays()
2171 xhci->hw_ports[i].hw_portnum = i; in xhci_setup_port_arrays()
2173 init_completion(&xhci->hw_ports[i].rexit_done); in xhci_setup_port_arrays()
2174 init_completion(&xhci->hw_ports[i].u3exit_done); in xhci_setup_port_arrays()
2177 xhci->rh_bw = kcalloc_node(num_ports, sizeof(*xhci->rh_bw), flags, in xhci_setup_port_arrays()
2179 if (!xhci->rh_bw) in xhci_setup_port_arrays()
2184 INIT_LIST_HEAD(&xhci->rh_bw[i].tts); in xhci_setup_port_arrays()
2185 bw_table = &xhci->rh_bw[i].bw_table; in xhci_setup_port_arrays()
2189 base = &xhci->cap_regs->hc_capbase; in xhci_setup_port_arrays()
2193 xhci_err(xhci, "No Extended Capability registers, unable to set up roothub\n"); in xhci_setup_port_arrays()
2205 xhci->port_caps = kcalloc_node(cap_count, sizeof(*xhci->port_caps), in xhci_setup_port_arrays()
2207 if (!xhci->port_caps) in xhci_setup_port_arrays()
2213 xhci_add_in_port(xhci, num_ports, base + offset, cap_count); in xhci_setup_port_arrays()
2214 if (xhci->usb2_rhub.num_ports + xhci->usb3_rhub.num_ports == in xhci_setup_port_arrays()
2220 if (xhci->usb2_rhub.num_ports == 0 && xhci->usb3_rhub.num_ports == 0) { in xhci_setup_port_arrays()
2221 xhci_warn(xhci, "No ports on the roothubs?\n"); in xhci_setup_port_arrays()
2224 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_setup_port_arrays()
2226 xhci->usb2_rhub.num_ports, xhci->usb3_rhub.num_ports); in xhci_setup_port_arrays()
2231 if (xhci->usb3_rhub.num_ports > USB_SS_MAXPORTS) { in xhci_setup_port_arrays()
2232 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_setup_port_arrays()
2235 xhci->usb3_rhub.num_ports = USB_SS_MAXPORTS; in xhci_setup_port_arrays()
2237 if (xhci->usb2_rhub.num_ports > USB_MAXCHILDREN) { in xhci_setup_port_arrays()
2238 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_setup_port_arrays()
2241 xhci->usb2_rhub.num_ports = USB_MAXCHILDREN; in xhci_setup_port_arrays()
2244 if (!xhci->usb2_rhub.num_ports) in xhci_setup_port_arrays()
2245 xhci_info(xhci, "USB2 root hub has no ports\n"); in xhci_setup_port_arrays()
2247 if (!xhci->usb3_rhub.num_ports) in xhci_setup_port_arrays()
2248 xhci_info(xhci, "USB3 root hub has no ports\n"); in xhci_setup_port_arrays()
2250 xhci_create_rhub_port_array(xhci, &xhci->usb2_rhub, flags); in xhci_setup_port_arrays()
2251 xhci_create_rhub_port_array(xhci, &xhci->usb3_rhub, flags); in xhci_setup_port_arrays()
2257 xhci_alloc_interrupter(struct xhci_hcd *xhci, unsigned int segs, gfp_t flags) in xhci_alloc_interrupter() argument
2259 struct device *dev = xhci_to_hcd(xhci)->self.sysdev; in xhci_alloc_interrupter()
2267 max_segs = BIT(HCS_ERST_MAX(xhci->hcs_params2)); in xhci_alloc_interrupter()
2274 ir->event_ring = xhci_ring_alloc(xhci, segs, 1, TYPE_EVENT, 0, flags); in xhci_alloc_interrupter()
2276 xhci_warn(xhci, "Failed to allocate interrupter event ring\n"); in xhci_alloc_interrupter()
2281 ret = xhci_alloc_erst(xhci, ir->event_ring, &ir->erst, flags); in xhci_alloc_interrupter()
2283 xhci_warn(xhci, "Failed to allocate interrupter erst\n"); in xhci_alloc_interrupter()
2284 xhci_ring_free(xhci, ir->event_ring); in xhci_alloc_interrupter()
2293 xhci_add_interrupter(struct xhci_hcd *xhci, struct xhci_interrupter *ir, in xhci_add_interrupter() argument
2299 if (intr_num >= xhci->max_interrupters) { in xhci_add_interrupter()
2300 xhci_warn(xhci, "Can't add interrupter %d, max interrupters %d\n", in xhci_add_interrupter()
2301 intr_num, xhci->max_interrupters); in xhci_add_interrupter()
2305 if (xhci->interrupters[intr_num]) { in xhci_add_interrupter()
2306 xhci_warn(xhci, "Interrupter %d\n already set up", intr_num); in xhci_add_interrupter()
2310 xhci->interrupters[intr_num] = ir; in xhci_add_interrupter()
2312 ir->ir_set = &xhci->run_regs->ir_set[intr_num]; in xhci_add_interrupter()
2320 erst_base = xhci_read_64(xhci, &ir->ir_set->erst_base); in xhci_add_interrupter()
2323 if (xhci->quirks & XHCI_WRITE_64_HI_LO) in xhci_add_interrupter()
2326 xhci_write_64(xhci, erst_base, &ir->ir_set->erst_base); in xhci_add_interrupter()
2329 xhci_set_hc_event_deq(xhci, ir); in xhci_add_interrupter()
2338 struct xhci_hcd *xhci = hcd_to_xhci(hcd); in xhci_create_secondary_interrupter() local
2343 if (!xhci->interrupters || xhci->max_interrupters <= 1) in xhci_create_secondary_interrupter()
2346 ir = xhci_alloc_interrupter(xhci, segs, GFP_KERNEL); in xhci_create_secondary_interrupter()
2350 spin_lock_irq(&xhci->lock); in xhci_create_secondary_interrupter()
2353 for (i = 1; i < xhci->max_interrupters; i++) { in xhci_create_secondary_interrupter()
2354 if (xhci->interrupters[i] == NULL) { in xhci_create_secondary_interrupter()
2355 err = xhci_add_interrupter(xhci, ir, i); in xhci_create_secondary_interrupter()
2360 spin_unlock_irq(&xhci->lock); in xhci_create_secondary_interrupter()
2363 xhci_warn(xhci, "Failed to add secondary interrupter, max interrupters %d\n", in xhci_create_secondary_interrupter()
2364 xhci->max_interrupters); in xhci_create_secondary_interrupter()
2365 xhci_free_interrupter(xhci, ir); in xhci_create_secondary_interrupter()
2371 xhci_warn(xhci, "Failed to set interrupter %d moderation to %uns\n", in xhci_create_secondary_interrupter()
2374 xhci_dbg(xhci, "Add secondary interrupter %d, max interrupters %d\n", in xhci_create_secondary_interrupter()
2375 i, xhci->max_interrupters); in xhci_create_secondary_interrupter()
2381 int xhci_mem_init(struct xhci_hcd *xhci, gfp_t flags) in xhci_mem_init() argument
2384 struct device *dev = xhci_to_hcd(xhci)->self.sysdev; in xhci_mem_init()
2391 INIT_LIST_HEAD(&xhci->cmd_list); in xhci_mem_init()
2394 INIT_DELAYED_WORK(&xhci->cmd_timer, xhci_handle_command_timeout); in xhci_mem_init()
2395 init_completion(&xhci->cmd_ring_stop_completion); in xhci_mem_init()
2397 page_size = readl(&xhci->op_regs->page_size); in xhci_mem_init()
2398 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2402 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2405 xhci_warn(xhci, "WARN: no supported page size\n"); in xhci_mem_init()
2407 xhci->page_shift = 12; in xhci_mem_init()
2408 xhci->page_size = 1 << xhci->page_shift; in xhci_mem_init()
2409 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2410 "HCD page size set to %iK", xhci->page_size / 1024); in xhci_mem_init()
2416 val = HCS_MAX_SLOTS(readl(&xhci->cap_regs->hcs_params1)); in xhci_mem_init()
2417 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2419 val2 = readl(&xhci->op_regs->config_reg); in xhci_mem_init()
2421 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2423 writel(val, &xhci->op_regs->config_reg); in xhci_mem_init()
2429 xhci->dcbaa = dma_alloc_coherent(dev, sizeof(*xhci->dcbaa), &dma, in xhci_mem_init()
2431 if (!xhci->dcbaa) in xhci_mem_init()
2433 xhci->dcbaa->dma = dma; in xhci_mem_init()
2434 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2436 &xhci->dcbaa->dma, xhci->dcbaa); in xhci_mem_init()
2437 xhci_write_64(xhci, dma, &xhci->op_regs->dcbaa_ptr); in xhci_mem_init()
2446 if (xhci->quirks & XHCI_ZHAOXIN_TRB_FETCH) in xhci_mem_init()
2447 xhci->segment_pool = dma_pool_create("xHCI ring segments", dev, in xhci_mem_init()
2448 TRB_SEGMENT_SIZE * 2, TRB_SEGMENT_SIZE * 2, xhci->page_size * 2); in xhci_mem_init()
2450 xhci->segment_pool = dma_pool_create("xHCI ring segments", dev, in xhci_mem_init()
2451 TRB_SEGMENT_SIZE, TRB_SEGMENT_SIZE, xhci->page_size); in xhci_mem_init()
2454 xhci->device_pool = dma_pool_create("xHCI input/output contexts", dev, in xhci_mem_init()
2455 2112, 64, xhci->page_size); in xhci_mem_init()
2456 if (!xhci->segment_pool || !xhci->device_pool) in xhci_mem_init()
2462 xhci->small_streams_pool = in xhci_mem_init()
2465 xhci->medium_streams_pool = in xhci_mem_init()
2472 if (!xhci->small_streams_pool || !xhci->medium_streams_pool) in xhci_mem_init()
2476 xhci->cmd_ring = xhci_ring_alloc(xhci, 1, 1, TYPE_COMMAND, 0, flags); in xhci_mem_init()
2477 if (!xhci->cmd_ring) in xhci_mem_init()
2479 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2480 "Allocated command ring at %p", xhci->cmd_ring); in xhci_mem_init()
2481 xhci_dbg_trace(xhci, trace_xhci_dbg_init, "First segment DMA is 0x%pad", in xhci_mem_init()
2482 &xhci->cmd_ring->first_seg->dma); in xhci_mem_init()
2485 val_64 = xhci_read_64(xhci, &xhci->op_regs->cmd_ring); in xhci_mem_init()
2487 (xhci->cmd_ring->first_seg->dma & (u64) ~CMD_RING_RSVD_BITS) | in xhci_mem_init()
2488 xhci->cmd_ring->cycle_state; in xhci_mem_init()
2489 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2491 xhci_write_64(xhci, val_64, &xhci->op_regs->cmd_ring); in xhci_mem_init()
2497 xhci->cmd_ring_reserved_trbs++; in xhci_mem_init()
2499 val = readl(&xhci->cap_regs->db_off); in xhci_mem_init()
2501 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2504 xhci->dba = (void __iomem *) xhci->cap_regs + val; in xhci_mem_init()
2507 xhci_dbg_trace(xhci, trace_xhci_dbg_init, in xhci_mem_init()
2509 xhci->interrupters = kcalloc_node(xhci->max_interrupters, sizeof(*xhci->interrupters), in xhci_mem_init()
2512 ir = xhci_alloc_interrupter(xhci, 0, flags); in xhci_mem_init()
2516 if (xhci_add_interrupter(xhci, ir, 0)) in xhci_mem_init()
2527 xhci->devs[i] = NULL; in xhci_mem_init()
2529 if (scratchpad_alloc(xhci, flags)) in xhci_mem_init()
2531 if (xhci_setup_port_arrays(xhci, flags)) in xhci_mem_init()
2538 temp = readl(&xhci->op_regs->dev_notification); in xhci_mem_init()
2541 writel(temp, &xhci->op_regs->dev_notification); in xhci_mem_init()
2546 xhci_halt(xhci); in xhci_mem_init()
2547 xhci_reset(xhci, XHCI_RESET_SHORT_USEC); in xhci_mem_init()
2548 xhci_mem_cleanup(xhci); in xhci_mem_init()