Lines Matching +full:num +full:- +full:rings
1 // SPDX-License-Identifier: GPL-2.0
5 * Copyright (C) 2019 Texas Instruments Incorporated - http://www.ti.com
8 #include <linux/dma-mapping.h>
14 #include <linux/dma/ti-cppi5.h>
15 #include <linux/soc/ti/k3-ringacc.h>
28 * struct k3_ring_rt_regs - The RA realtime Control/Status Registers region
57 * struct k3_ring_fifo_regs - The Ring Accelerator Queues Registers region
72 * struct k3_ringacc_proxy_gcfg_regs - RA Proxy Global Config MMIO Region
85 * struct k3_ringacc_proxy_target_regs - Proxy Datapath MMIO Region
100 #define K3_RINGACC_PROXY_NOT_USED (-1)
121 * struct k3_ring_state - Internal state tracking structure
138 * struct k3_ring - RA Ring descriptor
153 * @use_count: Use count for shared rings
187 * struct k3_ringacc - Rings accelerator descriptor
194 * @rm_gp_range: general purpose rings range from tisci
198 * @rings: array of rings descriptors (struct @k3_ring)
200 * @req_lock: protect rings allocation
201 * @tisci: pointer ti-sci handle
202 * @tisci_ring_ops: ti-sci rings ops
203 * @tisci_dev_id: ti-sci device id
211 u32 num_rings; /* number of rings in Ringacc module */
219 struct k3_ring *rings; member
221 struct mutex req_lock; /* protect rings allocation */
232 * struct k3_ringacc_soc_data - Rings accelerator SoC data
242 return readl(&ring->rt->occ) & K3_RINGACC_RT_OCC_MASK; in k3_ringacc_ring_read_occ()
249 val = readl(&ring->rt->occ); in k3_ringacc_ring_update_occ()
251 ring->state.occ = val & K3_RINGACC_RT_OCC_MASK; in k3_ringacc_ring_update_occ()
252 ring->state.tdown_complete = !!(val & K3_DMARING_RT_OCC_TDOWN_COMPLETE); in k3_ringacc_ring_update_occ()
257 return K3_RINGACC_FIFO_WINDOW_SIZE_BYTES - in k3_ringacc_ring_get_fifo_pos()
258 (4 << ring->elm_size); in k3_ringacc_ring_get_fifo_pos()
263 return (ring->ring_mem_virt + idx * (4 << ring->elm_size)); in k3_ringacc_get_elm_addr()
312 struct device *dev = ring->parent->dev; in k3_ringacc_ring_dump()
314 dev_dbg(dev, "dump ring: %d\n", ring->ring_id); in k3_ringacc_ring_dump()
315 dev_dbg(dev, "dump mem virt %p, dma %pad\n", ring->ring_mem_virt, in k3_ringacc_ring_dump()
316 &ring->ring_mem_dma); in k3_ringacc_ring_dump()
318 ring->elm_size, ring->size, ring->mode, ring->proxy_id); in k3_ringacc_ring_dump()
319 dev_dbg(dev, "dump flags %08X\n", ring->flags); in k3_ringacc_ring_dump()
321 dev_dbg(dev, "dump ring_rt_regs: db%08x\n", readl(&ring->rt->db)); in k3_ringacc_ring_dump()
322 dev_dbg(dev, "dump occ%08x\n", readl(&ring->rt->occ)); in k3_ringacc_ring_dump()
323 dev_dbg(dev, "dump indx%08x\n", readl(&ring->rt->indx)); in k3_ringacc_ring_dump()
324 dev_dbg(dev, "dump hwocc%08x\n", readl(&ring->rt->hwocc)); in k3_ringacc_ring_dump()
325 dev_dbg(dev, "dump hwindx%08x\n", readl(&ring->rt->hwindx)); in k3_ringacc_ring_dump()
327 if (ring->ring_mem_virt) in k3_ringacc_ring_dump()
329 16, 1, ring->ring_mem_virt, 16 * 8, false); in k3_ringacc_ring_dump()
337 mutex_lock(&ringacc->req_lock); in k3_ringacc_request_ring()
339 if (!try_module_get(ringacc->dev->driver->owner)) in k3_ringacc_request_ring()
345 &ringacc->rm_gp_range->desc[0]; in k3_ringacc_request_ring()
348 size = gp_rings->start + gp_rings->num; in k3_ringacc_request_ring()
349 id = find_next_zero_bit(ringacc->rings_inuse, size, in k3_ringacc_request_ring()
350 gp_rings->start); in k3_ringacc_request_ring()
357 if (test_bit(id, ringacc->rings_inuse) && in k3_ringacc_request_ring()
358 !(ringacc->rings[id].flags & K3_RING_FLAG_SHARED)) in k3_ringacc_request_ring()
360 else if (ringacc->rings[id].flags & K3_RING_FLAG_SHARED) in k3_ringacc_request_ring()
364 proxy_id = find_first_zero_bit(ringacc->proxy_inuse, in k3_ringacc_request_ring()
365 ringacc->num_proxies); in k3_ringacc_request_ring()
366 if (proxy_id == ringacc->num_proxies) in k3_ringacc_request_ring()
371 set_bit(proxy_id, ringacc->proxy_inuse); in k3_ringacc_request_ring()
372 ringacc->rings[id].proxy_id = proxy_id; in k3_ringacc_request_ring()
373 dev_dbg(ringacc->dev, "Giving ring#%d proxy#%d\n", id, in k3_ringacc_request_ring()
376 dev_dbg(ringacc->dev, "Giving ring#%d\n", id); in k3_ringacc_request_ring()
379 set_bit(id, ringacc->rings_inuse); in k3_ringacc_request_ring()
381 ringacc->rings[id].use_count++; in k3_ringacc_request_ring()
382 mutex_unlock(&ringacc->req_lock); in k3_ringacc_request_ring()
383 return &ringacc->rings[id]; in k3_ringacc_request_ring()
386 module_put(ringacc->dev->driver->owner); in k3_ringacc_request_ring()
389 mutex_unlock(&ringacc->req_lock); in k3_ringacc_request_ring()
401 * DMA rings must be requested by ID, completion ring is the reverse in k3_dmaring_request_dual_ring()
405 return -EINVAL; in k3_dmaring_request_dual_ring()
407 mutex_lock(&ringacc->req_lock); in k3_dmaring_request_dual_ring()
409 if (!try_module_get(ringacc->dev->driver->owner)) { in k3_dmaring_request_dual_ring()
410 ret = -EINVAL; in k3_dmaring_request_dual_ring()
414 if (test_bit(fwd_id, ringacc->rings_inuse)) { in k3_dmaring_request_dual_ring()
415 ret = -EBUSY; in k3_dmaring_request_dual_ring()
419 *fwd_ring = &ringacc->rings[fwd_id]; in k3_dmaring_request_dual_ring()
420 *compl_ring = &ringacc->rings[fwd_id + ringacc->num_rings]; in k3_dmaring_request_dual_ring()
421 set_bit(fwd_id, ringacc->rings_inuse); in k3_dmaring_request_dual_ring()
422 ringacc->rings[fwd_id].use_count++; in k3_dmaring_request_dual_ring()
423 dev_dbg(ringacc->dev, "Giving ring#%d\n", fwd_id); in k3_dmaring_request_dual_ring()
425 mutex_unlock(&ringacc->req_lock); in k3_dmaring_request_dual_ring()
429 module_put(ringacc->dev->driver->owner); in k3_dmaring_request_dual_ring()
431 mutex_unlock(&ringacc->req_lock); in k3_dmaring_request_dual_ring()
443 return -EINVAL; in k3_ringacc_request_rings_pair()
445 if (ringacc->dma_rings) in k3_ringacc_request_rings_pair()
451 return -ENODEV; in k3_ringacc_request_rings_pair()
456 ret = -ENODEV; in k3_ringacc_request_rings_pair()
466 struct k3_ringacc *ringacc = ring->parent; in k3_ringacc_ring_reset_sci()
469 ring_cfg.nav_id = ringacc->tisci_dev_id; in k3_ringacc_ring_reset_sci()
470 ring_cfg.index = ring->ring_id; in k3_ringacc_ring_reset_sci()
472 ring_cfg.count = ring->size; in k3_ringacc_ring_reset_sci()
474 ret = ringacc->tisci_ring_ops->set_cfg(ringacc->tisci, &ring_cfg); in k3_ringacc_ring_reset_sci()
476 dev_err(ringacc->dev, "TISCI reset ring fail (%d) ring_idx %d\n", in k3_ringacc_ring_reset_sci()
477 ret, ring->ring_id); in k3_ringacc_ring_reset_sci()
482 if (!ring || !(ring->flags & K3_RING_FLAG_BUSY)) in k3_ringacc_ring_reset()
485 memset(&ring->state, 0, sizeof(ring->state)); in k3_ringacc_ring_reset()
495 struct k3_ringacc *ringacc = ring->parent; in k3_ringacc_ring_reconfig_qmode_sci()
498 ring_cfg.nav_id = ringacc->tisci_dev_id; in k3_ringacc_ring_reconfig_qmode_sci()
499 ring_cfg.index = ring->ring_id; in k3_ringacc_ring_reconfig_qmode_sci()
503 ret = ringacc->tisci_ring_ops->set_cfg(ringacc->tisci, &ring_cfg); in k3_ringacc_ring_reconfig_qmode_sci()
505 dev_err(ringacc->dev, "TISCI reconf qmode fail (%d) ring_idx %d\n", in k3_ringacc_ring_reconfig_qmode_sci()
506 ret, ring->ring_id); in k3_ringacc_ring_reconfig_qmode_sci()
511 if (!ring || !(ring->flags & K3_RING_FLAG_BUSY)) in k3_ringacc_ring_reset_dma()
514 if (!ring->parent->dma_ring_reset_quirk) in k3_ringacc_ring_reset_dma()
523 dev_dbg(ring->parent->dev, "%s %u occ: %u\n", __func__, in k3_ringacc_ring_reset_dma()
524 ring->ring_id, occ); in k3_ringacc_ring_reset_dma()
525 /* TI-SCI ring reset */ in k3_ringacc_ring_reset_dma()
532 if (ring->mode != K3_RINGACC_RING_MODE_RING) in k3_ringacc_ring_reset_dma()
538 * counter (which is 21-bits wide) to 0. in k3_ringacc_ring_reset_dma()
540 db_ring_cnt = (1U << 22) - occ; in k3_ringacc_ring_reset_dma()
553 writel(db_ring_cnt_cur, &ring->rt->db); in k3_ringacc_ring_reset_dma()
554 db_ring_cnt -= db_ring_cnt_cur; in k3_ringacc_ring_reset_dma()
558 if (ring->mode != K3_RINGACC_RING_MODE_RING) in k3_ringacc_ring_reset_dma()
559 k3_ringacc_ring_reconfig_qmode_sci(ring, ring->mode); in k3_ringacc_ring_reset_dma()
571 struct k3_ringacc *ringacc = ring->parent; in k3_ringacc_ring_free_sci()
574 ring_cfg.nav_id = ringacc->tisci_dev_id; in k3_ringacc_ring_free_sci()
575 ring_cfg.index = ring->ring_id; in k3_ringacc_ring_free_sci()
578 ret = ringacc->tisci_ring_ops->set_cfg(ringacc->tisci, &ring_cfg); in k3_ringacc_ring_free_sci()
580 dev_err(ringacc->dev, "TISCI ring free fail (%d) ring_idx %d\n", in k3_ringacc_ring_free_sci()
581 ret, ring->ring_id); in k3_ringacc_ring_free_sci()
589 return -EINVAL; in k3_ringacc_ring_free()
591 ringacc = ring->parent; in k3_ringacc_ring_free()
594 * DMA rings: rings shared memory and configuration, only forward ring in k3_ringacc_ring_free()
597 if (ringacc->dma_rings && (ring->flags & K3_RING_FLAG_REVERSE)) in k3_ringacc_ring_free()
600 dev_dbg(ring->parent->dev, "flags: 0x%08x\n", ring->flags); in k3_ringacc_ring_free()
602 if (!test_bit(ring->ring_id, ringacc->rings_inuse)) in k3_ringacc_ring_free()
603 return -EINVAL; in k3_ringacc_ring_free()
605 mutex_lock(&ringacc->req_lock); in k3_ringacc_ring_free()
607 if (--ring->use_count) in k3_ringacc_ring_free()
610 if (!(ring->flags & K3_RING_FLAG_BUSY)) in k3_ringacc_ring_free()
615 dma_free_coherent(ring->dma_dev, in k3_ringacc_ring_free()
616 ring->size * (4 << ring->elm_size), in k3_ringacc_ring_free()
617 ring->ring_mem_virt, ring->ring_mem_dma); in k3_ringacc_ring_free()
618 ring->flags = 0; in k3_ringacc_ring_free()
619 ring->ops = NULL; in k3_ringacc_ring_free()
620 ring->dma_dev = NULL; in k3_ringacc_ring_free()
621 ring->asel = 0; in k3_ringacc_ring_free()
623 if (ring->proxy_id != K3_RINGACC_PROXY_NOT_USED) { in k3_ringacc_ring_free()
624 clear_bit(ring->proxy_id, ringacc->proxy_inuse); in k3_ringacc_ring_free()
625 ring->proxy = NULL; in k3_ringacc_ring_free()
626 ring->proxy_id = K3_RINGACC_PROXY_NOT_USED; in k3_ringacc_ring_free()
630 clear_bit(ring->ring_id, ringacc->rings_inuse); in k3_ringacc_ring_free()
632 module_put(ringacc->dev->driver->owner); in k3_ringacc_ring_free()
635 mutex_unlock(&ringacc->req_lock); in k3_ringacc_ring_free()
643 return -EINVAL; in k3_ringacc_get_ring_id()
645 return ring->ring_id; in k3_ringacc_get_ring_id()
652 return -EINVAL; in k3_ringacc_get_tisci_dev_id()
654 return ring->parent->tisci_dev_id; in k3_ringacc_get_tisci_dev_id()
663 return -EINVAL; in k3_ringacc_get_ring_irq_num()
665 irq_num = msi_get_virq(ring->parent->dev, ring->ring_id); in k3_ringacc_get_ring_irq_num()
667 irq_num = -EINVAL; in k3_ringacc_get_ring_irq_num()
675 struct k3_ringacc *ringacc = ring->parent; in k3_ringacc_ring_cfg_sci()
678 if (!ringacc->tisci) in k3_ringacc_ring_cfg_sci()
679 return -EINVAL; in k3_ringacc_ring_cfg_sci()
681 ring_cfg.nav_id = ringacc->tisci_dev_id; in k3_ringacc_ring_cfg_sci()
682 ring_cfg.index = ring->ring_id; in k3_ringacc_ring_cfg_sci()
684 ring_cfg.addr_lo = lower_32_bits(ring->ring_mem_dma); in k3_ringacc_ring_cfg_sci()
685 ring_cfg.addr_hi = upper_32_bits(ring->ring_mem_dma); in k3_ringacc_ring_cfg_sci()
686 ring_cfg.count = ring->size; in k3_ringacc_ring_cfg_sci()
687 ring_cfg.mode = ring->mode; in k3_ringacc_ring_cfg_sci()
688 ring_cfg.size = ring->elm_size; in k3_ringacc_ring_cfg_sci()
689 ring_cfg.asel = ring->asel; in k3_ringacc_ring_cfg_sci()
691 ret = ringacc->tisci_ring_ops->set_cfg(ringacc->tisci, &ring_cfg); in k3_ringacc_ring_cfg_sci()
693 dev_err(ringacc->dev, "TISCI config ring fail (%d) ring_idx %d\n", in k3_ringacc_ring_cfg_sci()
694 ret, ring->ring_id); in k3_ringacc_ring_cfg_sci()
705 if (cfg->elm_size != K3_RINGACC_RING_ELSIZE_8 || in k3_dmaring_cfg()
706 cfg->mode != K3_RINGACC_RING_MODE_RING || in k3_dmaring_cfg()
707 cfg->size & ~K3_DMARING_CFG_RING_SIZE_ELCNT_MASK) in k3_dmaring_cfg()
708 return -EINVAL; in k3_dmaring_cfg()
710 ringacc = ring->parent; in k3_dmaring_cfg()
713 * DMA rings: rings shared memory and configuration, only forward ring in k3_dmaring_cfg()
716 if (ringacc->dma_rings && (ring->flags & K3_RING_FLAG_REVERSE)) in k3_dmaring_cfg()
719 if (!test_bit(ring->ring_id, ringacc->rings_inuse)) in k3_dmaring_cfg()
720 return -EINVAL; in k3_dmaring_cfg()
722 ring->size = cfg->size; in k3_dmaring_cfg()
723 ring->elm_size = cfg->elm_size; in k3_dmaring_cfg()
724 ring->mode = cfg->mode; in k3_dmaring_cfg()
725 ring->asel = cfg->asel; in k3_dmaring_cfg()
726 ring->dma_dev = cfg->dma_dev; in k3_dmaring_cfg()
727 if (!ring->dma_dev) { in k3_dmaring_cfg()
728 dev_warn(ringacc->dev, "dma_dev is not provided for ring%d\n", in k3_dmaring_cfg()
729 ring->ring_id); in k3_dmaring_cfg()
730 ring->dma_dev = ringacc->dev; in k3_dmaring_cfg()
733 memset(&ring->state, 0, sizeof(ring->state)); in k3_dmaring_cfg()
735 ring->ops = &k3_dmaring_fwd_ops; in k3_dmaring_cfg()
737 ring->ring_mem_virt = dma_alloc_coherent(ring->dma_dev, in k3_dmaring_cfg()
738 ring->size * (4 << ring->elm_size), in k3_dmaring_cfg()
739 &ring->ring_mem_dma, GFP_KERNEL); in k3_dmaring_cfg()
740 if (!ring->ring_mem_virt) { in k3_dmaring_cfg()
741 dev_err(ringacc->dev, "Failed to alloc ring mem\n"); in k3_dmaring_cfg()
742 ret = -ENOMEM; in k3_dmaring_cfg()
750 ring->flags |= K3_RING_FLAG_BUSY; in k3_dmaring_cfg()
754 /* DMA rings: configure reverse ring */ in k3_dmaring_cfg()
755 reverse_ring = &ringacc->rings[ring->ring_id + ringacc->num_rings]; in k3_dmaring_cfg()
756 reverse_ring->size = cfg->size; in k3_dmaring_cfg()
757 reverse_ring->elm_size = cfg->elm_size; in k3_dmaring_cfg()
758 reverse_ring->mode = cfg->mode; in k3_dmaring_cfg()
759 reverse_ring->asel = cfg->asel; in k3_dmaring_cfg()
760 memset(&reverse_ring->state, 0, sizeof(reverse_ring->state)); in k3_dmaring_cfg()
761 reverse_ring->ops = &k3_dmaring_reverse_ops; in k3_dmaring_cfg()
763 reverse_ring->ring_mem_virt = ring->ring_mem_virt; in k3_dmaring_cfg()
764 reverse_ring->ring_mem_dma = ring->ring_mem_dma; in k3_dmaring_cfg()
765 reverse_ring->flags |= K3_RING_FLAG_BUSY; in k3_dmaring_cfg()
771 dma_free_coherent(ring->dma_dev, in k3_dmaring_cfg()
772 ring->size * (4 << ring->elm_size), in k3_dmaring_cfg()
773 ring->ring_mem_virt, in k3_dmaring_cfg()
774 ring->ring_mem_dma); in k3_dmaring_cfg()
776 ring->ops = NULL; in k3_dmaring_cfg()
777 ring->proxy = NULL; in k3_dmaring_cfg()
778 ring->dma_dev = NULL; in k3_dmaring_cfg()
779 ring->asel = 0; in k3_dmaring_cfg()
789 return -EINVAL; in k3_ringacc_ring_cfg()
791 ringacc = ring->parent; in k3_ringacc_ring_cfg()
793 if (ringacc->dma_rings) in k3_ringacc_ring_cfg()
796 if (cfg->elm_size > K3_RINGACC_RING_ELSIZE_256 || in k3_ringacc_ring_cfg()
797 cfg->mode >= K3_RINGACC_RING_MODE_INVALID || in k3_ringacc_ring_cfg()
798 cfg->size & ~K3_RINGACC_CFG_RING_SIZE_ELCNT_MASK || in k3_ringacc_ring_cfg()
799 !test_bit(ring->ring_id, ringacc->rings_inuse)) in k3_ringacc_ring_cfg()
800 return -EINVAL; in k3_ringacc_ring_cfg()
802 if (cfg->mode == K3_RINGACC_RING_MODE_MESSAGE && in k3_ringacc_ring_cfg()
803 ring->proxy_id == K3_RINGACC_PROXY_NOT_USED && in k3_ringacc_ring_cfg()
804 cfg->elm_size > K3_RINGACC_RING_ELSIZE_8) { in k3_ringacc_ring_cfg()
805 dev_err(ringacc->dev, in k3_ringacc_ring_cfg()
807 4 << ring->elm_size); in k3_ringacc_ring_cfg()
808 return -EINVAL; in k3_ringacc_ring_cfg()
819 if (ring->use_count != 1) in k3_ringacc_ring_cfg()
822 ring->size = cfg->size; in k3_ringacc_ring_cfg()
823 ring->elm_size = cfg->elm_size; in k3_ringacc_ring_cfg()
824 ring->mode = cfg->mode; in k3_ringacc_ring_cfg()
825 memset(&ring->state, 0, sizeof(ring->state)); in k3_ringacc_ring_cfg()
827 if (ring->proxy_id != K3_RINGACC_PROXY_NOT_USED) in k3_ringacc_ring_cfg()
828 ring->proxy = ringacc->proxy_target_base + in k3_ringacc_ring_cfg()
829 ring->proxy_id * K3_RINGACC_PROXY_TARGET_STEP; in k3_ringacc_ring_cfg()
831 switch (ring->mode) { in k3_ringacc_ring_cfg()
833 ring->ops = &k3_ring_mode_ring_ops; in k3_ringacc_ring_cfg()
834 ring->dma_dev = cfg->dma_dev; in k3_ringacc_ring_cfg()
835 if (!ring->dma_dev) in k3_ringacc_ring_cfg()
836 ring->dma_dev = ringacc->dev; in k3_ringacc_ring_cfg()
839 ring->dma_dev = ringacc->dev; in k3_ringacc_ring_cfg()
840 if (ring->proxy) in k3_ringacc_ring_cfg()
841 ring->ops = &k3_ring_mode_proxy_ops; in k3_ringacc_ring_cfg()
843 ring->ops = &k3_ring_mode_msg_ops; in k3_ringacc_ring_cfg()
846 ring->ops = NULL; in k3_ringacc_ring_cfg()
847 ret = -EINVAL; in k3_ringacc_ring_cfg()
851 ring->ring_mem_virt = dma_alloc_coherent(ring->dma_dev, in k3_ringacc_ring_cfg()
852 ring->size * (4 << ring->elm_size), in k3_ringacc_ring_cfg()
853 &ring->ring_mem_dma, GFP_KERNEL); in k3_ringacc_ring_cfg()
854 if (!ring->ring_mem_virt) { in k3_ringacc_ring_cfg()
855 dev_err(ringacc->dev, "Failed to alloc ring mem\n"); in k3_ringacc_ring_cfg()
856 ret = -ENOMEM; in k3_ringacc_ring_cfg()
865 ring->flags |= K3_RING_FLAG_BUSY; in k3_ringacc_ring_cfg()
866 ring->flags |= (cfg->flags & K3_RINGACC_RING_SHARED) ? in k3_ringacc_ring_cfg()
874 dma_free_coherent(ring->dma_dev, in k3_ringacc_ring_cfg()
875 ring->size * (4 << ring->elm_size), in k3_ringacc_ring_cfg()
876 ring->ring_mem_virt, in k3_ringacc_ring_cfg()
877 ring->ring_mem_dma); in k3_ringacc_ring_cfg()
879 ring->ops = NULL; in k3_ringacc_ring_cfg()
880 ring->dma_dev = NULL; in k3_ringacc_ring_cfg()
882 ring->proxy = NULL; in k3_ringacc_ring_cfg()
889 if (!ring || !(ring->flags & K3_RING_FLAG_BUSY)) in k3_ringacc_ring_get_size()
890 return -EINVAL; in k3_ringacc_ring_get_size()
892 return ring->size; in k3_ringacc_ring_get_size()
898 if (!ring || !(ring->flags & K3_RING_FLAG_BUSY)) in k3_ringacc_ring_get_free()
899 return -EINVAL; in k3_ringacc_ring_get_free()
901 if (!ring->state.free) in k3_ringacc_ring_get_free()
902 ring->state.free = ring->size - k3_ringacc_ring_read_occ(ring); in k3_ringacc_ring_get_free()
904 return ring->state.free; in k3_ringacc_ring_get_free()
910 if (!ring || !(ring->flags & K3_RING_FLAG_BUSY)) in k3_ringacc_ring_get_occ()
911 return -EINVAL; in k3_ringacc_ring_get_occ()
939 val = ring->ring_id; in k3_ringacc_ring_cfg_proxy()
941 val |= K3_RINGACC_PROXY_ELSIZE(ring->elm_size); in k3_ringacc_ring_cfg_proxy()
942 writel(val, &ring->proxy->control); in k3_ringacc_ring_cfg_proxy()
951 ptr = (void __iomem *)&ring->proxy->data; in k3_ringacc_ring_access_proxy()
963 return -EINVAL; in k3_ringacc_ring_access_proxy()
971 dev_dbg(ring->parent->dev, in k3_ringacc_ring_access_proxy()
972 "proxy:memcpy_fromio(x): --> ptr(%p), mode:%d\n", ptr, in k3_ringacc_ring_access_proxy()
974 memcpy_fromio(elem, ptr, (4 << ring->elm_size)); in k3_ringacc_ring_access_proxy()
975 ring->state.occ--; in k3_ringacc_ring_access_proxy()
979 dev_dbg(ring->parent->dev, in k3_ringacc_ring_access_proxy()
980 "proxy:memcpy_toio(x): --> ptr(%p), mode:%d\n", ptr, in k3_ringacc_ring_access_proxy()
982 memcpy_toio(ptr, elem, (4 << ring->elm_size)); in k3_ringacc_ring_access_proxy()
983 ring->state.free--; in k3_ringacc_ring_access_proxy()
986 return -EINVAL; in k3_ringacc_ring_access_proxy()
989 dev_dbg(ring->parent->dev, "proxy: free%d occ%d\n", ring->state.free, in k3_ringacc_ring_access_proxy()
990 ring->state.occ); in k3_ringacc_ring_access_proxy()
1026 ptr = (void __iomem *)&ring->fifos->head_data; in k3_ringacc_ring_access_io()
1030 ptr = (void __iomem *)&ring->fifos->tail_data; in k3_ringacc_ring_access_io()
1033 return -EINVAL; in k3_ringacc_ring_access_io()
1041 dev_dbg(ring->parent->dev, in k3_ringacc_ring_access_io()
1042 "memcpy_fromio(x): --> ptr(%p), mode:%d\n", ptr, in k3_ringacc_ring_access_io()
1044 memcpy_fromio(elem, ptr, (4 << ring->elm_size)); in k3_ringacc_ring_access_io()
1045 ring->state.occ--; in k3_ringacc_ring_access_io()
1049 dev_dbg(ring->parent->dev, in k3_ringacc_ring_access_io()
1050 "memcpy_toio(x): --> ptr(%p), mode:%d\n", ptr, in k3_ringacc_ring_access_io()
1052 memcpy_toio(ptr, elem, (4 << ring->elm_size)); in k3_ringacc_ring_access_io()
1053 ring->state.free--; in k3_ringacc_ring_access_io()
1056 return -EINVAL; in k3_ringacc_ring_access_io()
1059 dev_dbg(ring->parent->dev, "free%d index%d occ%d index%d\n", in k3_ringacc_ring_access_io()
1060 ring->state.free, ring->state.windex, ring->state.occ, in k3_ringacc_ring_access_io()
1061 ring->state.rindex); in k3_ringacc_ring_access_io()
1096 *elem &= GENMASK_ULL(K3_ADDRESS_ASEL_SHIFT - 1, 0); in k3_dmaring_remove_asel_from_elem()
1105 * DMA rings: forward ring is always tied DMA channel and HW does not in k3_dmaring_fwd_pop()
1110 ring->state.occ = k3_ringacc_ring_read_occ(ring); in k3_dmaring_fwd_pop()
1111 if (ring->state.windex >= ring->state.occ) in k3_dmaring_fwd_pop()
1112 elem_idx = ring->state.windex - ring->state.occ; in k3_dmaring_fwd_pop()
1114 elem_idx = ring->size - (ring->state.occ - ring->state.windex); in k3_dmaring_fwd_pop()
1117 memcpy(elem, elem_ptr, (4 << ring->elm_size)); in k3_dmaring_fwd_pop()
1120 ring->state.occ--; in k3_dmaring_fwd_pop()
1121 writel(-1, &ring->rt->db); in k3_dmaring_fwd_pop()
1123 dev_dbg(ring->parent->dev, "%s: occ%d Windex%d Rindex%d pos_ptr%px\n", in k3_dmaring_fwd_pop()
1124 __func__, ring->state.occ, ring->state.windex, elem_idx, in k3_dmaring_fwd_pop()
1133 elem_ptr = k3_ringacc_get_elm_addr(ring, ring->state.rindex); in k3_dmaring_reverse_pop()
1135 if (ring->state.occ) { in k3_dmaring_reverse_pop()
1136 memcpy(elem, elem_ptr, (4 << ring->elm_size)); in k3_dmaring_reverse_pop()
1139 ring->state.rindex = (ring->state.rindex + 1) % ring->size; in k3_dmaring_reverse_pop()
1140 ring->state.occ--; in k3_dmaring_reverse_pop()
1141 writel(-1 & K3_DMARING_RT_DB_ENTRY_MASK, &ring->rt->db); in k3_dmaring_reverse_pop()
1142 } else if (ring->state.tdown_complete) { in k3_dmaring_reverse_pop()
1146 writel(K3_DMARING_RT_DB_TDOWN_ACK, &ring->rt->db); in k3_dmaring_reverse_pop()
1147 ring->state.tdown_complete = false; in k3_dmaring_reverse_pop()
1150 dev_dbg(ring->parent->dev, "%s: occ%d index%d pos_ptr%px\n", in k3_dmaring_reverse_pop()
1151 __func__, ring->state.occ, ring->state.rindex, elem_ptr); in k3_dmaring_reverse_pop()
1159 elem_ptr = k3_ringacc_get_elm_addr(ring, ring->state.windex); in k3_ringacc_ring_push_mem()
1161 memcpy(elem_ptr, elem, (4 << ring->elm_size)); in k3_ringacc_ring_push_mem()
1162 if (ring->parent->dma_rings) { in k3_ringacc_ring_push_mem()
1165 *addr |= ((u64)ring->asel << K3_ADDRESS_ASEL_SHIFT); in k3_ringacc_ring_push_mem()
1168 ring->state.windex = (ring->state.windex + 1) % ring->size; in k3_ringacc_ring_push_mem()
1169 ring->state.free--; in k3_ringacc_ring_push_mem()
1170 writel(1, &ring->rt->db); in k3_ringacc_ring_push_mem()
1172 dev_dbg(ring->parent->dev, "ring_push_mem: free%d index%d\n", in k3_ringacc_ring_push_mem()
1173 ring->state.free, ring->state.windex); in k3_ringacc_ring_push_mem()
1182 elem_ptr = k3_ringacc_get_elm_addr(ring, ring->state.rindex); in k3_ringacc_ring_pop_mem()
1184 memcpy(elem, elem_ptr, (4 << ring->elm_size)); in k3_ringacc_ring_pop_mem()
1186 ring->state.rindex = (ring->state.rindex + 1) % ring->size; in k3_ringacc_ring_pop_mem()
1187 ring->state.occ--; in k3_ringacc_ring_pop_mem()
1188 writel(-1, &ring->rt->db); in k3_ringacc_ring_pop_mem()
1190 dev_dbg(ring->parent->dev, "ring_pop_mem: occ%d index%d pos_ptr%p\n", in k3_ringacc_ring_pop_mem()
1191 ring->state.occ, ring->state.rindex, elem_ptr); in k3_ringacc_ring_pop_mem()
1197 int ret = -EOPNOTSUPP; in k3_ringacc_ring_push()
1199 if (!ring || !(ring->flags & K3_RING_FLAG_BUSY)) in k3_ringacc_ring_push()
1200 return -EINVAL; in k3_ringacc_ring_push()
1202 dev_dbg(ring->parent->dev, "ring_push: free%d index%d\n", in k3_ringacc_ring_push()
1203 ring->state.free, ring->state.windex); in k3_ringacc_ring_push()
1206 return -ENOMEM; in k3_ringacc_ring_push()
1208 if (ring->ops && ring->ops->push_tail) in k3_ringacc_ring_push()
1209 ret = ring->ops->push_tail(ring, elem); in k3_ringacc_ring_push()
1217 int ret = -EOPNOTSUPP; in k3_ringacc_ring_push_head()
1219 if (!ring || !(ring->flags & K3_RING_FLAG_BUSY)) in k3_ringacc_ring_push_head()
1220 return -EINVAL; in k3_ringacc_ring_push_head()
1222 dev_dbg(ring->parent->dev, "ring_push_head: free%d index%d\n", in k3_ringacc_ring_push_head()
1223 ring->state.free, ring->state.windex); in k3_ringacc_ring_push_head()
1226 return -ENOMEM; in k3_ringacc_ring_push_head()
1228 if (ring->ops && ring->ops->push_head) in k3_ringacc_ring_push_head()
1229 ret = ring->ops->push_head(ring, elem); in k3_ringacc_ring_push_head()
1237 int ret = -EOPNOTSUPP; in k3_ringacc_ring_pop()
1239 if (!ring || !(ring->flags & K3_RING_FLAG_BUSY)) in k3_ringacc_ring_pop()
1240 return -EINVAL; in k3_ringacc_ring_pop()
1242 if (!ring->state.occ) in k3_ringacc_ring_pop()
1245 dev_dbg(ring->parent->dev, "ring_pop: occ%d index%d\n", ring->state.occ, in k3_ringacc_ring_pop()
1246 ring->state.rindex); in k3_ringacc_ring_pop()
1248 if (!ring->state.occ && !ring->state.tdown_complete) in k3_ringacc_ring_pop()
1249 return -ENODATA; in k3_ringacc_ring_pop()
1251 if (ring->ops && ring->ops->pop_head) in k3_ringacc_ring_pop()
1252 ret = ring->ops->pop_head(ring, elem); in k3_ringacc_ring_pop()
1260 int ret = -EOPNOTSUPP; in k3_ringacc_ring_pop_tail()
1262 if (!ring || !(ring->flags & K3_RING_FLAG_BUSY)) in k3_ringacc_ring_pop_tail()
1263 return -EINVAL; in k3_ringacc_ring_pop_tail()
1265 if (!ring->state.occ) in k3_ringacc_ring_pop_tail()
1268 dev_dbg(ring->parent->dev, "ring_pop_tail: occ%d index%d\n", in k3_ringacc_ring_pop_tail()
1269 ring->state.occ, ring->state.rindex); in k3_ringacc_ring_pop_tail()
1271 if (!ring->state.occ) in k3_ringacc_ring_pop_tail()
1272 return -ENODATA; in k3_ringacc_ring_pop_tail()
1274 if (ring->ops && ring->ops->pop_tail) in k3_ringacc_ring_pop_tail()
1275 ret = ring->ops->pop_tail(ring, elem); in k3_ringacc_ring_pop_tail()
1285 struct k3_ringacc *ringacc = ERR_PTR(-EPROBE_DEFER); in of_k3_ringacc_get_by_phandle()
1290 return ERR_PTR(-ENODEV); in of_k3_ringacc_get_by_phandle()
1294 if (entry->dev->of_node == ringacc_np) { in of_k3_ringacc_get_by_phandle()
1307 struct device_node *node = ringacc->dev->of_node; in k3_ringacc_probe_dt()
1308 struct device *dev = ringacc->dev; in k3_ringacc_probe_dt()
1314 return -ENODEV; in k3_ringacc_probe_dt()
1317 ret = of_property_read_u32(node, "ti,num-rings", &ringacc->num_rings); in k3_ringacc_probe_dt()
1319 dev_err(dev, "ti,num-rings read failure %d\n", ret); in k3_ringacc_probe_dt()
1323 ringacc->tisci = ti_sci_get_by_phandle(node, "ti,sci"); in k3_ringacc_probe_dt()
1324 if (IS_ERR(ringacc->tisci)) { in k3_ringacc_probe_dt()
1325 ret = PTR_ERR(ringacc->tisci); in k3_ringacc_probe_dt()
1326 if (ret != -EPROBE_DEFER) in k3_ringacc_probe_dt()
1328 ringacc->tisci = NULL; in k3_ringacc_probe_dt()
1332 ret = of_property_read_u32(node, "ti,sci-dev-id", in k3_ringacc_probe_dt()
1333 &ringacc->tisci_dev_id); in k3_ringacc_probe_dt()
1335 dev_err(dev, "ti,sci-dev-id read fail %d\n", ret); in k3_ringacc_probe_dt()
1339 pdev->id = ringacc->tisci_dev_id; in k3_ringacc_probe_dt()
1341 ringacc->rm_gp_range = devm_ti_sci_get_of_resource(ringacc->tisci, dev, in k3_ringacc_probe_dt()
1342 ringacc->tisci_dev_id, in k3_ringacc_probe_dt()
1343 "ti,sci-rm-range-gp-rings"); in k3_ringacc_probe_dt()
1344 if (IS_ERR(ringacc->rm_gp_range)) { in k3_ringacc_probe_dt()
1346 return PTR_ERR(ringacc->rm_gp_range); in k3_ringacc_probe_dt()
1349 return ti_sci_inta_msi_domain_alloc_irqs(ringacc->dev, in k3_ringacc_probe_dt()
1350 ringacc->rm_gp_range); in k3_ringacc_probe_dt()
1370 struct device *dev = &pdev->dev; in k3_ringacc_init()
1373 dev->msi.domain = of_msi_get_domain(dev, dev->of_node, in k3_ringacc_init()
1375 if (!dev->msi.domain) in k3_ringacc_init()
1376 return -EPROBE_DEFER; in k3_ringacc_init()
1383 if (soc && soc->data) { in k3_ringacc_init()
1384 const struct k3_ringacc_soc_data *soc_data = soc->data; in k3_ringacc_init()
1386 ringacc->dma_ring_reset_quirk = soc_data->dma_ring_reset_quirk; in k3_ringacc_init()
1397 ringacc->proxy_gcfg = devm_platform_ioremap_resource_byname(pdev, "proxy_gcfg"); in k3_ringacc_init()
1398 if (IS_ERR(ringacc->proxy_gcfg)) in k3_ringacc_init()
1399 return PTR_ERR(ringacc->proxy_gcfg); in k3_ringacc_init()
1401 ringacc->proxy_target_base = devm_platform_ioremap_resource_byname(pdev, in k3_ringacc_init()
1403 if (IS_ERR(ringacc->proxy_target_base)) in k3_ringacc_init()
1404 return PTR_ERR(ringacc->proxy_target_base); in k3_ringacc_init()
1406 ringacc->num_proxies = readl(&ringacc->proxy_gcfg->config) & in k3_ringacc_init()
1409 ringacc->rings = devm_kzalloc(dev, in k3_ringacc_init()
1410 sizeof(*ringacc->rings) * in k3_ringacc_init()
1411 ringacc->num_rings, in k3_ringacc_init()
1413 ringacc->rings_inuse = devm_bitmap_zalloc(dev, ringacc->num_rings, in k3_ringacc_init()
1415 ringacc->proxy_inuse = devm_bitmap_zalloc(dev, ringacc->num_proxies, in k3_ringacc_init()
1418 if (!ringacc->rings || !ringacc->rings_inuse || !ringacc->proxy_inuse) in k3_ringacc_init()
1419 return -ENOMEM; in k3_ringacc_init()
1421 for (i = 0; i < ringacc->num_rings; i++) { in k3_ringacc_init()
1422 ringacc->rings[i].rt = base_rt + in k3_ringacc_init()
1424 ringacc->rings[i].fifos = base_fifo + in k3_ringacc_init()
1426 ringacc->rings[i].parent = ringacc; in k3_ringacc_init()
1427 ringacc->rings[i].ring_id = i; in k3_ringacc_init()
1428 ringacc->rings[i].proxy_id = K3_RINGACC_PROXY_NOT_USED; in k3_ringacc_init()
1431 ringacc->tisci_ring_ops = &ringacc->tisci->ops.rm_ring_ops; in k3_ringacc_init()
1433 dev_info(dev, "Ring Accelerator probed rings:%u, gp-rings[%u,%u] sci-dev-id:%u\n", in k3_ringacc_init()
1434 ringacc->num_rings, in k3_ringacc_init()
1435 ringacc->rm_gp_range->desc[0].start, in k3_ringacc_init()
1436 ringacc->rm_gp_range->desc[0].num, in k3_ringacc_init()
1437 ringacc->tisci_dev_id); in k3_ringacc_init()
1438 dev_info(dev, "dma-ring-reset-quirk: %s\n", in k3_ringacc_init()
1439 ringacc->dma_ring_reset_quirk ? "enabled" : "disabled"); in k3_ringacc_init()
1441 readl(&ringacc->proxy_gcfg->revision), ringacc->num_proxies); in k3_ringacc_init()
1458 { .compatible = "ti,am654-navss-ringacc", .data = &k3_ringacc_data, },
1466 struct device *dev = &pdev->dev; in k3_ringacc_dmarings_init()
1473 return ERR_PTR(-ENOMEM); in k3_ringacc_dmarings_init()
1475 ringacc->dev = dev; in k3_ringacc_dmarings_init()
1476 ringacc->dma_rings = true; in k3_ringacc_dmarings_init()
1477 ringacc->num_rings = data->num_rings; in k3_ringacc_dmarings_init()
1478 ringacc->tisci = data->tisci; in k3_ringacc_dmarings_init()
1479 ringacc->tisci_dev_id = data->tisci_dev_id; in k3_ringacc_dmarings_init()
1481 mutex_init(&ringacc->req_lock); in k3_ringacc_dmarings_init()
1487 ringacc->rings = devm_kzalloc(dev, in k3_ringacc_dmarings_init()
1488 sizeof(*ringacc->rings) * in k3_ringacc_dmarings_init()
1489 ringacc->num_rings * 2, in k3_ringacc_dmarings_init()
1491 ringacc->rings_inuse = devm_bitmap_zalloc(dev, ringacc->num_rings, in k3_ringacc_dmarings_init()
1494 if (!ringacc->rings || !ringacc->rings_inuse) in k3_ringacc_dmarings_init()
1495 return ERR_PTR(-ENOMEM); in k3_ringacc_dmarings_init()
1497 for (i = 0; i < ringacc->num_rings; i++) { in k3_ringacc_dmarings_init()
1498 struct k3_ring *ring = &ringacc->rings[i]; in k3_ringacc_dmarings_init()
1500 ring->rt = base_rt + K3_DMARING_RT_REGS_STEP * i; in k3_ringacc_dmarings_init()
1501 ring->parent = ringacc; in k3_ringacc_dmarings_init()
1502 ring->ring_id = i; in k3_ringacc_dmarings_init()
1503 ring->proxy_id = K3_RINGACC_PROXY_NOT_USED; in k3_ringacc_dmarings_init()
1505 ring = &ringacc->rings[ringacc->num_rings + i]; in k3_ringacc_dmarings_init()
1506 ring->rt = base_rt + K3_DMARING_RT_REGS_STEP * i + in k3_ringacc_dmarings_init()
1508 ring->parent = ringacc; in k3_ringacc_dmarings_init()
1509 ring->ring_id = i; in k3_ringacc_dmarings_init()
1510 ring->proxy_id = K3_RINGACC_PROXY_NOT_USED; in k3_ringacc_dmarings_init()
1511 ring->flags = K3_RING_FLAG_REVERSE; in k3_ringacc_dmarings_init()
1514 ringacc->tisci_ring_ops = &ringacc->tisci->ops.rm_ring_ops; in k3_ringacc_dmarings_init()
1516 dev_info(dev, "Number of rings: %u\n", ringacc->num_rings); in k3_ringacc_dmarings_init()
1525 struct device *dev = &pdev->dev; in k3_ringacc_probe()
1529 match_data = of_device_get_match_data(&pdev->dev); in k3_ringacc_probe()
1531 return -ENODEV; in k3_ringacc_probe()
1535 return -ENOMEM; in k3_ringacc_probe()
1537 ringacc->dev = dev; in k3_ringacc_probe()
1538 mutex_init(&ringacc->req_lock); in k3_ringacc_probe()
1539 ringacc->ops = &match_data->ops; in k3_ringacc_probe()
1541 ret = ringacc->ops->init(pdev, ringacc); in k3_ringacc_probe()
1548 list_add_tail(&ringacc->list, &k3_ringacc_list); in k3_ringacc_probe()
1556 struct k3_ringacc *ringacc = dev_get_drvdata(&pdev->dev); in k3_ringacc_remove()
1559 list_del(&ringacc->list); in k3_ringacc_remove()
1567 .name = "k3-ringacc",