Lines Matching full:ud
280 struct udma_dev *ud; member
402 static int navss_psil_pair(struct udma_dev *ud, u32 src_thread, u32 dst_thread) in navss_psil_pair() argument
404 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in navss_psil_pair()
412 static int navss_psil_unpair(struct udma_dev *ud, u32 src_thread, in navss_psil_unpair() argument
415 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in navss_psil_unpair()
470 struct device *dev = uc->ud->dev; in udma_dump_chan_stdata()
557 struct udma_dev *ud = container_of(work, typeof(*ud), purge_work); in udma_purge_desc_work() local
562 spin_lock_irqsave(&ud->lock, flags); in udma_purge_desc_work()
563 list_splice_tail_init(&ud->desc_to_purge, &head); in udma_purge_desc_work()
564 spin_unlock_irqrestore(&ud->lock, flags); in udma_purge_desc_work()
576 if (!list_empty(&ud->desc_to_purge)) in udma_purge_desc_work()
577 schedule_work(&ud->purge_work); in udma_purge_desc_work()
582 struct udma_dev *ud = to_udma_dev(vd->tx.chan->device); in udma_desc_free() local
596 spin_lock_irqsave(&ud->lock, flags); in udma_desc_free()
597 list_add_tail(&vd->node, &ud->desc_to_purge); in udma_desc_free()
598 spin_unlock_irqrestore(&ud->lock, flags); in udma_desc_free()
600 schedule_work(&ud->purge_work); in udma_desc_free()
648 return uc->ud->rx_flush.hwdescs[uc->config.pkt_mode].cppi5_desc_paddr; in udma_get_rx_flush_hwdesc_paddr()
840 uc->ud->ddev.device_free_chan_resources(&uc->vc.chan); in udma_reset_chan()
844 ret = uc->ud->ddev.device_alloc_chan_resources(&uc->vc.chan); in udma_reset_chan()
867 if (uc->ud->match_data->type == DMA_TYPE_UDMA && ucc->pkt_mode && in udma_start_desc()
929 uc->ud->match_data; in udma_start()
1187 dev_err(uc->ud->dev, "not matching descriptors!\n"); in udma_ring_irq_handler()
1247 * @ud: UDMA device
1261 static int __udma_alloc_gp_rflow_range(struct udma_dev *ud, int from, int cnt) in __udma_alloc_gp_rflow_range() argument
1268 tmp_from = ud->rchan_cnt; in __udma_alloc_gp_rflow_range()
1270 if (tmp_from < ud->rchan_cnt) in __udma_alloc_gp_rflow_range()
1273 if (tmp_from + cnt > ud->rflow_cnt) in __udma_alloc_gp_rflow_range()
1276 bitmap_or(tmp, ud->rflow_gp_map, ud->rflow_gp_map_allocated, in __udma_alloc_gp_rflow_range()
1277 ud->rflow_cnt); in __udma_alloc_gp_rflow_range()
1280 ud->rflow_cnt, in __udma_alloc_gp_rflow_range()
1282 if (start >= ud->rflow_cnt) in __udma_alloc_gp_rflow_range()
1288 bitmap_set(ud->rflow_gp_map_allocated, start, cnt); in __udma_alloc_gp_rflow_range()
1292 static int __udma_free_gp_rflow_range(struct udma_dev *ud, int from, int cnt) in __udma_free_gp_rflow_range() argument
1294 if (from < ud->rchan_cnt) in __udma_free_gp_rflow_range()
1296 if (from + cnt > ud->rflow_cnt) in __udma_free_gp_rflow_range()
1299 bitmap_clear(ud->rflow_gp_map_allocated, from, cnt); in __udma_free_gp_rflow_range()
1303 static struct udma_rflow *__udma_get_rflow(struct udma_dev *ud, int id) in __udma_get_rflow() argument
1312 if (id < 0 || id >= ud->rflow_cnt) in __udma_get_rflow()
1315 if (test_bit(id, ud->rflow_in_use)) in __udma_get_rflow()
1318 if (ud->rflow_gp_map) { in __udma_get_rflow()
1320 if (!test_bit(id, ud->rflow_gp_map) && in __udma_get_rflow()
1321 !test_bit(id, ud->rflow_gp_map_allocated)) in __udma_get_rflow()
1325 dev_dbg(ud->dev, "get rflow%d\n", id); in __udma_get_rflow()
1326 set_bit(id, ud->rflow_in_use); in __udma_get_rflow()
1327 return &ud->rflows[id]; in __udma_get_rflow()
1330 static void __udma_put_rflow(struct udma_dev *ud, struct udma_rflow *rflow) in __udma_put_rflow() argument
1332 if (!test_bit(rflow->id, ud->rflow_in_use)) { in __udma_put_rflow()
1333 dev_err(ud->dev, "attempt to put unused rflow%d\n", rflow->id); in __udma_put_rflow()
1337 dev_dbg(ud->dev, "put rflow%d\n", rflow->id); in __udma_put_rflow()
1338 clear_bit(rflow->id, ud->rflow_in_use); in __udma_put_rflow()
1342 static struct udma_##res *__udma_reserve_##res(struct udma_dev *ud, \
1347 if (test_bit(id, ud->res##_map)) { \
1348 dev_err(ud->dev, "res##%d is in use\n", id); \
1354 if (tpl >= ud->res##_tpl.levels) \
1355 tpl = ud->res##_tpl.levels - 1; \
1357 start = ud->res##_tpl.start_idx[tpl]; \
1359 id = find_next_zero_bit(ud->res##_map, ud->res##_cnt, \
1361 if (id == ud->res##_cnt) { \
1366 set_bit(id, ud->res##_map); \
1367 return &ud->res##s[id]; \
1376 struct udma_dev *ud = uc->ud; in bcdma_get_bchan() local
1381 dev_dbg(ud->dev, "chan%d: already have bchan%d allocated\n", in bcdma_get_bchan()
1393 tpl = ud->bchan_tpl.levels - 1; in bcdma_get_bchan()
1395 uc->bchan = __udma_reserve_bchan(ud, tpl, -1); in bcdma_get_bchan()
1409 struct udma_dev *ud = uc->ud; in udma_get_tchan() local
1413 dev_dbg(ud->dev, "chan%d: already have tchan%d allocated\n", in udma_get_tchan()
1423 uc->tchan = __udma_reserve_tchan(ud, uc->config.channel_tpl, in udma_get_tchan()
1431 if (ud->tflow_cnt) { in udma_get_tchan()
1440 if (test_bit(tflow_id, ud->tflow_map)) { in udma_get_tchan()
1441 dev_err(ud->dev, "tflow%d is in use\n", tflow_id); in udma_get_tchan()
1442 clear_bit(uc->tchan->id, ud->tchan_map); in udma_get_tchan()
1448 set_bit(tflow_id, ud->tflow_map); in udma_get_tchan()
1458 struct udma_dev *ud = uc->ud; in udma_get_rchan() local
1462 dev_dbg(ud->dev, "chan%d: already have rchan%d allocated\n", in udma_get_rchan()
1472 uc->rchan = __udma_reserve_rchan(ud, uc->config.channel_tpl, in udma_get_rchan()
1485 struct udma_dev *ud = uc->ud; in udma_get_chan_pair() local
1489 dev_info(ud->dev, "chan%d: already have %d pair allocated\n", in udma_get_chan_pair()
1495 dev_err(ud->dev, "chan%d: already have tchan%d allocated\n", in udma_get_chan_pair()
1499 dev_err(ud->dev, "chan%d: already have rchan%d allocated\n", in udma_get_chan_pair()
1505 end = min(ud->tchan_cnt, ud->rchan_cnt); in udma_get_chan_pair()
1510 chan_id = ud->tchan_tpl.start_idx[ud->tchan_tpl.levels - 1]; in udma_get_chan_pair()
1512 if (!test_bit(chan_id, ud->tchan_map) && in udma_get_chan_pair()
1513 !test_bit(chan_id, ud->rchan_map)) in udma_get_chan_pair()
1520 set_bit(chan_id, ud->tchan_map); in udma_get_chan_pair()
1521 set_bit(chan_id, ud->rchan_map); in udma_get_chan_pair()
1522 uc->tchan = &ud->tchans[chan_id]; in udma_get_chan_pair()
1523 uc->rchan = &ud->rchans[chan_id]; in udma_get_chan_pair()
1533 struct udma_dev *ud = uc->ud; in udma_get_rflow() local
1537 dev_err(ud->dev, "chan%d: does not have rchan??\n", uc->id); in udma_get_rflow()
1542 dev_dbg(ud->dev, "chan%d: already have rflow%d allocated\n", in udma_get_rflow()
1547 uc->rflow = __udma_get_rflow(ud, flow_id); in udma_get_rflow()
1559 struct udma_dev *ud = uc->ud; in bcdma_put_bchan() local
1562 dev_dbg(ud->dev, "chan%d: put bchan%d\n", uc->id, in bcdma_put_bchan()
1564 clear_bit(uc->bchan->id, ud->bchan_map); in bcdma_put_bchan()
1572 struct udma_dev *ud = uc->ud; in udma_put_rchan() local
1575 dev_dbg(ud->dev, "chan%d: put rchan%d\n", uc->id, in udma_put_rchan()
1577 clear_bit(uc->rchan->id, ud->rchan_map); in udma_put_rchan()
1584 struct udma_dev *ud = uc->ud; in udma_put_tchan() local
1587 dev_dbg(ud->dev, "chan%d: put tchan%d\n", uc->id, in udma_put_tchan()
1589 clear_bit(uc->tchan->id, ud->tchan_map); in udma_put_tchan()
1592 clear_bit(uc->tchan->tflow_id, ud->tflow_map); in udma_put_tchan()
1600 struct udma_dev *ud = uc->ud; in udma_put_rflow() local
1603 dev_dbg(ud->dev, "chan%d: put rflow%d\n", uc->id, in udma_put_rflow()
1605 __udma_put_rflow(ud, uc->rflow); in udma_put_rflow()
1627 struct udma_dev *ud = uc->ud; in bcdma_alloc_bchan_resources() local
1634 ret = k3_ringacc_request_rings_pair(ud->ringacc, uc->bchan->id, -1, in bcdma_alloc_bchan_resources()
1647 k3_configure_chan_coherency(&uc->vc.chan, ud->asel); in bcdma_alloc_bchan_resources()
1648 ring_cfg.asel = ud->asel; in bcdma_alloc_bchan_resources()
1685 struct udma_dev *ud = uc->ud; in udma_alloc_tx_resources() local
1697 ring_idx = ud->bchan_cnt + tchan->id; in udma_alloc_tx_resources()
1699 ret = k3_ringacc_request_rings_pair(ud->ringacc, ring_idx, -1, in udma_alloc_tx_resources()
1710 if (ud->match_data->type == DMA_TYPE_UDMA) { in udma_alloc_tx_resources()
1760 struct udma_dev *ud = uc->ud; in udma_alloc_rx_resources() local
1785 if (ud->tflow_cnt) in udma_alloc_rx_resources()
1786 fd_ring_id = ud->tflow_cnt + rflow->id; in udma_alloc_rx_resources()
1788 fd_ring_id = ud->bchan_cnt + ud->tchan_cnt + ud->echan_cnt + in udma_alloc_rx_resources()
1791 ret = k3_ringacc_request_rings_pair(ud->ringacc, fd_ring_id, -1, in udma_alloc_rx_resources()
1801 if (ud->match_data->type == DMA_TYPE_UDMA) { in udma_alloc_rx_resources()
1874 struct udma_dev *ud = uc->ud; in udma_tisci_m2m_channel_config() local
1875 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in udma_tisci_m2m_channel_config()
1888 if (ud->match_data->flags & UDMA_FLAG_BURST_SIZE) { in udma_tisci_m2m_channel_config()
1889 tpl = udma_get_chan_tpl_index(&ud->tchan_tpl, tchan->id); in udma_tisci_m2m_channel_config()
1891 burst_size = ud->match_data->burst_size[tpl]; in udma_tisci_m2m_channel_config()
1900 req_tx.tx_atype = ud->atype; in udma_tisci_m2m_channel_config()
1908 dev_err(ud->dev, "tchan%d cfg failed %d\n", tchan->id, ret); in udma_tisci_m2m_channel_config()
1918 req_rx.rx_atype = ud->atype; in udma_tisci_m2m_channel_config()
1926 dev_err(ud->dev, "rchan%d alloc failed %d\n", rchan->id, ret); in udma_tisci_m2m_channel_config()
1933 struct udma_dev *ud = uc->ud; in bcdma_tisci_m2m_channel_config() local
1934 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in bcdma_tisci_m2m_channel_config()
1942 if (ud->match_data->flags & UDMA_FLAG_BURST_SIZE) { in bcdma_tisci_m2m_channel_config()
1943 tpl = udma_get_chan_tpl_index(&ud->bchan_tpl, bchan->id); in bcdma_tisci_m2m_channel_config()
1945 burst_size = ud->match_data->burst_size[tpl]; in bcdma_tisci_m2m_channel_config()
1959 dev_err(ud->dev, "bchan%d cfg failed %d\n", bchan->id, ret); in bcdma_tisci_m2m_channel_config()
1966 struct udma_dev *ud = uc->ud; in udma_tisci_tx_channel_config() local
1967 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in udma_tisci_tx_channel_config()
1993 ud->match_data->flags & UDMA_FLAG_TDTYPE) { in udma_tisci_tx_channel_config()
2002 dev_err(ud->dev, "tchan%d cfg failed %d\n", tchan->id, ret); in udma_tisci_tx_channel_config()
2009 struct udma_dev *ud = uc->ud; in bcdma_tisci_tx_channel_config() local
2010 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in bcdma_tisci_tx_channel_config()
2020 if (ud->match_data->flags & UDMA_FLAG_TDTYPE) { in bcdma_tisci_tx_channel_config()
2029 dev_err(ud->dev, "tchan%d cfg failed %d\n", tchan->id, ret); in bcdma_tisci_tx_channel_config()
2038 struct udma_dev *ud = uc->ud; in udma_tisci_rx_channel_config() local
2039 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in udma_tisci_rx_channel_config()
2068 dev_err(ud->dev, "rchan%d cfg failed %d\n", rchan->id, ret); in udma_tisci_rx_channel_config()
2112 dev_err(ud->dev, "flow%d config failed: %d\n", rchan->id, ret); in udma_tisci_rx_channel_config()
2119 struct udma_dev *ud = uc->ud; in bcdma_tisci_rx_channel_config() local
2120 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in bcdma_tisci_rx_channel_config()
2132 dev_err(ud->dev, "rchan%d cfg failed %d\n", rchan->id, ret); in bcdma_tisci_rx_channel_config()
2139 struct udma_dev *ud = uc->ud; in pktdma_tisci_rx_channel_config() local
2140 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in pktdma_tisci_rx_channel_config()
2152 dev_err(ud->dev, "rchan%d cfg failed %d\n", uc->rchan->id, ret); in pktdma_tisci_rx_channel_config()
2177 dev_err(ud->dev, "flow%d config failed: %d\n", uc->rflow->id, in pktdma_tisci_rx_channel_config()
2186 struct udma_dev *ud = to_udma_dev(chan->device); in udma_alloc_chan_resources() local
2187 const struct udma_soc_data *soc_data = ud->soc_data; in udma_alloc_chan_resources()
2192 uc->dma_dev = ud->dev; in udma_alloc_chan_resources()
2205 uc->hdesc_pool = dma_pool_create(uc->name, ud->ddev.dev, in udma_alloc_chan_resources()
2207 ud->desc_align, in udma_alloc_chan_resources()
2210 dev_err(ud->ddev.dev, in udma_alloc_chan_resources()
2229 dev_dbg(uc->ud->dev, "%s: chan%d as MEM-to-MEM\n", __func__, in udma_alloc_chan_resources()
2248 uc->config.src_thread = ud->psil_base + uc->tchan->id; in udma_alloc_chan_resources()
2249 uc->config.dst_thread = (ud->psil_base + uc->rchan->id) | in udma_alloc_chan_resources()
2259 dev_dbg(uc->ud->dev, "%s: chan%d as MEM-to-DEV\n", __func__, in udma_alloc_chan_resources()
2266 uc->config.src_thread = ud->psil_base + uc->tchan->id; in udma_alloc_chan_resources()
2277 dev_dbg(uc->ud->dev, "%s: chan%d as DEV-to-MEM\n", __func__, in udma_alloc_chan_resources()
2285 uc->config.dst_thread = (ud->psil_base + uc->rchan->id) | in udma_alloc_chan_resources()
2295 dev_err(uc->ud->dev, "%s: chan%d invalid direction (%u)\n", in udma_alloc_chan_resources()
2307 dev_warn(ud->dev, "chan%d: is running!\n", uc->id); in udma_alloc_chan_resources()
2310 dev_err(ud->dev, "chan%d: won't stop!\n", uc->id); in udma_alloc_chan_resources()
2317 ret = navss_psil_pair(ud, uc->config.src_thread, uc->config.dst_thread); in udma_alloc_chan_resources()
2319 dev_err(ud->dev, "PSI-L pairing failed: 0x%04x -> 0x%04x\n", in udma_alloc_chan_resources()
2328 dev_err(ud->dev, "Failed to get ring irq (index: %u)\n", in udma_alloc_chan_resources()
2337 dev_err(ud->dev, "chan%d: ring irq request failed\n", uc->id); in udma_alloc_chan_resources()
2343 uc->irq_num_udma = msi_get_virq(ud->dev, irq_udma_idx); in udma_alloc_chan_resources()
2345 dev_err(ud->dev, "Failed to get udma irq (index: %u)\n", in udma_alloc_chan_resources()
2355 dev_err(ud->dev, "chan%d: UDMA irq request failed\n", in udma_alloc_chan_resources()
2372 navss_psil_unpair(ud, uc->config.src_thread, uc->config.dst_thread); in udma_alloc_chan_resources()
2391 struct udma_dev *ud = to_udma_dev(chan->device); in bcdma_alloc_chan_resources() local
2392 const struct udma_oes_offsets *oes = &ud->soc_data->oes; in bcdma_alloc_chan_resources()
2410 dev_dbg(uc->ud->dev, "%s: chan%d as MEM-to-MEM\n", __func__, in bcdma_alloc_chan_resources()
2424 dev_dbg(uc->ud->dev, "%s: chan%d as MEM-to-DEV\n", __func__, in bcdma_alloc_chan_resources()
2433 uc->config.src_thread = ud->psil_base + uc->tchan->id; in bcdma_alloc_chan_resources()
2444 dev_dbg(uc->ud->dev, "%s: chan%d as DEV-to-MEM\n", __func__, in bcdma_alloc_chan_resources()
2454 uc->config.dst_thread = (ud->psil_base + uc->rchan->id) | in bcdma_alloc_chan_resources()
2464 dev_err(uc->ud->dev, "%s: chan%d invalid direction (%u)\n", in bcdma_alloc_chan_resources()
2474 dev_warn(ud->dev, "chan%d: is running!\n", uc->id); in bcdma_alloc_chan_resources()
2477 dev_err(ud->dev, "chan%d: won't stop!\n", uc->id); in bcdma_alloc_chan_resources()
2488 uc->hdesc_pool = dma_pool_create(uc->name, ud->ddev.dev, in bcdma_alloc_chan_resources()
2490 ud->desc_align, in bcdma_alloc_chan_resources()
2493 dev_err(ud->ddev.dev, in bcdma_alloc_chan_resources()
2503 ret = navss_psil_pair(ud, uc->config.src_thread, in bcdma_alloc_chan_resources()
2506 dev_err(ud->dev, in bcdma_alloc_chan_resources()
2515 uc->irq_num_ring = msi_get_virq(ud->dev, irq_ring_idx); in bcdma_alloc_chan_resources()
2517 dev_err(ud->dev, "Failed to get ring irq (index: %u)\n", in bcdma_alloc_chan_resources()
2526 dev_err(ud->dev, "chan%d: ring irq request failed\n", uc->id); in bcdma_alloc_chan_resources()
2532 uc->irq_num_udma = msi_get_virq(ud->dev, irq_udma_idx); in bcdma_alloc_chan_resources()
2534 dev_err(ud->dev, "Failed to get bcdma irq (index: %u)\n", in bcdma_alloc_chan_resources()
2544 dev_err(ud->dev, "chan%d: BCDMA irq request failed\n", in bcdma_alloc_chan_resources()
2564 navss_psil_unpair(ud, uc->config.src_thread, in bcdma_alloc_chan_resources()
2594 trigger_event = uc->ud->soc_data->bcdma_trigger_event_offset; in bcdma_router_config()
2603 struct udma_dev *ud = to_udma_dev(chan->device); in pktdma_alloc_chan_resources() local
2604 const struct udma_oes_offsets *oes = &ud->soc_data->oes; in pktdma_alloc_chan_resources()
2619 dev_dbg(uc->ud->dev, "%s: chan%d as MEM-to-DEV\n", __func__, in pktdma_alloc_chan_resources()
2628 uc->config.src_thread = ud->psil_base + uc->tchan->id; in pktdma_alloc_chan_resources()
2638 dev_dbg(uc->ud->dev, "%s: chan%d as DEV-to-MEM\n", __func__, in pktdma_alloc_chan_resources()
2648 uc->config.dst_thread = (ud->psil_base + uc->rchan->id) | in pktdma_alloc_chan_resources()
2657 dev_err(uc->ud->dev, "%s: chan%d invalid direction (%u)\n", in pktdma_alloc_chan_resources()
2667 dev_warn(ud->dev, "chan%d: is running!\n", uc->id); in pktdma_alloc_chan_resources()
2670 dev_err(ud->dev, "chan%d: won't stop!\n", uc->id); in pktdma_alloc_chan_resources()
2678 uc->config.hdesc_size, ud->desc_align, in pktdma_alloc_chan_resources()
2681 dev_err(ud->ddev.dev, in pktdma_alloc_chan_resources()
2691 ret = navss_psil_pair(ud, uc->config.src_thread, uc->config.dst_thread); in pktdma_alloc_chan_resources()
2693 dev_err(ud->dev, "PSI-L pairing failed: 0x%04x -> 0x%04x\n", in pktdma_alloc_chan_resources()
2700 uc->irq_num_ring = msi_get_virq(ud->dev, irq_ring_idx); in pktdma_alloc_chan_resources()
2702 dev_err(ud->dev, "Failed to get ring irq (index: %u)\n", in pktdma_alloc_chan_resources()
2711 dev_err(ud->dev, "chan%d: ring irq request failed\n", uc->id); in pktdma_alloc_chan_resources()
2723 dev_dbg(ud->dev, in pktdma_alloc_chan_resources()
2728 dev_dbg(ud->dev, in pktdma_alloc_chan_resources()
2737 navss_psil_unpair(ud, uc->config.src_thread, uc->config.dst_thread); in pktdma_alloc_chan_resources()
2778 dev_err(uc->ud->dev, "Unsupported TR size of %zu\n", tr_size); in udma_alloc_tr_desc()
2802 uc->ud->desc_align); in udma_alloc_tr_desc()
2803 hwdesc->cppi5_desc_vaddr = dma_alloc_coherent(uc->ud->dev, in udma_alloc_tr_desc()
2913 if (uc->ud->match_data->type == DMA_TYPE_UDMA) in udma_prep_slave_sg_tr()
2925 dev_err(uc->ud->dev, "size %u is not supported\n", in udma_prep_slave_sg_tr()
2997 dev_err(uc->ud->dev, "%s: bad direction?\n", __func__); in udma_prep_slave_sg_triggered_tr()
3006 dev_err(uc->ud->dev, in udma_prep_slave_sg_triggered_tr()
3024 dev_err(uc->ud->dev, in udma_prep_slave_sg_triggered_tr()
3044 if (uc->ud->match_data->type == DMA_TYPE_UDMA) { in udma_prep_slave_sg_triggered_tr()
3061 dev_err(uc->ud->dev, "size %zu is not supported\n", in udma_prep_slave_sg_triggered_tr()
3208 } else if (uc->ud->match_data->type == DMA_TYPE_BCDMA && in udma_configure_statictr()
3226 d->static_tr.bstcnt > uc->ud->match_data->statictr_z_mask) in udma_configure_statictr()
3256 if (uc->ud->match_data->type == DMA_TYPE_UDMA) in udma_prep_slave_sg_pkt()
3271 dev_err(uc->ud->dev, in udma_prep_slave_sg_pkt()
3303 if (uc->ud->match_data->type == DMA_TYPE_PKTDMA || in udma_prep_slave_sg_pkt()
3309 dev_err(uc->ud->dev, in udma_prep_slave_sg_pkt()
3472 dev_err(uc->ud->dev, in udma_prep_slave_sg()
3474 __func__, uc->ud->match_data->statictr_z_mask, in udma_prep_slave_sg()
3505 dev_err(uc->ud->dev, "size %zu is not supported\n", in udma_prep_dma_cyclic_tr()
3517 if (uc->ud->match_data->type == DMA_TYPE_UDMA) in udma_prep_dma_cyclic_tr()
3533 uc->ud->match_data->type == DMA_TYPE_BCDMA) { in udma_prep_dma_cyclic_tr()
3603 if (uc->ud->match_data->type != DMA_TYPE_UDMA) in udma_prep_dma_cyclic_pkt()
3615 dev_err(uc->ud->dev, in udma_prep_dma_cyclic_pkt()
3671 dev_err(uc->ud->dev, "%s: bad direction?\n", __func__); in udma_prep_dma_cyclic()
3695 dev_err(uc->ud->dev, in udma_prep_dma_cyclic()
3697 __func__, uc->ud->match_data->statictr_z_mask, in udma_prep_dma_cyclic()
3735 dev_err(uc->ud->dev, "size %zu is not supported\n", in udma_prep_dma_memcpy()
3749 if (uc->ud->match_data->type != DMA_TYPE_UDMA) { in udma_prep_dma_memcpy()
3750 src |= (u64)uc->ud->asel << K3_ADDRESS_ASEL_SHIFT; in udma_prep_dma_memcpy()
3751 dest |= (u64)uc->ud->asel << K3_ADDRESS_ASEL_SHIFT; in udma_prep_dma_memcpy()
3989 dev_warn(uc->ud->dev, "chan%d teardown timeout!\n", in udma_synchronize()
3998 dev_warn(uc->ud->dev, "chan%d refused to stop!\n", uc->id); in udma_synchronize()
4086 struct udma_dev *ud = to_udma_dev(chan->device); in udma_free_chan_resources() local
4109 navss_psil_unpair(ud, uc->config.src_thread, in udma_free_chan_resources()
4145 struct udma_dev *ud; in udma_dma_filter_fn() local
4154 ud = uc->ud; in udma_dma_filter_fn()
4158 dev_err(ud->dev, "Invalid channel atype: %u\n", in udma_dma_filter_fn()
4164 dev_err(ud->dev, "Invalid channel asel: %u\n", in udma_dma_filter_fn()
4185 dev_err(ud->dev, "No configuration for psi-l thread 0x%04x\n", in udma_dma_filter_fn()
4194 if (ud->match_data->type == DMA_TYPE_BCDMA && in udma_dma_filter_fn()
4196 dev_err(ud->dev, in udma_dma_filter_fn()
4211 if (ud->match_data->type == DMA_TYPE_PKTDMA && in udma_dma_filter_fn()
4221 const struct udma_match_data *match_data = ud->match_data; in udma_dma_filter_fn()
4237 ucc->metadata_size, ud->desc_align); in udma_dma_filter_fn()
4239 dev_dbg(ud->dev, "chan%d: Remote thread: 0x%04x (%s)\n", uc->id, in udma_dma_filter_fn()
4245 dev_dbg(ud->dev, "chan%d: triggered channel (type: %u)\n", uc->id, in udma_dma_filter_fn()
4255 struct udma_dev *ud = ofdma->of_dma_data; in udma_of_xlate() local
4259 if (ud->match_data->type == DMA_TYPE_BCDMA) { in udma_of_xlate()
4274 if (ud->match_data->type == DMA_TYPE_UDMA) { in udma_of_xlate()
4287 chan = __dma_request_channel(&ud->ddev.cap_mask, udma_dma_filter_fn, &filter_param, in udma_of_xlate()
4290 dev_err(ud->dev, "get channel fail in %s.\n", __func__); in udma_of_xlate()
4510 static int udma_get_mmrs(struct platform_device *pdev, struct udma_dev *ud) in udma_get_mmrs() argument
4515 ud->mmrs[MMR_GCFG] = devm_platform_ioremap_resource_byname(pdev, mmr_names[MMR_GCFG]); in udma_get_mmrs()
4516 if (IS_ERR(ud->mmrs[MMR_GCFG])) in udma_get_mmrs()
4517 return PTR_ERR(ud->mmrs[MMR_GCFG]); in udma_get_mmrs()
4519 cap2 = udma_read(ud->mmrs[MMR_GCFG], 0x28); in udma_get_mmrs()
4520 cap3 = udma_read(ud->mmrs[MMR_GCFG], 0x2c); in udma_get_mmrs()
4522 switch (ud->match_data->type) { in udma_get_mmrs()
4524 ud->rflow_cnt = UDMA_CAP3_RFLOW_CNT(cap3); in udma_get_mmrs()
4525 ud->tchan_cnt = UDMA_CAP2_TCHAN_CNT(cap2); in udma_get_mmrs()
4526 ud->echan_cnt = UDMA_CAP2_ECHAN_CNT(cap2); in udma_get_mmrs()
4527 ud->rchan_cnt = UDMA_CAP2_RCHAN_CNT(cap2); in udma_get_mmrs()
4530 ud->bchan_cnt = BCDMA_CAP2_BCHAN_CNT(cap2) + in udma_get_mmrs()
4533 ud->tchan_cnt = BCDMA_CAP2_TCHAN_CNT(cap2); in udma_get_mmrs()
4534 ud->rchan_cnt = BCDMA_CAP2_RCHAN_CNT(cap2); in udma_get_mmrs()
4535 ud->rflow_cnt = ud->rchan_cnt; in udma_get_mmrs()
4538 cap4 = udma_read(ud->mmrs[MMR_GCFG], 0x30); in udma_get_mmrs()
4539 ud->tchan_cnt = UDMA_CAP2_TCHAN_CNT(cap2); in udma_get_mmrs()
4540 ud->rchan_cnt = UDMA_CAP2_RCHAN_CNT(cap2); in udma_get_mmrs()
4541 ud->rflow_cnt = UDMA_CAP3_RFLOW_CNT(cap3); in udma_get_mmrs()
4542 ud->tflow_cnt = PKTDMA_CAP4_TFLOW_CNT(cap4); in udma_get_mmrs()
4549 if (i == MMR_BCHANRT && ud->bchan_cnt == 0) in udma_get_mmrs()
4551 if (i == MMR_TCHANRT && ud->tchan_cnt == 0) in udma_get_mmrs()
4553 if (i == MMR_RCHANRT && ud->rchan_cnt == 0) in udma_get_mmrs()
4556 ud->mmrs[i] = devm_platform_ioremap_resource_byname(pdev, mmr_names[i]); in udma_get_mmrs()
4557 if (IS_ERR(ud->mmrs[i])) in udma_get_mmrs()
4558 return PTR_ERR(ud->mmrs[i]); in udma_get_mmrs()
4564 static void udma_mark_resource_ranges(struct udma_dev *ud, unsigned long *map, in udma_mark_resource_ranges() argument
4570 dev_dbg(ud->dev, "ti_sci resource range for %s: %d:%d | %d:%d\n", name, in udma_mark_resource_ranges()
4583 static int udma_setup_resources(struct udma_dev *ud) in udma_setup_resources() argument
4586 struct device *dev = ud->dev; in udma_setup_resources()
4588 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in udma_setup_resources()
4592 cap3 = udma_read(ud->mmrs[MMR_GCFG], 0x2c); in udma_setup_resources()
4595 ud->tchan_tpl.levels = 2; in udma_setup_resources()
4596 ud->tchan_tpl.start_idx[0] = 8; in udma_setup_resources()
4599 ud->tchan_tpl.levels = 2; in udma_setup_resources()
4600 ud->tchan_tpl.start_idx[0] = 2; in udma_setup_resources()
4602 ud->tchan_tpl.levels = 3; in udma_setup_resources()
4603 ud->tchan_tpl.start_idx[1] = UDMA_CAP3_UCHAN_CNT(cap3); in udma_setup_resources()
4604 ud->tchan_tpl.start_idx[0] = UDMA_CAP3_HCHAN_CNT(cap3); in udma_setup_resources()
4606 ud->tchan_tpl.levels = 2; in udma_setup_resources()
4607 ud->tchan_tpl.start_idx[0] = UDMA_CAP3_HCHAN_CNT(cap3); in udma_setup_resources()
4609 ud->tchan_tpl.levels = 1; in udma_setup_resources()
4612 ud->rchan_tpl.levels = ud->tchan_tpl.levels; in udma_setup_resources()
4613 ud->rchan_tpl.start_idx[0] = ud->tchan_tpl.start_idx[0]; in udma_setup_resources()
4614 ud->rchan_tpl.start_idx[1] = ud->tchan_tpl.start_idx[1]; in udma_setup_resources()
4616 ud->tchan_map = devm_kmalloc_array(dev, BITS_TO_LONGS(ud->tchan_cnt), in udma_setup_resources()
4618 ud->tchans = devm_kcalloc(dev, ud->tchan_cnt, sizeof(*ud->tchans), in udma_setup_resources()
4620 ud->rchan_map = devm_kmalloc_array(dev, BITS_TO_LONGS(ud->rchan_cnt), in udma_setup_resources()
4622 ud->rchans = devm_kcalloc(dev, ud->rchan_cnt, sizeof(*ud->rchans), in udma_setup_resources()
4624 ud->rflow_gp_map = devm_kmalloc_array(dev, BITS_TO_LONGS(ud->rflow_cnt), in udma_setup_resources()
4627 ud->rflow_gp_map_allocated = devm_kcalloc(dev, in udma_setup_resources()
4628 BITS_TO_LONGS(ud->rflow_cnt), in udma_setup_resources()
4631 ud->rflow_in_use = devm_kcalloc(dev, BITS_TO_LONGS(ud->rflow_cnt), in udma_setup_resources()
4634 ud->rflows = devm_kcalloc(dev, ud->rflow_cnt, sizeof(*ud->rflows), in udma_setup_resources()
4637 if (!ud->tchan_map || !ud->rchan_map || !ud->rflow_gp_map || in udma_setup_resources()
4638 !ud->rflow_gp_map_allocated || !ud->tchans || !ud->rchans || in udma_setup_resources()
4639 !ud->rflows || !ud->rflow_in_use) in udma_setup_resources()
4647 bitmap_set(ud->rflow_gp_map_allocated, 0, ud->rchan_cnt); in udma_setup_resources()
4650 bitmap_set(ud->rflow_gp_map, 0, ud->rflow_cnt); in udma_setup_resources()
4666 bitmap_zero(ud->tchan_map, ud->tchan_cnt); in udma_setup_resources()
4669 bitmap_fill(ud->tchan_map, ud->tchan_cnt); in udma_setup_resources()
4671 udma_mark_resource_ranges(ud, ud->tchan_map, in udma_setup_resources()
4679 bitmap_zero(ud->rchan_map, ud->rchan_cnt); in udma_setup_resources()
4682 bitmap_fill(ud->rchan_map, ud->rchan_cnt); in udma_setup_resources()
4684 udma_mark_resource_ranges(ud, ud->rchan_map, in udma_setup_resources()
4695 irq_res.desc[0].num = ud->tchan_cnt; in udma_setup_resources()
4708 irq_res.desc[i].num = ud->rchan_cnt; in udma_setup_resources()
4713 ud->soc_data->oes.udma_rchan; in udma_setup_resources()
4718 ud->soc_data->oes.udma_rchan; in udma_setup_resources()
4723 ret = ti_sci_inta_msi_domain_alloc_irqs(ud->dev, &irq_res); in udma_setup_resources()
4726 dev_err(ud->dev, "Failed to allocate MSI interrupts\n"); in udma_setup_resources()
4734 bitmap_clear(ud->rflow_gp_map, ud->rchan_cnt, in udma_setup_resources()
4735 ud->rflow_cnt - ud->rchan_cnt); in udma_setup_resources()
4738 udma_mark_resource_ranges(ud, ud->rflow_gp_map, in udma_setup_resources()
4745 static int bcdma_setup_resources(struct udma_dev *ud) in bcdma_setup_resources() argument
4748 struct device *dev = ud->dev; in bcdma_setup_resources()
4750 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in bcdma_setup_resources()
4751 const struct udma_oes_offsets *oes = &ud->soc_data->oes; in bcdma_setup_resources()
4755 cap = udma_read(ud->mmrs[MMR_GCFG], 0x2c); in bcdma_setup_resources()
4757 ud->bchan_tpl.levels = 3; in bcdma_setup_resources()
4758 ud->bchan_tpl.start_idx[1] = BCDMA_CAP3_UBCHAN_CNT(cap); in bcdma_setup_resources()
4759 ud->bchan_tpl.start_idx[0] = BCDMA_CAP3_HBCHAN_CNT(cap); in bcdma_setup_resources()
4761 ud->bchan_tpl.levels = 2; in bcdma_setup_resources()
4762 ud->bchan_tpl.start_idx[0] = BCDMA_CAP3_HBCHAN_CNT(cap); in bcdma_setup_resources()
4764 ud->bchan_tpl.levels = 1; in bcdma_setup_resources()
4767 cap = udma_read(ud->mmrs[MMR_GCFG], 0x30); in bcdma_setup_resources()
4769 ud->rchan_tpl.levels = 3; in bcdma_setup_resources()
4770 ud->rchan_tpl.start_idx[1] = BCDMA_CAP4_URCHAN_CNT(cap); in bcdma_setup_resources()
4771 ud->rchan_tpl.start_idx[0] = BCDMA_CAP4_HRCHAN_CNT(cap); in bcdma_setup_resources()
4773 ud->rchan_tpl.levels = 2; in bcdma_setup_resources()
4774 ud->rchan_tpl.start_idx[0] = BCDMA_CAP4_HRCHAN_CNT(cap); in bcdma_setup_resources()
4776 ud->rchan_tpl.levels = 1; in bcdma_setup_resources()
4780 ud->tchan_tpl.levels = 3; in bcdma_setup_resources()
4781 ud->tchan_tpl.start_idx[1] = BCDMA_CAP4_UTCHAN_CNT(cap); in bcdma_setup_resources()
4782 ud->tchan_tpl.start_idx[0] = BCDMA_CAP4_HTCHAN_CNT(cap); in bcdma_setup_resources()
4784 ud->tchan_tpl.levels = 2; in bcdma_setup_resources()
4785 ud->tchan_tpl.start_idx[0] = BCDMA_CAP4_HTCHAN_CNT(cap); in bcdma_setup_resources()
4787 ud->tchan_tpl.levels = 1; in bcdma_setup_resources()
4790 ud->bchan_map = devm_kmalloc_array(dev, BITS_TO_LONGS(ud->bchan_cnt), in bcdma_setup_resources()
4792 ud->bchans = devm_kcalloc(dev, ud->bchan_cnt, sizeof(*ud->bchans), in bcdma_setup_resources()
4794 ud->tchan_map = devm_kmalloc_array(dev, BITS_TO_LONGS(ud->tchan_cnt), in bcdma_setup_resources()
4796 ud->tchans = devm_kcalloc(dev, ud->tchan_cnt, sizeof(*ud->tchans), in bcdma_setup_resources()
4798 ud->rchan_map = devm_kmalloc_array(dev, BITS_TO_LONGS(ud->rchan_cnt), in bcdma_setup_resources()
4800 ud->rchans = devm_kcalloc(dev, ud->rchan_cnt, sizeof(*ud->rchans), in bcdma_setup_resources()
4803 ud->rflow_in_use = devm_kcalloc(dev, BITS_TO_LONGS(ud->rchan_cnt), in bcdma_setup_resources()
4806 ud->rflows = devm_kcalloc(dev, ud->rchan_cnt, sizeof(*ud->rflows), in bcdma_setup_resources()
4809 if (!ud->bchan_map || !ud->tchan_map || !ud->rchan_map || in bcdma_setup_resources()
4810 !ud->rflow_in_use || !ud->bchans || !ud->tchans || !ud->rchans || in bcdma_setup_resources()
4811 !ud->rflows) in bcdma_setup_resources()
4818 if (i == RM_RANGE_BCHAN && ud->bchan_cnt == 0) in bcdma_setup_resources()
4820 if (i == RM_RANGE_TCHAN && ud->tchan_cnt == 0) in bcdma_setup_resources()
4822 if (i == RM_RANGE_RCHAN && ud->rchan_cnt == 0) in bcdma_setup_resources()
4834 if (ud->bchan_cnt) { in bcdma_setup_resources()
4837 bitmap_zero(ud->bchan_map, ud->bchan_cnt); in bcdma_setup_resources()
4840 bitmap_fill(ud->bchan_map, ud->bchan_cnt); in bcdma_setup_resources()
4842 udma_mark_resource_ranges(ud, ud->bchan_map, in bcdma_setup_resources()
4850 if (ud->tchan_cnt) { in bcdma_setup_resources()
4853 bitmap_zero(ud->tchan_map, ud->tchan_cnt); in bcdma_setup_resources()
4856 bitmap_fill(ud->tchan_map, ud->tchan_cnt); in bcdma_setup_resources()
4858 udma_mark_resource_ranges(ud, ud->tchan_map, in bcdma_setup_resources()
4866 if (ud->rchan_cnt) { in bcdma_setup_resources()
4869 bitmap_zero(ud->rchan_map, ud->rchan_cnt); in bcdma_setup_resources()
4872 bitmap_fill(ud->rchan_map, ud->rchan_cnt); in bcdma_setup_resources()
4874 udma_mark_resource_ranges(ud, ud->rchan_map, in bcdma_setup_resources()
4884 if (ud->bchan_cnt) { in bcdma_setup_resources()
4888 irq_res.desc[0].num = ud->bchan_cnt; in bcdma_setup_resources()
4907 if (ud->tchan_cnt) { in bcdma_setup_resources()
4911 irq_res.desc[i].num = ud->tchan_cnt; in bcdma_setup_resources()
4913 irq_res.desc[i + 1].num = ud->tchan_cnt; in bcdma_setup_resources()
4936 if (ud->rchan_cnt) { in bcdma_setup_resources()
4940 irq_res.desc[i].num = ud->rchan_cnt; in bcdma_setup_resources()
4942 irq_res.desc[i + 1].num = ud->rchan_cnt; in bcdma_setup_resources()
4966 ret = ti_sci_inta_msi_domain_alloc_irqs(ud->dev, &irq_res); in bcdma_setup_resources()
4969 dev_err(ud->dev, "Failed to allocate MSI interrupts\n"); in bcdma_setup_resources()
4976 static int pktdma_setup_resources(struct udma_dev *ud) in pktdma_setup_resources() argument
4979 struct device *dev = ud->dev; in pktdma_setup_resources()
4981 struct udma_tisci_rm *tisci_rm = &ud->tisci_rm; in pktdma_setup_resources()
4982 const struct udma_oes_offsets *oes = &ud->soc_data->oes; in pktdma_setup_resources()
4986 cap3 = udma_read(ud->mmrs[MMR_GCFG], 0x2c); in pktdma_setup_resources()
4988 ud->tchan_tpl.levels = 3; in pktdma_setup_resources()
4989 ud->tchan_tpl.start_idx[1] = UDMA_CAP3_UCHAN_CNT(cap3); in pktdma_setup_resources()
4990 ud->tchan_tpl.start_idx[0] = UDMA_CAP3_HCHAN_CNT(cap3); in pktdma_setup_resources()
4992 ud->tchan_tpl.levels = 2; in pktdma_setup_resources()
4993 ud->tchan_tpl.start_idx[0] = UDMA_CAP3_HCHAN_CNT(cap3); in pktdma_setup_resources()
4995 ud->tchan_tpl.levels = 1; in pktdma_setup_resources()
4998 ud->rchan_tpl.levels = ud->tchan_tpl.levels; in pktdma_setup_resources()
4999 ud->rchan_tpl.start_idx[0] = ud->tchan_tpl.start_idx[0]; in pktdma_setup_resources()
5000 ud->rchan_tpl.start_idx[1] = ud->tchan_tpl.start_idx[1]; in pktdma_setup_resources()
5002 ud->tchan_map = devm_kmalloc_array(dev, BITS_TO_LONGS(ud->tchan_cnt), in pktdma_setup_resources()
5004 ud->tchans = devm_kcalloc(dev, ud->tchan_cnt, sizeof(*ud->tchans), in pktdma_setup_resources()
5006 ud->rchan_map = devm_kmalloc_array(dev, BITS_TO_LONGS(ud->rchan_cnt), in pktdma_setup_resources()
5008 ud->rchans = devm_kcalloc(dev, ud->rchan_cnt, sizeof(*ud->rchans), in pktdma_setup_resources()
5010 ud->rflow_in_use = devm_kcalloc(dev, BITS_TO_LONGS(ud->rflow_cnt), in pktdma_setup_resources()
5013 ud->rflows = devm_kcalloc(dev, ud->rflow_cnt, sizeof(*ud->rflows), in pktdma_setup_resources()
5015 ud->tflow_map = devm_kmalloc_array(dev, BITS_TO_LONGS(ud->tflow_cnt), in pktdma_setup_resources()
5018 if (!ud->tchan_map || !ud->rchan_map || !ud->tflow_map || !ud->tchans || in pktdma_setup_resources()
5019 !ud->rchans || !ud->rflows || !ud->rflow_in_use) in pktdma_setup_resources()
5036 bitmap_zero(ud->tchan_map, ud->tchan_cnt); in pktdma_setup_resources()
5038 bitmap_fill(ud->tchan_map, ud->tchan_cnt); in pktdma_setup_resources()
5040 udma_mark_resource_ranges(ud, ud->tchan_map, in pktdma_setup_resources()
5047 bitmap_zero(ud->rchan_map, ud->rchan_cnt); in pktdma_setup_resources()
5049 bitmap_fill(ud->rchan_map, ud->rchan_cnt); in pktdma_setup_resources()
5051 udma_mark_resource_ranges(ud, ud->rchan_map, in pktdma_setup_resources()
5059 bitmap_zero(ud->rflow_in_use, ud->rflow_cnt); in pktdma_setup_resources()
5062 bitmap_fill(ud->rflow_in_use, ud->rflow_cnt); in pktdma_setup_resources()
5064 udma_mark_resource_ranges(ud, ud->rflow_in_use, in pktdma_setup_resources()
5073 bitmap_zero(ud->tflow_map, ud->tflow_cnt); in pktdma_setup_resources()
5076 bitmap_fill(ud->tflow_map, ud->tflow_cnt); in pktdma_setup_resources()
5078 udma_mark_resource_ranges(ud, ud->tflow_map, in pktdma_setup_resources()
5089 irq_res.desc[0].num = ud->tflow_cnt; in pktdma_setup_resources()
5107 irq_res.desc[i].num = ud->rflow_cnt; in pktdma_setup_resources()
5121 ret = ti_sci_inta_msi_domain_alloc_irqs(ud->dev, &irq_res); in pktdma_setup_resources()
5124 dev_err(ud->dev, "Failed to allocate MSI interrupts\n"); in pktdma_setup_resources()
5131 static int setup_resources(struct udma_dev *ud) in setup_resources() argument
5133 struct device *dev = ud->dev; in setup_resources()
5136 switch (ud->match_data->type) { in setup_resources()
5138 ret = udma_setup_resources(ud); in setup_resources()
5141 ret = bcdma_setup_resources(ud); in setup_resources()
5144 ret = pktdma_setup_resources(ud); in setup_resources()
5153 ch_count = ud->bchan_cnt + ud->tchan_cnt + ud->rchan_cnt; in setup_resources()
5154 if (ud->bchan_cnt) in setup_resources()
5155 ch_count -= bitmap_weight(ud->bchan_map, ud->bchan_cnt); in setup_resources()
5156 ch_count -= bitmap_weight(ud->tchan_map, ud->tchan_cnt); in setup_resources()
5157 ch_count -= bitmap_weight(ud->rchan_map, ud->rchan_cnt); in setup_resources()
5161 ud->channels = devm_kcalloc(dev, ch_count, sizeof(*ud->channels), in setup_resources()
5163 if (!ud->channels) in setup_resources()
5166 switch (ud->match_data->type) { in setup_resources()
5171 ud->tchan_cnt - bitmap_weight(ud->tchan_map, in setup_resources()
5172 ud->tchan_cnt), in setup_resources()
5173 ud->rchan_cnt - bitmap_weight(ud->rchan_map, in setup_resources()
5174 ud->rchan_cnt), in setup_resources()
5175 ud->rflow_cnt - bitmap_weight(ud->rflow_gp_map, in setup_resources()
5176 ud->rflow_cnt)); in setup_resources()
5182 ud->bchan_cnt - bitmap_weight(ud->bchan_map, in setup_resources()
5183 ud->bchan_cnt), in setup_resources()
5184 ud->tchan_cnt - bitmap_weight(ud->tchan_map, in setup_resources()
5185 ud->tchan_cnt), in setup_resources()
5186 ud->rchan_cnt - bitmap_weight(ud->rchan_map, in setup_resources()
5187 ud->rchan_cnt)); in setup_resources()
5193 ud->tchan_cnt - bitmap_weight(ud->tchan_map, in setup_resources()
5194 ud->tchan_cnt), in setup_resources()
5195 ud->rchan_cnt - bitmap_weight(ud->rchan_map, in setup_resources()
5196 ud->rchan_cnt)); in setup_resources()
5205 static int udma_setup_rx_flush(struct udma_dev *ud) in udma_setup_rx_flush() argument
5207 struct udma_rx_flush *rx_flush = &ud->rx_flush; in udma_setup_rx_flush()
5211 struct device *dev = ud->dev; in udma_setup_rx_flush()
5233 ud->desc_align); in udma_setup_rx_flush()
5273 ud->desc_align); in udma_setup_rx_flush()
5317 if (uc->ud->match_data->type == DMA_TYPE_BCDMA) { in udma_dbg_summary_show_chan()
5328 if (uc->ud->match_data->type == DMA_TYPE_PKTDMA) in udma_dbg_summary_show_chan()
5334 if (uc->ud->match_data->type == DMA_TYPE_PKTDMA) in udma_dbg_summary_show_chan()
5373 static enum dmaengine_alignment udma_get_copy_align(struct udma_dev *ud) in udma_get_copy_align() argument
5375 const struct udma_match_data *match_data = ud->match_data; in udma_get_copy_align()
5382 if (ud->bchan_cnt) in udma_get_copy_align()
5383 tpl = udma_get_chan_tpl_index(&ud->bchan_tpl, 0); in udma_get_copy_align()
5384 else if (ud->tchan_cnt) in udma_get_copy_align()
5385 tpl = udma_get_chan_tpl_index(&ud->tchan_tpl, 0); in udma_get_copy_align()
5412 struct udma_dev *ud; in udma_probe() local
5421 ud = devm_kzalloc(dev, sizeof(*ud), GFP_KERNEL); in udma_probe()
5422 if (!ud) in udma_probe()
5430 ud->match_data = match->data; in udma_probe()
5432 ud->soc_data = ud->match_data->soc_data; in udma_probe()
5433 if (!ud->soc_data) { in udma_probe()
5439 ud->soc_data = soc->data; in udma_probe()
5442 ret = udma_get_mmrs(pdev, ud); in udma_probe()
5446 ud->tisci_rm.tisci = ti_sci_get_by_phandle(dev->of_node, "ti,sci"); in udma_probe()
5447 if (IS_ERR(ud->tisci_rm.tisci)) in udma_probe()
5448 return PTR_ERR(ud->tisci_rm.tisci); in udma_probe()
5451 &ud->tisci_rm.tisci_dev_id); in udma_probe()
5456 pdev->id = ud->tisci_rm.tisci_dev_id; in udma_probe()
5459 &ud->tisci_rm.tisci_navss_dev_id); in udma_probe()
5465 if (ud->match_data->type == DMA_TYPE_UDMA) { in udma_probe()
5467 &ud->atype); in udma_probe()
5468 if (!ret && ud->atype > 2) { in udma_probe()
5469 dev_err(dev, "Invalid atype: %u\n", ud->atype); in udma_probe()
5474 &ud->asel); in udma_probe()
5475 if (!ret && ud->asel > 15) { in udma_probe()
5476 dev_err(dev, "Invalid asel: %u\n", ud->asel); in udma_probe()
5481 ud->tisci_rm.tisci_udmap_ops = &ud->tisci_rm.tisci->ops.rm_udmap_ops; in udma_probe()
5482 ud->tisci_rm.tisci_psil_ops = &ud->tisci_rm.tisci->ops.rm_psil_ops; in udma_probe()
5484 if (ud->match_data->type == DMA_TYPE_UDMA) { in udma_probe()
5485 ud->ringacc = of_k3_ringacc_get_by_phandle(dev->of_node, "ti,ringacc"); in udma_probe()
5489 ring_init_data.tisci = ud->tisci_rm.tisci; in udma_probe()
5490 ring_init_data.tisci_dev_id = ud->tisci_rm.tisci_dev_id; in udma_probe()
5491 if (ud->match_data->type == DMA_TYPE_BCDMA) { in udma_probe()
5492 ring_init_data.num_rings = ud->bchan_cnt + in udma_probe()
5493 ud->tchan_cnt + in udma_probe()
5494 ud->rchan_cnt; in udma_probe()
5496 ring_init_data.num_rings = ud->rflow_cnt + in udma_probe()
5497 ud->tflow_cnt; in udma_probe()
5500 ud->ringacc = k3_ringacc_dmarings_init(pdev, &ring_init_data); in udma_probe()
5503 if (IS_ERR(ud->ringacc)) in udma_probe()
5504 return PTR_ERR(ud->ringacc); in udma_probe()
5512 dma_cap_set(DMA_SLAVE, ud->ddev.cap_mask); in udma_probe()
5514 if (ud->match_data->type != DMA_TYPE_PKTDMA) { in udma_probe()
5515 dma_cap_set(DMA_CYCLIC, ud->ddev.cap_mask); in udma_probe()
5516 ud->ddev.device_prep_dma_cyclic = udma_prep_dma_cyclic; in udma_probe()
5519 ud->ddev.device_config = udma_slave_config; in udma_probe()
5520 ud->ddev.device_prep_slave_sg = udma_prep_slave_sg; in udma_probe()
5521 ud->ddev.device_issue_pending = udma_issue_pending; in udma_probe()
5522 ud->ddev.device_tx_status = udma_tx_status; in udma_probe()
5523 ud->ddev.device_pause = udma_pause; in udma_probe()
5524 ud->ddev.device_resume = udma_resume; in udma_probe()
5525 ud->ddev.device_terminate_all = udma_terminate_all; in udma_probe()
5526 ud->ddev.device_synchronize = udma_synchronize; in udma_probe()
5528 ud->ddev.dbg_summary_show = udma_dbg_summary_show; in udma_probe()
5531 switch (ud->match_data->type) { in udma_probe()
5533 ud->ddev.device_alloc_chan_resources = in udma_probe()
5537 ud->ddev.device_alloc_chan_resources = in udma_probe()
5539 ud->ddev.device_router_config = bcdma_router_config; in udma_probe()
5542 ud->ddev.device_alloc_chan_resources = in udma_probe()
5548 ud->ddev.device_free_chan_resources = udma_free_chan_resources; in udma_probe()
5550 ud->ddev.src_addr_widths = TI_UDMAC_BUSWIDTHS; in udma_probe()
5551 ud->ddev.dst_addr_widths = TI_UDMAC_BUSWIDTHS; in udma_probe()
5552 ud->ddev.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV); in udma_probe()
5553 ud->ddev.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST; in udma_probe()
5554 ud->ddev.desc_metadata_modes = DESC_METADATA_CLIENT | in udma_probe()
5556 if (ud->match_data->enable_memcpy_support && in udma_probe()
5557 !(ud->match_data->type == DMA_TYPE_BCDMA && ud->bchan_cnt == 0)) { in udma_probe()
5558 dma_cap_set(DMA_MEMCPY, ud->ddev.cap_mask); in udma_probe()
5559 ud->ddev.device_prep_dma_memcpy = udma_prep_dma_memcpy; in udma_probe()
5560 ud->ddev.directions |= BIT(DMA_MEM_TO_MEM); in udma_probe()
5563 ud->ddev.dev = dev; in udma_probe()
5564 ud->dev = dev; in udma_probe()
5565 ud->psil_base = ud->match_data->psil_base; in udma_probe()
5567 INIT_LIST_HEAD(&ud->ddev.channels); in udma_probe()
5568 INIT_LIST_HEAD(&ud->desc_to_purge); in udma_probe()
5570 ch_count = setup_resources(ud); in udma_probe()
5574 spin_lock_init(&ud->lock); in udma_probe()
5575 INIT_WORK(&ud->purge_work, udma_purge_desc_work); in udma_probe()
5577 ud->desc_align = 64; in udma_probe()
5578 if (ud->desc_align < dma_get_cache_alignment()) in udma_probe()
5579 ud->desc_align = dma_get_cache_alignment(); in udma_probe()
5581 ret = udma_setup_rx_flush(ud); in udma_probe()
5585 for (i = 0; i < ud->bchan_cnt; i++) { in udma_probe()
5586 struct udma_bchan *bchan = &ud->bchans[i]; in udma_probe()
5589 bchan->reg_rt = ud->mmrs[MMR_BCHANRT] + i * 0x1000; in udma_probe()
5592 for (i = 0; i < ud->tchan_cnt; i++) { in udma_probe()
5593 struct udma_tchan *tchan = &ud->tchans[i]; in udma_probe()
5596 tchan->reg_rt = ud->mmrs[MMR_TCHANRT] + i * 0x1000; in udma_probe()
5599 for (i = 0; i < ud->rchan_cnt; i++) { in udma_probe()
5600 struct udma_rchan *rchan = &ud->rchans[i]; in udma_probe()
5603 rchan->reg_rt = ud->mmrs[MMR_RCHANRT] + i * 0x1000; in udma_probe()
5606 for (i = 0; i < ud->rflow_cnt; i++) { in udma_probe()
5607 struct udma_rflow *rflow = &ud->rflows[i]; in udma_probe()
5613 struct udma_chan *uc = &ud->channels[i]; in udma_probe()
5615 uc->ud = ud; in udma_probe()
5629 vchan_init(&uc->vc, &ud->ddev); in udma_probe()
5637 ud->ddev.copy_align = udma_get_copy_align(ud); in udma_probe()
5639 ret = dma_async_device_register(&ud->ddev); in udma_probe()
5645 platform_set_drvdata(pdev, ud); in udma_probe()
5647 ret = of_dma_controller_register(dev->of_node, udma_of_xlate, ud); in udma_probe()
5650 dma_async_device_unregister(&ud->ddev); in udma_probe()
5658 struct udma_dev *ud = dev_get_drvdata(dev); in udma_pm_suspend() local
5659 struct dma_device *dma_dev = &ud->ddev; in udma_pm_suspend()
5671 ud->ddev.device_free_chan_resources(chan); in udma_pm_suspend()
5680 struct udma_dev *ud = dev_get_drvdata(dev); in udma_pm_resume() local
5681 struct dma_device *dma_dev = &ud->ddev; in udma_pm_resume()
5694 ret = ud->ddev.device_alloc_chan_resources(chan); in udma_pm_resume()