Lines Matching +full:wed +full:- +full:pcie

1 // SPDX-License-Identifier: GPL-2.0-only
9 #include <linux/dma-mapping.h>
102 regmap_update_bits(dev->hw->regs, reg, mask | val, val); in wed_m32()
138 return readl(dev->wlan.base + reg); in wifi_r32()
144 writel(val, dev->wlan.base + reg); in wifi_w32()
164 if (!mtk_wed_is_v3_or_greater(dev->hw)) in mtk_wdma_v3_rx_reset()
173 dev_err(dev->hw->dev, "rx reset failed\n"); in mtk_wdma_v3_rx_reset()
178 dev_err(dev->hw->dev, "rx reset failed\n"); in mtk_wdma_v3_rx_reset()
186 dev_err(dev->hw->dev, "rx reset failed\n"); in mtk_wdma_v3_rx_reset()
191 dev_err(dev->hw->dev, "rx reset failed\n"); in mtk_wdma_v3_rx_reset()
253 dev_err(dev->hw->dev, "rx reset failed\n"); in mtk_wdma_rx_reset()
259 for (i = 0; i < ARRAY_SIZE(dev->rx_wdma); i++) { in mtk_wdma_rx_reset()
260 if (dev->rx_wdma[i].desc) in mtk_wdma_rx_reset()
292 if (!mtk_wed_is_v3_or_greater(dev->hw)) in mtk_wdma_v3_tx_reset()
301 dev_err(dev->hw->dev, "tx reset failed\n"); in mtk_wdma_v3_tx_reset()
306 dev_err(dev->hw->dev, "tx reset failed\n"); in mtk_wdma_v3_tx_reset()
314 dev_err(dev->hw->dev, "tx reset failed\n"); in mtk_wdma_v3_tx_reset()
319 dev_err(dev->hw->dev, "tx reset failed\n"); in mtk_wdma_v3_tx_reset()
374 dev_err(dev->hw->dev, "tx reset failed\n"); in mtk_wdma_tx_reset()
380 for (i = 0; i < ARRAY_SIZE(dev->tx_wdma); i++) in mtk_wdma_tx_reset()
405 struct mtk_wed_wo *wo = dev->hw->wed_wo; in mtk_wed_wo_reset()
421 dev_err(dev->hw->dev, "failed to disable wed-wo\n"); in mtk_wed_wo_reset()
426 switch (dev->hw->index) { in mtk_wed_wo_reset()
459 dev = hw->wed_dev; in mtk_wed_fe_reset()
460 if (!dev || !dev->wlan.reset) in mtk_wed_fe_reset()
464 err = dev->wlan.reset(dev); in mtk_wed_fe_reset()
466 dev_err(dev->dev, "wlan reset failed: %d\n", err); in mtk_wed_fe_reset()
485 dev = hw->wed_dev; in mtk_wed_fe_reset_complete()
486 if (!dev || !dev->wlan.reset_complete) in mtk_wed_fe_reset_complete()
489 dev->wlan.reset_complete(dev); in mtk_wed_fe_reset_complete()
501 if (dev->wlan.bus_type == MTK_WED_BUS_PCIE) { in mtk_wed_assign()
502 hw = hw_list[pci_domain_nr(dev->wlan.pci_dev->bus)]; in mtk_wed_assign()
506 if (!hw->wed_dev) in mtk_wed_assign()
512 /* MT7986 WED devices do not have any pcie slot restrictions */ in mtk_wed_assign()
514 /* MT7986 PCIE or AXI */ in mtk_wed_assign()
517 if (hw && !hw->wed_dev) in mtk_wed_assign()
524 hw->wed_dev = dev; in mtk_wed_assign()
531 struct mtk_wed_hw *hw = dev->hw; in mtk_wed_amsdu_buffer_alloc()
538 wed_amsdu = devm_kcalloc(hw->dev, MTK_WED_AMSDU_NPAGES, in mtk_wed_amsdu_buffer_alloc()
541 return -ENOMEM; in mtk_wed_amsdu_buffer_alloc()
555 wed_amsdu[i].txd_phy = dma_map_single(hw->dev, ptr, in mtk_wed_amsdu_buffer_alloc()
558 if (dma_mapping_error(hw->dev, wed_amsdu[i].txd_phy)) in mtk_wed_amsdu_buffer_alloc()
561 dev->hw->wed_amsdu = wed_amsdu; in mtk_wed_amsdu_buffer_alloc()
566 for (i--; i >= 0; i--) in mtk_wed_amsdu_buffer_alloc()
567 dma_unmap_single(hw->dev, wed_amsdu[i].txd_phy, in mtk_wed_amsdu_buffer_alloc()
569 return -ENOMEM; in mtk_wed_amsdu_buffer_alloc()
575 struct mtk_wed_amsdu *wed_amsdu = dev->hw->wed_amsdu; in mtk_wed_amsdu_free_buffer()
582 dma_unmap_single(dev->hw->dev, wed_amsdu[i].txd_phy, in mtk_wed_amsdu_free_buffer()
592 struct mtk_wed_amsdu *wed_amsdu = dev->hw->wed_amsdu; in mtk_wed_amsdu_init()
606 dev->wlan.amsdu_max_len >> 8) | in mtk_wed_amsdu_init()
608 dev->wlan.amsdu_max_subframes)); in mtk_wed_amsdu_init()
615 dev_err(dev->hw->dev, "amsdu initialization failed\n"); in mtk_wed_amsdu_init()
621 FIELD_PREP(MTK_WED_AMSDU_HIFTXD_SRC, dev->hw->index)); in mtk_wed_amsdu_init()
632 if (dev->wlan.id == 0x7991 || dev->wlan.id == 0x7992) in mtk_wed_amsdu_init()
643 u32 desc_size = dev->hw->soc->tx_ring_desc_size; in mtk_wed_tx_buffer_alloc()
645 int token = dev->wlan.token_start; in mtk_wed_tx_buffer_alloc()
650 if (!mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_tx_buffer_alloc()
651 ring_size = dev->wlan.nbuf & ~(MTK_WED_BUF_PER_PAGE - 1); in mtk_wed_tx_buffer_alloc()
652 dev->tx_buf_ring.size = ring_size; in mtk_wed_tx_buffer_alloc()
654 dev->tx_buf_ring.size = MTK_WED_TX_BM_DMA_SIZE; in mtk_wed_tx_buffer_alloc()
657 n_pages = dev->tx_buf_ring.size / MTK_WED_BUF_PER_PAGE; in mtk_wed_tx_buffer_alloc()
661 return -ENOMEM; in mtk_wed_tx_buffer_alloc()
663 dev->tx_buf_ring.pages = page_list; in mtk_wed_tx_buffer_alloc()
665 desc_ptr = dma_alloc_coherent(dev->hw->dev, in mtk_wed_tx_buffer_alloc()
666 dev->tx_buf_ring.size * desc_size, in mtk_wed_tx_buffer_alloc()
669 return -ENOMEM; in mtk_wed_tx_buffer_alloc()
671 dev->tx_buf_ring.desc = desc_ptr; in mtk_wed_tx_buffer_alloc()
672 dev->tx_buf_ring.desc_phys = desc_phys; in mtk_wed_tx_buffer_alloc()
682 return -ENOMEM; in mtk_wed_tx_buffer_alloc()
684 page_phys = dma_map_page(dev->hw->dev, page, 0, PAGE_SIZE, in mtk_wed_tx_buffer_alloc()
686 if (dma_mapping_error(dev->hw->dev, page_phys)) { in mtk_wed_tx_buffer_alloc()
688 return -ENOMEM; in mtk_wed_tx_buffer_alloc()
693 dma_sync_single_for_cpu(dev->hw->dev, page_phys, PAGE_SIZE, in mtk_wed_tx_buffer_alloc()
703 desc->buf0 = cpu_to_le32(buf_phys); in mtk_wed_tx_buffer_alloc()
704 if (!mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_tx_buffer_alloc()
707 txd_size = dev->wlan.init_buf(buf, buf_phys, in mtk_wed_tx_buffer_alloc()
709 desc->buf1 = cpu_to_le32(buf_phys + txd_size); in mtk_wed_tx_buffer_alloc()
711 if (mtk_wed_is_v1(dev->hw)) in mtk_wed_tx_buffer_alloc()
714 MTK_WED_BUF_SIZE - txd_size); in mtk_wed_tx_buffer_alloc()
718 MTK_WED_BUF_SIZE - txd_size); in mtk_wed_tx_buffer_alloc()
719 desc->info = 0; in mtk_wed_tx_buffer_alloc()
723 desc->ctrl = cpu_to_le32(ctrl); in mtk_wed_tx_buffer_alloc()
730 dma_sync_single_for_device(dev->hw->dev, page_phys, PAGE_SIZE, in mtk_wed_tx_buffer_alloc()
740 struct mtk_wed_buf *page_list = dev->tx_buf_ring.pages; in mtk_wed_free_tx_buffer()
741 struct mtk_wed_hw *hw = dev->hw; in mtk_wed_free_tx_buffer()
747 if (!dev->tx_buf_ring.desc) in mtk_wed_free_tx_buffer()
750 for (i = 0; i < dev->tx_buf_ring.size; i += MTK_WED_BUF_PER_PAGE) { in mtk_wed_free_tx_buffer()
757 dma_unmap_page(dev->hw->dev, page_phy, PAGE_SIZE, in mtk_wed_free_tx_buffer()
762 dma_free_coherent(dev->hw->dev, in mtk_wed_free_tx_buffer()
763 dev->tx_buf_ring.size * hw->soc->tx_ring_desc_size, in mtk_wed_free_tx_buffer()
764 dev->tx_buf_ring.desc, in mtk_wed_free_tx_buffer()
765 dev->tx_buf_ring.desc_phys); in mtk_wed_free_tx_buffer()
780 if (!dev->wlan.hw_rro) in mtk_wed_hwrro_buffer_alloc()
785 return -ENOMEM; in mtk_wed_hwrro_buffer_alloc()
787 dev->hw_rro.size = dev->wlan.rx_nbuf & ~(MTK_WED_BUF_PER_PAGE - 1); in mtk_wed_hwrro_buffer_alloc()
788 dev->hw_rro.pages = page_list; in mtk_wed_hwrro_buffer_alloc()
789 desc = dma_alloc_coherent(dev->hw->dev, in mtk_wed_hwrro_buffer_alloc()
790 dev->wlan.rx_nbuf * sizeof(*desc), in mtk_wed_hwrro_buffer_alloc()
793 return -ENOMEM; in mtk_wed_hwrro_buffer_alloc()
795 dev->hw_rro.desc = desc; in mtk_wed_hwrro_buffer_alloc()
796 dev->hw_rro.desc_phys = desc_phys; in mtk_wed_hwrro_buffer_alloc()
805 return -ENOMEM; in mtk_wed_hwrro_buffer_alloc()
807 page_phys = dma_map_page(dev->hw->dev, page, 0, PAGE_SIZE, in mtk_wed_hwrro_buffer_alloc()
809 if (dma_mapping_error(dev->hw->dev, page_phys)) { in mtk_wed_hwrro_buffer_alloc()
811 return -ENOMEM; in mtk_wed_hwrro_buffer_alloc()
816 dma_sync_single_for_cpu(dev->hw->dev, page_phys, PAGE_SIZE, in mtk_wed_hwrro_buffer_alloc()
821 desc->buf0 = cpu_to_le32(buf_phys); in mtk_wed_hwrro_buffer_alloc()
822 desc->token = cpu_to_le32(RX_DMA_PREP_ADDR64(buf_phys)); in mtk_wed_hwrro_buffer_alloc()
827 dma_sync_single_for_device(dev->hw->dev, page_phys, PAGE_SIZE, in mtk_wed_hwrro_buffer_alloc()
840 dev->rx_buf_ring.size = dev->wlan.rx_nbuf; in mtk_wed_rx_buffer_alloc()
841 desc = dma_alloc_coherent(dev->hw->dev, in mtk_wed_rx_buffer_alloc()
842 dev->wlan.rx_nbuf * sizeof(*desc), in mtk_wed_rx_buffer_alloc()
845 return -ENOMEM; in mtk_wed_rx_buffer_alloc()
847 dev->rx_buf_ring.desc = desc; in mtk_wed_rx_buffer_alloc()
848 dev->rx_buf_ring.desc_phys = desc_phys; in mtk_wed_rx_buffer_alloc()
849 dev->wlan.init_rx_buf(dev, dev->wlan.rx_npkt); in mtk_wed_rx_buffer_alloc()
857 struct mtk_wed_buf *page_list = dev->hw_rro.pages; in mtk_wed_hwrro_free_buffer()
858 struct mtk_wed_bm_desc *desc = dev->hw_rro.desc; in mtk_wed_hwrro_free_buffer()
861 if (!dev->wlan.hw_rro) in mtk_wed_hwrro_free_buffer()
877 dma_unmap_page(dev->hw->dev, buf_addr, PAGE_SIZE, in mtk_wed_hwrro_free_buffer()
882 dma_free_coherent(dev->hw->dev, dev->hw_rro.size * sizeof(*desc), in mtk_wed_hwrro_free_buffer()
883 desc, dev->hw_rro.desc_phys); in mtk_wed_hwrro_free_buffer()
892 struct mtk_wed_bm_desc *desc = dev->rx_buf_ring.desc; in mtk_wed_free_rx_buffer()
897 dev->wlan.release_rx_buf(dev); in mtk_wed_free_rx_buffer()
898 dma_free_coherent(dev->hw->dev, dev->rx_buf_ring.size * sizeof(*desc), in mtk_wed_free_rx_buffer()
899 desc, dev->rx_buf_ring.desc_phys); in mtk_wed_free_rx_buffer()
907 if (!mtk_wed_get_rx_capa(dev) || !dev->wlan.hw_rro) in mtk_wed_hwrro_init()
913 wed_w32(dev, MTK_WED_RRO_PG_BM_BASE, dev->hw_rro.desc_phys); in mtk_wed_hwrro_init()
928 FIELD_PREP(MTK_WED_RX_BM_RX_DMAD_SDL0, dev->wlan.rx_size)); in mtk_wed_rx_buffer_hw_init()
929 wed_w32(dev, MTK_WED_RX_BM_BASE, dev->rx_buf_ring.desc_phys); in mtk_wed_rx_buffer_hw_init()
931 FIELD_PREP(MTK_WED_RX_BM_SW_TAIL, dev->wlan.rx_npkt)); in mtk_wed_rx_buffer_hw_init()
942 if (!ring->desc) in mtk_wed_free_ring()
945 dma_free_coherent(dev->hw->dev, ring->size * ring->desc_size, in mtk_wed_free_ring()
946 ring->desc, ring->desc_phys); in mtk_wed_free_ring()
953 mtk_wed_free_ring(dev, &dev->rro.ring); in mtk_wed_free_rx_rings()
961 for (i = 0; i < ARRAY_SIZE(dev->tx_ring); i++) in mtk_wed_free_tx_rings()
962 mtk_wed_free_ring(dev, &dev->tx_ring[i]); in mtk_wed_free_tx_rings()
963 for (i = 0; i < ARRAY_SIZE(dev->rx_wdma); i++) in mtk_wed_free_tx_rings()
964 mtk_wed_free_ring(dev, &dev->rx_wdma[i]); in mtk_wed_free_tx_rings()
972 switch (dev->hw->version) { in mtk_wed_set_ext_int()
990 if (!dev->hw->num_flows) in mtk_wed_set_ext_int()
1000 if (!mtk_wed_is_v2(dev->hw)) in mtk_wed_set_512_support()
1021 u32 cur_idx = readl(ring->wpdma + MTK_WED_RING_OFS_CPU_IDX); in mtk_wed_check_wfdma_rx_fill()
1023 if (cur_idx == MTK_WED_RX_RING_SIZE - 1) in mtk_wed_check_wfdma_rx_fill()
1030 dev_err(dev->hw->dev, "rx dma enable failed\n"); in mtk_wed_check_wfdma_rx_fill()
1031 return -ETIMEDOUT; in mtk_wed_check_wfdma_rx_fill()
1055 if (mtk_wed_is_v1(dev->hw)) { in mtk_wed_dma_disable()
1056 regmap_write(dev->hw->mirror, dev->hw->index * 4, 0); in mtk_wed_dma_disable()
1069 if (mtk_wed_is_v3_or_greater(dev->hw) && in mtk_wed_dma_disable()
1110 if (mtk_wed_is_v1(dev->hw)) in mtk_wed_deinit()
1118 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_deinit()
1130 struct mtk_wed_hw *hw = dev->hw; in __mtk_wed_detach()
1141 if (hw->wed_wo) in __mtk_wed_detach()
1144 if (hw->wed_wo) in __mtk_wed_detach()
1148 if (dev->wlan.bus_type == MTK_WED_BUS_PCIE) { in __mtk_wed_detach()
1151 wlan_node = dev->wlan.pci_dev->dev.of_node; in __mtk_wed_detach()
1152 if (of_dma_is_coherent(wlan_node) && hw->hifsys) in __mtk_wed_detach()
1153 regmap_update_bits(hw->hifsys, HIFSYS_DMA_AG_MAP, in __mtk_wed_detach()
1154 BIT(hw->index), BIT(hw->index)); in __mtk_wed_detach()
1157 if ((!hw_list[!hw->index] || !hw_list[!hw->index]->wed_dev) && in __mtk_wed_detach()
1158 hw->eth->dma_dev != hw->eth->dev) in __mtk_wed_detach()
1159 mtk_eth_set_dma_device(hw->eth, hw->eth->dev); in __mtk_wed_detach()
1164 hw->wed_dev = NULL; in __mtk_wed_detach()
1178 switch (dev->wlan.bus_type) { in mtk_wed_bus_init()
1180 struct device_node *np = dev->hw->eth->dev->of_node; in mtk_wed_bus_init()
1182 if (mtk_wed_is_v2(dev->hw)) { in mtk_wed_bus_init()
1186 "mediatek,wed-pcie"); in mtk_wed_bus_init()
1193 if (dev->wlan.msi) { in mtk_wed_bus_init()
1195 dev->hw->pcie_base | 0xc08); in mtk_wed_bus_init()
1197 dev->hw->pcie_base | 0xc04); in mtk_wed_bus_init()
1201 dev->hw->pcie_base | 0x180); in mtk_wed_bus_init()
1203 dev->hw->pcie_base | 0x184); in mtk_wed_bus_init()
1210 /* pcie interrupt control: pola/source selection */ in mtk_wed_bus_init()
1215 dev->hw->index)); in mtk_wed_bus_init()
1233 if (mtk_wed_is_v1(dev->hw)) { in mtk_wed_set_wpdma()
1234 wed_w32(dev, MTK_WED_WPDMA_CFG_BASE, dev->wlan.wpdma_phys); in mtk_wed_set_wpdma()
1240 wed_w32(dev, MTK_WED_WPDMA_CFG_BASE, dev->wlan.wpdma_int); in mtk_wed_set_wpdma()
1241 wed_w32(dev, MTK_WED_WPDMA_CFG_INT_MASK, dev->wlan.wpdma_mask); in mtk_wed_set_wpdma()
1242 wed_w32(dev, MTK_WED_WPDMA_CFG_TX, dev->wlan.wpdma_tx); in mtk_wed_set_wpdma()
1243 wed_w32(dev, MTK_WED_WPDMA_CFG_TX_FREE, dev->wlan.wpdma_txfree); in mtk_wed_set_wpdma()
1248 wed_w32(dev, MTK_WED_WPDMA_RX_GLO_CFG, dev->wlan.wpdma_rx_glo); in mtk_wed_set_wpdma()
1249 wed_w32(dev, dev->hw->soc->regmap.wpdma_rx_ring[0], in mtk_wed_set_wpdma()
1250 dev->wlan.wpdma_rx[0]); in mtk_wed_set_wpdma()
1251 if (mtk_wed_is_v3_or_greater(dev->hw)) in mtk_wed_set_wpdma()
1252 wed_w32(dev, dev->hw->soc->regmap.wpdma_rx_ring[1], in mtk_wed_set_wpdma()
1253 dev->wlan.wpdma_rx[1]); in mtk_wed_set_wpdma()
1255 if (!dev->wlan.hw_rro) in mtk_wed_set_wpdma()
1258 wed_w32(dev, MTK_WED_RRO_RX_D_CFG(0), dev->wlan.wpdma_rx_rro[0]); in mtk_wed_set_wpdma()
1259 wed_w32(dev, MTK_WED_RRO_RX_D_CFG(1), dev->wlan.wpdma_rx_rro[1]); in mtk_wed_set_wpdma()
1262 dev->wlan.wpdma_rx_pg + i * 0x10); in mtk_wed_set_wpdma()
1275 if (!mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_hw_init_early()
1283 if (mtk_wed_is_v1(dev->hw)) { in mtk_wed_hw_init_early()
1284 u32 offset = dev->hw->index ? 0x04000400 : 0; in mtk_wed_hw_init_early()
1294 MTK_PCIE_BASE(dev->hw->index)); in mtk_wed_hw_init_early()
1296 wed_w32(dev, MTK_WED_WDMA_CFG_BASE, dev->hw->wdma_phy); in mtk_wed_hw_init_early()
1316 ring->desc = dma_alloc_coherent(dev->hw->dev, in mtk_wed_rro_ring_alloc()
1317 size * sizeof(*ring->desc), in mtk_wed_rro_ring_alloc()
1318 &ring->desc_phys, GFP_KERNEL); in mtk_wed_rro_ring_alloc()
1319 if (!ring->desc) in mtk_wed_rro_ring_alloc()
1320 return -ENOMEM; in mtk_wed_rro_ring_alloc()
1322 ring->desc_size = sizeof(*ring->desc); in mtk_wed_rro_ring_alloc()
1323 ring->size = size; in mtk_wed_rro_ring_alloc()
1335 ret = of_reserved_mem_region_to_resource_byname(dev->hw->node, "wo-dlm", &res); in mtk_wed_rro_alloc()
1339 dev->rro.miod_phys = res.start; in mtk_wed_rro_alloc()
1340 dev->rro.fdbk_phys = MTK_WED_MIOD_COUNT + dev->rro.miod_phys; in mtk_wed_rro_alloc()
1342 return mtk_wed_rro_ring_alloc(dev, &dev->rro.ring, in mtk_wed_rro_alloc()
1349 struct mtk_wed_wo *wo = dev->hw->wed_wo; in mtk_wed_rro_cfg()
1356 __le32 wed; in mtk_wed_rro_cfg() member
1386 wed_w32(dev, MTK_WED_RROQM_MIOD_CTRL0, dev->rro.miod_phys); in mtk_wed_rro_hw_init()
1389 wed_w32(dev, MTK_WED_RROQM_FDBK_CTRL0, dev->rro.fdbk_phys); in mtk_wed_rro_hw_init()
1393 wed_w32(dev, MTK_WED_RROQ_BASE_L, dev->rro.ring.desc_phys); in mtk_wed_rro_hw_init()
1400 wed_w32(dev, MTK_WED_RROQM_MIOD_CTRL2, MTK_WED_MIOD_CNT - 1); in mtk_wed_rro_hw_init()
1416 if (mtk_wed_is_v2(dev->hw)) { in mtk_wed_route_qm_hw_init()
1421 0x3 + dev->hw->index)); in mtk_wed_route_qm_hw_init()
1426 0x3 + dev->hw->index)); in mtk_wed_route_qm_hw_init()
1435 if (dev->init_done) in mtk_wed_hw_init()
1438 dev->init_done = true; in mtk_wed_hw_init()
1441 wed_w32(dev, MTK_WED_TX_BM_BASE, dev->tx_buf_ring.desc_phys); in mtk_wed_hw_init()
1444 if (mtk_wed_is_v1(dev->hw)) { in mtk_wed_hw_init()
1448 dev->tx_buf_ring.size / 128) | in mtk_wed_hw_init()
1454 } else if (mtk_wed_is_v2(dev->hw)) { in mtk_wed_hw_init()
1458 dev->tx_buf_ring.size / 128) | in mtk_wed_hw_init()
1470 dev->tx_buf_ring.size / 128) | in mtk_wed_hw_init()
1472 dev->tx_buf_ring.size / 128)); in mtk_wed_hw_init()
1475 wed_w32(dev, dev->hw->soc->regmap.tx_bm_tkid, in mtk_wed_hw_init()
1476 FIELD_PREP(MTK_WED_TX_BM_TKID_START, dev->wlan.token_start) | in mtk_wed_hw_init()
1478 dev->wlan.token_start + dev->wlan.nbuf - 1)); in mtk_wed_hw_init()
1482 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_hw_init()
1490 dev->wlan.nbuf / 128) | in mtk_wed_hw_init()
1492 dev->wlan.nbuf / 128)); in mtk_wed_hw_init()
1502 if (mtk_wed_is_v1(dev->hw)) { in mtk_wed_hw_init()
1536 if (!mtk_wed_is_v1(dev->hw)) in mtk_wed_hw_init()
1543 void *head = (void *)ring->desc; in mtk_wed_ring_reset()
1549 desc = (struct mtk_wdma_desc *)(head + i * ring->desc_size); in mtk_wed_ring_reset()
1550 desc->buf0 = 0; in mtk_wed_ring_reset()
1552 desc->ctrl = cpu_to_le32(MTK_WDMA_DESC_CTRL_DMA_DONE); in mtk_wed_ring_reset()
1554 desc->ctrl = cpu_to_le32(MTK_WFDMA_DESC_CTRL_TO_HOST); in mtk_wed_ring_reset()
1555 desc->buf1 = 0; in mtk_wed_ring_reset()
1556 desc->info = 0; in mtk_wed_ring_reset()
1563 struct mtk_wed_wo *wo = dev->hw->wed_wo; in mtk_wed_rx_reset()
1573 if (dev->wlan.hw_rro) { in mtk_wed_rx_reset()
1583 if (!ret && mtk_wed_is_v3_or_greater(dev->hw)) in mtk_wed_rx_reset()
1590 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_rx_reset()
1627 if (dev->wlan.hw_rro) { in mtk_wed_rx_reset()
1654 } else if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_rx_reset()
1667 if (mtk_wed_is_v3_or_greater(dev->hw)) in mtk_wed_rx_reset()
1675 /* reset wed rx dma */ in mtk_wed_rx_reset()
1683 dev->hw->soc->regmap.reset_idx_rx_mask); in mtk_wed_rx_reset()
1693 if (dev->wlan.hw_rro) { in mtk_wed_rx_reset()
1710 for (i = 0; i < ARRAY_SIZE(dev->rx_ring); i++) { in mtk_wed_rx_reset()
1711 if (!dev->rx_ring[i].desc) in mtk_wed_rx_reset()
1714 mtk_wed_ring_reset(&dev->rx_ring[i], MTK_WED_RX_RING_SIZE, in mtk_wed_rx_reset()
1730 for (i = 0; i < ARRAY_SIZE(dev->tx_ring); i++) { in mtk_wed_reset_dma()
1731 if (!dev->tx_ring[i].desc) in mtk_wed_reset_dma()
1734 mtk_wed_ring_reset(&dev->tx_ring[i], MTK_WED_TX_RING_SIZE, in mtk_wed_reset_dma()
1738 /* 1. reset WED tx DMA */ in mtk_wed_reset_dma()
1746 dev->hw->soc->regmap.reset_idx_tx_mask); in mtk_wed_reset_dma()
1752 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_reset_dma()
1765 if (!busy && mtk_wed_is_v3_or_greater(dev->hw)) in mtk_wed_reset_dma()
1773 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_reset_dma()
1798 /* 3. reset WED WPDMA tx */ in mtk_wed_reset_dma()
1802 if (mtk_wed_is_v1(dev->hw)) in mtk_wed_reset_dma()
1816 /* 4. reset WED WPDMA tx */ in mtk_wed_reset_dma()
1830 if (mtk_wed_is_v3_or_greater(dev->hw)) in mtk_wed_reset_dma()
1839 dev->init_done = false; in mtk_wed_reset_dma()
1840 if (mtk_wed_is_v1(dev->hw)) in mtk_wed_reset_dma()
1848 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_reset_dma()
1862 ring->desc = dma_alloc_coherent(dev->hw->dev, size * desc_size, in mtk_wed_ring_alloc()
1863 &ring->desc_phys, GFP_KERNEL); in mtk_wed_ring_alloc()
1864 if (!ring->desc) in mtk_wed_ring_alloc()
1865 return -ENOMEM; in mtk_wed_ring_alloc()
1867 ring->desc_size = desc_size; in mtk_wed_ring_alloc()
1868 ring->size = size; in mtk_wed_ring_alloc()
1880 if (idx >= ARRAY_SIZE(dev->rx_wdma)) in mtk_wed_wdma_rx_ring_setup()
1881 return -EINVAL; in mtk_wed_wdma_rx_ring_setup()
1883 wdma = &dev->rx_wdma[idx]; in mtk_wed_wdma_rx_ring_setup()
1885 dev->hw->soc->wdma_desc_size, true)) in mtk_wed_wdma_rx_ring_setup()
1886 return -ENOMEM; in mtk_wed_wdma_rx_ring_setup()
1889 wdma->desc_phys); in mtk_wed_wdma_rx_ring_setup()
1895 wdma->desc_phys); in mtk_wed_wdma_rx_ring_setup()
1908 if (idx >= ARRAY_SIZE(dev->tx_wdma)) in mtk_wed_wdma_tx_ring_setup()
1909 return -EINVAL; in mtk_wed_wdma_tx_ring_setup()
1911 wdma = &dev->tx_wdma[idx]; in mtk_wed_wdma_tx_ring_setup()
1913 dev->hw->soc->wdma_desc_size, true)) in mtk_wed_wdma_tx_ring_setup()
1914 return -ENOMEM; in mtk_wed_wdma_tx_ring_setup()
1916 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_wdma_tx_ring_setup()
1917 struct mtk_wdma_desc *desc = wdma->desc; in mtk_wed_wdma_tx_ring_setup()
1921 desc->buf0 = 0; in mtk_wed_wdma_tx_ring_setup()
1922 desc->ctrl = cpu_to_le32(MTK_WDMA_DESC_CTRL_DMA_DONE); in mtk_wed_wdma_tx_ring_setup()
1923 desc->buf1 = 0; in mtk_wed_wdma_tx_ring_setup()
1924 desc->info = cpu_to_le32(MTK_WDMA_TXD0_DESC_INFO_DMA_DONE); in mtk_wed_wdma_tx_ring_setup()
1926 desc->buf0 = 0; in mtk_wed_wdma_tx_ring_setup()
1927 desc->ctrl = cpu_to_le32(MTK_WDMA_DESC_CTRL_DMA_DONE); in mtk_wed_wdma_tx_ring_setup()
1928 desc->buf1 = 0; in mtk_wed_wdma_tx_ring_setup()
1929 desc->info = cpu_to_le32(MTK_WDMA_TXD1_DESC_INFO_DMA_DONE); in mtk_wed_wdma_tx_ring_setup()
1935 wdma->desc_phys); in mtk_wed_wdma_tx_ring_setup()
1946 wdma->desc_phys); in mtk_wed_wdma_tx_ring_setup()
1962 struct mtk_eth *eth = dev->hw->eth; in mtk_wed_ppe_check()
1973 skb->protocol = eh->h_proto; in mtk_wed_ppe_check()
1974 mtk_ppe_check_skb(eth->ppe[dev->hw->index], skb, hash); in mtk_wed_ppe_check()
1982 /* wed control cr set */ in mtk_wed_configure_irq()
1989 if (mtk_wed_is_v1(dev->hw)) { in mtk_wed_configure_irq()
1999 if (mtk_wed_is_v3_or_greater(dev->hw)) in mtk_wed_configure_irq()
2009 dev->wlan.tx_tbit[0]) | in mtk_wed_configure_irq()
2011 dev->wlan.tx_tbit[1])); in mtk_wed_configure_irq()
2018 dev->wlan.txfree_tbit)); in mtk_wed_configure_irq()
2027 dev->wlan.rx_tbit[0]) | in mtk_wed_configure_irq()
2029 dev->wlan.rx_tbit[1])); in mtk_wed_configure_irq()
2038 dev->wdma_idx)); in mtk_wed_configure_irq()
2055 if (!mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_dma_enable()
2081 if (mtk_wed_is_v1(dev->hw)) { in mtk_wed_dma_enable()
2091 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_dma_enable()
2127 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_dma_enable()
2139 struct mtk_wed_ring *ring = &dev->rx_ring[i]; in mtk_wed_dma_enable()
2142 if (!(ring->flags & MTK_WED_RING_CONFIGURED)) in mtk_wed_dma_enable()
2146 dev_err(dev->hw->dev, in mtk_wed_dma_enable()
2152 dev->wlan.wpdma_rx_glo - in mtk_wed_dma_enable()
2153 dev->wlan.phy_base) | MTK_WFMDA_RX_DMA_EN; in mtk_wed_dma_enable()
2155 dev->wlan.wpdma_rx_glo - dev->wlan.phy_base, in mtk_wed_dma_enable()
2168 if (!mtk_wed_get_rx_capa(dev) || !dev->wlan.hw_rro) in mtk_wed_start_hw_rro()
2187 dev->wlan.rro_rx_tbit[0]) | in mtk_wed_start_hw_rro()
2189 dev->wlan.rro_rx_tbit[1])); in mtk_wed_start_hw_rro()
2199 dev->wlan.rx_pg_tbit[0]) | in mtk_wed_start_hw_rro()
2201 dev->wlan.rx_pg_tbit[1]) | in mtk_wed_start_hw_rro()
2203 dev->wlan.rx_pg_tbit[2])); in mtk_wed_start_hw_rro()
2212 struct mtk_wed_ring *ring = &dev->rx_rro_ring[i]; in mtk_wed_start_hw_rro()
2214 if (!(ring->flags & MTK_WED_RING_CONFIGURED)) in mtk_wed_start_hw_rro()
2218 dev_err(dev->hw->dev, in mtk_wed_start_hw_rro()
2223 struct mtk_wed_ring *ring = &dev->rx_page_ring[i]; in mtk_wed_start_hw_rro()
2225 if (!(ring->flags & MTK_WED_RING_CONFIGURED)) in mtk_wed_start_hw_rro()
2229 dev_err(dev->hw->dev, in mtk_wed_start_hw_rro()
2238 struct mtk_wed_ring *ring = &dev->rx_rro_ring[idx]; in mtk_wed_rro_rx_ring_setup()
2240 ring->wpdma = regs; in mtk_wed_rro_rx_ring_setup()
2245 ring->flags |= MTK_WED_RING_CONFIGURED; in mtk_wed_rro_rx_ring_setup()
2251 struct mtk_wed_ring *ring = &dev->rx_page_ring[idx]; in mtk_wed_msdu_pg_rx_ring_setup()
2253 ring->wpdma = regs; in mtk_wed_msdu_pg_rx_ring_setup()
2258 ring->flags |= MTK_WED_RING_CONFIGURED; in mtk_wed_msdu_pg_rx_ring_setup()
2264 struct mtk_wed_ring *ring = &dev->ind_cmd_ring; in mtk_wed_ind_rx_ring_setup()
2268 ring->wpdma = regs; in mtk_wed_ind_rx_ring_setup()
2276 wed_w32(dev, MTK_WED_RRO_CFG0, dev->wlan.phy_base + in mtk_wed_ind_rx_ring_setup()
2277 dev->wlan.ind_cmd.ack_sn_addr); in mtk_wed_ind_rx_ring_setup()
2280 dev->wlan.ind_cmd.win_size) | in mtk_wed_ind_rx_ring_setup()
2282 dev->wlan.ind_cmd.particular_sid)); in mtk_wed_ind_rx_ring_setup()
2286 dev->wlan.ind_cmd.particular_se_phys); in mtk_wed_ind_rx_ring_setup()
2288 for (i = 0; i < dev->wlan.ind_cmd.se_group_nums; i++) { in mtk_wed_ind_rx_ring_setup()
2290 dev->wlan.ind_cmd.addr_elem_phys[i] >> 4); in mtk_wed_ind_rx_ring_setup()
2298 dev_err(dev->hw->dev, in mtk_wed_ind_rx_ring_setup()
2303 for (i = 0; i < dev->wlan.ind_cmd.particular_sid; i++) { in mtk_wed_ind_rx_ring_setup()
2315 dev_err(dev->hw->dev, in mtk_wed_ind_rx_ring_setup()
2333 for (i = 0; i < ARRAY_SIZE(dev->rx_wdma); i++) in mtk_wed_start()
2334 if (!dev->rx_wdma[i].desc) in mtk_wed_start()
2337 if (dev->wlan.hw_rro) { in mtk_wed_start()
2352 if (mtk_wed_is_v1(dev->hw)) { in mtk_wed_start()
2353 u32 val = dev->wlan.wpdma_phys | MTK_PCIE_MIRROR_MAP_EN | in mtk_wed_start()
2355 dev->hw->index); in mtk_wed_start()
2357 val |= BIT(0) | (BIT(1) * !!dev->hw->index); in mtk_wed_start()
2358 regmap_write(dev->hw->mirror, dev->hw->index * 4, val); in mtk_wed_start()
2369 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_start()
2379 mtk_wed_set_512_support(dev, dev->wlan.wcid_512); in mtk_wed_start()
2383 dev->running = true; in mtk_wed_start()
2397 if ((dev->wlan.bus_type == MTK_WED_BUS_PCIE && in mtk_wed_attach()
2398 pci_domain_nr(dev->wlan.pci_dev->bus) > 1) || in mtk_wed_attach()
2400 ret = -ENODEV; in mtk_wed_attach()
2412 ret = -ENODEV; in mtk_wed_attach()
2416 device = dev->wlan.bus_type == MTK_WED_BUS_PCIE in mtk_wed_attach()
2417 ? &dev->wlan.pci_dev->dev in mtk_wed_attach()
2418 : &dev->wlan.platform_dev->dev; in mtk_wed_attach()
2419 dev_info(device, "attaching wed device %d version %d\n", in mtk_wed_attach()
2420 hw->index, hw->version); in mtk_wed_attach()
2422 dev->hw = hw; in mtk_wed_attach()
2423 dev->dev = hw->dev; in mtk_wed_attach()
2424 dev->irq = hw->irq; in mtk_wed_attach()
2425 dev->wdma_idx = hw->index; in mtk_wed_attach()
2426 dev->version = hw->version; in mtk_wed_attach()
2427 dev->hw->pcie_base = mtk_wed_get_pcie_base(dev); in mtk_wed_attach()
2429 ret = dma_set_mask_and_coherent(hw->dev, DMA_BIT_MASK(32)); in mtk_wed_attach()
2433 if (hw->eth->dma_dev == hw->eth->dev && in mtk_wed_attach()
2434 of_dma_is_coherent(hw->eth->dev->of_node)) in mtk_wed_attach()
2435 mtk_eth_set_dma_device(hw->eth, hw->dev); in mtk_wed_attach()
2453 regmap_update_bits(hw->hifsys, HIFSYS_DMA_AG_MAP, in mtk_wed_attach()
2454 BIT(hw->index), 0); in mtk_wed_attach()
2456 dev->rev_id = wed_r32(dev, MTK_WED_REV_ID); in mtk_wed_attach()
2462 dev_err(dev->hw->dev, "failed to attach wed device\n"); in mtk_wed_attach()
2475 struct mtk_wed_ring *ring = &dev->tx_ring[idx]; in mtk_wed_tx_ring_setup()
2480 * driver allocated DMA ring gets configured into WED MTK_WED_RING_TX(n) in mtk_wed_tx_ring_setup()
2483 * WED driver posts its own DMA ring as WLAN PDMA TX and configures it in mtk_wed_tx_ring_setup()
2485 * It gets filled with packets picked up from WED TX ring and from in mtk_wed_tx_ring_setup()
2489 if (WARN_ON(idx >= ARRAY_SIZE(dev->tx_ring))) in mtk_wed_tx_ring_setup()
2490 return -EINVAL; in mtk_wed_tx_ring_setup()
2493 sizeof(*ring->desc), true)) in mtk_wed_tx_ring_setup()
2494 return -ENOMEM; in mtk_wed_tx_ring_setup()
2498 return -ENOMEM; in mtk_wed_tx_ring_setup()
2500 ring->reg_base = MTK_WED_RING_TX(idx); in mtk_wed_tx_ring_setup()
2501 ring->wpdma = regs; in mtk_wed_tx_ring_setup()
2503 if (mtk_wed_is_v3_or_greater(dev->hw) && idx == 1) { in mtk_wed_tx_ring_setup()
2520 /* WED -> WPDMA */ in mtk_wed_tx_ring_setup()
2521 wpdma_tx_w32(dev, idx, MTK_WED_RING_OFS_BASE, ring->desc_phys); in mtk_wed_tx_ring_setup()
2526 ring->desc_phys); in mtk_wed_tx_ring_setup()
2537 struct mtk_wed_ring *ring = &dev->txfree_ring; in mtk_wed_txfree_ring_setup()
2538 int i, index = mtk_wed_is_v1(dev->hw); in mtk_wed_txfree_ring_setup()
2541 * For txfree event handling, the same DMA ring is shared between WED in mtk_wed_txfree_ring_setup()
2543 * WED in mtk_wed_txfree_ring_setup()
2545 ring->reg_base = MTK_WED_RING_RX(index); in mtk_wed_txfree_ring_setup()
2546 ring->wpdma = regs; in mtk_wed_txfree_ring_setup()
2562 struct mtk_wed_ring *ring = &dev->rx_ring[idx]; in mtk_wed_rx_ring_setup()
2564 if (WARN_ON(idx >= ARRAY_SIZE(dev->rx_ring))) in mtk_wed_rx_ring_setup()
2565 return -EINVAL; in mtk_wed_rx_ring_setup()
2568 sizeof(*ring->desc), false)) in mtk_wed_rx_ring_setup()
2569 return -ENOMEM; in mtk_wed_rx_ring_setup()
2573 return -ENOMEM; in mtk_wed_rx_ring_setup()
2575 ring->reg_base = MTK_WED_RING_RX_DATA(idx); in mtk_wed_rx_ring_setup()
2576 ring->wpdma = regs; in mtk_wed_rx_ring_setup()
2577 ring->flags |= MTK_WED_RING_CONFIGURED; in mtk_wed_rx_ring_setup()
2579 /* WPDMA -> WED */ in mtk_wed_rx_ring_setup()
2580 wpdma_rx_w32(dev, idx, MTK_WED_RING_OFS_BASE, ring->desc_phys); in mtk_wed_rx_ring_setup()
2584 ring->desc_phys); in mtk_wed_rx_ring_setup()
2596 if (mtk_wed_is_v3_or_greater(dev->hw)) in mtk_wed_irq_get()
2605 if (!dev->hw->num_flows) in mtk_wed_irq_get()
2608 pr_err("mtk_wed%d: error status=%08x\n", dev->hw->index, val); in mtk_wed_irq_get()
2631 if (!hw || !hw->wed_dev) { in mtk_wed_flow_add()
2632 ret = -ENODEV; in mtk_wed_flow_add()
2636 if (!hw->wed_dev->wlan.offload_enable) in mtk_wed_flow_add()
2639 if (hw->num_flows) { in mtk_wed_flow_add()
2640 hw->num_flows++; in mtk_wed_flow_add()
2644 ret = hw->wed_dev->wlan.offload_enable(hw->wed_dev); in mtk_wed_flow_add()
2646 hw->num_flows++; in mtk_wed_flow_add()
2647 mtk_wed_set_ext_int(hw->wed_dev, true); in mtk_wed_flow_add()
2661 if (!hw || !hw->wed_dev) in mtk_wed_flow_remove()
2664 if (!hw->wed_dev->wlan.offload_disable) in mtk_wed_flow_remove()
2667 if (--hw->num_flows) in mtk_wed_flow_remove()
2670 hw->wed_dev->wlan.offload_disable(hw->wed_dev); in mtk_wed_flow_remove()
2671 mtk_wed_set_ext_int(hw->wed_dev, true); in mtk_wed_flow_remove()
2684 if (!priv || !tc_can_offload(priv->dev)) in mtk_wed_setup_tc_block_cb()
2685 return -EOPNOTSUPP; in mtk_wed_setup_tc_block_cb()
2688 return -EOPNOTSUPP; in mtk_wed_setup_tc_block_cb()
2690 hw = priv->hw; in mtk_wed_setup_tc_block_cb()
2691 return mtk_flow_offload_cmd(hw->eth, cls, hw->index); in mtk_wed_setup_tc_block_cb()
2701 struct mtk_eth *eth = hw->eth; in mtk_wed_setup_tc_block()
2704 if (!eth->soc->offload_version) in mtk_wed_setup_tc_block()
2705 return -EOPNOTSUPP; in mtk_wed_setup_tc_block()
2707 if (f->binder_type != FLOW_BLOCK_BINDER_TYPE_CLSACT_INGRESS) in mtk_wed_setup_tc_block()
2708 return -EOPNOTSUPP; in mtk_wed_setup_tc_block()
2711 f->driver_block_list = &block_cb_list; in mtk_wed_setup_tc_block()
2713 switch (f->command) { in mtk_wed_setup_tc_block()
2715 block_cb = flow_block_cb_lookup(f->block, cb, dev); in mtk_wed_setup_tc_block()
2723 return -ENOMEM; in mtk_wed_setup_tc_block()
2725 priv->hw = hw; in mtk_wed_setup_tc_block()
2726 priv->dev = dev; in mtk_wed_setup_tc_block()
2735 list_add_tail(&block_cb->driver_list, &block_cb_list); in mtk_wed_setup_tc_block()
2738 block_cb = flow_block_cb_lookup(f->block, cb, dev); in mtk_wed_setup_tc_block()
2740 return -ENOENT; in mtk_wed_setup_tc_block()
2744 list_del(&block_cb->driver_list); in mtk_wed_setup_tc_block()
2745 kfree(block_cb->cb_priv); in mtk_wed_setup_tc_block()
2746 block_cb->cb_priv = NULL; in mtk_wed_setup_tc_block()
2750 return -EOPNOTSUPP; in mtk_wed_setup_tc_block()
2755 mtk_wed_setup_tc(struct mtk_wed_device *wed, struct net_device *dev, in mtk_wed_setup_tc() argument
2758 struct mtk_wed_hw *hw = wed->hw; in mtk_wed_setup_tc()
2761 return -EOPNOTSUPP; in mtk_wed_setup_tc()
2768 return -EOPNOTSUPP; in mtk_wed_setup_tc()
2797 struct device_node *eth_np = eth->dev->of_node; in mtk_wed_add_hw()
2829 hw->node = np; in mtk_wed_add_hw()
2830 hw->regs = regs; in mtk_wed_add_hw()
2831 hw->eth = eth; in mtk_wed_add_hw()
2832 hw->dev = &pdev->dev; in mtk_wed_add_hw()
2833 hw->wdma_phy = wdma_phy; in mtk_wed_add_hw()
2834 hw->wdma = wdma; in mtk_wed_add_hw()
2835 hw->index = index; in mtk_wed_add_hw()
2836 hw->irq = irq; in mtk_wed_add_hw()
2837 hw->version = eth->soc->version; in mtk_wed_add_hw()
2839 switch (hw->version) { in mtk_wed_add_hw()
2841 hw->soc = &mt7986_data; in mtk_wed_add_hw()
2844 hw->soc = &mt7988_data; in mtk_wed_add_hw()
2848 hw->mirror = syscon_regmap_lookup_by_phandle(eth_np, in mtk_wed_add_hw()
2849 "mediatek,pcie-mirror"); in mtk_wed_add_hw()
2850 hw->hifsys = syscon_regmap_lookup_by_phandle(eth_np, in mtk_wed_add_hw()
2852 if (IS_ERR(hw->mirror) || IS_ERR(hw->hifsys)) { in mtk_wed_add_hw()
2858 regmap_write(hw->mirror, 0, 0); in mtk_wed_add_hw()
2859 regmap_write(hw->mirror, 4, 0); in mtk_wed_add_hw()
2861 hw->soc = &mt7622_data; in mtk_wed_add_hw()
2876 put_device(&pdev->dev); in mtk_wed_add_hw()
2897 debugfs_remove(hw->debugfs_dir); in mtk_wed_exit()
2898 put_device(hw->dev); in mtk_wed_exit()
2899 of_node_put(hw->node); in mtk_wed_exit()