Lines Matching full:dev
56 struct net_device *dev; member
100 wed_m32(struct mtk_wed_device *dev, u32 reg, u32 mask, u32 val) in wed_m32() argument
102 regmap_update_bits(dev->hw->regs, reg, mask | val, val); in wed_m32()
106 wed_set(struct mtk_wed_device *dev, u32 reg, u32 mask) in wed_set() argument
108 return wed_m32(dev, reg, 0, mask); in wed_set()
112 wed_clr(struct mtk_wed_device *dev, u32 reg, u32 mask) in wed_clr() argument
114 return wed_m32(dev, reg, mask, 0); in wed_clr()
118 wdma_m32(struct mtk_wed_device *dev, u32 reg, u32 mask, u32 val) in wdma_m32() argument
120 wdma_w32(dev, reg, (wdma_r32(dev, reg) & ~mask) | val); in wdma_m32()
124 wdma_set(struct mtk_wed_device *dev, u32 reg, u32 mask) in wdma_set() argument
126 wdma_m32(dev, reg, 0, mask); in wdma_set()
130 wdma_clr(struct mtk_wed_device *dev, u32 reg, u32 mask) in wdma_clr() argument
132 wdma_m32(dev, reg, mask, 0); in wdma_clr()
136 wifi_r32(struct mtk_wed_device *dev, u32 reg) in wifi_r32() argument
138 return readl(dev->wlan.base + reg); in wifi_r32()
142 wifi_w32(struct mtk_wed_device *dev, u32 reg, u32 val) in wifi_w32() argument
144 writel(val, dev->wlan.base + reg); in wifi_w32()
148 mtk_wed_read_reset(struct mtk_wed_device *dev) in mtk_wed_read_reset() argument
150 return wed_r32(dev, MTK_WED_RESET); in mtk_wed_read_reset()
154 mtk_wdma_read_reset(struct mtk_wed_device *dev) in mtk_wdma_read_reset() argument
156 return wdma_r32(dev, MTK_WDMA_GLO_CFG); in mtk_wdma_read_reset()
160 mtk_wdma_v3_rx_reset(struct mtk_wed_device *dev) in mtk_wdma_v3_rx_reset() argument
164 if (!mtk_wed_is_v3_or_greater(dev->hw)) in mtk_wdma_v3_rx_reset()
167 wdma_clr(dev, MTK_WDMA_PREF_TX_CFG, MTK_WDMA_PREF_TX_CFG_PREF_EN); in mtk_wdma_v3_rx_reset()
168 wdma_clr(dev, MTK_WDMA_PREF_RX_CFG, MTK_WDMA_PREF_RX_CFG_PREF_EN); in mtk_wdma_v3_rx_reset()
172 0, 10000, false, dev, MTK_WDMA_PREF_TX_CFG)) in mtk_wdma_v3_rx_reset()
173 dev_err(dev->hw->dev, "rx reset failed\n"); in mtk_wdma_v3_rx_reset()
177 0, 10000, false, dev, MTK_WDMA_PREF_RX_CFG)) in mtk_wdma_v3_rx_reset()
178 dev_err(dev->hw->dev, "rx reset failed\n"); in mtk_wdma_v3_rx_reset()
180 wdma_clr(dev, MTK_WDMA_WRBK_TX_CFG, MTK_WDMA_WRBK_TX_CFG_WRBK_EN); in mtk_wdma_v3_rx_reset()
181 wdma_clr(dev, MTK_WDMA_WRBK_RX_CFG, MTK_WDMA_WRBK_RX_CFG_WRBK_EN); in mtk_wdma_v3_rx_reset()
185 0, 10000, false, dev, MTK_WDMA_WRBK_TX_CFG)) in mtk_wdma_v3_rx_reset()
186 dev_err(dev->hw->dev, "rx reset failed\n"); in mtk_wdma_v3_rx_reset()
190 0, 10000, false, dev, MTK_WDMA_WRBK_RX_CFG)) in mtk_wdma_v3_rx_reset()
191 dev_err(dev->hw->dev, "rx reset failed\n"); in mtk_wdma_v3_rx_reset()
194 wdma_w32(dev, MTK_WDMA_PREF_RX_FIFO_CFG, in mtk_wdma_v3_rx_reset()
197 wdma_clr(dev, MTK_WDMA_PREF_RX_FIFO_CFG, in mtk_wdma_v3_rx_reset()
202 wdma_w32(dev, MTK_WDMA_XDMA_RX_FIFO_CFG, in mtk_wdma_v3_rx_reset()
210 wdma_clr(dev, MTK_WDMA_XDMA_RX_FIFO_CFG, in mtk_wdma_v3_rx_reset()
220 wdma_w32(dev, MTK_WDMA_WRBK_RX_FIFO_CFG(0), in mtk_wdma_v3_rx_reset()
222 wdma_w32(dev, MTK_WDMA_WRBK_RX_FIFO_CFG(1), in mtk_wdma_v3_rx_reset()
225 wdma_clr(dev, MTK_WDMA_WRBK_RX_FIFO_CFG(0), in mtk_wdma_v3_rx_reset()
227 wdma_clr(dev, MTK_WDMA_WRBK_RX_FIFO_CFG(1), in mtk_wdma_v3_rx_reset()
231 wdma_w32(dev, MTK_WDMA_PREF_SIDX_CFG, in mtk_wdma_v3_rx_reset()
233 wdma_clr(dev, MTK_WDMA_PREF_SIDX_CFG, in mtk_wdma_v3_rx_reset()
237 wdma_w32(dev, MTK_WDMA_WRBK_SIDX_CFG, in mtk_wdma_v3_rx_reset()
239 wdma_clr(dev, MTK_WDMA_WRBK_SIDX_CFG, in mtk_wdma_v3_rx_reset()
244 mtk_wdma_rx_reset(struct mtk_wed_device *dev) in mtk_wdma_rx_reset() argument
249 wdma_clr(dev, MTK_WDMA_GLO_CFG, MTK_WDMA_GLO_CFG_RX_DMA_EN); in mtk_wdma_rx_reset()
250 ret = readx_poll_timeout(mtk_wdma_read_reset, dev, status, in mtk_wdma_rx_reset()
253 dev_err(dev->hw->dev, "rx reset failed\n"); in mtk_wdma_rx_reset()
255 mtk_wdma_v3_rx_reset(dev); in mtk_wdma_rx_reset()
256 wdma_w32(dev, MTK_WDMA_RESET_IDX, MTK_WDMA_RESET_IDX_RX); in mtk_wdma_rx_reset()
257 wdma_w32(dev, MTK_WDMA_RESET_IDX, 0); in mtk_wdma_rx_reset()
259 for (i = 0; i < ARRAY_SIZE(dev->rx_wdma); i++) { in mtk_wdma_rx_reset()
260 if (dev->rx_wdma[i].desc) in mtk_wdma_rx_reset()
263 wdma_w32(dev, in mtk_wdma_rx_reset()
271 mtk_wed_check_busy(struct mtk_wed_device *dev, u32 reg, u32 mask) in mtk_wed_check_busy() argument
273 return !!(wed_r32(dev, reg) & mask); in mtk_wed_check_busy()
277 mtk_wed_poll_busy(struct mtk_wed_device *dev, u32 reg, u32 mask) in mtk_wed_poll_busy() argument
284 timeout, false, dev, reg, mask); in mtk_wed_poll_busy()
288 mtk_wdma_v3_tx_reset(struct mtk_wed_device *dev) in mtk_wdma_v3_tx_reset() argument
292 if (!mtk_wed_is_v3_or_greater(dev->hw)) in mtk_wdma_v3_tx_reset()
295 wdma_clr(dev, MTK_WDMA_PREF_TX_CFG, MTK_WDMA_PREF_TX_CFG_PREF_EN); in mtk_wdma_v3_tx_reset()
296 wdma_clr(dev, MTK_WDMA_PREF_RX_CFG, MTK_WDMA_PREF_RX_CFG_PREF_EN); in mtk_wdma_v3_tx_reset()
300 0, 10000, false, dev, MTK_WDMA_PREF_TX_CFG)) in mtk_wdma_v3_tx_reset()
301 dev_err(dev->hw->dev, "tx reset failed\n"); in mtk_wdma_v3_tx_reset()
305 0, 10000, false, dev, MTK_WDMA_PREF_RX_CFG)) in mtk_wdma_v3_tx_reset()
306 dev_err(dev->hw->dev, "tx reset failed\n"); in mtk_wdma_v3_tx_reset()
308 wdma_clr(dev, MTK_WDMA_WRBK_TX_CFG, MTK_WDMA_WRBK_TX_CFG_WRBK_EN); in mtk_wdma_v3_tx_reset()
309 wdma_clr(dev, MTK_WDMA_WRBK_RX_CFG, MTK_WDMA_WRBK_RX_CFG_WRBK_EN); in mtk_wdma_v3_tx_reset()
313 0, 10000, false, dev, MTK_WDMA_WRBK_TX_CFG)) in mtk_wdma_v3_tx_reset()
314 dev_err(dev->hw->dev, "tx reset failed\n"); in mtk_wdma_v3_tx_reset()
318 0, 10000, false, dev, MTK_WDMA_WRBK_RX_CFG)) in mtk_wdma_v3_tx_reset()
319 dev_err(dev->hw->dev, "tx reset failed\n"); in mtk_wdma_v3_tx_reset()
322 wdma_w32(dev, MTK_WDMA_PREF_TX_FIFO_CFG, in mtk_wdma_v3_tx_reset()
325 wdma_clr(dev, MTK_WDMA_PREF_TX_FIFO_CFG, in mtk_wdma_v3_tx_reset()
330 wdma_w32(dev, MTK_WDMA_XDMA_TX_FIFO_CFG, in mtk_wdma_v3_tx_reset()
335 wdma_clr(dev, MTK_WDMA_XDMA_TX_FIFO_CFG, in mtk_wdma_v3_tx_reset()
342 wdma_w32(dev, MTK_WDMA_WRBK_TX_FIFO_CFG(0), in mtk_wdma_v3_tx_reset()
344 wdma_w32(dev, MTK_WDMA_WRBK_TX_FIFO_CFG(1), in mtk_wdma_v3_tx_reset()
347 wdma_clr(dev, MTK_WDMA_WRBK_TX_FIFO_CFG(0), in mtk_wdma_v3_tx_reset()
349 wdma_clr(dev, MTK_WDMA_WRBK_TX_FIFO_CFG(1), in mtk_wdma_v3_tx_reset()
353 wdma_w32(dev, MTK_WDMA_PREF_SIDX_CFG, in mtk_wdma_v3_tx_reset()
355 wdma_clr(dev, MTK_WDMA_PREF_SIDX_CFG, in mtk_wdma_v3_tx_reset()
359 wdma_w32(dev, MTK_WDMA_WRBK_SIDX_CFG, in mtk_wdma_v3_tx_reset()
361 wdma_clr(dev, MTK_WDMA_WRBK_SIDX_CFG, in mtk_wdma_v3_tx_reset()
366 mtk_wdma_tx_reset(struct mtk_wed_device *dev) in mtk_wdma_tx_reset() argument
371 wdma_clr(dev, MTK_WDMA_GLO_CFG, MTK_WDMA_GLO_CFG_TX_DMA_EN); in mtk_wdma_tx_reset()
372 if (readx_poll_timeout(mtk_wdma_read_reset, dev, status, in mtk_wdma_tx_reset()
374 dev_err(dev->hw->dev, "tx reset failed\n"); in mtk_wdma_tx_reset()
376 mtk_wdma_v3_tx_reset(dev); in mtk_wdma_tx_reset()
377 wdma_w32(dev, MTK_WDMA_RESET_IDX, MTK_WDMA_RESET_IDX_TX); in mtk_wdma_tx_reset()
378 wdma_w32(dev, MTK_WDMA_RESET_IDX, 0); in mtk_wdma_tx_reset()
380 for (i = 0; i < ARRAY_SIZE(dev->tx_wdma); i++) in mtk_wdma_tx_reset()
381 wdma_w32(dev, in mtk_wdma_tx_reset()
386 mtk_wed_reset(struct mtk_wed_device *dev, u32 mask) in mtk_wed_reset() argument
390 wed_w32(dev, MTK_WED_RESET, mask); in mtk_wed_reset()
391 if (readx_poll_timeout(mtk_wed_read_reset, dev, status, in mtk_wed_reset()
397 mtk_wed_wo_read_status(struct mtk_wed_device *dev) in mtk_wed_wo_read_status() argument
399 return wed_r32(dev, MTK_WED_SCR0 + 4 * MTK_WED_DUMMY_CR_WO_STATUS); in mtk_wed_wo_read_status()
403 mtk_wed_wo_reset(struct mtk_wed_device *dev) in mtk_wed_wo_reset() argument
405 struct mtk_wed_wo *wo = dev->hw->wed_wo; in mtk_wed_wo_reset()
410 mtk_wdma_tx_reset(dev); in mtk_wed_wo_reset()
411 mtk_wed_reset(dev, MTK_WED_RESET_WED); in mtk_wed_wo_reset()
418 if (readx_poll_timeout(mtk_wed_wo_read_status, dev, val, in mtk_wed_wo_reset()
421 dev_err(dev->hw->dev, "failed to disable wed-wo\n"); in mtk_wed_wo_reset()
426 switch (dev->hw->index) { in mtk_wed_wo_reset()
453 struct mtk_wed_device *dev; in mtk_wed_fe_reset() local
459 dev = hw->wed_dev; in mtk_wed_fe_reset()
460 if (!dev || !dev->wlan.reset) in mtk_wed_fe_reset()
464 err = dev->wlan.reset(dev); in mtk_wed_fe_reset()
466 dev_err(dev->dev, "wlan reset failed: %d\n", err); in mtk_wed_fe_reset()
480 struct mtk_wed_device *dev; in mtk_wed_fe_reset_complete() local
485 dev = hw->wed_dev; in mtk_wed_fe_reset_complete()
486 if (!dev || !dev->wlan.reset_complete) in mtk_wed_fe_reset_complete()
489 dev->wlan.reset_complete(dev); in mtk_wed_fe_reset_complete()
496 mtk_wed_assign(struct mtk_wed_device *dev) in mtk_wed_assign() argument
501 if (dev->wlan.bus_type == MTK_WED_BUS_PCIE) { in mtk_wed_assign()
502 hw = hw_list[pci_domain_nr(dev->wlan.pci_dev->bus)]; in mtk_wed_assign()
524 hw->wed_dev = dev; in mtk_wed_assign()
529 mtk_wed_amsdu_buffer_alloc(struct mtk_wed_device *dev) in mtk_wed_amsdu_buffer_alloc() argument
531 struct mtk_wed_hw *hw = dev->hw; in mtk_wed_amsdu_buffer_alloc()
538 wed_amsdu = devm_kcalloc(hw->dev, MTK_WED_AMSDU_NPAGES, in mtk_wed_amsdu_buffer_alloc()
555 wed_amsdu[i].txd_phy = dma_map_single(hw->dev, ptr, in mtk_wed_amsdu_buffer_alloc()
558 if (dma_mapping_error(hw->dev, wed_amsdu[i].txd_phy)) in mtk_wed_amsdu_buffer_alloc()
561 dev->hw->wed_amsdu = wed_amsdu; in mtk_wed_amsdu_buffer_alloc()
567 dma_unmap_single(hw->dev, wed_amsdu[i].txd_phy, in mtk_wed_amsdu_buffer_alloc()
573 mtk_wed_amsdu_free_buffer(struct mtk_wed_device *dev) in mtk_wed_amsdu_free_buffer() argument
575 struct mtk_wed_amsdu *wed_amsdu = dev->hw->wed_amsdu; in mtk_wed_amsdu_free_buffer()
582 dma_unmap_single(dev->hw->dev, wed_amsdu[i].txd_phy, in mtk_wed_amsdu_free_buffer()
590 mtk_wed_amsdu_init(struct mtk_wed_device *dev) in mtk_wed_amsdu_init() argument
592 struct mtk_wed_amsdu *wed_amsdu = dev->hw->wed_amsdu; in mtk_wed_amsdu_init()
599 wed_w32(dev, MTK_WED_AMSDU_HIFTXD_BASE_L(i), in mtk_wed_amsdu_init()
603 wed_w32(dev, MTK_WED_AMSDU_STA_INFO_INIT, MTK_WED_AMSDU_STA_RMVL | in mtk_wed_amsdu_init()
606 dev->wlan.amsdu_max_len >> 8) | in mtk_wed_amsdu_init()
608 dev->wlan.amsdu_max_subframes)); in mtk_wed_amsdu_init()
610 wed_w32(dev, MTK_WED_AMSDU_STA_INFO, MTK_WED_AMSDU_STA_INFO_DO_INIT); in mtk_wed_amsdu_init()
612 ret = mtk_wed_poll_busy(dev, MTK_WED_AMSDU_STA_INFO, in mtk_wed_amsdu_init()
615 dev_err(dev->hw->dev, "amsdu initialization failed\n"); in mtk_wed_amsdu_init()
620 wed_set(dev, MTK_WED_AMSDU_HIFTXD_CFG, in mtk_wed_amsdu_init()
621 FIELD_PREP(MTK_WED_AMSDU_HIFTXD_SRC, dev->hw->index)); in mtk_wed_amsdu_init()
624 wed_set(dev, MTK_WED_AMSDU_PSE, MTK_WED_AMSDU_PSE_RESET); in mtk_wed_amsdu_init()
625 ret = mtk_wed_poll_busy(dev, MTK_WED_MON_AMSDU_QMEM_STS1, BIT(29)); in mtk_wed_amsdu_init()
632 if (dev->wlan.id == 0x7991 || dev->wlan.id == 0x7992) in mtk_wed_amsdu_init()
633 wed_clr(dev, MTK_WED_AMSDU_FIFO, MTK_WED_AMSDU_IS_PRIOR0_RING); in mtk_wed_amsdu_init()
635 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_TX_AMSDU_EN); in mtk_wed_amsdu_init()
641 mtk_wed_tx_buffer_alloc(struct mtk_wed_device *dev) in mtk_wed_tx_buffer_alloc() argument
643 u32 desc_size = dev->hw->soc->tx_ring_desc_size; in mtk_wed_tx_buffer_alloc()
645 int token = dev->wlan.token_start; in mtk_wed_tx_buffer_alloc()
650 if (!mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_tx_buffer_alloc()
651 ring_size = dev->wlan.nbuf & ~(MTK_WED_BUF_PER_PAGE - 1); in mtk_wed_tx_buffer_alloc()
652 dev->tx_buf_ring.size = ring_size; in mtk_wed_tx_buffer_alloc()
654 dev->tx_buf_ring.size = MTK_WED_TX_BM_DMA_SIZE; in mtk_wed_tx_buffer_alloc()
657 n_pages = dev->tx_buf_ring.size / MTK_WED_BUF_PER_PAGE; in mtk_wed_tx_buffer_alloc()
663 dev->tx_buf_ring.pages = page_list; in mtk_wed_tx_buffer_alloc()
665 desc_ptr = dma_alloc_coherent(dev->hw->dev, in mtk_wed_tx_buffer_alloc()
666 dev->tx_buf_ring.size * desc_size, in mtk_wed_tx_buffer_alloc()
671 dev->tx_buf_ring.desc = desc_ptr; in mtk_wed_tx_buffer_alloc()
672 dev->tx_buf_ring.desc_phys = desc_phys; in mtk_wed_tx_buffer_alloc()
684 page_phys = dma_map_page(dev->hw->dev, page, 0, PAGE_SIZE, in mtk_wed_tx_buffer_alloc()
686 if (dma_mapping_error(dev->hw->dev, page_phys)) { in mtk_wed_tx_buffer_alloc()
693 dma_sync_single_for_cpu(dev->hw->dev, page_phys, PAGE_SIZE, in mtk_wed_tx_buffer_alloc()
704 if (!mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_tx_buffer_alloc()
707 txd_size = dev->wlan.init_buf(buf, buf_phys, in mtk_wed_tx_buffer_alloc()
711 if (mtk_wed_is_v1(dev->hw)) in mtk_wed_tx_buffer_alloc()
730 dma_sync_single_for_device(dev->hw->dev, page_phys, PAGE_SIZE, in mtk_wed_tx_buffer_alloc()
738 mtk_wed_free_tx_buffer(struct mtk_wed_device *dev) in mtk_wed_free_tx_buffer() argument
740 struct mtk_wed_buf *page_list = dev->tx_buf_ring.pages; in mtk_wed_free_tx_buffer()
741 struct mtk_wed_hw *hw = dev->hw; in mtk_wed_free_tx_buffer()
747 if (!dev->tx_buf_ring.desc) in mtk_wed_free_tx_buffer()
750 for (i = 0; i < dev->tx_buf_ring.size; i += MTK_WED_BUF_PER_PAGE) { in mtk_wed_free_tx_buffer()
757 dma_unmap_page(dev->hw->dev, page_phy, PAGE_SIZE, in mtk_wed_free_tx_buffer()
762 dma_free_coherent(dev->hw->dev, in mtk_wed_free_tx_buffer()
763 dev->tx_buf_ring.size * hw->soc->tx_ring_desc_size, in mtk_wed_free_tx_buffer()
764 dev->tx_buf_ring.desc, in mtk_wed_free_tx_buffer()
765 dev->tx_buf_ring.desc_phys); in mtk_wed_free_tx_buffer()
772 mtk_wed_hwrro_buffer_alloc(struct mtk_wed_device *dev) in mtk_wed_hwrro_buffer_alloc() argument
780 if (!dev->wlan.hw_rro) in mtk_wed_hwrro_buffer_alloc()
787 dev->hw_rro.size = dev->wlan.rx_nbuf & ~(MTK_WED_BUF_PER_PAGE - 1); in mtk_wed_hwrro_buffer_alloc()
788 dev->hw_rro.pages = page_list; in mtk_wed_hwrro_buffer_alloc()
789 desc = dma_alloc_coherent(dev->hw->dev, in mtk_wed_hwrro_buffer_alloc()
790 dev->wlan.rx_nbuf * sizeof(*desc), in mtk_wed_hwrro_buffer_alloc()
795 dev->hw_rro.desc = desc; in mtk_wed_hwrro_buffer_alloc()
796 dev->hw_rro.desc_phys = desc_phys; in mtk_wed_hwrro_buffer_alloc()
807 page_phys = dma_map_page(dev->hw->dev, page, 0, PAGE_SIZE, in mtk_wed_hwrro_buffer_alloc()
809 if (dma_mapping_error(dev->hw->dev, page_phys)) { in mtk_wed_hwrro_buffer_alloc()
816 dma_sync_single_for_cpu(dev->hw->dev, page_phys, PAGE_SIZE, in mtk_wed_hwrro_buffer_alloc()
827 dma_sync_single_for_device(dev->hw->dev, page_phys, PAGE_SIZE, in mtk_wed_hwrro_buffer_alloc()
835 mtk_wed_rx_buffer_alloc(struct mtk_wed_device *dev) in mtk_wed_rx_buffer_alloc() argument
840 dev->rx_buf_ring.size = dev->wlan.rx_nbuf; in mtk_wed_rx_buffer_alloc()
841 desc = dma_alloc_coherent(dev->hw->dev, in mtk_wed_rx_buffer_alloc()
842 dev->wlan.rx_nbuf * sizeof(*desc), in mtk_wed_rx_buffer_alloc()
847 dev->rx_buf_ring.desc = desc; in mtk_wed_rx_buffer_alloc()
848 dev->rx_buf_ring.desc_phys = desc_phys; in mtk_wed_rx_buffer_alloc()
849 dev->wlan.init_rx_buf(dev, dev->wlan.rx_npkt); in mtk_wed_rx_buffer_alloc()
851 return mtk_wed_hwrro_buffer_alloc(dev); in mtk_wed_rx_buffer_alloc()
855 mtk_wed_hwrro_free_buffer(struct mtk_wed_device *dev) in mtk_wed_hwrro_free_buffer() argument
857 struct mtk_wed_buf *page_list = dev->hw_rro.pages; in mtk_wed_hwrro_free_buffer()
858 struct mtk_wed_bm_desc *desc = dev->hw_rro.desc; in mtk_wed_hwrro_free_buffer()
861 if (!dev->wlan.hw_rro) in mtk_wed_hwrro_free_buffer()
877 dma_unmap_page(dev->hw->dev, buf_addr, PAGE_SIZE, in mtk_wed_hwrro_free_buffer()
882 dma_free_coherent(dev->hw->dev, dev->hw_rro.size * sizeof(*desc), in mtk_wed_hwrro_free_buffer()
883 desc, dev->hw_rro.desc_phys); in mtk_wed_hwrro_free_buffer()
890 mtk_wed_free_rx_buffer(struct mtk_wed_device *dev) in mtk_wed_free_rx_buffer() argument
892 struct mtk_wed_bm_desc *desc = dev->rx_buf_ring.desc; in mtk_wed_free_rx_buffer()
897 dev->wlan.release_rx_buf(dev); in mtk_wed_free_rx_buffer()
898 dma_free_coherent(dev->hw->dev, dev->rx_buf_ring.size * sizeof(*desc), in mtk_wed_free_rx_buffer()
899 desc, dev->rx_buf_ring.desc_phys); in mtk_wed_free_rx_buffer()
901 mtk_wed_hwrro_free_buffer(dev); in mtk_wed_free_rx_buffer()
905 mtk_wed_hwrro_init(struct mtk_wed_device *dev) in mtk_wed_hwrro_init() argument
907 if (!mtk_wed_get_rx_capa(dev) || !dev->wlan.hw_rro) in mtk_wed_hwrro_init()
910 wed_set(dev, MTK_WED_RRO_PG_BM_RX_DMAM, in mtk_wed_hwrro_init()
913 wed_w32(dev, MTK_WED_RRO_PG_BM_BASE, dev->hw_rro.desc_phys); in mtk_wed_hwrro_init()
915 wed_w32(dev, MTK_WED_RRO_PG_BM_INIT_PTR, in mtk_wed_hwrro_init()
921 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_RX_PG_BM_EN); in mtk_wed_hwrro_init()
925 mtk_wed_rx_buffer_hw_init(struct mtk_wed_device *dev) in mtk_wed_rx_buffer_hw_init() argument
927 wed_w32(dev, MTK_WED_RX_BM_RX_DMAD, in mtk_wed_rx_buffer_hw_init()
928 FIELD_PREP(MTK_WED_RX_BM_RX_DMAD_SDL0, dev->wlan.rx_size)); in mtk_wed_rx_buffer_hw_init()
929 wed_w32(dev, MTK_WED_RX_BM_BASE, dev->rx_buf_ring.desc_phys); in mtk_wed_rx_buffer_hw_init()
930 wed_w32(dev, MTK_WED_RX_BM_INIT_PTR, MTK_WED_RX_BM_INIT_SW_TAIL | in mtk_wed_rx_buffer_hw_init()
931 FIELD_PREP(MTK_WED_RX_BM_SW_TAIL, dev->wlan.rx_npkt)); in mtk_wed_rx_buffer_hw_init()
932 wed_w32(dev, MTK_WED_RX_BM_DYN_ALLOC_TH, in mtk_wed_rx_buffer_hw_init()
934 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_RX_BM_EN); in mtk_wed_rx_buffer_hw_init()
936 mtk_wed_hwrro_init(dev); in mtk_wed_rx_buffer_hw_init()
940 mtk_wed_free_ring(struct mtk_wed_device *dev, struct mtk_wed_ring *ring) in mtk_wed_free_ring() argument
945 dma_free_coherent(dev->hw->dev, ring->size * ring->desc_size, in mtk_wed_free_ring()
950 mtk_wed_free_rx_rings(struct mtk_wed_device *dev) in mtk_wed_free_rx_rings() argument
952 mtk_wed_free_rx_buffer(dev); in mtk_wed_free_rx_rings()
953 mtk_wed_free_ring(dev, &dev->rro.ring); in mtk_wed_free_rx_rings()
957 mtk_wed_free_tx_rings(struct mtk_wed_device *dev) in mtk_wed_free_tx_rings() argument
961 for (i = 0; i < ARRAY_SIZE(dev->tx_ring); i++) in mtk_wed_free_tx_rings()
962 mtk_wed_free_ring(dev, &dev->tx_ring[i]); in mtk_wed_free_tx_rings()
963 for (i = 0; i < ARRAY_SIZE(dev->rx_wdma); i++) in mtk_wed_free_tx_rings()
964 mtk_wed_free_ring(dev, &dev->rx_wdma[i]); in mtk_wed_free_tx_rings()
968 mtk_wed_set_ext_int(struct mtk_wed_device *dev, bool en) in mtk_wed_set_ext_int() argument
972 switch (dev->hw->version) { in mtk_wed_set_ext_int()
990 if (!dev->hw->num_flows) in mtk_wed_set_ext_int()
993 wed_w32(dev, MTK_WED_EXT_INT_MASK, en ? mask : 0); in mtk_wed_set_ext_int()
994 wed_r32(dev, MTK_WED_EXT_INT_MASK); in mtk_wed_set_ext_int()
998 mtk_wed_set_512_support(struct mtk_wed_device *dev, bool enable) in mtk_wed_set_512_support() argument
1000 if (!mtk_wed_is_v2(dev->hw)) in mtk_wed_set_512_support()
1004 wed_w32(dev, MTK_WED_TXDP_CTRL, MTK_WED_TXDP_DW9_OVERWR); in mtk_wed_set_512_support()
1005 wed_w32(dev, MTK_WED_TXP_DW1, in mtk_wed_set_512_support()
1008 wed_w32(dev, MTK_WED_TXP_DW1, in mtk_wed_set_512_support()
1010 wed_clr(dev, MTK_WED_TXDP_CTRL, MTK_WED_TXDP_DW9_OVERWR); in mtk_wed_set_512_support()
1015 mtk_wed_check_wfdma_rx_fill(struct mtk_wed_device *dev, in mtk_wed_check_wfdma_rx_fill() argument
1030 dev_err(dev->hw->dev, "rx dma enable failed\n"); in mtk_wed_check_wfdma_rx_fill()
1038 mtk_wed_dma_disable(struct mtk_wed_device *dev) in mtk_wed_dma_disable() argument
1040 wed_clr(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_dma_disable()
1044 wed_clr(dev, MTK_WED_WDMA_GLO_CFG, MTK_WED_WDMA_GLO_CFG_RX_DRV_EN); in mtk_wed_dma_disable()
1046 wed_clr(dev, MTK_WED_GLO_CFG, in mtk_wed_dma_disable()
1050 wdma_clr(dev, MTK_WDMA_GLO_CFG, in mtk_wed_dma_disable()
1055 if (mtk_wed_is_v1(dev->hw)) { in mtk_wed_dma_disable()
1056 regmap_write(dev->hw->mirror, dev->hw->index * 4, 0); in mtk_wed_dma_disable()
1057 wdma_clr(dev, MTK_WDMA_GLO_CFG, in mtk_wed_dma_disable()
1060 wed_clr(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_dma_disable()
1064 wed_clr(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, in mtk_wed_dma_disable()
1066 wed_clr(dev, MTK_WED_WDMA_GLO_CFG, in mtk_wed_dma_disable()
1069 if (mtk_wed_is_v3_or_greater(dev->hw) && in mtk_wed_dma_disable()
1070 mtk_wed_get_rx_capa(dev)) { in mtk_wed_dma_disable()
1071 wdma_clr(dev, MTK_WDMA_PREF_TX_CFG, in mtk_wed_dma_disable()
1073 wdma_clr(dev, MTK_WDMA_PREF_RX_CFG, in mtk_wed_dma_disable()
1078 mtk_wed_set_512_support(dev, false); in mtk_wed_dma_disable()
1082 mtk_wed_stop(struct mtk_wed_device *dev) in mtk_wed_stop() argument
1084 mtk_wed_dma_disable(dev); in mtk_wed_stop()
1085 mtk_wed_set_ext_int(dev, false); in mtk_wed_stop()
1087 wed_w32(dev, MTK_WED_WPDMA_INT_TRIGGER, 0); in mtk_wed_stop()
1088 wed_w32(dev, MTK_WED_WDMA_INT_TRIGGER, 0); in mtk_wed_stop()
1089 wdma_w32(dev, MTK_WDMA_INT_MASK, 0); in mtk_wed_stop()
1090 wdma_w32(dev, MTK_WDMA_INT_GRP2, 0); in mtk_wed_stop()
1092 if (!mtk_wed_get_rx_capa(dev)) in mtk_wed_stop()
1095 wed_w32(dev, MTK_WED_EXT_INT_MASK1, 0); in mtk_wed_stop()
1096 wed_w32(dev, MTK_WED_EXT_INT_MASK2, 0); in mtk_wed_stop()
1100 mtk_wed_deinit(struct mtk_wed_device *dev) in mtk_wed_deinit() argument
1102 mtk_wed_stop(dev); in mtk_wed_deinit()
1104 wed_clr(dev, MTK_WED_CTRL, in mtk_wed_deinit()
1110 if (mtk_wed_is_v1(dev->hw)) in mtk_wed_deinit()
1113 wed_clr(dev, MTK_WED_CTRL, in mtk_wed_deinit()
1118 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_deinit()
1119 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_TX_AMSDU_EN); in mtk_wed_deinit()
1120 wed_clr(dev, MTK_WED_RESET, MTK_WED_RESET_TX_AMSDU); in mtk_wed_deinit()
1121 wed_clr(dev, MTK_WED_PCIE_INT_CTRL, in mtk_wed_deinit()
1128 __mtk_wed_detach(struct mtk_wed_device *dev) in __mtk_wed_detach() argument
1130 struct mtk_wed_hw *hw = dev->hw; in __mtk_wed_detach()
1132 mtk_wed_deinit(dev); in __mtk_wed_detach()
1134 mtk_wdma_rx_reset(dev); in __mtk_wed_detach()
1135 mtk_wed_reset(dev, MTK_WED_RESET_WED); in __mtk_wed_detach()
1136 mtk_wed_amsdu_free_buffer(dev); in __mtk_wed_detach()
1137 mtk_wed_free_tx_buffer(dev); in __mtk_wed_detach()
1138 mtk_wed_free_tx_rings(dev); in __mtk_wed_detach()
1140 if (mtk_wed_get_rx_capa(dev)) { in __mtk_wed_detach()
1142 mtk_wed_wo_reset(dev); in __mtk_wed_detach()
1143 mtk_wed_free_rx_rings(dev); in __mtk_wed_detach()
1148 if (dev->wlan.bus_type == MTK_WED_BUS_PCIE) { in __mtk_wed_detach()
1151 wlan_node = dev->wlan.pci_dev->dev.of_node; in __mtk_wed_detach()
1158 hw->eth->dma_dev != hw->eth->dev) in __mtk_wed_detach()
1159 mtk_eth_set_dma_device(hw->eth, hw->eth->dev); in __mtk_wed_detach()
1161 memset(dev, 0, sizeof(*dev)); in __mtk_wed_detach()
1168 mtk_wed_detach(struct mtk_wed_device *dev) in mtk_wed_detach() argument
1171 __mtk_wed_detach(dev); in mtk_wed_detach()
1176 mtk_wed_bus_init(struct mtk_wed_device *dev) in mtk_wed_bus_init() argument
1178 switch (dev->wlan.bus_type) { in mtk_wed_bus_init()
1180 struct device_node *np = dev->hw->eth->dev->of_node; in mtk_wed_bus_init()
1182 if (mtk_wed_is_v2(dev->hw)) { in mtk_wed_bus_init()
1193 if (dev->wlan.msi) { in mtk_wed_bus_init()
1194 wed_w32(dev, MTK_WED_PCIE_CFG_INTM, in mtk_wed_bus_init()
1195 dev->hw->pcie_base | 0xc08); in mtk_wed_bus_init()
1196 wed_w32(dev, MTK_WED_PCIE_CFG_BASE, in mtk_wed_bus_init()
1197 dev->hw->pcie_base | 0xc04); in mtk_wed_bus_init()
1198 wed_w32(dev, MTK_WED_PCIE_INT_TRIGGER, BIT(8)); in mtk_wed_bus_init()
1200 wed_w32(dev, MTK_WED_PCIE_CFG_INTM, in mtk_wed_bus_init()
1201 dev->hw->pcie_base | 0x180); in mtk_wed_bus_init()
1202 wed_w32(dev, MTK_WED_PCIE_CFG_BASE, in mtk_wed_bus_init()
1203 dev->hw->pcie_base | 0x184); in mtk_wed_bus_init()
1204 wed_w32(dev, MTK_WED_PCIE_INT_TRIGGER, BIT(24)); in mtk_wed_bus_init()
1207 wed_w32(dev, MTK_WED_PCIE_INT_CTRL, in mtk_wed_bus_init()
1211 wed_set(dev, MTK_WED_PCIE_INT_CTRL, in mtk_wed_bus_init()
1215 dev->hw->index)); in mtk_wed_bus_init()
1219 wed_set(dev, MTK_WED_WPDMA_INT_CTRL, in mtk_wed_bus_init()
1229 mtk_wed_set_wpdma(struct mtk_wed_device *dev) in mtk_wed_set_wpdma() argument
1233 if (mtk_wed_is_v1(dev->hw)) { in mtk_wed_set_wpdma()
1234 wed_w32(dev, MTK_WED_WPDMA_CFG_BASE, dev->wlan.wpdma_phys); in mtk_wed_set_wpdma()
1238 mtk_wed_bus_init(dev); in mtk_wed_set_wpdma()
1240 wed_w32(dev, MTK_WED_WPDMA_CFG_BASE, dev->wlan.wpdma_int); in mtk_wed_set_wpdma()
1241 wed_w32(dev, MTK_WED_WPDMA_CFG_INT_MASK, dev->wlan.wpdma_mask); in mtk_wed_set_wpdma()
1242 wed_w32(dev, MTK_WED_WPDMA_CFG_TX, dev->wlan.wpdma_tx); in mtk_wed_set_wpdma()
1243 wed_w32(dev, MTK_WED_WPDMA_CFG_TX_FREE, dev->wlan.wpdma_txfree); in mtk_wed_set_wpdma()
1245 if (!mtk_wed_get_rx_capa(dev)) in mtk_wed_set_wpdma()
1248 wed_w32(dev, MTK_WED_WPDMA_RX_GLO_CFG, dev->wlan.wpdma_rx_glo); in mtk_wed_set_wpdma()
1249 wed_w32(dev, dev->hw->soc->regmap.wpdma_rx_ring[0], in mtk_wed_set_wpdma()
1250 dev->wlan.wpdma_rx[0]); in mtk_wed_set_wpdma()
1251 if (mtk_wed_is_v3_or_greater(dev->hw)) in mtk_wed_set_wpdma()
1252 wed_w32(dev, dev->hw->soc->regmap.wpdma_rx_ring[1], in mtk_wed_set_wpdma()
1253 dev->wlan.wpdma_rx[1]); in mtk_wed_set_wpdma()
1255 if (!dev->wlan.hw_rro) in mtk_wed_set_wpdma()
1258 wed_w32(dev, MTK_WED_RRO_RX_D_CFG(0), dev->wlan.wpdma_rx_rro[0]); in mtk_wed_set_wpdma()
1259 wed_w32(dev, MTK_WED_RRO_RX_D_CFG(1), dev->wlan.wpdma_rx_rro[1]); in mtk_wed_set_wpdma()
1261 wed_w32(dev, MTK_WED_RRO_MSDU_PG_RING_CFG(i), in mtk_wed_set_wpdma()
1262 dev->wlan.wpdma_rx_pg + i * 0x10); in mtk_wed_set_wpdma()
1266 mtk_wed_hw_init_early(struct mtk_wed_device *dev) in mtk_wed_hw_init_early() argument
1271 mtk_wed_deinit(dev); in mtk_wed_hw_init_early()
1272 mtk_wed_reset(dev, MTK_WED_RESET_WED); in mtk_wed_hw_init_early()
1273 mtk_wed_set_wpdma(dev); in mtk_wed_hw_init_early()
1275 if (!mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_hw_init_early()
1281 wed_m32(dev, MTK_WED_WDMA_GLO_CFG, mask, set); in mtk_wed_hw_init_early()
1283 if (mtk_wed_is_v1(dev->hw)) { in mtk_wed_hw_init_early()
1284 u32 offset = dev->hw->index ? 0x04000400 : 0; in mtk_wed_hw_init_early()
1286 wdma_set(dev, MTK_WDMA_GLO_CFG, in mtk_wed_hw_init_early()
1291 wed_w32(dev, MTK_WED_WDMA_OFFSET0, 0x2a042a20 + offset); in mtk_wed_hw_init_early()
1292 wed_w32(dev, MTK_WED_WDMA_OFFSET1, 0x29002800 + offset); in mtk_wed_hw_init_early()
1293 wed_w32(dev, MTK_WED_PCIE_CFG_BASE, in mtk_wed_hw_init_early()
1294 MTK_PCIE_BASE(dev->hw->index)); in mtk_wed_hw_init_early()
1296 wed_w32(dev, MTK_WED_WDMA_CFG_BASE, dev->hw->wdma_phy); in mtk_wed_hw_init_early()
1297 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_ETH_DMAD_FMT); in mtk_wed_hw_init_early()
1298 wed_w32(dev, MTK_WED_WDMA_OFFSET0, in mtk_wed_hw_init_early()
1304 wed_w32(dev, MTK_WED_WDMA_OFFSET1, in mtk_wed_hw_init_early()
1313 mtk_wed_rro_ring_alloc(struct mtk_wed_device *dev, struct mtk_wed_ring *ring, in mtk_wed_rro_ring_alloc() argument
1316 ring->desc = dma_alloc_coherent(dev->hw->dev, in mtk_wed_rro_ring_alloc()
1330 mtk_wed_rro_alloc(struct mtk_wed_device *dev) in mtk_wed_rro_alloc() argument
1335 ret = of_reserved_mem_region_to_resource_byname(dev->hw->node, "wo-dlm", &res); in mtk_wed_rro_alloc()
1339 dev->rro.miod_phys = res.start; in mtk_wed_rro_alloc()
1340 dev->rro.fdbk_phys = MTK_WED_MIOD_COUNT + dev->rro.miod_phys; in mtk_wed_rro_alloc()
1342 return mtk_wed_rro_ring_alloc(dev, &dev->rro.ring, in mtk_wed_rro_alloc()
1347 mtk_wed_rro_cfg(struct mtk_wed_device *dev) in mtk_wed_rro_cfg() argument
1349 struct mtk_wed_wo *wo = dev->hw->wed_wo; in mtk_wed_rro_cfg()
1378 mtk_wed_rro_hw_init(struct mtk_wed_device *dev) in mtk_wed_rro_hw_init() argument
1380 wed_w32(dev, MTK_WED_RROQM_MIOD_CFG, in mtk_wed_rro_hw_init()
1386 wed_w32(dev, MTK_WED_RROQM_MIOD_CTRL0, dev->rro.miod_phys); in mtk_wed_rro_hw_init()
1387 wed_w32(dev, MTK_WED_RROQM_MIOD_CTRL1, in mtk_wed_rro_hw_init()
1389 wed_w32(dev, MTK_WED_RROQM_FDBK_CTRL0, dev->rro.fdbk_phys); in mtk_wed_rro_hw_init()
1390 wed_w32(dev, MTK_WED_RROQM_FDBK_CTRL1, in mtk_wed_rro_hw_init()
1392 wed_w32(dev, MTK_WED_RROQM_FDBK_CTRL2, 0); in mtk_wed_rro_hw_init()
1393 wed_w32(dev, MTK_WED_RROQ_BASE_L, dev->rro.ring.desc_phys); in mtk_wed_rro_hw_init()
1395 wed_set(dev, MTK_WED_RROQM_RST_IDX, in mtk_wed_rro_hw_init()
1399 wed_w32(dev, MTK_WED_RROQM_RST_IDX, 0); in mtk_wed_rro_hw_init()
1400 wed_w32(dev, MTK_WED_RROQM_MIOD_CTRL2, MTK_WED_MIOD_CNT - 1); in mtk_wed_rro_hw_init()
1401 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_RX_RRO_QM_EN); in mtk_wed_rro_hw_init()
1405 mtk_wed_route_qm_hw_init(struct mtk_wed_device *dev) in mtk_wed_route_qm_hw_init() argument
1407 wed_w32(dev, MTK_WED_RESET, MTK_WED_RESET_RX_ROUTE_QM); in mtk_wed_route_qm_hw_init()
1411 if (!(wed_r32(dev, MTK_WED_RESET) & MTK_WED_RESET_RX_ROUTE_QM)) in mtk_wed_route_qm_hw_init()
1416 if (mtk_wed_is_v2(dev->hw)) { in mtk_wed_route_qm_hw_init()
1417 wed_clr(dev, MTK_WED_RTQM_GLO_CFG, MTK_WED_RTQM_Q_RST); in mtk_wed_route_qm_hw_init()
1418 wed_clr(dev, MTK_WED_RTQM_GLO_CFG, MTK_WED_RTQM_TXDMAD_FPORT); in mtk_wed_route_qm_hw_init()
1419 wed_set(dev, MTK_WED_RTQM_GLO_CFG, in mtk_wed_route_qm_hw_init()
1421 0x3 + dev->hw->index)); in mtk_wed_route_qm_hw_init()
1422 wed_clr(dev, MTK_WED_RTQM_GLO_CFG, MTK_WED_RTQM_Q_RST); in mtk_wed_route_qm_hw_init()
1424 wed_set(dev, MTK_WED_RTQM_ENQ_CFG0, in mtk_wed_route_qm_hw_init()
1426 0x3 + dev->hw->index)); in mtk_wed_route_qm_hw_init()
1429 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_RX_ROUTE_QM_EN); in mtk_wed_route_qm_hw_init()
1433 mtk_wed_hw_init(struct mtk_wed_device *dev) in mtk_wed_hw_init() argument
1435 if (dev->init_done) in mtk_wed_hw_init()
1438 dev->init_done = true; in mtk_wed_hw_init()
1439 mtk_wed_set_ext_int(dev, false); in mtk_wed_hw_init()
1441 wed_w32(dev, MTK_WED_TX_BM_BASE, dev->tx_buf_ring.desc_phys); in mtk_wed_hw_init()
1442 wed_w32(dev, MTK_WED_TX_BM_BUF_LEN, MTK_WED_PKT_SIZE); in mtk_wed_hw_init()
1444 if (mtk_wed_is_v1(dev->hw)) { in mtk_wed_hw_init()
1445 wed_w32(dev, MTK_WED_TX_BM_CTRL, in mtk_wed_hw_init()
1448 dev->tx_buf_ring.size / 128) | in mtk_wed_hw_init()
1451 wed_w32(dev, MTK_WED_TX_BM_DYN_THR, in mtk_wed_hw_init()
1454 } else if (mtk_wed_is_v2(dev->hw)) { in mtk_wed_hw_init()
1455 wed_w32(dev, MTK_WED_TX_BM_CTRL, in mtk_wed_hw_init()
1458 dev->tx_buf_ring.size / 128) | in mtk_wed_hw_init()
1461 wed_w32(dev, MTK_WED_TX_TKID_DYN_THR, in mtk_wed_hw_init()
1464 wed_w32(dev, MTK_WED_TX_BM_DYN_THR, in mtk_wed_hw_init()
1467 wed_w32(dev, MTK_WED_TX_TKID_CTRL, in mtk_wed_hw_init()
1470 dev->tx_buf_ring.size / 128) | in mtk_wed_hw_init()
1472 dev->tx_buf_ring.size / 128)); in mtk_wed_hw_init()
1475 wed_w32(dev, dev->hw->soc->regmap.tx_bm_tkid, in mtk_wed_hw_init()
1476 FIELD_PREP(MTK_WED_TX_BM_TKID_START, dev->wlan.token_start) | in mtk_wed_hw_init()
1478 dev->wlan.token_start + dev->wlan.nbuf - 1)); in mtk_wed_hw_init()
1480 mtk_wed_reset(dev, MTK_WED_RESET_TX_BM); in mtk_wed_hw_init()
1482 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_hw_init()
1484 wed_clr(dev, MTK_WED_TX_BM_CTRL, in mtk_wed_hw_init()
1487 wed_w32(dev, MTK_WED_TX_TKID_CTRL, in mtk_wed_hw_init()
1490 dev->wlan.nbuf / 128) | in mtk_wed_hw_init()
1492 dev->wlan.nbuf / 128)); in mtk_wed_hw_init()
1494 wed_set(dev, MTK_WED_TX_TKID_CTRL, in mtk_wed_hw_init()
1497 wed_w32(dev, MTK_WED_TX_BM_INIT_PTR, in mtk_wed_hw_init()
1502 if (mtk_wed_is_v1(dev->hw)) { in mtk_wed_hw_init()
1503 wed_set(dev, MTK_WED_CTRL, in mtk_wed_hw_init()
1506 } else if (mtk_wed_get_rx_capa(dev)) { in mtk_wed_hw_init()
1508 wed_w32(dev, MTK_WED_WPDMA_RX_D_RST_IDX, in mtk_wed_hw_init()
1511 wed_w32(dev, MTK_WED_WPDMA_RX_D_RST_IDX, 0); in mtk_wed_hw_init()
1514 wed_set(dev, MTK_WED_WPDMA_RX_D_PREF_RX0_SIDX, in mtk_wed_hw_init()
1516 wed_clr(dev, MTK_WED_WPDMA_RX_D_PREF_RX0_SIDX, in mtk_wed_hw_init()
1519 wed_set(dev, MTK_WED_WPDMA_RX_D_PREF_RX1_SIDX, in mtk_wed_hw_init()
1521 wed_clr(dev, MTK_WED_WPDMA_RX_D_PREF_RX1_SIDX, in mtk_wed_hw_init()
1525 wed_set(dev, MTK_WED_WPDMA_RX_D_PREF_FIFO_CFG, in mtk_wed_hw_init()
1528 wed_w32(dev, MTK_WED_WPDMA_RX_D_PREF_FIFO_CFG, 0); in mtk_wed_hw_init()
1530 mtk_wed_rx_buffer_hw_init(dev); in mtk_wed_hw_init()
1531 mtk_wed_rro_hw_init(dev); in mtk_wed_hw_init()
1532 mtk_wed_route_qm_hw_init(dev); in mtk_wed_hw_init()
1535 wed_clr(dev, MTK_WED_TX_BM_CTRL, MTK_WED_TX_BM_CTRL_PAUSE); in mtk_wed_hw_init()
1536 if (!mtk_wed_is_v1(dev->hw)) in mtk_wed_hw_init()
1537 wed_clr(dev, MTK_WED_TX_TKID_CTRL, MTK_WED_TX_TKID_CTRL_PAUSE); in mtk_wed_hw_init()
1561 mtk_wed_rx_reset(struct mtk_wed_device *dev) in mtk_wed_rx_reset() argument
1563 struct mtk_wed_wo *wo = dev->hw->wed_wo; in mtk_wed_rx_reset()
1573 if (dev->wlan.hw_rro) { in mtk_wed_rx_reset()
1574 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_RX_IND_CMD_EN); in mtk_wed_rx_reset()
1575 mtk_wed_poll_busy(dev, MTK_WED_RRO_RX_HW_STS, in mtk_wed_rx_reset()
1577 mtk_wed_reset(dev, MTK_WED_RESET_RRO_RX_TO_PG); in mtk_wed_rx_reset()
1580 wed_clr(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, MTK_WED_WPDMA_RX_D_RX_DRV_EN); in mtk_wed_rx_reset()
1581 ret = mtk_wed_poll_busy(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, in mtk_wed_rx_reset()
1583 if (!ret && mtk_wed_is_v3_or_greater(dev->hw)) in mtk_wed_rx_reset()
1584 ret = mtk_wed_poll_busy(dev, MTK_WED_WPDMA_RX_D_PREF_CFG, in mtk_wed_rx_reset()
1587 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_INT_AGENT); in mtk_wed_rx_reset()
1588 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_RX_D_DRV); in mtk_wed_rx_reset()
1590 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_rx_reset()
1592 wed_clr(dev, MTK_WED_WPDMA_RX_D_PREF_CFG, in mtk_wed_rx_reset()
1594 mtk_wed_poll_busy(dev, MTK_WED_WPDMA_RX_D_PREF_CFG, in mtk_wed_rx_reset()
1596 wed_w32(dev, MTK_WED_WPDMA_RX_D_RST_IDX, in mtk_wed_rx_reset()
1600 wed_w32(dev, MTK_WED_WPDMA_RX_D_RST_IDX, in mtk_wed_rx_reset()
1604 wed_set(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, in mtk_wed_rx_reset()
1607 wed_clr(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, in mtk_wed_rx_reset()
1611 wed_w32(dev, MTK_WED_WPDMA_RX_D_RST_IDX, 0); in mtk_wed_rx_reset()
1615 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_RX_RRO_QM_EN); in mtk_wed_rx_reset()
1616 ret = mtk_wed_poll_busy(dev, MTK_WED_CTRL, in mtk_wed_rx_reset()
1619 mtk_wed_reset(dev, MTK_WED_RESET_RX_RRO_QM); in mtk_wed_rx_reset()
1621 wed_set(dev, MTK_WED_RROQM_RST_IDX, in mtk_wed_rx_reset()
1624 wed_w32(dev, MTK_WED_RROQM_RST_IDX, 0); in mtk_wed_rx_reset()
1627 if (dev->wlan.hw_rro) { in mtk_wed_rx_reset()
1629 wed_clr(dev, MTK_WED_RRO_MSDU_PG_RING2_CFG, in mtk_wed_rx_reset()
1633 wed_clr(dev, MTK_WED_RRO_RX_D_CFG(2), MTK_WED_RRO_RX_D_DRV_EN); in mtk_wed_rx_reset()
1636 wed_w32(dev, MTK_WED_RRO_MSDU_PG_RING2_CFG, in mtk_wed_rx_reset()
1638 mtk_wed_poll_busy(dev, MTK_WED_RRO_MSDU_PG_RING2_CFG, in mtk_wed_rx_reset()
1642 wed_w32(dev, MTK_WED_RRO_RX_D_CFG(2), in mtk_wed_rx_reset()
1644 mtk_wed_poll_busy(dev, MTK_WED_RRO_RX_D_CFG(2), in mtk_wed_rx_reset()
1649 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_RX_ROUTE_QM_EN); in mtk_wed_rx_reset()
1650 ret = mtk_wed_poll_busy(dev, MTK_WED_CTRL, in mtk_wed_rx_reset()
1653 mtk_wed_reset(dev, MTK_WED_RESET_RX_ROUTE_QM); in mtk_wed_rx_reset()
1654 } else if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_rx_reset()
1655 wed_set(dev, MTK_WED_RTQM_RST, BIT(0)); in mtk_wed_rx_reset()
1656 wed_clr(dev, MTK_WED_RTQM_RST, BIT(0)); in mtk_wed_rx_reset()
1657 mtk_wed_reset(dev, MTK_WED_RESET_RX_ROUTE_QM); in mtk_wed_rx_reset()
1659 wed_set(dev, MTK_WED_RTQM_GLO_CFG, MTK_WED_RTQM_Q_RST); in mtk_wed_rx_reset()
1663 mtk_wdma_tx_reset(dev); in mtk_wed_rx_reset()
1666 wed_clr(dev, MTK_WED_WDMA_GLO_CFG, MTK_WED_WDMA_GLO_CFG_TX_DRV_EN); in mtk_wed_rx_reset()
1667 if (mtk_wed_is_v3_or_greater(dev->hw)) in mtk_wed_rx_reset()
1668 mtk_wed_poll_busy(dev, MTK_WED_WPDMA_STATUS, in mtk_wed_rx_reset()
1671 mtk_wed_poll_busy(dev, MTK_WED_CTRL, in mtk_wed_rx_reset()
1673 mtk_wed_reset(dev, MTK_WED_RESET_WDMA_TX_DRV); in mtk_wed_rx_reset()
1676 ret = mtk_wed_poll_busy(dev, MTK_WED_GLO_CFG, in mtk_wed_rx_reset()
1678 wed_clr(dev, MTK_WED_GLO_CFG, MTK_WED_GLO_CFG_RX_DMA_EN); in mtk_wed_rx_reset()
1680 mtk_wed_reset(dev, MTK_WED_RESET_WED_RX_DMA); in mtk_wed_rx_reset()
1682 wed_set(dev, MTK_WED_RESET_IDX, in mtk_wed_rx_reset()
1683 dev->hw->soc->regmap.reset_idx_rx_mask); in mtk_wed_rx_reset()
1684 wed_w32(dev, MTK_WED_RESET_IDX, 0); in mtk_wed_rx_reset()
1688 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_RX_BM_EN); in mtk_wed_rx_reset()
1689 mtk_wed_poll_busy(dev, MTK_WED_CTRL, in mtk_wed_rx_reset()
1691 mtk_wed_reset(dev, MTK_WED_RESET_RX_BM); in mtk_wed_rx_reset()
1693 if (dev->wlan.hw_rro) { in mtk_wed_rx_reset()
1694 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_RX_PG_BM_EN); in mtk_wed_rx_reset()
1695 mtk_wed_poll_busy(dev, MTK_WED_CTRL, in mtk_wed_rx_reset()
1697 wed_set(dev, MTK_WED_RESET, MTK_WED_RESET_RX_PG_BM); in mtk_wed_rx_reset()
1698 wed_clr(dev, MTK_WED_RESET, MTK_WED_RESET_RX_PG_BM); in mtk_wed_rx_reset()
1710 for (i = 0; i < ARRAY_SIZE(dev->rx_ring); i++) { in mtk_wed_rx_reset()
1711 if (!dev->rx_ring[i].desc) in mtk_wed_rx_reset()
1714 mtk_wed_ring_reset(&dev->rx_ring[i], MTK_WED_RX_RING_SIZE, in mtk_wed_rx_reset()
1717 mtk_wed_free_rx_buffer(dev); in mtk_wed_rx_reset()
1718 mtk_wed_hwrro_free_buffer(dev); in mtk_wed_rx_reset()
1724 mtk_wed_reset_dma(struct mtk_wed_device *dev) in mtk_wed_reset_dma() argument
1730 for (i = 0; i < ARRAY_SIZE(dev->tx_ring); i++) { in mtk_wed_reset_dma()
1731 if (!dev->tx_ring[i].desc) in mtk_wed_reset_dma()
1734 mtk_wed_ring_reset(&dev->tx_ring[i], MTK_WED_TX_RING_SIZE, in mtk_wed_reset_dma()
1739 wed_clr(dev, MTK_WED_GLO_CFG, MTK_WED_GLO_CFG_TX_DMA_EN); in mtk_wed_reset_dma()
1740 busy = mtk_wed_poll_busy(dev, MTK_WED_GLO_CFG, in mtk_wed_reset_dma()
1743 mtk_wed_reset(dev, MTK_WED_RESET_WED_TX_DMA); in mtk_wed_reset_dma()
1745 wed_w32(dev, MTK_WED_RESET_IDX, in mtk_wed_reset_dma()
1746 dev->hw->soc->regmap.reset_idx_tx_mask); in mtk_wed_reset_dma()
1747 wed_w32(dev, MTK_WED_RESET_IDX, 0); in mtk_wed_reset_dma()
1751 busy = !!mtk_wdma_rx_reset(dev); in mtk_wed_reset_dma()
1752 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_reset_dma()
1754 wed_r32(dev, MTK_WED_WDMA_GLO_CFG); in mtk_wed_reset_dma()
1756 wed_w32(dev, MTK_WED_WDMA_GLO_CFG, val); in mtk_wed_reset_dma()
1758 wed_clr(dev, MTK_WED_WDMA_GLO_CFG, in mtk_wed_reset_dma()
1763 busy = mtk_wed_poll_busy(dev, MTK_WED_WDMA_GLO_CFG, in mtk_wed_reset_dma()
1765 if (!busy && mtk_wed_is_v3_or_greater(dev->hw)) in mtk_wed_reset_dma()
1766 busy = mtk_wed_poll_busy(dev, MTK_WED_WDMA_RX_PREF_CFG, in mtk_wed_reset_dma()
1770 mtk_wed_reset(dev, MTK_WED_RESET_WDMA_INT_AGENT); in mtk_wed_reset_dma()
1771 mtk_wed_reset(dev, MTK_WED_RESET_WDMA_RX_DRV); in mtk_wed_reset_dma()
1773 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_reset_dma()
1775 wed_clr(dev, MTK_WED_WDMA_RX_PREF_CFG, in mtk_wed_reset_dma()
1777 mtk_wed_poll_busy(dev, MTK_WED_WDMA_RX_PREF_CFG, in mtk_wed_reset_dma()
1779 wed_clr(dev, MTK_WED_WDMA_RX_PREF_CFG, in mtk_wed_reset_dma()
1783 wed_w32(dev, MTK_WED_WDMA_RESET_IDX, in mtk_wed_reset_dma()
1787 wed_w32(dev, MTK_WED_WDMA_RESET_IDX, in mtk_wed_reset_dma()
1789 wed_w32(dev, MTK_WED_WDMA_RESET_IDX, 0); in mtk_wed_reset_dma()
1791 wed_set(dev, MTK_WED_WDMA_GLO_CFG, in mtk_wed_reset_dma()
1794 wed_clr(dev, MTK_WED_WDMA_GLO_CFG, in mtk_wed_reset_dma()
1799 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_TX_FREE_AGENT_EN); in mtk_wed_reset_dma()
1802 if (mtk_wed_is_v1(dev->hw)) in mtk_wed_reset_dma()
1804 wed_r32(dev, MTK_WED_TX_BM_INTF)); in mtk_wed_reset_dma()
1807 wed_r32(dev, MTK_WED_TX_TKID_INTF)); in mtk_wed_reset_dma()
1812 mtk_wed_reset(dev, MTK_WED_RESET_TX_FREE_AGENT); in mtk_wed_reset_dma()
1813 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_TX_BM_EN); in mtk_wed_reset_dma()
1814 mtk_wed_reset(dev, MTK_WED_RESET_TX_BM); in mtk_wed_reset_dma()
1817 busy = mtk_wed_poll_busy(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_reset_dma()
1819 wed_clr(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_reset_dma()
1823 busy = mtk_wed_poll_busy(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_reset_dma()
1827 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_INT_AGENT); in mtk_wed_reset_dma()
1828 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_TX_DRV); in mtk_wed_reset_dma()
1829 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_RX_DRV); in mtk_wed_reset_dma()
1830 if (mtk_wed_is_v3_or_greater(dev->hw)) in mtk_wed_reset_dma()
1831 wed_w32(dev, MTK_WED_RX1_CTRL2, 0); in mtk_wed_reset_dma()
1833 wed_w32(dev, MTK_WED_WPDMA_RESET_IDX, in mtk_wed_reset_dma()
1836 wed_w32(dev, MTK_WED_WPDMA_RESET_IDX, 0); in mtk_wed_reset_dma()
1839 dev->init_done = false; in mtk_wed_reset_dma()
1840 if (mtk_wed_is_v1(dev->hw)) in mtk_wed_reset_dma()
1844 wed_w32(dev, MTK_WED_RESET_IDX, MTK_WED_RESET_WPDMA_IDX_RX); in mtk_wed_reset_dma()
1845 wed_w32(dev, MTK_WED_RESET_IDX, 0); in mtk_wed_reset_dma()
1848 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_reset_dma()
1850 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_TX_AMSDU_EN); in mtk_wed_reset_dma()
1851 mtk_wed_reset(dev, MTK_WED_RESET_TX_AMSDU); in mtk_wed_reset_dma()
1854 if (mtk_wed_get_rx_capa(dev)) in mtk_wed_reset_dma()
1855 mtk_wed_rx_reset(dev); in mtk_wed_reset_dma()
1859 mtk_wed_ring_alloc(struct mtk_wed_device *dev, struct mtk_wed_ring *ring, in mtk_wed_ring_alloc() argument
1862 ring->desc = dma_alloc_coherent(dev->hw->dev, size * desc_size, in mtk_wed_ring_alloc()
1875 mtk_wed_wdma_rx_ring_setup(struct mtk_wed_device *dev, int idx, int size, in mtk_wed_wdma_rx_ring_setup() argument
1880 if (idx >= ARRAY_SIZE(dev->rx_wdma)) in mtk_wed_wdma_rx_ring_setup()
1883 wdma = &dev->rx_wdma[idx]; in mtk_wed_wdma_rx_ring_setup()
1884 if (!reset && mtk_wed_ring_alloc(dev, wdma, MTK_WED_WDMA_RING_SIZE, in mtk_wed_wdma_rx_ring_setup()
1885 dev->hw->soc->wdma_desc_size, true)) in mtk_wed_wdma_rx_ring_setup()
1888 wdma_w32(dev, MTK_WDMA_RING_RX(idx) + MTK_WED_RING_OFS_BASE, in mtk_wed_wdma_rx_ring_setup()
1890 wdma_w32(dev, MTK_WDMA_RING_RX(idx) + MTK_WED_RING_OFS_COUNT, in mtk_wed_wdma_rx_ring_setup()
1892 wdma_w32(dev, MTK_WDMA_RING_RX(idx) + MTK_WED_RING_OFS_CPU_IDX, 0); in mtk_wed_wdma_rx_ring_setup()
1894 wed_w32(dev, MTK_WED_WDMA_RING_RX(idx) + MTK_WED_RING_OFS_BASE, in mtk_wed_wdma_rx_ring_setup()
1896 wed_w32(dev, MTK_WED_WDMA_RING_RX(idx) + MTK_WED_RING_OFS_COUNT, in mtk_wed_wdma_rx_ring_setup()
1903 mtk_wed_wdma_tx_ring_setup(struct mtk_wed_device *dev, int idx, int size, in mtk_wed_wdma_tx_ring_setup() argument
1908 if (idx >= ARRAY_SIZE(dev->tx_wdma)) in mtk_wed_wdma_tx_ring_setup()
1911 wdma = &dev->tx_wdma[idx]; in mtk_wed_wdma_tx_ring_setup()
1912 if (!reset && mtk_wed_ring_alloc(dev, wdma, MTK_WED_WDMA_RING_SIZE, in mtk_wed_wdma_tx_ring_setup()
1913 dev->hw->soc->wdma_desc_size, true)) in mtk_wed_wdma_tx_ring_setup()
1916 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_wdma_tx_ring_setup()
1934 wdma_w32(dev, MTK_WDMA_RING_TX(idx) + MTK_WED_RING_OFS_BASE, in mtk_wed_wdma_tx_ring_setup()
1936 wdma_w32(dev, MTK_WDMA_RING_TX(idx) + MTK_WED_RING_OFS_COUNT, in mtk_wed_wdma_tx_ring_setup()
1938 wdma_w32(dev, MTK_WDMA_RING_TX(idx) + MTK_WED_RING_OFS_CPU_IDX, 0); in mtk_wed_wdma_tx_ring_setup()
1939 wdma_w32(dev, MTK_WDMA_RING_TX(idx) + MTK_WED_RING_OFS_DMA_IDX, 0); in mtk_wed_wdma_tx_ring_setup()
1945 wed_w32(dev, MTK_WED_WDMA_RING_TX + MTK_WED_RING_OFS_BASE, in mtk_wed_wdma_tx_ring_setup()
1947 wed_w32(dev, MTK_WED_WDMA_RING_TX + MTK_WED_RING_OFS_COUNT, in mtk_wed_wdma_tx_ring_setup()
1949 wed_w32(dev, MTK_WED_WDMA_RING_TX + MTK_WED_RING_OFS_CPU_IDX, in mtk_wed_wdma_tx_ring_setup()
1951 wed_w32(dev, MTK_WED_WDMA_RING_TX + MTK_WED_RING_OFS_DMA_IDX, in mtk_wed_wdma_tx_ring_setup()
1959 mtk_wed_ppe_check(struct mtk_wed_device *dev, struct sk_buff *skb, in mtk_wed_ppe_check() argument
1962 struct mtk_eth *eth = dev->hw->eth; in mtk_wed_ppe_check()
1974 mtk_ppe_check_skb(eth->ppe[dev->hw->index], skb, hash); in mtk_wed_ppe_check()
1978 mtk_wed_configure_irq(struct mtk_wed_device *dev, u32 irq_mask) in mtk_wed_configure_irq() argument
1983 wed_set(dev, MTK_WED_CTRL, in mtk_wed_configure_irq()
1989 if (mtk_wed_is_v1(dev->hw)) { in mtk_wed_configure_irq()
1990 wed_w32(dev, MTK_WED_PCIE_INT_TRIGGER, in mtk_wed_configure_irq()
1993 wed_w32(dev, MTK_WED_WPDMA_INT_TRIGGER, in mtk_wed_configure_irq()
1997 wed_clr(dev, MTK_WED_WDMA_INT_CTRL, wdma_mask); in mtk_wed_configure_irq()
1999 if (mtk_wed_is_v3_or_greater(dev->hw)) in mtk_wed_configure_irq()
2000 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_TX_TKID_ALI_EN); in mtk_wed_configure_irq()
2003 wed_w32(dev, MTK_WED_WPDMA_INT_CTRL_TX, in mtk_wed_configure_irq()
2009 dev->wlan.tx_tbit[0]) | in mtk_wed_configure_irq()
2011 dev->wlan.tx_tbit[1])); in mtk_wed_configure_irq()
2014 wed_w32(dev, MTK_WED_WPDMA_INT_CTRL_TX_FREE, in mtk_wed_configure_irq()
2018 dev->wlan.txfree_tbit)); in mtk_wed_configure_irq()
2020 if (mtk_wed_get_rx_capa(dev)) { in mtk_wed_configure_irq()
2021 wed_w32(dev, MTK_WED_WPDMA_INT_CTRL_RX, in mtk_wed_configure_irq()
2027 dev->wlan.rx_tbit[0]) | in mtk_wed_configure_irq()
2029 dev->wlan.rx_tbit[1])); in mtk_wed_configure_irq()
2035 wed_w32(dev, MTK_WED_WDMA_INT_CLR, wdma_mask); in mtk_wed_configure_irq()
2036 wed_set(dev, MTK_WED_WDMA_INT_CTRL, in mtk_wed_configure_irq()
2038 dev->wdma_idx)); in mtk_wed_configure_irq()
2041 wed_w32(dev, MTK_WED_WDMA_INT_TRIGGER, wdma_mask); in mtk_wed_configure_irq()
2043 wdma_w32(dev, MTK_WDMA_INT_MASK, wdma_mask); in mtk_wed_configure_irq()
2044 wdma_w32(dev, MTK_WDMA_INT_GRP2, wdma_mask); in mtk_wed_configure_irq()
2045 wed_w32(dev, MTK_WED_WPDMA_INT_MASK, irq_mask); in mtk_wed_configure_irq()
2046 wed_w32(dev, MTK_WED_INT_MASK, irq_mask); in mtk_wed_configure_irq()
2051 mtk_wed_dma_enable(struct mtk_wed_device *dev) in mtk_wed_dma_enable() argument
2055 if (!mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_dma_enable()
2056 wed_set(dev, MTK_WED_WPDMA_INT_CTRL, in mtk_wed_dma_enable()
2058 wed_set(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_dma_enable()
2061 wdma_set(dev, MTK_WDMA_GLO_CFG, in mtk_wed_dma_enable()
2065 wed_set(dev, MTK_WED_WPDMA_CTRL, MTK_WED_WPDMA_CTRL_SDL1_FIXED); in mtk_wed_dma_enable()
2067 wed_set(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_dma_enable()
2071 wdma_set(dev, MTK_WDMA_GLO_CFG, MTK_WDMA_GLO_CFG_TX_DMA_EN); in mtk_wed_dma_enable()
2074 wed_set(dev, MTK_WED_GLO_CFG, in mtk_wed_dma_enable()
2078 wed_set(dev, MTK_WED_WDMA_GLO_CFG, in mtk_wed_dma_enable()
2081 if (mtk_wed_is_v1(dev->hw)) { in mtk_wed_dma_enable()
2082 wdma_set(dev, MTK_WDMA_GLO_CFG, in mtk_wed_dma_enable()
2087 wed_set(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_dma_enable()
2091 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_dma_enable()
2092 wed_set(dev, MTK_WED_WDMA_RX_PREF_CFG, in mtk_wed_dma_enable()
2095 wed_clr(dev, MTK_WED_WDMA_RX_PREF_CFG, in mtk_wed_dma_enable()
2097 wed_set(dev, MTK_WED_WDMA_RX_PREF_CFG, MTK_WED_WDMA_RX_PREF_EN); in mtk_wed_dma_enable()
2099 wed_clr(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_dma_enable()
2101 wed_set(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_dma_enable()
2106 wdma_set(dev, MTK_WDMA_PREF_RX_CFG, MTK_WDMA_PREF_RX_CFG_PREF_EN); in mtk_wed_dma_enable()
2107 wdma_set(dev, MTK_WDMA_WRBK_RX_CFG, MTK_WDMA_WRBK_RX_CFG_WRBK_EN); in mtk_wed_dma_enable()
2110 wed_clr(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_dma_enable()
2114 if (!mtk_wed_get_rx_capa(dev)) in mtk_wed_dma_enable()
2117 wed_set(dev, MTK_WED_WDMA_GLO_CFG, in mtk_wed_dma_enable()
2121 wed_clr(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, MTK_WED_WPDMA_RX_D_RXD_READ_LEN); in mtk_wed_dma_enable()
2122 wed_set(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, in mtk_wed_dma_enable()
2127 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_dma_enable()
2128 wed_set(dev, MTK_WED_WPDMA_RX_D_PREF_CFG, in mtk_wed_dma_enable()
2133 wed_set(dev, MTK_WED_RRO_RX_D_CFG(2), MTK_WED_RRO_RX_D_DRV_EN); in mtk_wed_dma_enable()
2134 wdma_set(dev, MTK_WDMA_PREF_TX_CFG, MTK_WDMA_PREF_TX_CFG_PREF_EN); in mtk_wed_dma_enable()
2135 wdma_set(dev, MTK_WDMA_WRBK_TX_CFG, MTK_WDMA_WRBK_TX_CFG_WRBK_EN); in mtk_wed_dma_enable()
2139 struct mtk_wed_ring *ring = &dev->rx_ring[i]; in mtk_wed_dma_enable()
2145 if (mtk_wed_check_wfdma_rx_fill(dev, ring)) { in mtk_wed_dma_enable()
2146 dev_err(dev->hw->dev, in mtk_wed_dma_enable()
2151 val = wifi_r32(dev, in mtk_wed_dma_enable()
2152 dev->wlan.wpdma_rx_glo - in mtk_wed_dma_enable()
2153 dev->wlan.phy_base) | MTK_WFMDA_RX_DMA_EN; in mtk_wed_dma_enable()
2154 wifi_w32(dev, in mtk_wed_dma_enable()
2155 dev->wlan.wpdma_rx_glo - dev->wlan.phy_base, in mtk_wed_dma_enable()
2161 mtk_wed_start_hw_rro(struct mtk_wed_device *dev, u32 irq_mask, bool reset) in mtk_wed_start_hw_rro() argument
2165 wed_w32(dev, MTK_WED_WPDMA_INT_MASK, irq_mask); in mtk_wed_start_hw_rro()
2166 wed_w32(dev, MTK_WED_INT_MASK, irq_mask); in mtk_wed_start_hw_rro()
2168 if (!mtk_wed_get_rx_capa(dev) || !dev->wlan.hw_rro) in mtk_wed_start_hw_rro()
2172 wed_set(dev, MTK_WED_RRO_MSDU_PG_RING2_CFG, in mtk_wed_start_hw_rro()
2177 wed_set(dev, MTK_WED_RRO_RX_D_CFG(2), MTK_WED_RRO_MSDU_PG_DRV_CLR); in mtk_wed_start_hw_rro()
2178 wed_w32(dev, MTK_WED_RRO_MSDU_PG_RING2_CFG, in mtk_wed_start_hw_rro()
2181 wed_w32(dev, MTK_WED_WPDMA_INT_CTRL_RRO_RX, in mtk_wed_start_hw_rro()
2187 dev->wlan.rro_rx_tbit[0]) | in mtk_wed_start_hw_rro()
2189 dev->wlan.rro_rx_tbit[1])); in mtk_wed_start_hw_rro()
2191 wed_w32(dev, MTK_WED_WPDMA_INT_CTRL_RRO_MSDU_PG, in mtk_wed_start_hw_rro()
2199 dev->wlan.rx_pg_tbit[0]) | in mtk_wed_start_hw_rro()
2201 dev->wlan.rx_pg_tbit[1]) | in mtk_wed_start_hw_rro()
2203 dev->wlan.rx_pg_tbit[2])); in mtk_wed_start_hw_rro()
2208 wed_set(dev, MTK_WED_RRO_MSDU_PG_RING2_CFG, in mtk_wed_start_hw_rro()
2212 struct mtk_wed_ring *ring = &dev->rx_rro_ring[i]; in mtk_wed_start_hw_rro()
2217 if (mtk_wed_check_wfdma_rx_fill(dev, ring)) in mtk_wed_start_hw_rro()
2218 dev_err(dev->hw->dev, in mtk_wed_start_hw_rro()
2223 struct mtk_wed_ring *ring = &dev->rx_page_ring[i]; in mtk_wed_start_hw_rro()
2228 if (mtk_wed_check_wfdma_rx_fill(dev, ring)) in mtk_wed_start_hw_rro()
2229 dev_err(dev->hw->dev, in mtk_wed_start_hw_rro()
2235 mtk_wed_rro_rx_ring_setup(struct mtk_wed_device *dev, int idx, in mtk_wed_rro_rx_ring_setup() argument
2238 struct mtk_wed_ring *ring = &dev->rx_rro_ring[idx]; in mtk_wed_rro_rx_ring_setup()
2241 wed_w32(dev, MTK_WED_RRO_RX_D_RX(idx) + MTK_WED_RING_OFS_BASE, in mtk_wed_rro_rx_ring_setup()
2243 wed_w32(dev, MTK_WED_RRO_RX_D_RX(idx) + MTK_WED_RING_OFS_COUNT, in mtk_wed_rro_rx_ring_setup()
2249 mtk_wed_msdu_pg_rx_ring_setup(struct mtk_wed_device *dev, int idx, void __iomem *regs) in mtk_wed_msdu_pg_rx_ring_setup() argument
2251 struct mtk_wed_ring *ring = &dev->rx_page_ring[idx]; in mtk_wed_msdu_pg_rx_ring_setup()
2254 wed_w32(dev, MTK_WED_RRO_MSDU_PG_CTRL0(idx) + MTK_WED_RING_OFS_BASE, in mtk_wed_msdu_pg_rx_ring_setup()
2256 wed_w32(dev, MTK_WED_RRO_MSDU_PG_CTRL0(idx) + MTK_WED_RING_OFS_COUNT, in mtk_wed_msdu_pg_rx_ring_setup()
2262 mtk_wed_ind_rx_ring_setup(struct mtk_wed_device *dev, void __iomem *regs) in mtk_wed_ind_rx_ring_setup() argument
2264 struct mtk_wed_ring *ring = &dev->ind_cmd_ring; in mtk_wed_ind_rx_ring_setup()
2269 wed_w32(dev, MTK_WED_IND_CMD_RX_CTRL1 + MTK_WED_RING_OFS_BASE, in mtk_wed_ind_rx_ring_setup()
2272 wed_w32(dev, MTK_WED_IND_CMD_RX_CTRL1 + MTK_WED_RING_OFS_COUNT, in mtk_wed_ind_rx_ring_setup()
2276 wed_w32(dev, MTK_WED_RRO_CFG0, dev->wlan.phy_base + in mtk_wed_ind_rx_ring_setup()
2277 dev->wlan.ind_cmd.ack_sn_addr); in mtk_wed_ind_rx_ring_setup()
2278 wed_w32(dev, MTK_WED_RRO_CFG1, in mtk_wed_ind_rx_ring_setup()
2280 dev->wlan.ind_cmd.win_size) | in mtk_wed_ind_rx_ring_setup()
2282 dev->wlan.ind_cmd.particular_sid)); in mtk_wed_ind_rx_ring_setup()
2285 wed_w32(dev, MTK_WED_ADDR_ELEM_CFG0, in mtk_wed_ind_rx_ring_setup()
2286 dev->wlan.ind_cmd.particular_se_phys); in mtk_wed_ind_rx_ring_setup()
2288 for (i = 0; i < dev->wlan.ind_cmd.se_group_nums; i++) { in mtk_wed_ind_rx_ring_setup()
2289 wed_w32(dev, MTK_WED_RADDR_ELEM_TBL_WDATA, in mtk_wed_ind_rx_ring_setup()
2290 dev->wlan.ind_cmd.addr_elem_phys[i] >> 4); in mtk_wed_ind_rx_ring_setup()
2291 wed_w32(dev, MTK_WED_ADDR_ELEM_TBL_CFG, in mtk_wed_ind_rx_ring_setup()
2294 val = wed_r32(dev, MTK_WED_ADDR_ELEM_TBL_CFG); in mtk_wed_ind_rx_ring_setup()
2296 val = wed_r32(dev, MTK_WED_ADDR_ELEM_TBL_CFG); in mtk_wed_ind_rx_ring_setup()
2298 dev_err(dev->hw->dev, in mtk_wed_ind_rx_ring_setup()
2303 for (i = 0; i < dev->wlan.ind_cmd.particular_sid; i++) { in mtk_wed_ind_rx_ring_setup()
2304 wed_w32(dev, MTK_WED_PN_CHECK_WDATA_M, in mtk_wed_ind_rx_ring_setup()
2307 wed_w32(dev, MTK_WED_PN_CHECK_CFG, MTK_WED_PN_CHECK_WR | in mtk_wed_ind_rx_ring_setup()
2311 val = wed_r32(dev, MTK_WED_PN_CHECK_CFG); in mtk_wed_ind_rx_ring_setup()
2313 val = wed_r32(dev, MTK_WED_PN_CHECK_CFG); in mtk_wed_ind_rx_ring_setup()
2315 dev_err(dev->hw->dev, in mtk_wed_ind_rx_ring_setup()
2319 wed_w32(dev, MTK_WED_RX_IND_CMD_CNT0, MTK_WED_RX_IND_CMD_DBG_CNT_EN); in mtk_wed_ind_rx_ring_setup()
2320 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_RX_IND_CMD_EN); in mtk_wed_ind_rx_ring_setup()
2326 mtk_wed_start(struct mtk_wed_device *dev, u32 irq_mask) in mtk_wed_start() argument
2330 if (mtk_wed_get_rx_capa(dev) && mtk_wed_rx_buffer_alloc(dev)) in mtk_wed_start()
2333 for (i = 0; i < ARRAY_SIZE(dev->rx_wdma); i++) in mtk_wed_start()
2334 if (!dev->rx_wdma[i].desc) in mtk_wed_start()
2335 mtk_wed_wdma_rx_ring_setup(dev, i, 16, false); in mtk_wed_start()
2337 if (dev->wlan.hw_rro) { in mtk_wed_start()
2342 if (!wed_r32(dev, addr)) in mtk_wed_start()
2343 wed_w32(dev, addr, 1); in mtk_wed_start()
2347 mtk_wed_hw_init(dev); in mtk_wed_start()
2348 mtk_wed_configure_irq(dev, irq_mask); in mtk_wed_start()
2350 mtk_wed_set_ext_int(dev, true); in mtk_wed_start()
2352 if (mtk_wed_is_v1(dev->hw)) { in mtk_wed_start()
2353 u32 val = dev->wlan.wpdma_phys | MTK_PCIE_MIRROR_MAP_EN | in mtk_wed_start()
2355 dev->hw->index); in mtk_wed_start()
2357 val |= BIT(0) | (BIT(1) * !!dev->hw->index); in mtk_wed_start()
2358 regmap_write(dev->hw->mirror, dev->hw->index * 4, val); in mtk_wed_start()
2359 } else if (mtk_wed_get_rx_capa(dev)) { in mtk_wed_start()
2361 wed_w32(dev, MTK_WED_EXT_INT_MASK1, in mtk_wed_start()
2363 wed_w32(dev, MTK_WED_EXT_INT_MASK2, in mtk_wed_start()
2366 wed_r32(dev, MTK_WED_EXT_INT_MASK1); in mtk_wed_start()
2367 wed_r32(dev, MTK_WED_EXT_INT_MASK2); in mtk_wed_start()
2369 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_start()
2370 wed_w32(dev, MTK_WED_EXT_INT_MASK3, in mtk_wed_start()
2372 wed_r32(dev, MTK_WED_EXT_INT_MASK3); in mtk_wed_start()
2375 if (mtk_wed_rro_cfg(dev)) in mtk_wed_start()
2379 mtk_wed_set_512_support(dev, dev->wlan.wcid_512); in mtk_wed_start()
2380 mtk_wed_amsdu_init(dev); in mtk_wed_start()
2382 mtk_wed_dma_enable(dev); in mtk_wed_start()
2383 dev->running = true; in mtk_wed_start()
2387 mtk_wed_attach(struct mtk_wed_device *dev) in mtk_wed_attach() argument
2397 if ((dev->wlan.bus_type == MTK_WED_BUS_PCIE && in mtk_wed_attach()
2398 pci_domain_nr(dev->wlan.pci_dev->bus) > 1) || in mtk_wed_attach()
2409 hw = mtk_wed_assign(dev); in mtk_wed_attach()
2416 device = dev->wlan.bus_type == MTK_WED_BUS_PCIE in mtk_wed_attach()
2417 ? &dev->wlan.pci_dev->dev in mtk_wed_attach()
2418 : &dev->wlan.platform_dev->dev; in mtk_wed_attach()
2422 dev->hw = hw; in mtk_wed_attach()
2423 dev->dev = hw->dev; in mtk_wed_attach()
2424 dev->irq = hw->irq; in mtk_wed_attach()
2425 dev->wdma_idx = hw->index; in mtk_wed_attach()
2426 dev->version = hw->version; in mtk_wed_attach()
2427 dev->hw->pcie_base = mtk_wed_get_pcie_base(dev); in mtk_wed_attach()
2429 ret = dma_set_mask_and_coherent(hw->dev, DMA_BIT_MASK(32)); in mtk_wed_attach()
2433 if (hw->eth->dma_dev == hw->eth->dev && in mtk_wed_attach()
2434 of_dma_is_coherent(hw->eth->dev->of_node)) in mtk_wed_attach()
2435 mtk_eth_set_dma_device(hw->eth, hw->dev); in mtk_wed_attach()
2437 ret = mtk_wed_tx_buffer_alloc(dev); in mtk_wed_attach()
2441 ret = mtk_wed_amsdu_buffer_alloc(dev); in mtk_wed_attach()
2445 if (mtk_wed_get_rx_capa(dev)) { in mtk_wed_attach()
2446 ret = mtk_wed_rro_alloc(dev); in mtk_wed_attach()
2451 mtk_wed_hw_init_early(dev); in mtk_wed_attach()
2456 dev->rev_id = wed_r32(dev, MTK_WED_REV_ID); in mtk_wed_attach()
2458 if (mtk_wed_get_rx_capa(dev)) in mtk_wed_attach()
2462 dev_err(dev->hw->dev, "failed to attach wed device\n"); in mtk_wed_attach()
2463 __mtk_wed_detach(dev); in mtk_wed_attach()
2472 mtk_wed_tx_ring_setup(struct mtk_wed_device *dev, int idx, void __iomem *regs, in mtk_wed_tx_ring_setup() argument
2475 struct mtk_wed_ring *ring = &dev->tx_ring[idx]; in mtk_wed_tx_ring_setup()
2489 if (WARN_ON(idx >= ARRAY_SIZE(dev->tx_ring))) in mtk_wed_tx_ring_setup()
2492 if (!reset && mtk_wed_ring_alloc(dev, ring, MTK_WED_TX_RING_SIZE, in mtk_wed_tx_ring_setup()
2496 if (mtk_wed_wdma_rx_ring_setup(dev, idx, MTK_WED_WDMA_RING_SIZE, in mtk_wed_tx_ring_setup()
2503 if (mtk_wed_is_v3_or_greater(dev->hw) && idx == 1) { in mtk_wed_tx_ring_setup()
2505 wed_set(dev, MTK_WED_WDMA_RX_PREF_CFG, in mtk_wed_tx_ring_setup()
2509 wed_clr(dev, MTK_WED_WDMA_RX_PREF_CFG, in mtk_wed_tx_ring_setup()
2514 wed_w32(dev, MTK_WED_WDMA_RX_PREF_FIFO_CFG, in mtk_wed_tx_ring_setup()
2517 wed_w32(dev, MTK_WED_WDMA_RX_PREF_FIFO_CFG, 0); in mtk_wed_tx_ring_setup()
2521 wpdma_tx_w32(dev, idx, MTK_WED_RING_OFS_BASE, ring->desc_phys); in mtk_wed_tx_ring_setup()
2522 wpdma_tx_w32(dev, idx, MTK_WED_RING_OFS_COUNT, MTK_WED_TX_RING_SIZE); in mtk_wed_tx_ring_setup()
2523 wpdma_tx_w32(dev, idx, MTK_WED_RING_OFS_CPU_IDX, 0); in mtk_wed_tx_ring_setup()
2525 wed_w32(dev, MTK_WED_WPDMA_RING_TX(idx) + MTK_WED_RING_OFS_BASE, in mtk_wed_tx_ring_setup()
2527 wed_w32(dev, MTK_WED_WPDMA_RING_TX(idx) + MTK_WED_RING_OFS_COUNT, in mtk_wed_tx_ring_setup()
2529 wed_w32(dev, MTK_WED_WPDMA_RING_TX(idx) + MTK_WED_RING_OFS_CPU_IDX, 0); in mtk_wed_tx_ring_setup()
2535 mtk_wed_txfree_ring_setup(struct mtk_wed_device *dev, void __iomem *regs) in mtk_wed_txfree_ring_setup() argument
2537 struct mtk_wed_ring *ring = &dev->txfree_ring; in mtk_wed_txfree_ring_setup()
2538 int i, index = mtk_wed_is_v1(dev->hw); in mtk_wed_txfree_ring_setup()
2551 wed_w32(dev, MTK_WED_RING_RX(index) + i, val); in mtk_wed_txfree_ring_setup()
2552 wed_w32(dev, MTK_WED_WPDMA_RING_RX(index) + i, val); in mtk_wed_txfree_ring_setup()
2559 mtk_wed_rx_ring_setup(struct mtk_wed_device *dev, int idx, void __iomem *regs, in mtk_wed_rx_ring_setup() argument
2562 struct mtk_wed_ring *ring = &dev->rx_ring[idx]; in mtk_wed_rx_ring_setup()
2564 if (WARN_ON(idx >= ARRAY_SIZE(dev->rx_ring))) in mtk_wed_rx_ring_setup()
2567 if (!reset && mtk_wed_ring_alloc(dev, ring, MTK_WED_RX_RING_SIZE, in mtk_wed_rx_ring_setup()
2571 if (mtk_wed_wdma_tx_ring_setup(dev, idx, MTK_WED_WDMA_RING_SIZE, in mtk_wed_rx_ring_setup()
2580 wpdma_rx_w32(dev, idx, MTK_WED_RING_OFS_BASE, ring->desc_phys); in mtk_wed_rx_ring_setup()
2581 wpdma_rx_w32(dev, idx, MTK_WED_RING_OFS_COUNT, MTK_WED_RX_RING_SIZE); in mtk_wed_rx_ring_setup()
2583 wed_w32(dev, MTK_WED_WPDMA_RING_RX_DATA(idx) + MTK_WED_RING_OFS_BASE, in mtk_wed_rx_ring_setup()
2585 wed_w32(dev, MTK_WED_WPDMA_RING_RX_DATA(idx) + MTK_WED_RING_OFS_COUNT, in mtk_wed_rx_ring_setup()
2592 mtk_wed_irq_get(struct mtk_wed_device *dev, u32 mask) in mtk_wed_irq_get() argument
2596 if (mtk_wed_is_v3_or_greater(dev->hw)) in mtk_wed_irq_get()
2602 val = wed_r32(dev, MTK_WED_EXT_INT_STATUS); in mtk_wed_irq_get()
2603 wed_w32(dev, MTK_WED_EXT_INT_STATUS, val); in mtk_wed_irq_get()
2605 if (!dev->hw->num_flows) in mtk_wed_irq_get()
2608 pr_err("mtk_wed%d: error status=%08x\n", dev->hw->index, val); in mtk_wed_irq_get()
2610 val = wed_r32(dev, MTK_WED_INT_STATUS); in mtk_wed_irq_get()
2612 wed_w32(dev, MTK_WED_INT_STATUS, val); /* ACK */ in mtk_wed_irq_get()
2618 mtk_wed_irq_set_mask(struct mtk_wed_device *dev, u32 mask) in mtk_wed_irq_set_mask() argument
2620 mtk_wed_set_ext_int(dev, !!mask); in mtk_wed_irq_set_mask()
2621 wed_w32(dev, MTK_WED_INT_MASK, mask); in mtk_wed_irq_set_mask()
2684 if (!priv || !tc_can_offload(priv->dev)) in mtk_wed_setup_tc_block_cb()
2695 mtk_wed_setup_tc_block(struct mtk_wed_hw *hw, struct net_device *dev, in mtk_wed_setup_tc_block() argument
2715 block_cb = flow_block_cb_lookup(f->block, cb, dev); in mtk_wed_setup_tc_block()
2726 priv->dev = dev; in mtk_wed_setup_tc_block()
2727 block_cb = flow_block_cb_alloc(cb, dev, priv, NULL); in mtk_wed_setup_tc_block()
2738 block_cb = flow_block_cb_lookup(f->block, cb, dev); in mtk_wed_setup_tc_block()
2755 mtk_wed_setup_tc(struct mtk_wed_device *wed, struct net_device *dev, in mtk_wed_setup_tc() argument
2766 return mtk_wed_setup_tc_block(hw, dev, type_data); in mtk_wed_setup_tc()
2797 struct device_node *eth_np = eth->dev->of_node; in mtk_wed_add_hw()
2832 hw->dev = &pdev->dev; in mtk_wed_add_hw()
2876 put_device(&pdev->dev); in mtk_wed_add_hw()
2898 put_device(hw->dev); in mtk_wed_exit()