Lines Matching full:dev
56 struct net_device *dev; member
93 wed_m32(struct mtk_wed_device *dev, u32 reg, u32 mask, u32 val) in wed_m32() argument
95 regmap_update_bits(dev->hw->regs, reg, mask | val, val); in wed_m32()
99 wed_set(struct mtk_wed_device *dev, u32 reg, u32 mask) in wed_set() argument
101 return wed_m32(dev, reg, 0, mask); in wed_set()
105 wed_clr(struct mtk_wed_device *dev, u32 reg, u32 mask) in wed_clr() argument
107 return wed_m32(dev, reg, mask, 0); in wed_clr()
111 wdma_m32(struct mtk_wed_device *dev, u32 reg, u32 mask, u32 val) in wdma_m32() argument
113 wdma_w32(dev, reg, (wdma_r32(dev, reg) & ~mask) | val); in wdma_m32()
117 wdma_set(struct mtk_wed_device *dev, u32 reg, u32 mask) in wdma_set() argument
119 wdma_m32(dev, reg, 0, mask); in wdma_set()
123 wdma_clr(struct mtk_wed_device *dev, u32 reg, u32 mask) in wdma_clr() argument
125 wdma_m32(dev, reg, mask, 0); in wdma_clr()
129 wifi_r32(struct mtk_wed_device *dev, u32 reg) in wifi_r32() argument
131 return readl(dev->wlan.base + reg); in wifi_r32()
135 wifi_w32(struct mtk_wed_device *dev, u32 reg, u32 val) in wifi_w32() argument
137 writel(val, dev->wlan.base + reg); in wifi_w32()
141 mtk_wed_read_reset(struct mtk_wed_device *dev) in mtk_wed_read_reset() argument
143 return wed_r32(dev, MTK_WED_RESET); in mtk_wed_read_reset()
147 mtk_wdma_read_reset(struct mtk_wed_device *dev) in mtk_wdma_read_reset() argument
149 return wdma_r32(dev, MTK_WDMA_GLO_CFG); in mtk_wdma_read_reset()
153 mtk_wdma_v3_rx_reset(struct mtk_wed_device *dev) in mtk_wdma_v3_rx_reset() argument
157 if (!mtk_wed_is_v3_or_greater(dev->hw)) in mtk_wdma_v3_rx_reset()
160 wdma_clr(dev, MTK_WDMA_PREF_TX_CFG, MTK_WDMA_PREF_TX_CFG_PREF_EN); in mtk_wdma_v3_rx_reset()
161 wdma_clr(dev, MTK_WDMA_PREF_RX_CFG, MTK_WDMA_PREF_RX_CFG_PREF_EN); in mtk_wdma_v3_rx_reset()
165 0, 10000, false, dev, MTK_WDMA_PREF_TX_CFG)) in mtk_wdma_v3_rx_reset()
166 dev_err(dev->hw->dev, "rx reset failed\n"); in mtk_wdma_v3_rx_reset()
170 0, 10000, false, dev, MTK_WDMA_PREF_RX_CFG)) in mtk_wdma_v3_rx_reset()
171 dev_err(dev->hw->dev, "rx reset failed\n"); in mtk_wdma_v3_rx_reset()
173 wdma_clr(dev, MTK_WDMA_WRBK_TX_CFG, MTK_WDMA_WRBK_TX_CFG_WRBK_EN); in mtk_wdma_v3_rx_reset()
174 wdma_clr(dev, MTK_WDMA_WRBK_RX_CFG, MTK_WDMA_WRBK_RX_CFG_WRBK_EN); in mtk_wdma_v3_rx_reset()
178 0, 10000, false, dev, MTK_WDMA_WRBK_TX_CFG)) in mtk_wdma_v3_rx_reset()
179 dev_err(dev->hw->dev, "rx reset failed\n"); in mtk_wdma_v3_rx_reset()
183 0, 10000, false, dev, MTK_WDMA_WRBK_RX_CFG)) in mtk_wdma_v3_rx_reset()
184 dev_err(dev->hw->dev, "rx reset failed\n"); in mtk_wdma_v3_rx_reset()
187 wdma_w32(dev, MTK_WDMA_PREF_RX_FIFO_CFG, in mtk_wdma_v3_rx_reset()
190 wdma_clr(dev, MTK_WDMA_PREF_RX_FIFO_CFG, in mtk_wdma_v3_rx_reset()
195 wdma_w32(dev, MTK_WDMA_XDMA_RX_FIFO_CFG, in mtk_wdma_v3_rx_reset()
203 wdma_clr(dev, MTK_WDMA_XDMA_RX_FIFO_CFG, in mtk_wdma_v3_rx_reset()
213 wdma_w32(dev, MTK_WDMA_WRBK_RX_FIFO_CFG(0), in mtk_wdma_v3_rx_reset()
215 wdma_w32(dev, MTK_WDMA_WRBK_RX_FIFO_CFG(1), in mtk_wdma_v3_rx_reset()
218 wdma_clr(dev, MTK_WDMA_WRBK_RX_FIFO_CFG(0), in mtk_wdma_v3_rx_reset()
220 wdma_clr(dev, MTK_WDMA_WRBK_RX_FIFO_CFG(1), in mtk_wdma_v3_rx_reset()
224 wdma_w32(dev, MTK_WDMA_PREF_SIDX_CFG, in mtk_wdma_v3_rx_reset()
226 wdma_clr(dev, MTK_WDMA_PREF_SIDX_CFG, in mtk_wdma_v3_rx_reset()
230 wdma_w32(dev, MTK_WDMA_WRBK_SIDX_CFG, in mtk_wdma_v3_rx_reset()
232 wdma_clr(dev, MTK_WDMA_WRBK_SIDX_CFG, in mtk_wdma_v3_rx_reset()
237 mtk_wdma_rx_reset(struct mtk_wed_device *dev) in mtk_wdma_rx_reset() argument
242 wdma_clr(dev, MTK_WDMA_GLO_CFG, MTK_WDMA_GLO_CFG_RX_DMA_EN); in mtk_wdma_rx_reset()
243 ret = readx_poll_timeout(mtk_wdma_read_reset, dev, status, in mtk_wdma_rx_reset()
246 dev_err(dev->hw->dev, "rx reset failed\n"); in mtk_wdma_rx_reset()
248 mtk_wdma_v3_rx_reset(dev); in mtk_wdma_rx_reset()
249 wdma_w32(dev, MTK_WDMA_RESET_IDX, MTK_WDMA_RESET_IDX_RX); in mtk_wdma_rx_reset()
250 wdma_w32(dev, MTK_WDMA_RESET_IDX, 0); in mtk_wdma_rx_reset()
252 for (i = 0; i < ARRAY_SIZE(dev->rx_wdma); i++) { in mtk_wdma_rx_reset()
253 if (dev->rx_wdma[i].desc) in mtk_wdma_rx_reset()
256 wdma_w32(dev, in mtk_wdma_rx_reset()
264 mtk_wed_check_busy(struct mtk_wed_device *dev, u32 reg, u32 mask) in mtk_wed_check_busy() argument
266 return !!(wed_r32(dev, reg) & mask); in mtk_wed_check_busy()
270 mtk_wed_poll_busy(struct mtk_wed_device *dev, u32 reg, u32 mask) in mtk_wed_poll_busy() argument
277 timeout, false, dev, reg, mask); in mtk_wed_poll_busy()
281 mtk_wdma_v3_tx_reset(struct mtk_wed_device *dev) in mtk_wdma_v3_tx_reset() argument
285 if (!mtk_wed_is_v3_or_greater(dev->hw)) in mtk_wdma_v3_tx_reset()
288 wdma_clr(dev, MTK_WDMA_PREF_TX_CFG, MTK_WDMA_PREF_TX_CFG_PREF_EN); in mtk_wdma_v3_tx_reset()
289 wdma_clr(dev, MTK_WDMA_PREF_RX_CFG, MTK_WDMA_PREF_RX_CFG_PREF_EN); in mtk_wdma_v3_tx_reset()
293 0, 10000, false, dev, MTK_WDMA_PREF_TX_CFG)) in mtk_wdma_v3_tx_reset()
294 dev_err(dev->hw->dev, "tx reset failed\n"); in mtk_wdma_v3_tx_reset()
298 0, 10000, false, dev, MTK_WDMA_PREF_RX_CFG)) in mtk_wdma_v3_tx_reset()
299 dev_err(dev->hw->dev, "tx reset failed\n"); in mtk_wdma_v3_tx_reset()
301 wdma_clr(dev, MTK_WDMA_WRBK_TX_CFG, MTK_WDMA_WRBK_TX_CFG_WRBK_EN); in mtk_wdma_v3_tx_reset()
302 wdma_clr(dev, MTK_WDMA_WRBK_RX_CFG, MTK_WDMA_WRBK_RX_CFG_WRBK_EN); in mtk_wdma_v3_tx_reset()
306 0, 10000, false, dev, MTK_WDMA_WRBK_TX_CFG)) in mtk_wdma_v3_tx_reset()
307 dev_err(dev->hw->dev, "tx reset failed\n"); in mtk_wdma_v3_tx_reset()
311 0, 10000, false, dev, MTK_WDMA_WRBK_RX_CFG)) in mtk_wdma_v3_tx_reset()
312 dev_err(dev->hw->dev, "tx reset failed\n"); in mtk_wdma_v3_tx_reset()
315 wdma_w32(dev, MTK_WDMA_PREF_TX_FIFO_CFG, in mtk_wdma_v3_tx_reset()
318 wdma_clr(dev, MTK_WDMA_PREF_TX_FIFO_CFG, in mtk_wdma_v3_tx_reset()
323 wdma_w32(dev, MTK_WDMA_XDMA_TX_FIFO_CFG, in mtk_wdma_v3_tx_reset()
328 wdma_clr(dev, MTK_WDMA_XDMA_TX_FIFO_CFG, in mtk_wdma_v3_tx_reset()
335 wdma_w32(dev, MTK_WDMA_WRBK_TX_FIFO_CFG(0), in mtk_wdma_v3_tx_reset()
337 wdma_w32(dev, MTK_WDMA_WRBK_TX_FIFO_CFG(1), in mtk_wdma_v3_tx_reset()
340 wdma_clr(dev, MTK_WDMA_WRBK_TX_FIFO_CFG(0), in mtk_wdma_v3_tx_reset()
342 wdma_clr(dev, MTK_WDMA_WRBK_TX_FIFO_CFG(1), in mtk_wdma_v3_tx_reset()
346 wdma_w32(dev, MTK_WDMA_PREF_SIDX_CFG, in mtk_wdma_v3_tx_reset()
348 wdma_clr(dev, MTK_WDMA_PREF_SIDX_CFG, in mtk_wdma_v3_tx_reset()
352 wdma_w32(dev, MTK_WDMA_WRBK_SIDX_CFG, in mtk_wdma_v3_tx_reset()
354 wdma_clr(dev, MTK_WDMA_WRBK_SIDX_CFG, in mtk_wdma_v3_tx_reset()
359 mtk_wdma_tx_reset(struct mtk_wed_device *dev) in mtk_wdma_tx_reset() argument
364 wdma_clr(dev, MTK_WDMA_GLO_CFG, MTK_WDMA_GLO_CFG_TX_DMA_EN); in mtk_wdma_tx_reset()
365 if (readx_poll_timeout(mtk_wdma_read_reset, dev, status, in mtk_wdma_tx_reset()
367 dev_err(dev->hw->dev, "tx reset failed\n"); in mtk_wdma_tx_reset()
369 mtk_wdma_v3_tx_reset(dev); in mtk_wdma_tx_reset()
370 wdma_w32(dev, MTK_WDMA_RESET_IDX, MTK_WDMA_RESET_IDX_TX); in mtk_wdma_tx_reset()
371 wdma_w32(dev, MTK_WDMA_RESET_IDX, 0); in mtk_wdma_tx_reset()
373 for (i = 0; i < ARRAY_SIZE(dev->tx_wdma); i++) in mtk_wdma_tx_reset()
374 wdma_w32(dev, in mtk_wdma_tx_reset()
379 mtk_wed_reset(struct mtk_wed_device *dev, u32 mask) in mtk_wed_reset() argument
383 wed_w32(dev, MTK_WED_RESET, mask); in mtk_wed_reset()
384 if (readx_poll_timeout(mtk_wed_read_reset, dev, status, in mtk_wed_reset()
390 mtk_wed_wo_read_status(struct mtk_wed_device *dev) in mtk_wed_wo_read_status() argument
392 return wed_r32(dev, MTK_WED_SCR0 + 4 * MTK_WED_DUMMY_CR_WO_STATUS); in mtk_wed_wo_read_status()
396 mtk_wed_wo_reset(struct mtk_wed_device *dev) in mtk_wed_wo_reset() argument
398 struct mtk_wed_wo *wo = dev->hw->wed_wo; in mtk_wed_wo_reset()
403 mtk_wdma_tx_reset(dev); in mtk_wed_wo_reset()
404 mtk_wed_reset(dev, MTK_WED_RESET_WED); in mtk_wed_wo_reset()
411 if (readx_poll_timeout(mtk_wed_wo_read_status, dev, val, in mtk_wed_wo_reset()
414 dev_err(dev->hw->dev, "failed to disable wed-wo\n"); in mtk_wed_wo_reset()
419 switch (dev->hw->index) { in mtk_wed_wo_reset()
446 struct mtk_wed_device *dev; in mtk_wed_fe_reset() local
452 dev = hw->wed_dev; in mtk_wed_fe_reset()
453 if (!dev || !dev->wlan.reset) in mtk_wed_fe_reset()
457 err = dev->wlan.reset(dev); in mtk_wed_fe_reset()
459 dev_err(dev->dev, "wlan reset failed: %d\n", err); in mtk_wed_fe_reset()
473 struct mtk_wed_device *dev; in mtk_wed_fe_reset_complete() local
478 dev = hw->wed_dev; in mtk_wed_fe_reset_complete()
479 if (!dev || !dev->wlan.reset_complete) in mtk_wed_fe_reset_complete()
482 dev->wlan.reset_complete(dev); in mtk_wed_fe_reset_complete()
489 mtk_wed_assign(struct mtk_wed_device *dev) in mtk_wed_assign() argument
494 if (dev->wlan.bus_type == MTK_WED_BUS_PCIE) { in mtk_wed_assign()
495 hw = hw_list[pci_domain_nr(dev->wlan.pci_dev->bus)]; in mtk_wed_assign()
517 hw->wed_dev = dev; in mtk_wed_assign()
522 mtk_wed_amsdu_buffer_alloc(struct mtk_wed_device *dev) in mtk_wed_amsdu_buffer_alloc() argument
524 struct mtk_wed_hw *hw = dev->hw; in mtk_wed_amsdu_buffer_alloc()
531 wed_amsdu = devm_kcalloc(hw->dev, MTK_WED_AMSDU_NPAGES, in mtk_wed_amsdu_buffer_alloc()
548 wed_amsdu[i].txd_phy = dma_map_single(hw->dev, ptr, in mtk_wed_amsdu_buffer_alloc()
551 if (dma_mapping_error(hw->dev, wed_amsdu[i].txd_phy)) in mtk_wed_amsdu_buffer_alloc()
554 dev->hw->wed_amsdu = wed_amsdu; in mtk_wed_amsdu_buffer_alloc()
560 dma_unmap_single(hw->dev, wed_amsdu[i].txd_phy, in mtk_wed_amsdu_buffer_alloc()
566 mtk_wed_amsdu_free_buffer(struct mtk_wed_device *dev) in mtk_wed_amsdu_free_buffer() argument
568 struct mtk_wed_amsdu *wed_amsdu = dev->hw->wed_amsdu; in mtk_wed_amsdu_free_buffer()
575 dma_unmap_single(dev->hw->dev, wed_amsdu[i].txd_phy, in mtk_wed_amsdu_free_buffer()
583 mtk_wed_amsdu_init(struct mtk_wed_device *dev) in mtk_wed_amsdu_init() argument
585 struct mtk_wed_amsdu *wed_amsdu = dev->hw->wed_amsdu; in mtk_wed_amsdu_init()
592 wed_w32(dev, MTK_WED_AMSDU_HIFTXD_BASE_L(i), in mtk_wed_amsdu_init()
596 wed_w32(dev, MTK_WED_AMSDU_STA_INFO_INIT, MTK_WED_AMSDU_STA_RMVL | in mtk_wed_amsdu_init()
599 dev->wlan.amsdu_max_len >> 8) | in mtk_wed_amsdu_init()
601 dev->wlan.amsdu_max_subframes)); in mtk_wed_amsdu_init()
603 wed_w32(dev, MTK_WED_AMSDU_STA_INFO, MTK_WED_AMSDU_STA_INFO_DO_INIT); in mtk_wed_amsdu_init()
605 ret = mtk_wed_poll_busy(dev, MTK_WED_AMSDU_STA_INFO, in mtk_wed_amsdu_init()
608 dev_err(dev->hw->dev, "amsdu initialization failed\n"); in mtk_wed_amsdu_init()
613 wed_set(dev, MTK_WED_AMSDU_HIFTXD_CFG, in mtk_wed_amsdu_init()
614 FIELD_PREP(MTK_WED_AMSDU_HIFTXD_SRC, dev->hw->index)); in mtk_wed_amsdu_init()
617 wed_set(dev, MTK_WED_AMSDU_PSE, MTK_WED_AMSDU_PSE_RESET); in mtk_wed_amsdu_init()
618 ret = mtk_wed_poll_busy(dev, MTK_WED_MON_AMSDU_QMEM_STS1, BIT(29)); in mtk_wed_amsdu_init()
625 if (dev->wlan.id == 0x7991) in mtk_wed_amsdu_init()
626 wed_clr(dev, MTK_WED_AMSDU_FIFO, MTK_WED_AMSDU_IS_PRIOR0_RING); in mtk_wed_amsdu_init()
628 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_TX_AMSDU_EN); in mtk_wed_amsdu_init()
634 mtk_wed_tx_buffer_alloc(struct mtk_wed_device *dev) in mtk_wed_tx_buffer_alloc() argument
636 u32 desc_size = dev->hw->soc->tx_ring_desc_size; in mtk_wed_tx_buffer_alloc()
638 int token = dev->wlan.token_start; in mtk_wed_tx_buffer_alloc()
643 if (!mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_tx_buffer_alloc()
644 ring_size = dev->wlan.nbuf & ~(MTK_WED_BUF_PER_PAGE - 1); in mtk_wed_tx_buffer_alloc()
645 dev->tx_buf_ring.size = ring_size; in mtk_wed_tx_buffer_alloc()
647 dev->tx_buf_ring.size = MTK_WED_TX_BM_DMA_SIZE; in mtk_wed_tx_buffer_alloc()
650 n_pages = dev->tx_buf_ring.size / MTK_WED_BUF_PER_PAGE; in mtk_wed_tx_buffer_alloc()
656 dev->tx_buf_ring.pages = page_list; in mtk_wed_tx_buffer_alloc()
658 desc_ptr = dma_alloc_coherent(dev->hw->dev, in mtk_wed_tx_buffer_alloc()
659 dev->tx_buf_ring.size * desc_size, in mtk_wed_tx_buffer_alloc()
664 dev->tx_buf_ring.desc = desc_ptr; in mtk_wed_tx_buffer_alloc()
665 dev->tx_buf_ring.desc_phys = desc_phys; in mtk_wed_tx_buffer_alloc()
677 page_phys = dma_map_page(dev->hw->dev, page, 0, PAGE_SIZE, in mtk_wed_tx_buffer_alloc()
679 if (dma_mapping_error(dev->hw->dev, page_phys)) { in mtk_wed_tx_buffer_alloc()
686 dma_sync_single_for_cpu(dev->hw->dev, page_phys, PAGE_SIZE, in mtk_wed_tx_buffer_alloc()
697 if (!mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_tx_buffer_alloc()
700 txd_size = dev->wlan.init_buf(buf, buf_phys, in mtk_wed_tx_buffer_alloc()
704 if (mtk_wed_is_v1(dev->hw)) in mtk_wed_tx_buffer_alloc()
723 dma_sync_single_for_device(dev->hw->dev, page_phys, PAGE_SIZE, in mtk_wed_tx_buffer_alloc()
731 mtk_wed_free_tx_buffer(struct mtk_wed_device *dev) in mtk_wed_free_tx_buffer() argument
733 struct mtk_wed_buf *page_list = dev->tx_buf_ring.pages; in mtk_wed_free_tx_buffer()
734 struct mtk_wed_hw *hw = dev->hw; in mtk_wed_free_tx_buffer()
740 if (!dev->tx_buf_ring.desc) in mtk_wed_free_tx_buffer()
743 for (i = 0; i < dev->tx_buf_ring.size; i += MTK_WED_BUF_PER_PAGE) { in mtk_wed_free_tx_buffer()
750 dma_unmap_page(dev->hw->dev, page_phy, PAGE_SIZE, in mtk_wed_free_tx_buffer()
755 dma_free_coherent(dev->hw->dev, in mtk_wed_free_tx_buffer()
756 dev->tx_buf_ring.size * hw->soc->tx_ring_desc_size, in mtk_wed_free_tx_buffer()
757 dev->tx_buf_ring.desc, in mtk_wed_free_tx_buffer()
758 dev->tx_buf_ring.desc_phys); in mtk_wed_free_tx_buffer()
765 mtk_wed_hwrro_buffer_alloc(struct mtk_wed_device *dev) in mtk_wed_hwrro_buffer_alloc() argument
773 if (!dev->wlan.hw_rro) in mtk_wed_hwrro_buffer_alloc()
780 dev->hw_rro.size = dev->wlan.rx_nbuf & ~(MTK_WED_BUF_PER_PAGE - 1); in mtk_wed_hwrro_buffer_alloc()
781 dev->hw_rro.pages = page_list; in mtk_wed_hwrro_buffer_alloc()
782 desc = dma_alloc_coherent(dev->hw->dev, in mtk_wed_hwrro_buffer_alloc()
783 dev->wlan.rx_nbuf * sizeof(*desc), in mtk_wed_hwrro_buffer_alloc()
788 dev->hw_rro.desc = desc; in mtk_wed_hwrro_buffer_alloc()
789 dev->hw_rro.desc_phys = desc_phys; in mtk_wed_hwrro_buffer_alloc()
800 page_phys = dma_map_page(dev->hw->dev, page, 0, PAGE_SIZE, in mtk_wed_hwrro_buffer_alloc()
802 if (dma_mapping_error(dev->hw->dev, page_phys)) { in mtk_wed_hwrro_buffer_alloc()
809 dma_sync_single_for_cpu(dev->hw->dev, page_phys, PAGE_SIZE, in mtk_wed_hwrro_buffer_alloc()
820 dma_sync_single_for_device(dev->hw->dev, page_phys, PAGE_SIZE, in mtk_wed_hwrro_buffer_alloc()
828 mtk_wed_rx_buffer_alloc(struct mtk_wed_device *dev) in mtk_wed_rx_buffer_alloc() argument
833 dev->rx_buf_ring.size = dev->wlan.rx_nbuf; in mtk_wed_rx_buffer_alloc()
834 desc = dma_alloc_coherent(dev->hw->dev, in mtk_wed_rx_buffer_alloc()
835 dev->wlan.rx_nbuf * sizeof(*desc), in mtk_wed_rx_buffer_alloc()
840 dev->rx_buf_ring.desc = desc; in mtk_wed_rx_buffer_alloc()
841 dev->rx_buf_ring.desc_phys = desc_phys; in mtk_wed_rx_buffer_alloc()
842 dev->wlan.init_rx_buf(dev, dev->wlan.rx_npkt); in mtk_wed_rx_buffer_alloc()
844 return mtk_wed_hwrro_buffer_alloc(dev); in mtk_wed_rx_buffer_alloc()
848 mtk_wed_hwrro_free_buffer(struct mtk_wed_device *dev) in mtk_wed_hwrro_free_buffer() argument
850 struct mtk_wed_buf *page_list = dev->hw_rro.pages; in mtk_wed_hwrro_free_buffer()
851 struct mtk_wed_bm_desc *desc = dev->hw_rro.desc; in mtk_wed_hwrro_free_buffer()
854 if (!dev->wlan.hw_rro) in mtk_wed_hwrro_free_buffer()
870 dma_unmap_page(dev->hw->dev, buf_addr, PAGE_SIZE, in mtk_wed_hwrro_free_buffer()
875 dma_free_coherent(dev->hw->dev, dev->hw_rro.size * sizeof(*desc), in mtk_wed_hwrro_free_buffer()
876 desc, dev->hw_rro.desc_phys); in mtk_wed_hwrro_free_buffer()
883 mtk_wed_free_rx_buffer(struct mtk_wed_device *dev) in mtk_wed_free_rx_buffer() argument
885 struct mtk_wed_bm_desc *desc = dev->rx_buf_ring.desc; in mtk_wed_free_rx_buffer()
890 dev->wlan.release_rx_buf(dev); in mtk_wed_free_rx_buffer()
891 dma_free_coherent(dev->hw->dev, dev->rx_buf_ring.size * sizeof(*desc), in mtk_wed_free_rx_buffer()
892 desc, dev->rx_buf_ring.desc_phys); in mtk_wed_free_rx_buffer()
894 mtk_wed_hwrro_free_buffer(dev); in mtk_wed_free_rx_buffer()
898 mtk_wed_hwrro_init(struct mtk_wed_device *dev) in mtk_wed_hwrro_init() argument
900 if (!mtk_wed_get_rx_capa(dev) || !dev->wlan.hw_rro) in mtk_wed_hwrro_init()
903 wed_set(dev, MTK_WED_RRO_PG_BM_RX_DMAM, in mtk_wed_hwrro_init()
906 wed_w32(dev, MTK_WED_RRO_PG_BM_BASE, dev->hw_rro.desc_phys); in mtk_wed_hwrro_init()
908 wed_w32(dev, MTK_WED_RRO_PG_BM_INIT_PTR, in mtk_wed_hwrro_init()
914 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_RX_PG_BM_EN); in mtk_wed_hwrro_init()
918 mtk_wed_rx_buffer_hw_init(struct mtk_wed_device *dev) in mtk_wed_rx_buffer_hw_init() argument
920 wed_w32(dev, MTK_WED_RX_BM_RX_DMAD, in mtk_wed_rx_buffer_hw_init()
921 FIELD_PREP(MTK_WED_RX_BM_RX_DMAD_SDL0, dev->wlan.rx_size)); in mtk_wed_rx_buffer_hw_init()
922 wed_w32(dev, MTK_WED_RX_BM_BASE, dev->rx_buf_ring.desc_phys); in mtk_wed_rx_buffer_hw_init()
923 wed_w32(dev, MTK_WED_RX_BM_INIT_PTR, MTK_WED_RX_BM_INIT_SW_TAIL | in mtk_wed_rx_buffer_hw_init()
924 FIELD_PREP(MTK_WED_RX_BM_SW_TAIL, dev->wlan.rx_npkt)); in mtk_wed_rx_buffer_hw_init()
925 wed_w32(dev, MTK_WED_RX_BM_DYN_ALLOC_TH, in mtk_wed_rx_buffer_hw_init()
927 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_RX_BM_EN); in mtk_wed_rx_buffer_hw_init()
929 mtk_wed_hwrro_init(dev); in mtk_wed_rx_buffer_hw_init()
933 mtk_wed_free_ring(struct mtk_wed_device *dev, struct mtk_wed_ring *ring) in mtk_wed_free_ring() argument
938 dma_free_coherent(dev->hw->dev, ring->size * ring->desc_size, in mtk_wed_free_ring()
943 mtk_wed_free_rx_rings(struct mtk_wed_device *dev) in mtk_wed_free_rx_rings() argument
945 mtk_wed_free_rx_buffer(dev); in mtk_wed_free_rx_rings()
946 mtk_wed_free_ring(dev, &dev->rro.ring); in mtk_wed_free_rx_rings()
950 mtk_wed_free_tx_rings(struct mtk_wed_device *dev) in mtk_wed_free_tx_rings() argument
954 for (i = 0; i < ARRAY_SIZE(dev->tx_ring); i++) in mtk_wed_free_tx_rings()
955 mtk_wed_free_ring(dev, &dev->tx_ring[i]); in mtk_wed_free_tx_rings()
956 for (i = 0; i < ARRAY_SIZE(dev->rx_wdma); i++) in mtk_wed_free_tx_rings()
957 mtk_wed_free_ring(dev, &dev->rx_wdma[i]); in mtk_wed_free_tx_rings()
961 mtk_wed_set_ext_int(struct mtk_wed_device *dev, bool en) in mtk_wed_set_ext_int() argument
965 switch (dev->hw->version) { in mtk_wed_set_ext_int()
983 if (!dev->hw->num_flows) in mtk_wed_set_ext_int()
986 wed_w32(dev, MTK_WED_EXT_INT_MASK, en ? mask : 0); in mtk_wed_set_ext_int()
987 wed_r32(dev, MTK_WED_EXT_INT_MASK); in mtk_wed_set_ext_int()
991 mtk_wed_set_512_support(struct mtk_wed_device *dev, bool enable) in mtk_wed_set_512_support() argument
993 if (!mtk_wed_is_v2(dev->hw)) in mtk_wed_set_512_support()
997 wed_w32(dev, MTK_WED_TXDP_CTRL, MTK_WED_TXDP_DW9_OVERWR); in mtk_wed_set_512_support()
998 wed_w32(dev, MTK_WED_TXP_DW1, in mtk_wed_set_512_support()
1001 wed_w32(dev, MTK_WED_TXP_DW1, in mtk_wed_set_512_support()
1003 wed_clr(dev, MTK_WED_TXDP_CTRL, MTK_WED_TXDP_DW9_OVERWR); in mtk_wed_set_512_support()
1008 mtk_wed_check_wfdma_rx_fill(struct mtk_wed_device *dev, in mtk_wed_check_wfdma_rx_fill() argument
1023 dev_err(dev->hw->dev, "rx dma enable failed\n"); in mtk_wed_check_wfdma_rx_fill()
1031 mtk_wed_dma_disable(struct mtk_wed_device *dev) in mtk_wed_dma_disable() argument
1033 wed_clr(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_dma_disable()
1037 wed_clr(dev, MTK_WED_WDMA_GLO_CFG, MTK_WED_WDMA_GLO_CFG_RX_DRV_EN); in mtk_wed_dma_disable()
1039 wed_clr(dev, MTK_WED_GLO_CFG, in mtk_wed_dma_disable()
1043 wdma_clr(dev, MTK_WDMA_GLO_CFG, in mtk_wed_dma_disable()
1048 if (mtk_wed_is_v1(dev->hw)) { in mtk_wed_dma_disable()
1049 regmap_write(dev->hw->mirror, dev->hw->index * 4, 0); in mtk_wed_dma_disable()
1050 wdma_clr(dev, MTK_WDMA_GLO_CFG, in mtk_wed_dma_disable()
1053 wed_clr(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_dma_disable()
1057 wed_clr(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, in mtk_wed_dma_disable()
1059 wed_clr(dev, MTK_WED_WDMA_GLO_CFG, in mtk_wed_dma_disable()
1062 if (mtk_wed_is_v3_or_greater(dev->hw) && in mtk_wed_dma_disable()
1063 mtk_wed_get_rx_capa(dev)) { in mtk_wed_dma_disable()
1064 wdma_clr(dev, MTK_WDMA_PREF_TX_CFG, in mtk_wed_dma_disable()
1066 wdma_clr(dev, MTK_WDMA_PREF_RX_CFG, in mtk_wed_dma_disable()
1071 mtk_wed_set_512_support(dev, false); in mtk_wed_dma_disable()
1075 mtk_wed_stop(struct mtk_wed_device *dev) in mtk_wed_stop() argument
1077 mtk_wed_dma_disable(dev); in mtk_wed_stop()
1078 mtk_wed_set_ext_int(dev, false); in mtk_wed_stop()
1080 wed_w32(dev, MTK_WED_WPDMA_INT_TRIGGER, 0); in mtk_wed_stop()
1081 wed_w32(dev, MTK_WED_WDMA_INT_TRIGGER, 0); in mtk_wed_stop()
1082 wdma_w32(dev, MTK_WDMA_INT_MASK, 0); in mtk_wed_stop()
1083 wdma_w32(dev, MTK_WDMA_INT_GRP2, 0); in mtk_wed_stop()
1085 if (!mtk_wed_get_rx_capa(dev)) in mtk_wed_stop()
1088 wed_w32(dev, MTK_WED_EXT_INT_MASK1, 0); in mtk_wed_stop()
1089 wed_w32(dev, MTK_WED_EXT_INT_MASK2, 0); in mtk_wed_stop()
1093 mtk_wed_deinit(struct mtk_wed_device *dev) in mtk_wed_deinit() argument
1095 mtk_wed_stop(dev); in mtk_wed_deinit()
1097 wed_clr(dev, MTK_WED_CTRL, in mtk_wed_deinit()
1103 if (mtk_wed_is_v1(dev->hw)) in mtk_wed_deinit()
1106 wed_clr(dev, MTK_WED_CTRL, in mtk_wed_deinit()
1111 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_deinit()
1112 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_TX_AMSDU_EN); in mtk_wed_deinit()
1113 wed_clr(dev, MTK_WED_RESET, MTK_WED_RESET_TX_AMSDU); in mtk_wed_deinit()
1114 wed_clr(dev, MTK_WED_PCIE_INT_CTRL, in mtk_wed_deinit()
1121 __mtk_wed_detach(struct mtk_wed_device *dev) in __mtk_wed_detach() argument
1123 struct mtk_wed_hw *hw = dev->hw; in __mtk_wed_detach()
1125 mtk_wed_deinit(dev); in __mtk_wed_detach()
1127 mtk_wdma_rx_reset(dev); in __mtk_wed_detach()
1128 mtk_wed_reset(dev, MTK_WED_RESET_WED); in __mtk_wed_detach()
1129 mtk_wed_amsdu_free_buffer(dev); in __mtk_wed_detach()
1130 mtk_wed_free_tx_buffer(dev); in __mtk_wed_detach()
1131 mtk_wed_free_tx_rings(dev); in __mtk_wed_detach()
1133 if (mtk_wed_get_rx_capa(dev)) { in __mtk_wed_detach()
1135 mtk_wed_wo_reset(dev); in __mtk_wed_detach()
1136 mtk_wed_free_rx_rings(dev); in __mtk_wed_detach()
1141 if (dev->wlan.bus_type == MTK_WED_BUS_PCIE) { in __mtk_wed_detach()
1144 wlan_node = dev->wlan.pci_dev->dev.of_node; in __mtk_wed_detach()
1151 hw->eth->dma_dev != hw->eth->dev) in __mtk_wed_detach()
1152 mtk_eth_set_dma_device(hw->eth, hw->eth->dev); in __mtk_wed_detach()
1154 memset(dev, 0, sizeof(*dev)); in __mtk_wed_detach()
1161 mtk_wed_detach(struct mtk_wed_device *dev) in mtk_wed_detach() argument
1164 __mtk_wed_detach(dev); in mtk_wed_detach()
1169 mtk_wed_bus_init(struct mtk_wed_device *dev) in mtk_wed_bus_init() argument
1171 switch (dev->wlan.bus_type) { in mtk_wed_bus_init()
1173 struct device_node *np = dev->hw->eth->dev->of_node; in mtk_wed_bus_init()
1175 if (mtk_wed_is_v2(dev->hw)) { in mtk_wed_bus_init()
1186 if (dev->wlan.msi) { in mtk_wed_bus_init()
1187 wed_w32(dev, MTK_WED_PCIE_CFG_INTM, in mtk_wed_bus_init()
1188 dev->hw->pcie_base | 0xc08); in mtk_wed_bus_init()
1189 wed_w32(dev, MTK_WED_PCIE_CFG_BASE, in mtk_wed_bus_init()
1190 dev->hw->pcie_base | 0xc04); in mtk_wed_bus_init()
1191 wed_w32(dev, MTK_WED_PCIE_INT_TRIGGER, BIT(8)); in mtk_wed_bus_init()
1193 wed_w32(dev, MTK_WED_PCIE_CFG_INTM, in mtk_wed_bus_init()
1194 dev->hw->pcie_base | 0x180); in mtk_wed_bus_init()
1195 wed_w32(dev, MTK_WED_PCIE_CFG_BASE, in mtk_wed_bus_init()
1196 dev->hw->pcie_base | 0x184); in mtk_wed_bus_init()
1197 wed_w32(dev, MTK_WED_PCIE_INT_TRIGGER, BIT(24)); in mtk_wed_bus_init()
1200 wed_w32(dev, MTK_WED_PCIE_INT_CTRL, in mtk_wed_bus_init()
1204 wed_set(dev, MTK_WED_PCIE_INT_CTRL, in mtk_wed_bus_init()
1208 dev->hw->index)); in mtk_wed_bus_init()
1212 wed_set(dev, MTK_WED_WPDMA_INT_CTRL, in mtk_wed_bus_init()
1222 mtk_wed_set_wpdma(struct mtk_wed_device *dev) in mtk_wed_set_wpdma() argument
1226 if (mtk_wed_is_v1(dev->hw)) { in mtk_wed_set_wpdma()
1227 wed_w32(dev, MTK_WED_WPDMA_CFG_BASE, dev->wlan.wpdma_phys); in mtk_wed_set_wpdma()
1231 mtk_wed_bus_init(dev); in mtk_wed_set_wpdma()
1233 wed_w32(dev, MTK_WED_WPDMA_CFG_BASE, dev->wlan.wpdma_int); in mtk_wed_set_wpdma()
1234 wed_w32(dev, MTK_WED_WPDMA_CFG_INT_MASK, dev->wlan.wpdma_mask); in mtk_wed_set_wpdma()
1235 wed_w32(dev, MTK_WED_WPDMA_CFG_TX, dev->wlan.wpdma_tx); in mtk_wed_set_wpdma()
1236 wed_w32(dev, MTK_WED_WPDMA_CFG_TX_FREE, dev->wlan.wpdma_txfree); in mtk_wed_set_wpdma()
1238 if (!mtk_wed_get_rx_capa(dev)) in mtk_wed_set_wpdma()
1241 wed_w32(dev, MTK_WED_WPDMA_RX_GLO_CFG, dev->wlan.wpdma_rx_glo); in mtk_wed_set_wpdma()
1242 wed_w32(dev, dev->hw->soc->regmap.wpdma_rx_ring0, dev->wlan.wpdma_rx); in mtk_wed_set_wpdma()
1244 if (!dev->wlan.hw_rro) in mtk_wed_set_wpdma()
1247 wed_w32(dev, MTK_WED_RRO_RX_D_CFG(0), dev->wlan.wpdma_rx_rro[0]); in mtk_wed_set_wpdma()
1248 wed_w32(dev, MTK_WED_RRO_RX_D_CFG(1), dev->wlan.wpdma_rx_rro[1]); in mtk_wed_set_wpdma()
1250 wed_w32(dev, MTK_WED_RRO_MSDU_PG_RING_CFG(i), in mtk_wed_set_wpdma()
1251 dev->wlan.wpdma_rx_pg + i * 0x10); in mtk_wed_set_wpdma()
1255 mtk_wed_hw_init_early(struct mtk_wed_device *dev) in mtk_wed_hw_init_early() argument
1260 mtk_wed_deinit(dev); in mtk_wed_hw_init_early()
1261 mtk_wed_reset(dev, MTK_WED_RESET_WED); in mtk_wed_hw_init_early()
1262 mtk_wed_set_wpdma(dev); in mtk_wed_hw_init_early()
1264 if (!mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_hw_init_early()
1270 wed_m32(dev, MTK_WED_WDMA_GLO_CFG, mask, set); in mtk_wed_hw_init_early()
1272 if (mtk_wed_is_v1(dev->hw)) { in mtk_wed_hw_init_early()
1273 u32 offset = dev->hw->index ? 0x04000400 : 0; in mtk_wed_hw_init_early()
1275 wdma_set(dev, MTK_WDMA_GLO_CFG, in mtk_wed_hw_init_early()
1280 wed_w32(dev, MTK_WED_WDMA_OFFSET0, 0x2a042a20 + offset); in mtk_wed_hw_init_early()
1281 wed_w32(dev, MTK_WED_WDMA_OFFSET1, 0x29002800 + offset); in mtk_wed_hw_init_early()
1282 wed_w32(dev, MTK_WED_PCIE_CFG_BASE, in mtk_wed_hw_init_early()
1283 MTK_PCIE_BASE(dev->hw->index)); in mtk_wed_hw_init_early()
1285 wed_w32(dev, MTK_WED_WDMA_CFG_BASE, dev->hw->wdma_phy); in mtk_wed_hw_init_early()
1286 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_ETH_DMAD_FMT); in mtk_wed_hw_init_early()
1287 wed_w32(dev, MTK_WED_WDMA_OFFSET0, in mtk_wed_hw_init_early()
1293 wed_w32(dev, MTK_WED_WDMA_OFFSET1, in mtk_wed_hw_init_early()
1302 mtk_wed_rro_ring_alloc(struct mtk_wed_device *dev, struct mtk_wed_ring *ring, in mtk_wed_rro_ring_alloc() argument
1305 ring->desc = dma_alloc_coherent(dev->hw->dev, in mtk_wed_rro_ring_alloc()
1319 mtk_wed_rro_alloc(struct mtk_wed_device *dev) in mtk_wed_rro_alloc() argument
1325 index = of_property_match_string(dev->hw->node, "memory-region-names", in mtk_wed_rro_alloc()
1330 np = of_parse_phandle(dev->hw->node, "memory-region", index); in mtk_wed_rro_alloc()
1340 dev->rro.miod_phys = rmem->base; in mtk_wed_rro_alloc()
1341 dev->rro.fdbk_phys = MTK_WED_MIOD_COUNT + dev->rro.miod_phys; in mtk_wed_rro_alloc()
1343 return mtk_wed_rro_ring_alloc(dev, &dev->rro.ring, in mtk_wed_rro_alloc()
1348 mtk_wed_rro_cfg(struct mtk_wed_device *dev) in mtk_wed_rro_cfg() argument
1350 struct mtk_wed_wo *wo = dev->hw->wed_wo; in mtk_wed_rro_cfg()
1379 mtk_wed_rro_hw_init(struct mtk_wed_device *dev) in mtk_wed_rro_hw_init() argument
1381 wed_w32(dev, MTK_WED_RROQM_MIOD_CFG, in mtk_wed_rro_hw_init()
1387 wed_w32(dev, MTK_WED_RROQM_MIOD_CTRL0, dev->rro.miod_phys); in mtk_wed_rro_hw_init()
1388 wed_w32(dev, MTK_WED_RROQM_MIOD_CTRL1, in mtk_wed_rro_hw_init()
1390 wed_w32(dev, MTK_WED_RROQM_FDBK_CTRL0, dev->rro.fdbk_phys); in mtk_wed_rro_hw_init()
1391 wed_w32(dev, MTK_WED_RROQM_FDBK_CTRL1, in mtk_wed_rro_hw_init()
1393 wed_w32(dev, MTK_WED_RROQM_FDBK_CTRL2, 0); in mtk_wed_rro_hw_init()
1394 wed_w32(dev, MTK_WED_RROQ_BASE_L, dev->rro.ring.desc_phys); in mtk_wed_rro_hw_init()
1396 wed_set(dev, MTK_WED_RROQM_RST_IDX, in mtk_wed_rro_hw_init()
1400 wed_w32(dev, MTK_WED_RROQM_RST_IDX, 0); in mtk_wed_rro_hw_init()
1401 wed_w32(dev, MTK_WED_RROQM_MIOD_CTRL2, MTK_WED_MIOD_CNT - 1); in mtk_wed_rro_hw_init()
1402 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_RX_RRO_QM_EN); in mtk_wed_rro_hw_init()
1406 mtk_wed_route_qm_hw_init(struct mtk_wed_device *dev) in mtk_wed_route_qm_hw_init() argument
1408 wed_w32(dev, MTK_WED_RESET, MTK_WED_RESET_RX_ROUTE_QM); in mtk_wed_route_qm_hw_init()
1412 if (!(wed_r32(dev, MTK_WED_RESET) & MTK_WED_RESET_RX_ROUTE_QM)) in mtk_wed_route_qm_hw_init()
1417 if (mtk_wed_is_v2(dev->hw)) { in mtk_wed_route_qm_hw_init()
1418 wed_clr(dev, MTK_WED_RTQM_GLO_CFG, MTK_WED_RTQM_Q_RST); in mtk_wed_route_qm_hw_init()
1419 wed_clr(dev, MTK_WED_RTQM_GLO_CFG, MTK_WED_RTQM_TXDMAD_FPORT); in mtk_wed_route_qm_hw_init()
1420 wed_set(dev, MTK_WED_RTQM_GLO_CFG, in mtk_wed_route_qm_hw_init()
1422 0x3 + dev->hw->index)); in mtk_wed_route_qm_hw_init()
1423 wed_clr(dev, MTK_WED_RTQM_GLO_CFG, MTK_WED_RTQM_Q_RST); in mtk_wed_route_qm_hw_init()
1425 wed_set(dev, MTK_WED_RTQM_ENQ_CFG0, in mtk_wed_route_qm_hw_init()
1427 0x3 + dev->hw->index)); in mtk_wed_route_qm_hw_init()
1430 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_RX_ROUTE_QM_EN); in mtk_wed_route_qm_hw_init()
1434 mtk_wed_hw_init(struct mtk_wed_device *dev) in mtk_wed_hw_init() argument
1436 if (dev->init_done) in mtk_wed_hw_init()
1439 dev->init_done = true; in mtk_wed_hw_init()
1440 mtk_wed_set_ext_int(dev, false); in mtk_wed_hw_init()
1442 wed_w32(dev, MTK_WED_TX_BM_BASE, dev->tx_buf_ring.desc_phys); in mtk_wed_hw_init()
1443 wed_w32(dev, MTK_WED_TX_BM_BUF_LEN, MTK_WED_PKT_SIZE); in mtk_wed_hw_init()
1445 if (mtk_wed_is_v1(dev->hw)) { in mtk_wed_hw_init()
1446 wed_w32(dev, MTK_WED_TX_BM_CTRL, in mtk_wed_hw_init()
1449 dev->tx_buf_ring.size / 128) | in mtk_wed_hw_init()
1452 wed_w32(dev, MTK_WED_TX_BM_DYN_THR, in mtk_wed_hw_init()
1455 } else if (mtk_wed_is_v2(dev->hw)) { in mtk_wed_hw_init()
1456 wed_w32(dev, MTK_WED_TX_BM_CTRL, in mtk_wed_hw_init()
1459 dev->tx_buf_ring.size / 128) | in mtk_wed_hw_init()
1462 wed_w32(dev, MTK_WED_TX_TKID_DYN_THR, in mtk_wed_hw_init()
1465 wed_w32(dev, MTK_WED_TX_BM_DYN_THR, in mtk_wed_hw_init()
1468 wed_w32(dev, MTK_WED_TX_TKID_CTRL, in mtk_wed_hw_init()
1471 dev->tx_buf_ring.size / 128) | in mtk_wed_hw_init()
1473 dev->tx_buf_ring.size / 128)); in mtk_wed_hw_init()
1476 wed_w32(dev, dev->hw->soc->regmap.tx_bm_tkid, in mtk_wed_hw_init()
1477 FIELD_PREP(MTK_WED_TX_BM_TKID_START, dev->wlan.token_start) | in mtk_wed_hw_init()
1479 dev->wlan.token_start + dev->wlan.nbuf - 1)); in mtk_wed_hw_init()
1481 mtk_wed_reset(dev, MTK_WED_RESET_TX_BM); in mtk_wed_hw_init()
1483 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_hw_init()
1485 wed_clr(dev, MTK_WED_TX_BM_CTRL, in mtk_wed_hw_init()
1488 wed_w32(dev, MTK_WED_TX_TKID_CTRL, in mtk_wed_hw_init()
1491 dev->wlan.nbuf / 128) | in mtk_wed_hw_init()
1493 dev->wlan.nbuf / 128)); in mtk_wed_hw_init()
1495 wed_set(dev, MTK_WED_TX_TKID_CTRL, in mtk_wed_hw_init()
1498 wed_w32(dev, MTK_WED_TX_BM_INIT_PTR, in mtk_wed_hw_init()
1503 if (mtk_wed_is_v1(dev->hw)) { in mtk_wed_hw_init()
1504 wed_set(dev, MTK_WED_CTRL, in mtk_wed_hw_init()
1507 } else if (mtk_wed_get_rx_capa(dev)) { in mtk_wed_hw_init()
1509 wed_w32(dev, MTK_WED_WPDMA_RX_D_RST_IDX, in mtk_wed_hw_init()
1512 wed_w32(dev, MTK_WED_WPDMA_RX_D_RST_IDX, 0); in mtk_wed_hw_init()
1515 wed_set(dev, MTK_WED_WPDMA_RX_D_PREF_RX0_SIDX, in mtk_wed_hw_init()
1517 wed_clr(dev, MTK_WED_WPDMA_RX_D_PREF_RX0_SIDX, in mtk_wed_hw_init()
1520 wed_set(dev, MTK_WED_WPDMA_RX_D_PREF_RX1_SIDX, in mtk_wed_hw_init()
1522 wed_clr(dev, MTK_WED_WPDMA_RX_D_PREF_RX1_SIDX, in mtk_wed_hw_init()
1526 wed_set(dev, MTK_WED_WPDMA_RX_D_PREF_FIFO_CFG, in mtk_wed_hw_init()
1529 wed_w32(dev, MTK_WED_WPDMA_RX_D_PREF_FIFO_CFG, 0); in mtk_wed_hw_init()
1531 mtk_wed_rx_buffer_hw_init(dev); in mtk_wed_hw_init()
1532 mtk_wed_rro_hw_init(dev); in mtk_wed_hw_init()
1533 mtk_wed_route_qm_hw_init(dev); in mtk_wed_hw_init()
1536 wed_clr(dev, MTK_WED_TX_BM_CTRL, MTK_WED_TX_BM_CTRL_PAUSE); in mtk_wed_hw_init()
1537 if (!mtk_wed_is_v1(dev->hw)) in mtk_wed_hw_init()
1538 wed_clr(dev, MTK_WED_TX_TKID_CTRL, MTK_WED_TX_TKID_CTRL_PAUSE); in mtk_wed_hw_init()
1562 mtk_wed_rx_reset(struct mtk_wed_device *dev) in mtk_wed_rx_reset() argument
1564 struct mtk_wed_wo *wo = dev->hw->wed_wo; in mtk_wed_rx_reset()
1574 if (dev->wlan.hw_rro) { in mtk_wed_rx_reset()
1575 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_RX_IND_CMD_EN); in mtk_wed_rx_reset()
1576 mtk_wed_poll_busy(dev, MTK_WED_RRO_RX_HW_STS, in mtk_wed_rx_reset()
1578 mtk_wed_reset(dev, MTK_WED_RESET_RRO_RX_TO_PG); in mtk_wed_rx_reset()
1581 wed_clr(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, MTK_WED_WPDMA_RX_D_RX_DRV_EN); in mtk_wed_rx_reset()
1582 ret = mtk_wed_poll_busy(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, in mtk_wed_rx_reset()
1584 if (!ret && mtk_wed_is_v3_or_greater(dev->hw)) in mtk_wed_rx_reset()
1585 ret = mtk_wed_poll_busy(dev, MTK_WED_WPDMA_RX_D_PREF_CFG, in mtk_wed_rx_reset()
1588 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_INT_AGENT); in mtk_wed_rx_reset()
1589 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_RX_D_DRV); in mtk_wed_rx_reset()
1591 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_rx_reset()
1593 wed_clr(dev, MTK_WED_WPDMA_RX_D_PREF_CFG, in mtk_wed_rx_reset()
1595 mtk_wed_poll_busy(dev, MTK_WED_WPDMA_RX_D_PREF_CFG, in mtk_wed_rx_reset()
1597 wed_w32(dev, MTK_WED_WPDMA_RX_D_RST_IDX, in mtk_wed_rx_reset()
1601 wed_w32(dev, MTK_WED_WPDMA_RX_D_RST_IDX, in mtk_wed_rx_reset()
1605 wed_set(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, in mtk_wed_rx_reset()
1608 wed_clr(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, in mtk_wed_rx_reset()
1612 wed_w32(dev, MTK_WED_WPDMA_RX_D_RST_IDX, 0); in mtk_wed_rx_reset()
1616 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_RX_RRO_QM_EN); in mtk_wed_rx_reset()
1617 ret = mtk_wed_poll_busy(dev, MTK_WED_CTRL, in mtk_wed_rx_reset()
1620 mtk_wed_reset(dev, MTK_WED_RESET_RX_RRO_QM); in mtk_wed_rx_reset()
1622 wed_set(dev, MTK_WED_RROQM_RST_IDX, in mtk_wed_rx_reset()
1625 wed_w32(dev, MTK_WED_RROQM_RST_IDX, 0); in mtk_wed_rx_reset()
1628 if (dev->wlan.hw_rro) { in mtk_wed_rx_reset()
1630 wed_clr(dev, MTK_WED_RRO_MSDU_PG_RING2_CFG, in mtk_wed_rx_reset()
1634 wed_clr(dev, MTK_WED_RRO_RX_D_CFG(2), MTK_WED_RRO_RX_D_DRV_EN); in mtk_wed_rx_reset()
1637 wed_w32(dev, MTK_WED_RRO_MSDU_PG_RING2_CFG, in mtk_wed_rx_reset()
1639 mtk_wed_poll_busy(dev, MTK_WED_RRO_MSDU_PG_RING2_CFG, in mtk_wed_rx_reset()
1643 wed_w32(dev, MTK_WED_RRO_RX_D_CFG(2), in mtk_wed_rx_reset()
1645 mtk_wed_poll_busy(dev, MTK_WED_RRO_RX_D_CFG(2), in mtk_wed_rx_reset()
1650 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_RX_ROUTE_QM_EN); in mtk_wed_rx_reset()
1651 ret = mtk_wed_poll_busy(dev, MTK_WED_CTRL, in mtk_wed_rx_reset()
1654 mtk_wed_reset(dev, MTK_WED_RESET_RX_ROUTE_QM); in mtk_wed_rx_reset()
1655 } else if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_rx_reset()
1656 wed_set(dev, MTK_WED_RTQM_RST, BIT(0)); in mtk_wed_rx_reset()
1657 wed_clr(dev, MTK_WED_RTQM_RST, BIT(0)); in mtk_wed_rx_reset()
1658 mtk_wed_reset(dev, MTK_WED_RESET_RX_ROUTE_QM); in mtk_wed_rx_reset()
1660 wed_set(dev, MTK_WED_RTQM_GLO_CFG, MTK_WED_RTQM_Q_RST); in mtk_wed_rx_reset()
1664 mtk_wdma_tx_reset(dev); in mtk_wed_rx_reset()
1667 wed_clr(dev, MTK_WED_WDMA_GLO_CFG, MTK_WED_WDMA_GLO_CFG_TX_DRV_EN); in mtk_wed_rx_reset()
1668 if (mtk_wed_is_v3_or_greater(dev->hw)) in mtk_wed_rx_reset()
1669 mtk_wed_poll_busy(dev, MTK_WED_WPDMA_STATUS, in mtk_wed_rx_reset()
1672 mtk_wed_poll_busy(dev, MTK_WED_CTRL, in mtk_wed_rx_reset()
1674 mtk_wed_reset(dev, MTK_WED_RESET_WDMA_TX_DRV); in mtk_wed_rx_reset()
1677 ret = mtk_wed_poll_busy(dev, MTK_WED_GLO_CFG, in mtk_wed_rx_reset()
1679 wed_clr(dev, MTK_WED_GLO_CFG, MTK_WED_GLO_CFG_RX_DMA_EN); in mtk_wed_rx_reset()
1681 mtk_wed_reset(dev, MTK_WED_RESET_WED_RX_DMA); in mtk_wed_rx_reset()
1683 wed_set(dev, MTK_WED_RESET_IDX, in mtk_wed_rx_reset()
1684 dev->hw->soc->regmap.reset_idx_rx_mask); in mtk_wed_rx_reset()
1685 wed_w32(dev, MTK_WED_RESET_IDX, 0); in mtk_wed_rx_reset()
1689 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_RX_BM_EN); in mtk_wed_rx_reset()
1690 mtk_wed_poll_busy(dev, MTK_WED_CTRL, in mtk_wed_rx_reset()
1692 mtk_wed_reset(dev, MTK_WED_RESET_RX_BM); in mtk_wed_rx_reset()
1694 if (dev->wlan.hw_rro) { in mtk_wed_rx_reset()
1695 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_RX_PG_BM_EN); in mtk_wed_rx_reset()
1696 mtk_wed_poll_busy(dev, MTK_WED_CTRL, in mtk_wed_rx_reset()
1698 wed_set(dev, MTK_WED_RESET, MTK_WED_RESET_RX_PG_BM); in mtk_wed_rx_reset()
1699 wed_clr(dev, MTK_WED_RESET, MTK_WED_RESET_RX_PG_BM); in mtk_wed_rx_reset()
1711 for (i = 0; i < ARRAY_SIZE(dev->rx_ring); i++) { in mtk_wed_rx_reset()
1712 if (!dev->rx_ring[i].desc) in mtk_wed_rx_reset()
1715 mtk_wed_ring_reset(&dev->rx_ring[i], MTK_WED_RX_RING_SIZE, in mtk_wed_rx_reset()
1718 mtk_wed_free_rx_buffer(dev); in mtk_wed_rx_reset()
1719 mtk_wed_hwrro_free_buffer(dev); in mtk_wed_rx_reset()
1725 mtk_wed_reset_dma(struct mtk_wed_device *dev) in mtk_wed_reset_dma() argument
1731 for (i = 0; i < ARRAY_SIZE(dev->tx_ring); i++) { in mtk_wed_reset_dma()
1732 if (!dev->tx_ring[i].desc) in mtk_wed_reset_dma()
1735 mtk_wed_ring_reset(&dev->tx_ring[i], MTK_WED_TX_RING_SIZE, in mtk_wed_reset_dma()
1740 wed_clr(dev, MTK_WED_GLO_CFG, MTK_WED_GLO_CFG_TX_DMA_EN); in mtk_wed_reset_dma()
1741 busy = mtk_wed_poll_busy(dev, MTK_WED_GLO_CFG, in mtk_wed_reset_dma()
1744 mtk_wed_reset(dev, MTK_WED_RESET_WED_TX_DMA); in mtk_wed_reset_dma()
1746 wed_w32(dev, MTK_WED_RESET_IDX, in mtk_wed_reset_dma()
1747 dev->hw->soc->regmap.reset_idx_tx_mask); in mtk_wed_reset_dma()
1748 wed_w32(dev, MTK_WED_RESET_IDX, 0); in mtk_wed_reset_dma()
1752 busy = !!mtk_wdma_rx_reset(dev); in mtk_wed_reset_dma()
1753 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_reset_dma()
1755 wed_r32(dev, MTK_WED_WDMA_GLO_CFG); in mtk_wed_reset_dma()
1757 wed_w32(dev, MTK_WED_WDMA_GLO_CFG, val); in mtk_wed_reset_dma()
1759 wed_clr(dev, MTK_WED_WDMA_GLO_CFG, in mtk_wed_reset_dma()
1764 busy = mtk_wed_poll_busy(dev, MTK_WED_WDMA_GLO_CFG, in mtk_wed_reset_dma()
1766 if (!busy && mtk_wed_is_v3_or_greater(dev->hw)) in mtk_wed_reset_dma()
1767 busy = mtk_wed_poll_busy(dev, MTK_WED_WDMA_RX_PREF_CFG, in mtk_wed_reset_dma()
1771 mtk_wed_reset(dev, MTK_WED_RESET_WDMA_INT_AGENT); in mtk_wed_reset_dma()
1772 mtk_wed_reset(dev, MTK_WED_RESET_WDMA_RX_DRV); in mtk_wed_reset_dma()
1774 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_reset_dma()
1776 wed_clr(dev, MTK_WED_WDMA_RX_PREF_CFG, in mtk_wed_reset_dma()
1778 mtk_wed_poll_busy(dev, MTK_WED_WDMA_RX_PREF_CFG, in mtk_wed_reset_dma()
1780 wed_clr(dev, MTK_WED_WDMA_RX_PREF_CFG, in mtk_wed_reset_dma()
1784 wed_w32(dev, MTK_WED_WDMA_RESET_IDX, in mtk_wed_reset_dma()
1788 wed_w32(dev, MTK_WED_WDMA_RESET_IDX, in mtk_wed_reset_dma()
1790 wed_w32(dev, MTK_WED_WDMA_RESET_IDX, 0); in mtk_wed_reset_dma()
1792 wed_set(dev, MTK_WED_WDMA_GLO_CFG, in mtk_wed_reset_dma()
1795 wed_clr(dev, MTK_WED_WDMA_GLO_CFG, in mtk_wed_reset_dma()
1800 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_TX_FREE_AGENT_EN); in mtk_wed_reset_dma()
1803 if (mtk_wed_is_v1(dev->hw)) in mtk_wed_reset_dma()
1805 wed_r32(dev, MTK_WED_TX_BM_INTF)); in mtk_wed_reset_dma()
1808 wed_r32(dev, MTK_WED_TX_TKID_INTF)); in mtk_wed_reset_dma()
1813 mtk_wed_reset(dev, MTK_WED_RESET_TX_FREE_AGENT); in mtk_wed_reset_dma()
1814 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_TX_BM_EN); in mtk_wed_reset_dma()
1815 mtk_wed_reset(dev, MTK_WED_RESET_TX_BM); in mtk_wed_reset_dma()
1818 busy = mtk_wed_poll_busy(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_reset_dma()
1820 wed_clr(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_reset_dma()
1824 busy = mtk_wed_poll_busy(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_reset_dma()
1828 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_INT_AGENT); in mtk_wed_reset_dma()
1829 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_TX_DRV); in mtk_wed_reset_dma()
1830 mtk_wed_reset(dev, MTK_WED_RESET_WPDMA_RX_DRV); in mtk_wed_reset_dma()
1831 if (mtk_wed_is_v3_or_greater(dev->hw)) in mtk_wed_reset_dma()
1832 wed_w32(dev, MTK_WED_RX1_CTRL2, 0); in mtk_wed_reset_dma()
1834 wed_w32(dev, MTK_WED_WPDMA_RESET_IDX, in mtk_wed_reset_dma()
1837 wed_w32(dev, MTK_WED_WPDMA_RESET_IDX, 0); in mtk_wed_reset_dma()
1840 dev->init_done = false; in mtk_wed_reset_dma()
1841 if (mtk_wed_is_v1(dev->hw)) in mtk_wed_reset_dma()
1845 wed_w32(dev, MTK_WED_RESET_IDX, MTK_WED_RESET_WPDMA_IDX_RX); in mtk_wed_reset_dma()
1846 wed_w32(dev, MTK_WED_RESET_IDX, 0); in mtk_wed_reset_dma()
1849 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_reset_dma()
1851 wed_clr(dev, MTK_WED_CTRL, MTK_WED_CTRL_TX_AMSDU_EN); in mtk_wed_reset_dma()
1852 mtk_wed_reset(dev, MTK_WED_RESET_TX_AMSDU); in mtk_wed_reset_dma()
1855 if (mtk_wed_get_rx_capa(dev)) in mtk_wed_reset_dma()
1856 mtk_wed_rx_reset(dev); in mtk_wed_reset_dma()
1860 mtk_wed_ring_alloc(struct mtk_wed_device *dev, struct mtk_wed_ring *ring, in mtk_wed_ring_alloc() argument
1863 ring->desc = dma_alloc_coherent(dev->hw->dev, size * desc_size, in mtk_wed_ring_alloc()
1876 mtk_wed_wdma_rx_ring_setup(struct mtk_wed_device *dev, int idx, int size, in mtk_wed_wdma_rx_ring_setup() argument
1881 if (idx >= ARRAY_SIZE(dev->rx_wdma)) in mtk_wed_wdma_rx_ring_setup()
1884 wdma = &dev->rx_wdma[idx]; in mtk_wed_wdma_rx_ring_setup()
1885 if (!reset && mtk_wed_ring_alloc(dev, wdma, MTK_WED_WDMA_RING_SIZE, in mtk_wed_wdma_rx_ring_setup()
1886 dev->hw->soc->wdma_desc_size, true)) in mtk_wed_wdma_rx_ring_setup()
1889 wdma_w32(dev, MTK_WDMA_RING_RX(idx) + MTK_WED_RING_OFS_BASE, in mtk_wed_wdma_rx_ring_setup()
1891 wdma_w32(dev, MTK_WDMA_RING_RX(idx) + MTK_WED_RING_OFS_COUNT, in mtk_wed_wdma_rx_ring_setup()
1893 wdma_w32(dev, MTK_WDMA_RING_RX(idx) + MTK_WED_RING_OFS_CPU_IDX, 0); in mtk_wed_wdma_rx_ring_setup()
1895 wed_w32(dev, MTK_WED_WDMA_RING_RX(idx) + MTK_WED_RING_OFS_BASE, in mtk_wed_wdma_rx_ring_setup()
1897 wed_w32(dev, MTK_WED_WDMA_RING_RX(idx) + MTK_WED_RING_OFS_COUNT, in mtk_wed_wdma_rx_ring_setup()
1904 mtk_wed_wdma_tx_ring_setup(struct mtk_wed_device *dev, int idx, int size, in mtk_wed_wdma_tx_ring_setup() argument
1909 if (idx >= ARRAY_SIZE(dev->tx_wdma)) in mtk_wed_wdma_tx_ring_setup()
1912 wdma = &dev->tx_wdma[idx]; in mtk_wed_wdma_tx_ring_setup()
1913 if (!reset && mtk_wed_ring_alloc(dev, wdma, MTK_WED_WDMA_RING_SIZE, in mtk_wed_wdma_tx_ring_setup()
1914 dev->hw->soc->wdma_desc_size, true)) in mtk_wed_wdma_tx_ring_setup()
1917 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_wdma_tx_ring_setup()
1935 wdma_w32(dev, MTK_WDMA_RING_TX(idx) + MTK_WED_RING_OFS_BASE, in mtk_wed_wdma_tx_ring_setup()
1937 wdma_w32(dev, MTK_WDMA_RING_TX(idx) + MTK_WED_RING_OFS_COUNT, in mtk_wed_wdma_tx_ring_setup()
1939 wdma_w32(dev, MTK_WDMA_RING_TX(idx) + MTK_WED_RING_OFS_CPU_IDX, 0); in mtk_wed_wdma_tx_ring_setup()
1940 wdma_w32(dev, MTK_WDMA_RING_TX(idx) + MTK_WED_RING_OFS_DMA_IDX, 0); in mtk_wed_wdma_tx_ring_setup()
1946 wed_w32(dev, MTK_WED_WDMA_RING_TX + MTK_WED_RING_OFS_BASE, in mtk_wed_wdma_tx_ring_setup()
1948 wed_w32(dev, MTK_WED_WDMA_RING_TX + MTK_WED_RING_OFS_COUNT, in mtk_wed_wdma_tx_ring_setup()
1950 wed_w32(dev, MTK_WED_WDMA_RING_TX + MTK_WED_RING_OFS_CPU_IDX, in mtk_wed_wdma_tx_ring_setup()
1952 wed_w32(dev, MTK_WED_WDMA_RING_TX + MTK_WED_RING_OFS_DMA_IDX, in mtk_wed_wdma_tx_ring_setup()
1960 mtk_wed_ppe_check(struct mtk_wed_device *dev, struct sk_buff *skb, in mtk_wed_ppe_check() argument
1963 struct mtk_eth *eth = dev->hw->eth; in mtk_wed_ppe_check()
1975 mtk_ppe_check_skb(eth->ppe[dev->hw->index], skb, hash); in mtk_wed_ppe_check()
1979 mtk_wed_configure_irq(struct mtk_wed_device *dev, u32 irq_mask) in mtk_wed_configure_irq() argument
1984 wed_set(dev, MTK_WED_CTRL, in mtk_wed_configure_irq()
1990 if (mtk_wed_is_v1(dev->hw)) { in mtk_wed_configure_irq()
1991 wed_w32(dev, MTK_WED_PCIE_INT_TRIGGER, in mtk_wed_configure_irq()
1994 wed_w32(dev, MTK_WED_WPDMA_INT_TRIGGER, in mtk_wed_configure_irq()
1998 wed_clr(dev, MTK_WED_WDMA_INT_CTRL, wdma_mask); in mtk_wed_configure_irq()
2000 if (mtk_wed_is_v3_or_greater(dev->hw)) in mtk_wed_configure_irq()
2001 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_TX_TKID_ALI_EN); in mtk_wed_configure_irq()
2004 wed_w32(dev, MTK_WED_WPDMA_INT_CTRL_TX, in mtk_wed_configure_irq()
2010 dev->wlan.tx_tbit[0]) | in mtk_wed_configure_irq()
2012 dev->wlan.tx_tbit[1])); in mtk_wed_configure_irq()
2015 wed_w32(dev, MTK_WED_WPDMA_INT_CTRL_TX_FREE, in mtk_wed_configure_irq()
2019 dev->wlan.txfree_tbit)); in mtk_wed_configure_irq()
2021 if (mtk_wed_get_rx_capa(dev)) { in mtk_wed_configure_irq()
2022 wed_w32(dev, MTK_WED_WPDMA_INT_CTRL_RX, in mtk_wed_configure_irq()
2028 dev->wlan.rx_tbit[0]) | in mtk_wed_configure_irq()
2030 dev->wlan.rx_tbit[1])); in mtk_wed_configure_irq()
2036 wed_w32(dev, MTK_WED_WDMA_INT_CLR, wdma_mask); in mtk_wed_configure_irq()
2037 wed_set(dev, MTK_WED_WDMA_INT_CTRL, in mtk_wed_configure_irq()
2039 dev->wdma_idx)); in mtk_wed_configure_irq()
2042 wed_w32(dev, MTK_WED_WDMA_INT_TRIGGER, wdma_mask); in mtk_wed_configure_irq()
2044 wdma_w32(dev, MTK_WDMA_INT_MASK, wdma_mask); in mtk_wed_configure_irq()
2045 wdma_w32(dev, MTK_WDMA_INT_GRP2, wdma_mask); in mtk_wed_configure_irq()
2046 wed_w32(dev, MTK_WED_WPDMA_INT_MASK, irq_mask); in mtk_wed_configure_irq()
2047 wed_w32(dev, MTK_WED_INT_MASK, irq_mask); in mtk_wed_configure_irq()
2052 mtk_wed_dma_enable(struct mtk_wed_device *dev) in mtk_wed_dma_enable() argument
2056 if (!mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_dma_enable()
2057 wed_set(dev, MTK_WED_WPDMA_INT_CTRL, in mtk_wed_dma_enable()
2059 wed_set(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_dma_enable()
2062 wdma_set(dev, MTK_WDMA_GLO_CFG, in mtk_wed_dma_enable()
2066 wed_set(dev, MTK_WED_WPDMA_CTRL, MTK_WED_WPDMA_CTRL_SDL1_FIXED); in mtk_wed_dma_enable()
2068 wed_set(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_dma_enable()
2072 wdma_set(dev, MTK_WDMA_GLO_CFG, MTK_WDMA_GLO_CFG_TX_DMA_EN); in mtk_wed_dma_enable()
2075 wed_set(dev, MTK_WED_GLO_CFG, in mtk_wed_dma_enable()
2079 wed_set(dev, MTK_WED_WDMA_GLO_CFG, in mtk_wed_dma_enable()
2082 if (mtk_wed_is_v1(dev->hw)) { in mtk_wed_dma_enable()
2083 wdma_set(dev, MTK_WDMA_GLO_CFG, in mtk_wed_dma_enable()
2088 wed_set(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_dma_enable()
2092 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_dma_enable()
2093 wed_set(dev, MTK_WED_WDMA_RX_PREF_CFG, in mtk_wed_dma_enable()
2096 wed_clr(dev, MTK_WED_WDMA_RX_PREF_CFG, in mtk_wed_dma_enable()
2098 wed_set(dev, MTK_WED_WDMA_RX_PREF_CFG, MTK_WED_WDMA_RX_PREF_EN); in mtk_wed_dma_enable()
2100 wed_clr(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_dma_enable()
2102 wed_set(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_dma_enable()
2107 wdma_set(dev, MTK_WDMA_PREF_RX_CFG, MTK_WDMA_PREF_RX_CFG_PREF_EN); in mtk_wed_dma_enable()
2108 wdma_set(dev, MTK_WDMA_WRBK_RX_CFG, MTK_WDMA_WRBK_RX_CFG_WRBK_EN); in mtk_wed_dma_enable()
2111 wed_clr(dev, MTK_WED_WPDMA_GLO_CFG, in mtk_wed_dma_enable()
2115 if (!mtk_wed_get_rx_capa(dev)) in mtk_wed_dma_enable()
2118 wed_set(dev, MTK_WED_WDMA_GLO_CFG, in mtk_wed_dma_enable()
2122 wed_clr(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, MTK_WED_WPDMA_RX_D_RXD_READ_LEN); in mtk_wed_dma_enable()
2123 wed_set(dev, MTK_WED_WPDMA_RX_D_GLO_CFG, in mtk_wed_dma_enable()
2128 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_dma_enable()
2129 wed_set(dev, MTK_WED_WPDMA_RX_D_PREF_CFG, in mtk_wed_dma_enable()
2134 wed_set(dev, MTK_WED_RRO_RX_D_CFG(2), MTK_WED_RRO_RX_D_DRV_EN); in mtk_wed_dma_enable()
2135 wdma_set(dev, MTK_WDMA_PREF_TX_CFG, MTK_WDMA_PREF_TX_CFG_PREF_EN); in mtk_wed_dma_enable()
2136 wdma_set(dev, MTK_WDMA_WRBK_TX_CFG, MTK_WDMA_WRBK_TX_CFG_WRBK_EN); in mtk_wed_dma_enable()
2140 struct mtk_wed_ring *ring = &dev->rx_ring[i]; in mtk_wed_dma_enable()
2146 if (mtk_wed_check_wfdma_rx_fill(dev, ring)) { in mtk_wed_dma_enable()
2147 dev_err(dev->hw->dev, in mtk_wed_dma_enable()
2152 val = wifi_r32(dev, in mtk_wed_dma_enable()
2153 dev->wlan.wpdma_rx_glo - in mtk_wed_dma_enable()
2154 dev->wlan.phy_base) | MTK_WFMDA_RX_DMA_EN; in mtk_wed_dma_enable()
2155 wifi_w32(dev, in mtk_wed_dma_enable()
2156 dev->wlan.wpdma_rx_glo - dev->wlan.phy_base, in mtk_wed_dma_enable()
2162 mtk_wed_start_hw_rro(struct mtk_wed_device *dev, u32 irq_mask, bool reset) in mtk_wed_start_hw_rro() argument
2166 wed_w32(dev, MTK_WED_WPDMA_INT_MASK, irq_mask); in mtk_wed_start_hw_rro()
2167 wed_w32(dev, MTK_WED_INT_MASK, irq_mask); in mtk_wed_start_hw_rro()
2169 if (!mtk_wed_get_rx_capa(dev) || !dev->wlan.hw_rro) in mtk_wed_start_hw_rro()
2173 wed_set(dev, MTK_WED_RRO_MSDU_PG_RING2_CFG, in mtk_wed_start_hw_rro()
2178 wed_set(dev, MTK_WED_RRO_RX_D_CFG(2), MTK_WED_RRO_MSDU_PG_DRV_CLR); in mtk_wed_start_hw_rro()
2179 wed_w32(dev, MTK_WED_RRO_MSDU_PG_RING2_CFG, in mtk_wed_start_hw_rro()
2182 wed_w32(dev, MTK_WED_WPDMA_INT_CTRL_RRO_RX, in mtk_wed_start_hw_rro()
2188 dev->wlan.rro_rx_tbit[0]) | in mtk_wed_start_hw_rro()
2190 dev->wlan.rro_rx_tbit[1])); in mtk_wed_start_hw_rro()
2192 wed_w32(dev, MTK_WED_WPDMA_INT_CTRL_RRO_MSDU_PG, in mtk_wed_start_hw_rro()
2200 dev->wlan.rx_pg_tbit[0]) | in mtk_wed_start_hw_rro()
2202 dev->wlan.rx_pg_tbit[1]) | in mtk_wed_start_hw_rro()
2204 dev->wlan.rx_pg_tbit[2])); in mtk_wed_start_hw_rro()
2209 wed_set(dev, MTK_WED_RRO_MSDU_PG_RING2_CFG, in mtk_wed_start_hw_rro()
2213 struct mtk_wed_ring *ring = &dev->rx_rro_ring[i]; in mtk_wed_start_hw_rro()
2218 if (mtk_wed_check_wfdma_rx_fill(dev, ring)) in mtk_wed_start_hw_rro()
2219 dev_err(dev->hw->dev, in mtk_wed_start_hw_rro()
2224 struct mtk_wed_ring *ring = &dev->rx_page_ring[i]; in mtk_wed_start_hw_rro()
2229 if (mtk_wed_check_wfdma_rx_fill(dev, ring)) in mtk_wed_start_hw_rro()
2230 dev_err(dev->hw->dev, in mtk_wed_start_hw_rro()
2236 mtk_wed_rro_rx_ring_setup(struct mtk_wed_device *dev, int idx, in mtk_wed_rro_rx_ring_setup() argument
2239 struct mtk_wed_ring *ring = &dev->rx_rro_ring[idx]; in mtk_wed_rro_rx_ring_setup()
2242 wed_w32(dev, MTK_WED_RRO_RX_D_RX(idx) + MTK_WED_RING_OFS_BASE, in mtk_wed_rro_rx_ring_setup()
2244 wed_w32(dev, MTK_WED_RRO_RX_D_RX(idx) + MTK_WED_RING_OFS_COUNT, in mtk_wed_rro_rx_ring_setup()
2250 mtk_wed_msdu_pg_rx_ring_setup(struct mtk_wed_device *dev, int idx, void __iomem *regs) in mtk_wed_msdu_pg_rx_ring_setup() argument
2252 struct mtk_wed_ring *ring = &dev->rx_page_ring[idx]; in mtk_wed_msdu_pg_rx_ring_setup()
2255 wed_w32(dev, MTK_WED_RRO_MSDU_PG_CTRL0(idx) + MTK_WED_RING_OFS_BASE, in mtk_wed_msdu_pg_rx_ring_setup()
2257 wed_w32(dev, MTK_WED_RRO_MSDU_PG_CTRL0(idx) + MTK_WED_RING_OFS_COUNT, in mtk_wed_msdu_pg_rx_ring_setup()
2263 mtk_wed_ind_rx_ring_setup(struct mtk_wed_device *dev, void __iomem *regs) in mtk_wed_ind_rx_ring_setup() argument
2265 struct mtk_wed_ring *ring = &dev->ind_cmd_ring; in mtk_wed_ind_rx_ring_setup()
2270 wed_w32(dev, MTK_WED_IND_CMD_RX_CTRL1 + MTK_WED_RING_OFS_BASE, in mtk_wed_ind_rx_ring_setup()
2273 wed_w32(dev, MTK_WED_IND_CMD_RX_CTRL1 + MTK_WED_RING_OFS_COUNT, in mtk_wed_ind_rx_ring_setup()
2277 wed_w32(dev, MTK_WED_RRO_CFG0, dev->wlan.phy_base + in mtk_wed_ind_rx_ring_setup()
2278 dev->wlan.ind_cmd.ack_sn_addr); in mtk_wed_ind_rx_ring_setup()
2279 wed_w32(dev, MTK_WED_RRO_CFG1, in mtk_wed_ind_rx_ring_setup()
2281 dev->wlan.ind_cmd.win_size) | in mtk_wed_ind_rx_ring_setup()
2283 dev->wlan.ind_cmd.particular_sid)); in mtk_wed_ind_rx_ring_setup()
2286 wed_w32(dev, MTK_WED_ADDR_ELEM_CFG0, in mtk_wed_ind_rx_ring_setup()
2287 dev->wlan.ind_cmd.particular_se_phys); in mtk_wed_ind_rx_ring_setup()
2289 for (i = 0; i < dev->wlan.ind_cmd.se_group_nums; i++) { in mtk_wed_ind_rx_ring_setup()
2290 wed_w32(dev, MTK_WED_RADDR_ELEM_TBL_WDATA, in mtk_wed_ind_rx_ring_setup()
2291 dev->wlan.ind_cmd.addr_elem_phys[i] >> 4); in mtk_wed_ind_rx_ring_setup()
2292 wed_w32(dev, MTK_WED_ADDR_ELEM_TBL_CFG, in mtk_wed_ind_rx_ring_setup()
2295 val = wed_r32(dev, MTK_WED_ADDR_ELEM_TBL_CFG); in mtk_wed_ind_rx_ring_setup()
2297 val = wed_r32(dev, MTK_WED_ADDR_ELEM_TBL_CFG); in mtk_wed_ind_rx_ring_setup()
2299 dev_err(dev->hw->dev, in mtk_wed_ind_rx_ring_setup()
2304 for (i = 0; i < dev->wlan.ind_cmd.particular_sid; i++) { in mtk_wed_ind_rx_ring_setup()
2305 wed_w32(dev, MTK_WED_PN_CHECK_WDATA_M, in mtk_wed_ind_rx_ring_setup()
2308 wed_w32(dev, MTK_WED_PN_CHECK_CFG, MTK_WED_PN_CHECK_WR | in mtk_wed_ind_rx_ring_setup()
2312 val = wed_r32(dev, MTK_WED_PN_CHECK_CFG); in mtk_wed_ind_rx_ring_setup()
2314 val = wed_r32(dev, MTK_WED_PN_CHECK_CFG); in mtk_wed_ind_rx_ring_setup()
2316 dev_err(dev->hw->dev, in mtk_wed_ind_rx_ring_setup()
2320 wed_w32(dev, MTK_WED_RX_IND_CMD_CNT0, MTK_WED_RX_IND_CMD_DBG_CNT_EN); in mtk_wed_ind_rx_ring_setup()
2321 wed_set(dev, MTK_WED_CTRL, MTK_WED_CTRL_WED_RX_IND_CMD_EN); in mtk_wed_ind_rx_ring_setup()
2327 mtk_wed_start(struct mtk_wed_device *dev, u32 irq_mask) in mtk_wed_start() argument
2331 if (mtk_wed_get_rx_capa(dev) && mtk_wed_rx_buffer_alloc(dev)) in mtk_wed_start()
2334 for (i = 0; i < ARRAY_SIZE(dev->rx_wdma); i++) in mtk_wed_start()
2335 if (!dev->rx_wdma[i].desc) in mtk_wed_start()
2336 mtk_wed_wdma_rx_ring_setup(dev, i, 16, false); in mtk_wed_start()
2338 mtk_wed_hw_init(dev); in mtk_wed_start()
2339 mtk_wed_configure_irq(dev, irq_mask); in mtk_wed_start()
2341 mtk_wed_set_ext_int(dev, true); in mtk_wed_start()
2343 if (mtk_wed_is_v1(dev->hw)) { in mtk_wed_start()
2344 u32 val = dev->wlan.wpdma_phys | MTK_PCIE_MIRROR_MAP_EN | in mtk_wed_start()
2346 dev->hw->index); in mtk_wed_start()
2348 val |= BIT(0) | (BIT(1) * !!dev->hw->index); in mtk_wed_start()
2349 regmap_write(dev->hw->mirror, dev->hw->index * 4, val); in mtk_wed_start()
2350 } else if (mtk_wed_get_rx_capa(dev)) { in mtk_wed_start()
2352 wed_w32(dev, MTK_WED_EXT_INT_MASK1, in mtk_wed_start()
2354 wed_w32(dev, MTK_WED_EXT_INT_MASK2, in mtk_wed_start()
2357 wed_r32(dev, MTK_WED_EXT_INT_MASK1); in mtk_wed_start()
2358 wed_r32(dev, MTK_WED_EXT_INT_MASK2); in mtk_wed_start()
2360 if (mtk_wed_is_v3_or_greater(dev->hw)) { in mtk_wed_start()
2361 wed_w32(dev, MTK_WED_EXT_INT_MASK3, in mtk_wed_start()
2363 wed_r32(dev, MTK_WED_EXT_INT_MASK3); in mtk_wed_start()
2366 if (mtk_wed_rro_cfg(dev)) in mtk_wed_start()
2370 mtk_wed_set_512_support(dev, dev->wlan.wcid_512); in mtk_wed_start()
2371 mtk_wed_amsdu_init(dev); in mtk_wed_start()
2373 mtk_wed_dma_enable(dev); in mtk_wed_start()
2374 dev->running = true; in mtk_wed_start()
2378 mtk_wed_attach(struct mtk_wed_device *dev) in mtk_wed_attach() argument
2388 if ((dev->wlan.bus_type == MTK_WED_BUS_PCIE && in mtk_wed_attach()
2389 pci_domain_nr(dev->wlan.pci_dev->bus) > 1) || in mtk_wed_attach()
2400 hw = mtk_wed_assign(dev); in mtk_wed_attach()
2407 device = dev->wlan.bus_type == MTK_WED_BUS_PCIE in mtk_wed_attach()
2408 ? &dev->wlan.pci_dev->dev in mtk_wed_attach()
2409 : &dev->wlan.platform_dev->dev; in mtk_wed_attach()
2413 dev->hw = hw; in mtk_wed_attach()
2414 dev->dev = hw->dev; in mtk_wed_attach()
2415 dev->irq = hw->irq; in mtk_wed_attach()
2416 dev->wdma_idx = hw->index; in mtk_wed_attach()
2417 dev->version = hw->version; in mtk_wed_attach()
2418 dev->hw->pcie_base = mtk_wed_get_pcie_base(dev); in mtk_wed_attach()
2420 if (hw->eth->dma_dev == hw->eth->dev && in mtk_wed_attach()
2421 of_dma_is_coherent(hw->eth->dev->of_node)) in mtk_wed_attach()
2422 mtk_eth_set_dma_device(hw->eth, hw->dev); in mtk_wed_attach()
2424 ret = mtk_wed_tx_buffer_alloc(dev); in mtk_wed_attach()
2428 ret = mtk_wed_amsdu_buffer_alloc(dev); in mtk_wed_attach()
2432 if (mtk_wed_get_rx_capa(dev)) { in mtk_wed_attach()
2433 ret = mtk_wed_rro_alloc(dev); in mtk_wed_attach()
2438 mtk_wed_hw_init_early(dev); in mtk_wed_attach()
2443 dev->rev_id = wed_r32(dev, MTK_WED_REV_ID); in mtk_wed_attach()
2445 if (mtk_wed_get_rx_capa(dev)) in mtk_wed_attach()
2449 dev_err(dev->hw->dev, "failed to attach wed device\n"); in mtk_wed_attach()
2450 __mtk_wed_detach(dev); in mtk_wed_attach()
2459 mtk_wed_tx_ring_setup(struct mtk_wed_device *dev, int idx, void __iomem *regs, in mtk_wed_tx_ring_setup() argument
2462 struct mtk_wed_ring *ring = &dev->tx_ring[idx]; in mtk_wed_tx_ring_setup()
2476 if (WARN_ON(idx >= ARRAY_SIZE(dev->tx_ring))) in mtk_wed_tx_ring_setup()
2479 if (!reset && mtk_wed_ring_alloc(dev, ring, MTK_WED_TX_RING_SIZE, in mtk_wed_tx_ring_setup()
2483 if (mtk_wed_wdma_rx_ring_setup(dev, idx, MTK_WED_WDMA_RING_SIZE, in mtk_wed_tx_ring_setup()
2490 if (mtk_wed_is_v3_or_greater(dev->hw) && idx == 1) { in mtk_wed_tx_ring_setup()
2492 wed_set(dev, MTK_WED_WDMA_RX_PREF_CFG, in mtk_wed_tx_ring_setup()
2496 wed_clr(dev, MTK_WED_WDMA_RX_PREF_CFG, in mtk_wed_tx_ring_setup()
2501 wed_w32(dev, MTK_WED_WDMA_RX_PREF_FIFO_CFG, in mtk_wed_tx_ring_setup()
2504 wed_w32(dev, MTK_WED_WDMA_RX_PREF_FIFO_CFG, 0); in mtk_wed_tx_ring_setup()
2508 wpdma_tx_w32(dev, idx, MTK_WED_RING_OFS_BASE, ring->desc_phys); in mtk_wed_tx_ring_setup()
2509 wpdma_tx_w32(dev, idx, MTK_WED_RING_OFS_COUNT, MTK_WED_TX_RING_SIZE); in mtk_wed_tx_ring_setup()
2510 wpdma_tx_w32(dev, idx, MTK_WED_RING_OFS_CPU_IDX, 0); in mtk_wed_tx_ring_setup()
2512 wed_w32(dev, MTK_WED_WPDMA_RING_TX(idx) + MTK_WED_RING_OFS_BASE, in mtk_wed_tx_ring_setup()
2514 wed_w32(dev, MTK_WED_WPDMA_RING_TX(idx) + MTK_WED_RING_OFS_COUNT, in mtk_wed_tx_ring_setup()
2516 wed_w32(dev, MTK_WED_WPDMA_RING_TX(idx) + MTK_WED_RING_OFS_CPU_IDX, 0); in mtk_wed_tx_ring_setup()
2522 mtk_wed_txfree_ring_setup(struct mtk_wed_device *dev, void __iomem *regs) in mtk_wed_txfree_ring_setup() argument
2524 struct mtk_wed_ring *ring = &dev->txfree_ring; in mtk_wed_txfree_ring_setup()
2525 int i, index = mtk_wed_is_v1(dev->hw); in mtk_wed_txfree_ring_setup()
2538 wed_w32(dev, MTK_WED_RING_RX(index) + i, val); in mtk_wed_txfree_ring_setup()
2539 wed_w32(dev, MTK_WED_WPDMA_RING_RX(index) + i, val); in mtk_wed_txfree_ring_setup()
2546 mtk_wed_rx_ring_setup(struct mtk_wed_device *dev, int idx, void __iomem *regs, in mtk_wed_rx_ring_setup() argument
2549 struct mtk_wed_ring *ring = &dev->rx_ring[idx]; in mtk_wed_rx_ring_setup()
2551 if (WARN_ON(idx >= ARRAY_SIZE(dev->rx_ring))) in mtk_wed_rx_ring_setup()
2554 if (!reset && mtk_wed_ring_alloc(dev, ring, MTK_WED_RX_RING_SIZE, in mtk_wed_rx_ring_setup()
2558 if (mtk_wed_wdma_tx_ring_setup(dev, idx, MTK_WED_WDMA_RING_SIZE, in mtk_wed_rx_ring_setup()
2567 wpdma_rx_w32(dev, idx, MTK_WED_RING_OFS_BASE, ring->desc_phys); in mtk_wed_rx_ring_setup()
2568 wpdma_rx_w32(dev, idx, MTK_WED_RING_OFS_COUNT, MTK_WED_RX_RING_SIZE); in mtk_wed_rx_ring_setup()
2570 wed_w32(dev, MTK_WED_WPDMA_RING_RX_DATA(idx) + MTK_WED_RING_OFS_BASE, in mtk_wed_rx_ring_setup()
2572 wed_w32(dev, MTK_WED_WPDMA_RING_RX_DATA(idx) + MTK_WED_RING_OFS_COUNT, in mtk_wed_rx_ring_setup()
2579 mtk_wed_irq_get(struct mtk_wed_device *dev, u32 mask) in mtk_wed_irq_get() argument
2583 if (mtk_wed_is_v3_or_greater(dev->hw)) in mtk_wed_irq_get()
2589 val = wed_r32(dev, MTK_WED_EXT_INT_STATUS); in mtk_wed_irq_get()
2590 wed_w32(dev, MTK_WED_EXT_INT_STATUS, val); in mtk_wed_irq_get()
2592 if (!dev->hw->num_flows) in mtk_wed_irq_get()
2595 pr_err("mtk_wed%d: error status=%08x\n", dev->hw->index, val); in mtk_wed_irq_get()
2597 val = wed_r32(dev, MTK_WED_INT_STATUS); in mtk_wed_irq_get()
2599 wed_w32(dev, MTK_WED_INT_STATUS, val); /* ACK */ in mtk_wed_irq_get()
2605 mtk_wed_irq_set_mask(struct mtk_wed_device *dev, u32 mask) in mtk_wed_irq_set_mask() argument
2607 mtk_wed_set_ext_int(dev, !!mask); in mtk_wed_irq_set_mask()
2608 wed_w32(dev, MTK_WED_INT_MASK, mask); in mtk_wed_irq_set_mask()
2671 if (!priv || !tc_can_offload(priv->dev)) in mtk_wed_setup_tc_block_cb()
2682 mtk_wed_setup_tc_block(struct mtk_wed_hw *hw, struct net_device *dev, in mtk_wed_setup_tc_block() argument
2702 block_cb = flow_block_cb_lookup(f->block, cb, dev); in mtk_wed_setup_tc_block()
2713 priv->dev = dev; in mtk_wed_setup_tc_block()
2714 block_cb = flow_block_cb_alloc(cb, dev, priv, NULL); in mtk_wed_setup_tc_block()
2725 block_cb = flow_block_cb_lookup(f->block, cb, dev); in mtk_wed_setup_tc_block()
2742 mtk_wed_setup_tc(struct mtk_wed_device *wed, struct net_device *dev, in mtk_wed_setup_tc() argument
2753 return mtk_wed_setup_tc_block(hw, dev, type_data); in mtk_wed_setup_tc()
2784 struct device_node *eth_np = eth->dev->of_node; in mtk_wed_add_hw()
2797 get_device(&pdev->dev); in mtk_wed_add_hw()
2820 hw->dev = &pdev->dev; in mtk_wed_add_hw()
2864 put_device(&pdev->dev); in mtk_wed_add_hw()
2886 put_device(hw->dev); in mtk_wed_exit()