Lines Matching +full:queue +full:- +full:rx

1 // SPDX-License-Identifier: ISC
16 struct mt7996_dev *dev = phy->dev; in mt7996_init_tx_queues()
21 idx -= MT_TXQ_ID(0); in mt7996_init_tx_queues()
23 if (phy->mt76->band_idx == MT_BAND2) in mt7996_init_tx_queues()
29 return mt76_connac_init_tx_queues(phy->mt76, idx, n_desc, in mt7996_init_tx_queues()
39 mt76_connac_tx_cleanup(&dev->mt76); in mt7996_poll_tx()
50 dev->q_wfdma_mask |= (1 << (q)); \ in mt7996_dma_config()
51 dev->q_int_mask[(q)] = int; \ in mt7996_dma_config()
52 dev->q_id[(q)] = id; \ in mt7996_dma_config()
59 /* rx queue */ in mt7996_dma_config()
61 /* for mt7990, RX ring 1 is for SDO instead */ in mt7996_dma_config()
68 switch (mt76_chip(&dev->mt76)) { in mt7996_dma_config()
77 if (dev->hif2) in mt7996_dma_config()
89 if (dev->has_rro) { in mt7996_dma_config()
112 /* data tx queue */ in mt7996_dma_config()
114 if (is_mt7996(&dev->mt76)) { in mt7996_dma_config()
121 /* mcu tx queue */ in mt7996_dma_config()
141 u8 queue, val; in __mt7996_dma_prefetch() local
144 /* prefetch SRAM wrapping boundary for tx/rx ring. */ in __mt7996_dma_prefetch()
146 val = is_mt7996(&dev->mt76) ? 2 : 4; in __mt7996_dma_prefetch()
154 if (!is_mt7996(&dev->mt76) || dev->hif2) in __mt7996_dma_prefetch()
156 if (is_mt7996(&dev->mt76)) in __mt7996_dma_prefetch()
159 /* Rx Event Rings */ in __mt7996_dma_prefetch()
163 /* Rx TxFreeDone From WA Rings */ in __mt7996_dma_prefetch()
166 queue = is_mt7996(&dev->mt76) ? MT_RXQ_BAND2_WA : MT_RXQ_BAND1_WA; in __mt7996_dma_prefetch()
167 mt76_wr(dev, MT_RXQ_EXT_CTRL(queue) + ofs, PREFETCH(val)); in __mt7996_dma_prefetch()
170 /* Rx TxFreeDone From MAC Rings */ in __mt7996_dma_prefetch()
171 val = is_mt7996(&dev->mt76) ? 4 : 8; in __mt7996_dma_prefetch()
172 if (is_mt7990(&dev->mt76) || (is_mt7996(&dev->mt76) && dev->has_rro)) in __mt7996_dma_prefetch()
174 if (is_mt7990(&dev->mt76) && dev->hif2) in __mt7996_dma_prefetch()
176 else if (is_mt7996(&dev->mt76) && dev->has_rro) in __mt7996_dma_prefetch()
179 /* Rx Data Rings */ in __mt7996_dma_prefetch()
181 queue = is_mt7996(&dev->mt76) ? MT_RXQ_BAND2 : MT_RXQ_BAND1; in __mt7996_dma_prefetch()
182 mt76_wr(dev, MT_RXQ_EXT_CTRL(queue) + ofs, PREFETCH(0x10)); in __mt7996_dma_prefetch()
184 /* Rx RRO Rings */ in __mt7996_dma_prefetch()
185 if (dev->has_rro) { in __mt7996_dma_prefetch()
187 queue = is_mt7996(&dev->mt76) ? MT_RXQ_RRO_BAND2 : MT_RXQ_RRO_BAND1; in __mt7996_dma_prefetch()
188 mt76_wr(dev, MT_RXQ_EXT_CTRL(queue) + ofs, PREFETCH(0x10)); in __mt7996_dma_prefetch()
191 if (is_mt7996(&dev->mt76)) { in __mt7996_dma_prefetch()
206 if (dev->hif2) in mt7996_dma_prefetch()
207 __mt7996_dma_prefetch(dev, MT_WFDMA0_PCIE1(0) - MT_WFDMA0(0)); in mt7996_dma_prefetch()
214 if (dev->hif2) in mt7996_dma_disable()
215 hif1_ofs = MT_WFDMA0_PCIE1(0) - MT_WFDMA0(0); in mt7996_dma_disable()
226 if (dev->hif2) { in mt7996_dma_disable()
245 if (dev->hif2) { in mt7996_dma_disable()
257 struct mtk_wed_device *wed = &dev->mt76.mmio.wed; in mt7996_dma_start()
261 if (dev->hif2) in mt7996_dma_start()
262 hif1_ofs = MT_WFDMA0_PCIE1(0) - MT_WFDMA0(0); in mt7996_dma_start()
264 /* enable WFDMA Tx/Rx */ in mt7996_dma_start()
279 if (dev->hif2) in mt7996_dma_start()
288 /* enable interrupts for TX/RX rings */ in mt7996_dma_start()
321 if (dev->hif2) in mt7996_dma_enable()
322 hif1_ofs = MT_WFDMA0_PCIE1(0) - MT_WFDMA0(0); in mt7996_dma_enable()
326 if (dev->hif2) in mt7996_dma_enable()
334 if (dev->hif2) { in mt7996_dma_enable()
349 if (dev->hif2) in mt7996_dma_enable()
367 /* WFDMA rx threshold */ in mt7996_dma_enable()
373 if (dev->hif2) { in mt7996_dma_enable()
391 /* WFDMA rx threshold */ in mt7996_dma_enable()
398 if (dev->hif2) { in mt7996_dma_enable()
399 /* fix hardware limitation, pcie1's rx ring3 is not available in mt7996_dma_enable()
400 * so, redirect pcie0 rx ring3 interrupt to pcie1 in mt7996_dma_enable()
402 if (mtk_wed_device_active(&dev->mt76.mmio.wed) && in mt7996_dma_enable()
403 dev->has_rro) in mt7996_dma_enable()
417 struct mt76_dev *mdev = &dev->mt76; in mt7996_dma_rro_init()
422 mdev->q_rx[MT_RXQ_RRO_IND].flags = MT_WED_RRO_Q_IND; in mt7996_dma_rro_init()
423 mdev->q_rx[MT_RXQ_RRO_IND].wed = &mdev->mmio.wed; in mt7996_dma_rro_init()
424 ret = mt76_queue_alloc(dev, &mdev->q_rx[MT_RXQ_RRO_IND], in mt7996_dma_rro_init()
431 /* rx msdu page queue for band0 */ in mt7996_dma_rro_init()
432 mdev->q_rx[MT_RXQ_MSDU_PAGE_BAND0].flags = in mt7996_dma_rro_init()
434 mdev->q_rx[MT_RXQ_MSDU_PAGE_BAND0].wed = &mdev->mmio.wed; in mt7996_dma_rro_init()
435 ret = mt76_queue_alloc(dev, &mdev->q_rx[MT_RXQ_MSDU_PAGE_BAND0], in mt7996_dma_rro_init()
444 /* rx msdu page queue for band1 */ in mt7996_dma_rro_init()
445 mdev->q_rx[MT_RXQ_MSDU_PAGE_BAND1].flags = in mt7996_dma_rro_init()
447 mdev->q_rx[MT_RXQ_MSDU_PAGE_BAND1].wed = &mdev->mmio.wed; in mt7996_dma_rro_init()
448 ret = mt76_queue_alloc(dev, &mdev->q_rx[MT_RXQ_MSDU_PAGE_BAND1], in mt7996_dma_rro_init()
458 /* rx msdu page queue for band2 */ in mt7996_dma_rro_init()
459 mdev->q_rx[MT_RXQ_MSDU_PAGE_BAND2].flags = in mt7996_dma_rro_init()
461 mdev->q_rx[MT_RXQ_MSDU_PAGE_BAND2].wed = &mdev->mmio.wed; in mt7996_dma_rro_init()
462 ret = mt76_queue_alloc(dev, &mdev->q_rx[MT_RXQ_MSDU_PAGE_BAND2], in mt7996_dma_rro_init()
471 irq_mask = mdev->mmio.irqmask | MT_INT_RRO_RX_DONE | in mt7996_dma_rro_init()
474 mtk_wed_device_start_hw_rro(&mdev->mmio.wed, irq_mask, false); in mt7996_dma_rro_init()
483 struct mtk_wed_device *wed = &dev->mt76.mmio.wed; in mt7996_dma_init()
484 struct mtk_wed_device *wed_hif2 = &dev->mt76.mmio.wed_hif2; in mt7996_dma_init()
491 mt76_dma_attach(&dev->mt76); in mt7996_dma_init()
493 if (dev->hif2) in mt7996_dma_init()
494 hif1_ofs = MT_WFDMA0_PCIE1(0) - MT_WFDMA0(0); in mt7996_dma_init()
498 /* init tx queue */ in mt7996_dma_init()
499 ret = mt7996_init_tx_queues(&dev->phy, in mt7996_dma_init()
500 MT_TXQ_ID(dev->mphy.band_idx), in mt7996_dma_init()
508 ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_WM, in mt7996_dma_init()
517 ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_WA, in mt7996_dma_init()
526 ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_FWDL, in mt7996_dma_init()
534 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MCU], in mt7996_dma_init()
543 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MCU_WA], in mt7996_dma_init()
551 /* rx data queue for band0 and mt7996 band1 */ in mt7996_dma_init()
553 dev->mt76.q_rx[MT_RXQ_MAIN].flags = MT_WED_Q_RX(0); in mt7996_dma_init()
554 dev->mt76.q_rx[MT_RXQ_MAIN].wed = wed; in mt7996_dma_init()
557 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MAIN], in mt7996_dma_init()
566 if (mtk_wed_device_active(wed) && !dev->has_rro) { in mt7996_dma_init()
567 dev->mt76.q_rx[MT_RXQ_MAIN_WA].flags = MT_WED_Q_TXFREE; in mt7996_dma_init()
568 dev->mt76.q_rx[MT_RXQ_MAIN_WA].wed = wed; in mt7996_dma_init()
572 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MAIN_WA], in mt7996_dma_init()
581 dev->mt76.q_rx[MT_RXQ_TXFREE_BAND0].flags = MT_WED_Q_TXFREE; in mt7996_dma_init()
582 dev->mt76.q_rx[MT_RXQ_TXFREE_BAND0].wed = wed; in mt7996_dma_init()
584 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_TXFREE_BAND0], in mt7996_dma_init()
593 if (!mt7996_has_wa(dev) && dev->hif2) { in mt7996_dma_init()
595 dev->mt76.q_rx[MT_RXQ_TXFREE_BAND1].flags = MT_WED_Q_TXFREE; in mt7996_dma_init()
596 dev->mt76.q_rx[MT_RXQ_TXFREE_BAND1].wed = wed; in mt7996_dma_init()
598 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_TXFREE_BAND1], in mt7996_dma_init()
608 /* rx data queue for mt7996 band2 */ in mt7996_dma_init()
610 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_BAND2], in mt7996_dma_init()
619 * use pcie0's rx ring3, but, redirect pcie0 rx ring3 interrupt to pcie1 in mt7996_dma_init()
621 if (mtk_wed_device_active(wed_hif2) && !dev->has_rro) { in mt7996_dma_init()
622 dev->mt76.q_rx[MT_RXQ_BAND2_WA].flags = MT_WED_Q_TXFREE; in mt7996_dma_init()
623 dev->mt76.q_rx[MT_RXQ_BAND2_WA].wed = wed_hif2; in mt7996_dma_init()
626 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_BAND2_WA], in mt7996_dma_init()
634 /* rx data queue for mt7992 band1 */ in mt7996_dma_init()
636 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_BAND1], in mt7996_dma_init()
647 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_BAND1_WA], in mt7996_dma_init()
658 dev->has_rro) { in mt7996_dma_init()
659 /* rx rro data queue for band0 */ in mt7996_dma_init()
660 dev->mt76.q_rx[MT_RXQ_RRO_BAND0].flags = in mt7996_dma_init()
662 dev->mt76.q_rx[MT_RXQ_RRO_BAND0].wed = wed; in mt7996_dma_init()
663 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_RRO_BAND0], in mt7996_dma_init()
672 dev->mt76.q_rx[MT_RXQ_TXFREE_BAND0].flags = MT_WED_Q_TXFREE; in mt7996_dma_init()
673 dev->mt76.q_rx[MT_RXQ_TXFREE_BAND0].wed = wed; in mt7996_dma_init()
675 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_TXFREE_BAND0], in mt7996_dma_init()
684 /* rx rro data queue for band2 */ in mt7996_dma_init()
685 dev->mt76.q_rx[MT_RXQ_RRO_BAND2].flags = in mt7996_dma_init()
687 dev->mt76.q_rx[MT_RXQ_RRO_BAND2].wed = wed; in mt7996_dma_init()
688 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_RRO_BAND2], in mt7996_dma_init()
698 dev->mt76.q_rx[MT_RXQ_TXFREE_BAND2].flags = MT_WED_Q_TXFREE; in mt7996_dma_init()
699 dev->mt76.q_rx[MT_RXQ_TXFREE_BAND2].wed = wed_hif2; in mt7996_dma_init()
701 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_TXFREE_BAND2], in mt7996_dma_init()
715 netif_napi_add_tx(dev->mt76.tx_napi_dev, &dev->mt76.tx_napi, in mt7996_dma_init()
717 napi_enable(&dev->mt76.tx_napi); in mt7996_dma_init()
726 struct mt76_phy *phy2 = dev->mt76.phys[MT_BAND1]; in mt7996_dma_reset()
727 struct mt76_phy *phy3 = dev->mt76.phys[MT_BAND2]; in mt7996_dma_reset()
728 u32 hif1_ofs = MT_WFDMA0_PCIE1(0) - MT_WFDMA0(0); in mt7996_dma_reset()
735 if (dev->hif2) in mt7996_dma_reset()
743 mt76_queue_tx_cleanup(dev, dev->mphy.q_tx[i], true); in mt7996_dma_reset()
745 mt76_queue_tx_cleanup(dev, phy2->q_tx[i], true); in mt7996_dma_reset()
747 mt76_queue_tx_cleanup(dev, phy3->q_tx[i], true); in mt7996_dma_reset()
751 mt76_queue_tx_cleanup(dev, dev->mt76.q_mcu[i], true); in mt7996_dma_reset()
753 mt76_for_each_q_rx(&dev->mt76, i) in mt7996_dma_reset()
754 mt76_queue_rx_cleanup(dev, &dev->mt76.q_rx[i]); in mt7996_dma_reset()
756 mt76_tx_status_check(&dev->mt76, true); in mt7996_dma_reset()
762 if (dev->hif2 && mtk_wed_device_active(&dev->mt76.mmio.wed_hif2)) in mt7996_dma_reset()
763 mtk_wed_device_dma_reset(&dev->mt76.mmio.wed_hif2); in mt7996_dma_reset()
765 if (mtk_wed_device_active(&dev->mt76.mmio.wed)) in mt7996_dma_reset()
766 mtk_wed_device_dma_reset(&dev->mt76.mmio.wed); in mt7996_dma_reset()
769 mt76_wed_dma_reset(&dev->mt76); in mt7996_dma_reset()
773 mt76_dma_reset_tx_queue(&dev->mt76, dev->mphy.q_tx[i]); in mt7996_dma_reset()
775 mt76_dma_reset_tx_queue(&dev->mt76, phy2->q_tx[i]); in mt7996_dma_reset()
777 mt76_dma_reset_tx_queue(&dev->mt76, phy3->q_tx[i]); in mt7996_dma_reset()
781 mt76_queue_reset(dev, dev->mt76.q_mcu[i]); in mt7996_dma_reset()
783 mt76_for_each_q_rx(&dev->mt76, i) { in mt7996_dma_reset()
784 if (mtk_wed_device_active(&dev->mt76.mmio.wed)) in mt7996_dma_reset()
785 if (mt76_queue_is_wed_rro(&dev->mt76.q_rx[i]) || in mt7996_dma_reset()
786 mt76_queue_is_wed_tx_free(&dev->mt76.q_rx[i])) in mt7996_dma_reset()
789 mt76_queue_reset(dev, &dev->mt76.q_rx[i]); in mt7996_dma_reset()
792 mt76_tx_status_check(&dev->mt76, true); in mt7996_dma_reset()
794 mt76_for_each_q_rx(&dev->mt76, i) in mt7996_dma_reset()
804 mt76_dma_cleanup(&dev->mt76); in mt7996_dma_cleanup()