Lines Matching refs:mt76

23 		if (phy->mt76->band_idx == MT_BAND2)  in mt7996_init_tx_queues()
29 return mt76_connac_init_tx_queues(phy->mt76, idx, n_desc, in mt7996_init_tx_queues()
37 dev = container_of(napi, struct mt7996_dev, mt76.tx_napi); in mt7996_poll_tx()
39 mt76_connac_tx_cleanup(&dev->mt76); in mt7996_poll_tx()
67 if (is_mt7996(&dev->mt76)) { in mt7996_dma_config()
102 if (is_mt7996(&dev->mt76)) { in mt7996_dma_config()
141 queue = is_mt7996(&dev->mt76) ? MT_RXQ_BAND2_WA : MT_RXQ_BAND1_WA; in __mt7996_dma_prefetch()
146 queue = is_mt7996(&dev->mt76) ? MT_RXQ_BAND2 : MT_RXQ_BAND1; in __mt7996_dma_prefetch()
224 struct mtk_wed_device *wed = &dev->mt76.mmio.wed; in mt7996_dma_start()
366 if (mtk_wed_device_active(&dev->mt76.mmio.wed) && in mt7996_dma_enable()
381 struct mt76_dev *mdev = &dev->mt76; in mt7996_dma_rro_init()
447 struct mtk_wed_device *wed = &dev->mt76.mmio.wed; in mt7996_dma_init()
448 struct mtk_wed_device *wed_hif2 = &dev->mt76.mmio.wed_hif2; in mt7996_dma_init()
455 mt76_dma_attach(&dev->mt76); in mt7996_dma_init()
472 ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_WM, in mt7996_dma_init()
480 ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_WA, in mt7996_dma_init()
488 ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_FWDL, in mt7996_dma_init()
496 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MCU], in mt7996_dma_init()
505 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MCU_WA], in mt7996_dma_init()
515 dev->mt76.q_rx[MT_RXQ_MAIN].flags = MT_WED_Q_RX(0); in mt7996_dma_init()
516 dev->mt76.q_rx[MT_RXQ_MAIN].wed = wed; in mt7996_dma_init()
519 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MAIN], in mt7996_dma_init()
529 dev->mt76.q_rx[MT_RXQ_MAIN_WA].flags = MT_WED_Q_TXFREE; in mt7996_dma_init()
530 dev->mt76.q_rx[MT_RXQ_MAIN_WA].wed = wed; in mt7996_dma_init()
533 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MAIN_WA], in mt7996_dma_init()
544 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_BAND2], in mt7996_dma_init()
556 dev->mt76.q_rx[MT_RXQ_BAND2_WA].flags = MT_WED_Q_TXFREE; in mt7996_dma_init()
557 dev->mt76.q_rx[MT_RXQ_BAND2_WA].wed = wed_hif2; in mt7996_dma_init()
560 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_BAND2_WA], in mt7996_dma_init()
570 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_BAND1], in mt7996_dma_init()
580 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_BAND1_WA], in mt7996_dma_init()
592 dev->mt76.q_rx[MT_RXQ_RRO_BAND0].flags = in mt7996_dma_init()
594 dev->mt76.q_rx[MT_RXQ_RRO_BAND0].wed = wed; in mt7996_dma_init()
595 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_RRO_BAND0], in mt7996_dma_init()
604 dev->mt76.q_rx[MT_RXQ_TXFREE_BAND0].flags = MT_WED_Q_TXFREE; in mt7996_dma_init()
605 dev->mt76.q_rx[MT_RXQ_TXFREE_BAND0].wed = wed; in mt7996_dma_init()
607 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_TXFREE_BAND0], in mt7996_dma_init()
617 dev->mt76.q_rx[MT_RXQ_RRO_BAND2].flags = in mt7996_dma_init()
619 dev->mt76.q_rx[MT_RXQ_RRO_BAND2].wed = wed; in mt7996_dma_init()
620 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_RRO_BAND2], in mt7996_dma_init()
630 dev->mt76.q_rx[MT_RXQ_TXFREE_BAND2].flags = MT_WED_Q_TXFREE; in mt7996_dma_init()
631 dev->mt76.q_rx[MT_RXQ_TXFREE_BAND2].wed = wed_hif2; in mt7996_dma_init()
633 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_TXFREE_BAND2], in mt7996_dma_init()
647 netif_napi_add_tx(dev->mt76.tx_napi_dev, &dev->mt76.tx_napi, in mt7996_dma_init()
649 napi_enable(&dev->mt76.tx_napi); in mt7996_dma_init()
658 struct mt76_phy *phy2 = dev->mt76.phys[MT_BAND1]; in mt7996_dma_reset()
659 struct mt76_phy *phy3 = dev->mt76.phys[MT_BAND2]; in mt7996_dma_reset()
683 mt76_queue_tx_cleanup(dev, dev->mt76.q_mcu[i], true); in mt7996_dma_reset()
685 mt76_for_each_q_rx(&dev->mt76, i) in mt7996_dma_reset()
686 mt76_queue_rx_cleanup(dev, &dev->mt76.q_rx[i]); in mt7996_dma_reset()
688 mt76_tx_status_check(&dev->mt76, true); in mt7996_dma_reset()
694 if (dev->hif2 && mtk_wed_device_active(&dev->mt76.mmio.wed_hif2)) in mt7996_dma_reset()
695 mtk_wed_device_dma_reset(&dev->mt76.mmio.wed_hif2); in mt7996_dma_reset()
697 if (mtk_wed_device_active(&dev->mt76.mmio.wed)) in mt7996_dma_reset()
698 mtk_wed_device_dma_reset(&dev->mt76.mmio.wed); in mt7996_dma_reset()
701 mt76_wed_dma_reset(&dev->mt76); in mt7996_dma_reset()
705 mt76_dma_reset_tx_queue(&dev->mt76, dev->mphy.q_tx[i]); in mt7996_dma_reset()
707 mt76_dma_reset_tx_queue(&dev->mt76, phy2->q_tx[i]); in mt7996_dma_reset()
709 mt76_dma_reset_tx_queue(&dev->mt76, phy3->q_tx[i]); in mt7996_dma_reset()
713 mt76_queue_reset(dev, dev->mt76.q_mcu[i]); in mt7996_dma_reset()
715 mt76_for_each_q_rx(&dev->mt76, i) { in mt7996_dma_reset()
716 if (mtk_wed_device_active(&dev->mt76.mmio.wed)) in mt7996_dma_reset()
717 if (mt76_queue_is_wed_rro(&dev->mt76.q_rx[i]) || in mt7996_dma_reset()
718 mt76_queue_is_wed_tx_free(&dev->mt76.q_rx[i])) in mt7996_dma_reset()
721 mt76_queue_reset(dev, &dev->mt76.q_rx[i]); in mt7996_dma_reset()
724 mt76_tx_status_check(&dev->mt76, true); in mt7996_dma_reset()
726 mt76_for_each_q_rx(&dev->mt76, i) in mt7996_dma_reset()
736 mt76_dma_cleanup(&dev->mt76); in mt7996_dma_cleanup()