1 // SPDX-License-Identifier: ISC 2 /* Copyright (C) 2020 MediaTek Inc. */ 3 4 #include "mt7915.h" 5 #include "../dma.h" 6 #include "mac.h" 7 8 int mt7915_init_tx_queues(struct mt7915_phy *phy, int idx, int n_desc, int ring_base) 9 { 10 int i, err; 11 12 err = mt76_init_tx_queue(phy->mt76, 0, idx, n_desc, ring_base); 13 if (err < 0) 14 return err; 15 16 for (i = 0; i <= MT_TXQ_PSD; i++) 17 phy->mt76->q_tx[i] = phy->mt76->q_tx[0]; 18 19 return 0; 20 } 21 22 static void 23 mt7915_tx_cleanup(struct mt7915_dev *dev) 24 { 25 mt76_queue_tx_cleanup(dev, dev->mt76.q_mcu[MT_MCUQ_WM], false); 26 mt76_queue_tx_cleanup(dev, dev->mt76.q_mcu[MT_MCUQ_WA], false); 27 } 28 29 static int mt7915_poll_tx(struct napi_struct *napi, int budget) 30 { 31 struct mt7915_dev *dev; 32 33 dev = container_of(napi, struct mt7915_dev, mt76.tx_napi); 34 35 mt7915_tx_cleanup(dev); 36 37 if (napi_complete_done(napi, 0)) 38 mt7915_irq_enable(dev, MT_INT_TX_DONE_MCU); 39 40 return 0; 41 } 42 43 static void mt7915_dma_config(struct mt7915_dev *dev) 44 { 45 #define Q_CONFIG(q, wfdma, int, id) do { \ 46 if (wfdma) \ 47 dev->wfdma_mask |= (1 << (q)); \ 48 dev->q_int_mask[(q)] = int; \ 49 dev->q_id[(q)] = id; \ 50 } while (0) 51 52 #define MCUQ_CONFIG(q, wfdma, int, id) Q_CONFIG(q, (wfdma), (int), (id)) 53 #define RXQ_CONFIG(q, wfdma, int, id) Q_CONFIG(__RXQ(q), (wfdma), (int), (id)) 54 #define TXQ_CONFIG(q, wfdma, int, id) Q_CONFIG(__TXQ(q), (wfdma), (int), (id)) 55 56 if (is_mt7915(&dev->mt76)) { 57 RXQ_CONFIG(MT_RXQ_MAIN, WFDMA0, MT_INT_RX_DONE_BAND0, MT7915_RXQ_BAND0); 58 RXQ_CONFIG(MT_RXQ_MCU, WFDMA1, MT_INT_RX_DONE_WM, MT7915_RXQ_MCU_WM); 59 RXQ_CONFIG(MT_RXQ_MCU_WA, WFDMA1, MT_INT_RX_DONE_WA, MT7915_RXQ_MCU_WA); 60 RXQ_CONFIG(MT_RXQ_EXT, WFDMA0, MT_INT_RX_DONE_BAND1, MT7915_RXQ_BAND1); 61 RXQ_CONFIG(MT_RXQ_EXT_WA, WFDMA1, MT_INT_RX_DONE_WA_EXT, MT7915_RXQ_MCU_WA_EXT); 62 RXQ_CONFIG(MT_RXQ_MAIN_WA, WFDMA1, MT_INT_RX_DONE_WA_MAIN, MT7915_RXQ_MCU_WA); 63 TXQ_CONFIG(0, WFDMA1, MT_INT_TX_DONE_BAND0, MT7915_TXQ_BAND0); 64 TXQ_CONFIG(1, WFDMA1, MT_INT_TX_DONE_BAND1, MT7915_TXQ_BAND1); 65 MCUQ_CONFIG(MT_MCUQ_WM, WFDMA1, MT_INT_TX_DONE_MCU_WM, MT7915_TXQ_MCU_WM); 66 MCUQ_CONFIG(MT_MCUQ_WA, WFDMA1, MT_INT_TX_DONE_MCU_WA, MT7915_TXQ_MCU_WA); 67 MCUQ_CONFIG(MT_MCUQ_FWDL, WFDMA1, MT_INT_TX_DONE_FWDL, MT7915_TXQ_FWDL); 68 } else { 69 RXQ_CONFIG(MT_RXQ_MAIN, WFDMA0, MT_INT_RX_DONE_BAND0_MT7916, MT7916_RXQ_BAND0); 70 RXQ_CONFIG(MT_RXQ_MCU, WFDMA0, MT_INT_RX_DONE_WM, MT7916_RXQ_MCU_WM); 71 RXQ_CONFIG(MT_RXQ_MCU_WA, WFDMA0, MT_INT_RX_DONE_WA, MT7916_RXQ_MCU_WA); 72 RXQ_CONFIG(MT_RXQ_EXT, WFDMA0, MT_INT_RX_DONE_BAND1_MT7916, MT7916_RXQ_BAND1); 73 RXQ_CONFIG(MT_RXQ_EXT_WA, WFDMA0, MT_INT_RX_DONE_WA_EXT_MT7916, MT7916_RXQ_MCU_WA_EXT); 74 RXQ_CONFIG(MT_RXQ_MAIN_WA, WFDMA0, MT_INT_RX_DONE_WA_MAIN_MT7916, MT7916_RXQ_MCU_WA_MAIN); 75 TXQ_CONFIG(0, WFDMA0, MT_INT_TX_DONE_BAND0, MT7915_TXQ_BAND0); 76 TXQ_CONFIG(1, WFDMA0, MT_INT_TX_DONE_BAND1, MT7915_TXQ_BAND1); 77 MCUQ_CONFIG(MT_MCUQ_WM, WFDMA0, MT_INT_TX_DONE_MCU_WM, MT7915_TXQ_MCU_WM); 78 MCUQ_CONFIG(MT_MCUQ_WA, WFDMA0, MT_INT_TX_DONE_MCU_WA_MT7916, MT7915_TXQ_MCU_WA); 79 MCUQ_CONFIG(MT_MCUQ_FWDL, WFDMA0, MT_INT_TX_DONE_FWDL, MT7915_TXQ_FWDL); 80 } 81 } 82 83 static void __mt7915_dma_prefetch(struct mt7915_dev *dev, u32 ofs) 84 { 85 #define PREFETCH(_base, _depth) ((_base) << 16 | (_depth)) 86 u32 base = 0; 87 88 /* prefetch SRAM wrapping boundary for tx/rx ring. */ 89 mt76_wr(dev, MT_MCUQ_EXT_CTRL(MT_MCUQ_FWDL) + ofs, PREFETCH(0x0, 0x4)); 90 mt76_wr(dev, MT_MCUQ_EXT_CTRL(MT_MCUQ_WM) + ofs, PREFETCH(0x40, 0x4)); 91 mt76_wr(dev, MT_TXQ_EXT_CTRL(0) + ofs, PREFETCH(0x80, 0x4)); 92 mt76_wr(dev, MT_TXQ_EXT_CTRL(1) + ofs, PREFETCH(0xc0, 0x4)); 93 mt76_wr(dev, MT_MCUQ_EXT_CTRL(MT_MCUQ_WA) + ofs, PREFETCH(0x100, 0x4)); 94 95 mt76_wr(dev, MT_RXQ_EXT_CTRL(MT_RXQ_MCU) + ofs, PREFETCH(0x140, 0x4)); 96 mt76_wr(dev, MT_RXQ_EXT_CTRL(MT_RXQ_MCU_WA) + ofs, PREFETCH(0x180, 0x4)); 97 if (!is_mt7915(&dev->mt76)) { 98 mt76_wr(dev, MT_RXQ_EXT_CTRL(MT_RXQ_MAIN_WA) + ofs, PREFETCH(0x1c0, 0x4)); 99 base = 0x40; 100 } 101 mt76_wr(dev, MT_RXQ_EXT_CTRL(MT_RXQ_EXT_WA) + ofs, PREFETCH(0x1c0 + base, 0x4)); 102 mt76_wr(dev, MT_RXQ_EXT_CTRL(MT_RXQ_MAIN) + ofs, PREFETCH(0x200 + base, 0x4)); 103 mt76_wr(dev, MT_RXQ_EXT_CTRL(MT_RXQ_EXT) + ofs, PREFETCH(0x240 + base, 0x4)); 104 105 /* for mt7915, the ring which is next the last 106 * used ring must be initialized. 107 */ 108 if (is_mt7915(&dev->mt76)) { 109 ofs += 0x4; 110 mt76_wr(dev, MT_MCUQ_EXT_CTRL(MT_MCUQ_WA) + ofs, PREFETCH(0x140, 0x0)); 111 mt76_wr(dev, MT_RXQ_EXT_CTRL(MT_RXQ_EXT_WA) + ofs, PREFETCH(0x200 + base, 0x0)); 112 mt76_wr(dev, MT_RXQ_EXT_CTRL(MT_RXQ_EXT) + ofs, PREFETCH(0x280 + base, 0x0)); 113 } 114 } 115 116 void mt7915_dma_prefetch(struct mt7915_dev *dev) 117 { 118 __mt7915_dma_prefetch(dev, 0); 119 if (dev->hif2) 120 __mt7915_dma_prefetch(dev, MT_WFDMA0_PCIE1(0) - MT_WFDMA0(0)); 121 } 122 123 static void mt7915_dma_disable(struct mt7915_dev *dev, bool rst) 124 { 125 struct mt76_dev *mdev = &dev->mt76; 126 u32 hif1_ofs = 0; 127 128 if (dev->hif2) 129 hif1_ofs = MT_WFDMA0_PCIE1(0) - MT_WFDMA0(0); 130 131 /* reset */ 132 if (rst) { 133 mt76_clear(dev, MT_WFDMA0_RST, 134 MT_WFDMA0_RST_DMASHDL_ALL_RST | 135 MT_WFDMA0_RST_LOGIC_RST); 136 137 mt76_set(dev, MT_WFDMA0_RST, 138 MT_WFDMA0_RST_DMASHDL_ALL_RST | 139 MT_WFDMA0_RST_LOGIC_RST); 140 141 if (is_mt7915(mdev)) { 142 mt76_clear(dev, MT_WFDMA1_RST, 143 MT_WFDMA1_RST_DMASHDL_ALL_RST | 144 MT_WFDMA1_RST_LOGIC_RST); 145 146 mt76_set(dev, MT_WFDMA1_RST, 147 MT_WFDMA1_RST_DMASHDL_ALL_RST | 148 MT_WFDMA1_RST_LOGIC_RST); 149 } 150 151 if (dev->hif2) { 152 mt76_clear(dev, MT_WFDMA0_RST + hif1_ofs, 153 MT_WFDMA0_RST_DMASHDL_ALL_RST | 154 MT_WFDMA0_RST_LOGIC_RST); 155 156 mt76_set(dev, MT_WFDMA0_RST + hif1_ofs, 157 MT_WFDMA0_RST_DMASHDL_ALL_RST | 158 MT_WFDMA0_RST_LOGIC_RST); 159 160 if (is_mt7915(mdev)) { 161 mt76_clear(dev, MT_WFDMA1_RST + hif1_ofs, 162 MT_WFDMA1_RST_DMASHDL_ALL_RST | 163 MT_WFDMA1_RST_LOGIC_RST); 164 165 mt76_set(dev, MT_WFDMA1_RST + hif1_ofs, 166 MT_WFDMA1_RST_DMASHDL_ALL_RST | 167 MT_WFDMA1_RST_LOGIC_RST); 168 } 169 } 170 } 171 172 /* disable */ 173 mt76_clear(dev, MT_WFDMA0_GLO_CFG, 174 MT_WFDMA0_GLO_CFG_TX_DMA_EN | 175 MT_WFDMA0_GLO_CFG_RX_DMA_EN | 176 MT_WFDMA0_GLO_CFG_OMIT_TX_INFO | 177 MT_WFDMA0_GLO_CFG_OMIT_RX_INFO | 178 MT_WFDMA0_GLO_CFG_OMIT_RX_INFO_PFET2); 179 180 if (is_mt7915(mdev)) 181 mt76_clear(dev, MT_WFDMA1_GLO_CFG, 182 MT_WFDMA1_GLO_CFG_TX_DMA_EN | 183 MT_WFDMA1_GLO_CFG_RX_DMA_EN | 184 MT_WFDMA1_GLO_CFG_OMIT_TX_INFO | 185 MT_WFDMA1_GLO_CFG_OMIT_RX_INFO | 186 MT_WFDMA1_GLO_CFG_OMIT_RX_INFO_PFET2); 187 188 if (dev->hif2) { 189 mt76_clear(dev, MT_WFDMA0_GLO_CFG + hif1_ofs, 190 MT_WFDMA0_GLO_CFG_TX_DMA_EN | 191 MT_WFDMA0_GLO_CFG_RX_DMA_EN | 192 MT_WFDMA0_GLO_CFG_OMIT_TX_INFO | 193 MT_WFDMA0_GLO_CFG_OMIT_RX_INFO | 194 MT_WFDMA0_GLO_CFG_OMIT_RX_INFO_PFET2); 195 196 if (is_mt7915(mdev)) 197 mt76_clear(dev, MT_WFDMA1_GLO_CFG + hif1_ofs, 198 MT_WFDMA1_GLO_CFG_TX_DMA_EN | 199 MT_WFDMA1_GLO_CFG_RX_DMA_EN | 200 MT_WFDMA1_GLO_CFG_OMIT_TX_INFO | 201 MT_WFDMA1_GLO_CFG_OMIT_RX_INFO | 202 MT_WFDMA1_GLO_CFG_OMIT_RX_INFO_PFET2); 203 } 204 } 205 206 static int mt7915_dma_enable(struct mt7915_dev *dev) 207 { 208 struct mt76_dev *mdev = &dev->mt76; 209 u32 hif1_ofs = 0; 210 u32 irq_mask; 211 212 if (dev->hif2) 213 hif1_ofs = MT_WFDMA0_PCIE1(0) - MT_WFDMA0(0); 214 215 /* reset dma idx */ 216 mt76_wr(dev, MT_WFDMA0_RST_DTX_PTR, ~0); 217 if (is_mt7915(mdev)) 218 mt76_wr(dev, MT_WFDMA1_RST_DTX_PTR, ~0); 219 if (dev->hif2) { 220 mt76_wr(dev, MT_WFDMA0_RST_DTX_PTR + hif1_ofs, ~0); 221 if (is_mt7915(mdev)) 222 mt76_wr(dev, MT_WFDMA1_RST_DTX_PTR + hif1_ofs, ~0); 223 } 224 225 /* configure delay interrupt off */ 226 mt76_wr(dev, MT_WFDMA0_PRI_DLY_INT_CFG0, 0); 227 if (is_mt7915(mdev)) { 228 mt76_wr(dev, MT_WFDMA1_PRI_DLY_INT_CFG0, 0); 229 } else { 230 mt76_wr(dev, MT_WFDMA0_PRI_DLY_INT_CFG1, 0); 231 mt76_wr(dev, MT_WFDMA0_PRI_DLY_INT_CFG2, 0); 232 } 233 234 if (dev->hif2) { 235 mt76_wr(dev, MT_WFDMA0_PRI_DLY_INT_CFG0 + hif1_ofs, 0); 236 if (is_mt7915(mdev)) { 237 mt76_wr(dev, MT_WFDMA1_PRI_DLY_INT_CFG0 + 238 hif1_ofs, 0); 239 } else { 240 mt76_wr(dev, MT_WFDMA0_PRI_DLY_INT_CFG1 + 241 hif1_ofs, 0); 242 mt76_wr(dev, MT_WFDMA0_PRI_DLY_INT_CFG2 + 243 hif1_ofs, 0); 244 } 245 } 246 247 /* configure perfetch settings */ 248 mt7915_dma_prefetch(dev); 249 250 /* hif wait WFDMA idle */ 251 mt76_set(dev, MT_WFDMA0_BUSY_ENA, 252 MT_WFDMA0_BUSY_ENA_TX_FIFO0 | 253 MT_WFDMA0_BUSY_ENA_TX_FIFO1 | 254 MT_WFDMA0_BUSY_ENA_RX_FIFO); 255 256 if (is_mt7915(mdev)) 257 mt76_set(dev, MT_WFDMA1_BUSY_ENA, 258 MT_WFDMA1_BUSY_ENA_TX_FIFO0 | 259 MT_WFDMA1_BUSY_ENA_TX_FIFO1 | 260 MT_WFDMA1_BUSY_ENA_RX_FIFO); 261 262 if (dev->hif2) { 263 mt76_set(dev, MT_WFDMA0_BUSY_ENA + hif1_ofs, 264 MT_WFDMA0_PCIE1_BUSY_ENA_TX_FIFO0 | 265 MT_WFDMA0_PCIE1_BUSY_ENA_TX_FIFO1 | 266 MT_WFDMA0_PCIE1_BUSY_ENA_RX_FIFO); 267 268 if (is_mt7915(mdev)) 269 mt76_set(dev, MT_WFDMA1_BUSY_ENA + hif1_ofs, 270 MT_WFDMA1_PCIE1_BUSY_ENA_TX_FIFO0 | 271 MT_WFDMA1_PCIE1_BUSY_ENA_TX_FIFO1 | 272 MT_WFDMA1_PCIE1_BUSY_ENA_RX_FIFO); 273 } 274 275 mt76_poll(dev, MT_WFDMA_EXT_CSR_HIF_MISC, 276 MT_WFDMA_EXT_CSR_HIF_MISC_BUSY, 0, 1000); 277 278 /* set WFDMA Tx/Rx */ 279 mt76_set(dev, MT_WFDMA0_GLO_CFG, 280 MT_WFDMA0_GLO_CFG_TX_DMA_EN | 281 MT_WFDMA0_GLO_CFG_RX_DMA_EN | 282 MT_WFDMA0_GLO_CFG_OMIT_TX_INFO | 283 MT_WFDMA0_GLO_CFG_OMIT_RX_INFO_PFET2); 284 285 if (is_mt7915(mdev)) 286 mt76_set(dev, MT_WFDMA1_GLO_CFG, 287 MT_WFDMA1_GLO_CFG_TX_DMA_EN | 288 MT_WFDMA1_GLO_CFG_RX_DMA_EN | 289 MT_WFDMA1_GLO_CFG_OMIT_TX_INFO | 290 MT_WFDMA1_GLO_CFG_OMIT_RX_INFO); 291 292 if (dev->hif2) { 293 mt76_set(dev, MT_WFDMA0_GLO_CFG + hif1_ofs, 294 MT_WFDMA0_GLO_CFG_TX_DMA_EN | 295 MT_WFDMA0_GLO_CFG_RX_DMA_EN | 296 MT_WFDMA0_GLO_CFG_OMIT_TX_INFO | 297 MT_WFDMA0_GLO_CFG_OMIT_RX_INFO_PFET2); 298 299 if (is_mt7915(mdev)) 300 mt76_set(dev, MT_WFDMA1_GLO_CFG + hif1_ofs, 301 MT_WFDMA1_GLO_CFG_TX_DMA_EN | 302 MT_WFDMA1_GLO_CFG_RX_DMA_EN | 303 MT_WFDMA1_GLO_CFG_OMIT_TX_INFO | 304 MT_WFDMA1_GLO_CFG_OMIT_RX_INFO); 305 306 mt76_set(dev, MT_WFDMA_HOST_CONFIG, 307 MT_WFDMA_HOST_CONFIG_PDMA_BAND); 308 } 309 310 /* enable interrupts for TX/RX rings */ 311 irq_mask = MT_INT_RX_DONE_MCU | 312 MT_INT_TX_DONE_MCU | 313 MT_INT_MCU_CMD | 314 MT_INT_BAND0_RX_DONE; 315 316 if (dev->dbdc_support) 317 irq_mask |= MT_INT_BAND1_RX_DONE; 318 319 mt7915_irq_enable(dev, irq_mask); 320 321 return 0; 322 } 323 324 int mt7915_dma_init(struct mt7915_dev *dev) 325 { 326 struct mt76_dev *mdev = &dev->mt76; 327 u32 hif1_ofs = 0; 328 int ret; 329 330 mt7915_dma_config(dev); 331 332 mt76_dma_attach(&dev->mt76); 333 334 if (dev->hif2) 335 hif1_ofs = MT_WFDMA0_PCIE1(0) - MT_WFDMA0(0); 336 337 mt7915_dma_disable(dev, true); 338 339 /* init tx queue */ 340 ret = mt7915_init_tx_queues(&dev->phy, 341 MT_TXQ_ID(0), 342 MT7915_TX_RING_SIZE, 343 MT_TXQ_RING_BASE(0)); 344 if (ret) 345 return ret; 346 347 /* command to WM */ 348 ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_WM, 349 MT_MCUQ_ID(MT_MCUQ_WM), 350 MT7915_TX_MCU_RING_SIZE, 351 MT_MCUQ_RING_BASE(MT_MCUQ_WM)); 352 if (ret) 353 return ret; 354 355 /* command to WA */ 356 ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_WA, 357 MT_MCUQ_ID(MT_MCUQ_WA), 358 MT7915_TX_MCU_RING_SIZE, 359 MT_MCUQ_RING_BASE(MT_MCUQ_WA)); 360 if (ret) 361 return ret; 362 363 /* firmware download */ 364 ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_FWDL, 365 MT_MCUQ_ID(MT_MCUQ_FWDL), 366 MT7915_TX_FWDL_RING_SIZE, 367 MT_MCUQ_RING_BASE(MT_MCUQ_FWDL)); 368 if (ret) 369 return ret; 370 371 /* event from WM */ 372 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MCU], 373 MT_RXQ_ID(MT_RXQ_MCU), 374 MT7915_RX_MCU_RING_SIZE, 375 MT_RX_BUF_SIZE, 376 MT_RXQ_RING_BASE(MT_RXQ_MCU)); 377 if (ret) 378 return ret; 379 380 /* event from WA */ 381 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MCU_WA], 382 MT_RXQ_ID(MT_RXQ_MCU_WA), 383 MT7915_RX_MCU_RING_SIZE, 384 MT_RX_BUF_SIZE, 385 MT_RXQ_RING_BASE(MT_RXQ_MCU_WA)); 386 if (ret) 387 return ret; 388 389 /* rx data queue for band0 */ 390 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MAIN], 391 MT_RXQ_ID(MT_RXQ_MAIN), 392 MT7915_RX_RING_SIZE, 393 MT_RX_BUF_SIZE, 394 MT_RXQ_RING_BASE(MT_RXQ_MAIN)); 395 if (ret) 396 return ret; 397 398 /* tx free notify event from WA for band0 */ 399 if (!is_mt7915(mdev)) { 400 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MAIN_WA], 401 MT_RXQ_ID(MT_RXQ_MAIN_WA), 402 MT7915_RX_MCU_RING_SIZE, 403 MT_RX_BUF_SIZE, 404 MT_RXQ_RING_BASE(MT_RXQ_MAIN_WA)); 405 if (ret) 406 return ret; 407 } 408 409 if (dev->dbdc_support) { 410 /* rx data queue for band1 */ 411 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_EXT], 412 MT_RXQ_ID(MT_RXQ_EXT), 413 MT7915_RX_RING_SIZE, 414 MT_RX_BUF_SIZE, 415 MT_RXQ_RING_BASE(MT_RXQ_EXT) + hif1_ofs); 416 if (ret) 417 return ret; 418 419 /* tx free notify event from WA for band1 */ 420 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_EXT_WA], 421 MT_RXQ_ID(MT_RXQ_EXT_WA), 422 MT7915_RX_MCU_RING_SIZE, 423 MT_RX_BUF_SIZE, 424 MT_RXQ_RING_BASE(MT_RXQ_EXT_WA) + hif1_ofs); 425 if (ret) 426 return ret; 427 } 428 429 ret = mt76_init_queues(dev, mt76_dma_rx_poll); 430 if (ret < 0) 431 return ret; 432 433 netif_tx_napi_add(&dev->mt76.tx_napi_dev, &dev->mt76.tx_napi, 434 mt7915_poll_tx, NAPI_POLL_WEIGHT); 435 napi_enable(&dev->mt76.tx_napi); 436 437 mt7915_dma_enable(dev); 438 439 return 0; 440 } 441 442 void mt7915_dma_cleanup(struct mt7915_dev *dev) 443 { 444 mt7915_dma_disable(dev, true); 445 446 mt76_dma_cleanup(&dev->mt76); 447 } 448