1 // SPDX-License-Identifier: BSD-3-Clause-Clear 2 /* Copyright (C) 2020 MediaTek Inc. */ 3 4 #include "mt76_connac.h" 5 #include "mt76_connac2_mac.h" 6 #include "dma.h" 7 8 #define HE_BITS(f) cpu_to_le16(IEEE80211_RADIOTAP_HE_##f) 9 #define HE_PREP(f, m, v) le16_encode_bits(le32_get_bits(v, MT_CRXV_HE_##m),\ 10 IEEE80211_RADIOTAP_HE_##f) 11 12 void mt76_connac_gen_ppe_thresh(u8 *he_ppet, int nss, enum nl80211_band band) 13 { 14 static const u8 ppet16_ppet8_ru3_ru0[] = { 0x1c, 0xc7, 0x71 }; 15 u8 i, ppet_bits, ppet_size, ru_bit_mask = 0xf; 16 17 if (band == NL80211_BAND_2GHZ) 18 ru_bit_mask = 0x3; 19 20 he_ppet[0] = FIELD_PREP(IEEE80211_PPE_THRES_NSS_MASK, nss - 1) | 21 FIELD_PREP(IEEE80211_PPE_THRES_RU_INDEX_BITMASK_MASK, 22 ru_bit_mask); 23 24 ppet_bits = IEEE80211_PPE_THRES_INFO_PPET_SIZE * 25 nss * hweight8(ru_bit_mask) * 2; 26 ppet_size = DIV_ROUND_UP(ppet_bits, 8); 27 28 for (i = 0; i < ppet_size - 1; i++) 29 he_ppet[i + 1] = ppet16_ppet8_ru3_ru0[i % 3]; 30 31 he_ppet[i + 1] = ppet16_ppet8_ru3_ru0[i % 3] & 32 (0xff >> (8 - (ppet_bits - 1) % 8)); 33 } 34 EXPORT_SYMBOL_GPL(mt76_connac_gen_ppe_thresh); 35 36 int mt76_connac_pm_wake(struct mt76_phy *phy, struct mt76_connac_pm *pm) 37 { 38 struct mt76_dev *dev = phy->dev; 39 40 if (mt76_is_usb(dev)) 41 return 0; 42 43 cancel_delayed_work_sync(&pm->ps_work); 44 if (!test_bit(MT76_STATE_PM, &phy->state)) 45 return 0; 46 47 if (pm->suspended) 48 return 0; 49 50 queue_work(dev->wq, &pm->wake_work); 51 if (!wait_event_timeout(pm->wait, 52 !test_bit(MT76_STATE_PM, &phy->state), 53 3 * HZ)) { 54 ieee80211_wake_queues(phy->hw); 55 return -ETIMEDOUT; 56 } 57 58 return 0; 59 } 60 EXPORT_SYMBOL_GPL(mt76_connac_pm_wake); 61 62 void mt76_connac_power_save_sched(struct mt76_phy *phy, 63 struct mt76_connac_pm *pm) 64 { 65 struct mt76_dev *dev = phy->dev; 66 67 if (mt76_is_usb(dev)) 68 return; 69 70 if (!pm->enable) 71 return; 72 73 if (pm->suspended) 74 return; 75 76 pm->last_activity = jiffies; 77 78 if (!test_bit(MT76_STATE_PM, &phy->state)) { 79 cancel_delayed_work(&phy->mac_work); 80 queue_delayed_work(dev->wq, &pm->ps_work, pm->idle_timeout); 81 } 82 } 83 EXPORT_SYMBOL_GPL(mt76_connac_power_save_sched); 84 85 void mt76_connac_free_pending_tx_skbs(struct mt76_connac_pm *pm, 86 struct mt76_wcid *wcid) 87 { 88 int i; 89 90 spin_lock_bh(&pm->txq_lock); 91 for (i = 0; i < IEEE80211_NUM_ACS; i++) { 92 if (wcid && pm->tx_q[i].wcid != wcid) 93 continue; 94 95 dev_kfree_skb(pm->tx_q[i].skb); 96 pm->tx_q[i].skb = NULL; 97 } 98 spin_unlock_bh(&pm->txq_lock); 99 } 100 EXPORT_SYMBOL_GPL(mt76_connac_free_pending_tx_skbs); 101 102 void mt76_connac_pm_queue_skb(struct ieee80211_hw *hw, 103 struct mt76_connac_pm *pm, 104 struct mt76_wcid *wcid, 105 struct sk_buff *skb) 106 { 107 int qid = skb_get_queue_mapping(skb); 108 struct mt76_phy *phy = hw->priv; 109 110 spin_lock_bh(&pm->txq_lock); 111 if (!pm->tx_q[qid].skb) { 112 ieee80211_stop_queues(hw); 113 pm->tx_q[qid].wcid = wcid; 114 pm->tx_q[qid].skb = skb; 115 queue_work(phy->dev->wq, &pm->wake_work); 116 } else { 117 dev_kfree_skb(skb); 118 } 119 spin_unlock_bh(&pm->txq_lock); 120 } 121 EXPORT_SYMBOL_GPL(mt76_connac_pm_queue_skb); 122 123 void mt76_connac_pm_dequeue_skbs(struct mt76_phy *phy, 124 struct mt76_connac_pm *pm) 125 { 126 int i; 127 128 spin_lock_bh(&pm->txq_lock); 129 for (i = 0; i < IEEE80211_NUM_ACS; i++) { 130 struct mt76_wcid *wcid = pm->tx_q[i].wcid; 131 struct ieee80211_sta *sta = NULL; 132 133 if (!pm->tx_q[i].skb) 134 continue; 135 136 if (wcid && wcid->sta) 137 sta = container_of((void *)wcid, struct ieee80211_sta, 138 drv_priv); 139 140 mt76_tx(phy, sta, wcid, pm->tx_q[i].skb); 141 pm->tx_q[i].skb = NULL; 142 } 143 spin_unlock_bh(&pm->txq_lock); 144 145 mt76_worker_schedule(&phy->dev->tx_worker); 146 } 147 EXPORT_SYMBOL_GPL(mt76_connac_pm_dequeue_skbs); 148 149 void mt76_connac_tx_complete_skb(struct mt76_dev *mdev, 150 struct mt76_queue_entry *e) 151 { 152 if (!e->txwi) { 153 dev_kfree_skb_any(e->skb); 154 return; 155 } 156 157 if (e->skb) 158 mt76_tx_complete_skb(mdev, e->wcid, e->skb); 159 } 160 EXPORT_SYMBOL_GPL(mt76_connac_tx_complete_skb); 161 162 void mt76_connac_write_hw_txp(struct mt76_dev *dev, 163 struct mt76_tx_info *tx_info, 164 void *txp_ptr, u32 id) 165 { 166 struct mt76_connac_hw_txp *txp = txp_ptr; 167 struct mt76_connac_txp_ptr *ptr = &txp->ptr[0]; 168 int i, nbuf = tx_info->nbuf - 1; 169 u32 last_mask; 170 171 tx_info->buf[0].len = MT_TXD_SIZE + sizeof(*txp); 172 tx_info->nbuf = 1; 173 174 txp->msdu_id[0] = cpu_to_le16(id | MT_MSDU_ID_VALID); 175 176 if (is_mt7663(dev) || is_connac2(dev) || is_mt7925(dev)) 177 last_mask = MT_TXD_LEN_LAST; 178 else 179 last_mask = MT_TXD_LEN_AMSDU_LAST | 180 MT_TXD_LEN_MSDU_LAST; 181 182 for (i = 0; i < nbuf; i++) { 183 u16 len = tx_info->buf[i + 1].len & MT_TXD_LEN_MASK; 184 u32 addr = tx_info->buf[i + 1].addr; 185 186 if (i == nbuf - 1) 187 len |= last_mask; 188 189 if (i & 1) { 190 ptr->buf1 = cpu_to_le32(addr); 191 ptr->len1 = cpu_to_le16(len); 192 ptr++; 193 } else { 194 ptr->buf0 = cpu_to_le32(addr); 195 ptr->len0 = cpu_to_le16(len); 196 } 197 } 198 } 199 EXPORT_SYMBOL_GPL(mt76_connac_write_hw_txp); 200 201 static void 202 mt76_connac_txp_skb_unmap_fw(struct mt76_dev *mdev, 203 struct mt76_connac_fw_txp *txp) 204 { 205 struct device *dev = is_connac_v1(mdev) ? mdev->dev : mdev->dma_dev; 206 int i; 207 208 for (i = 0; i < txp->nbuf; i++) 209 dma_unmap_single(dev, le32_to_cpu(txp->buf[i]), 210 le16_to_cpu(txp->len[i]), DMA_TO_DEVICE); 211 } 212 213 static void 214 mt76_connac_txp_skb_unmap_hw(struct mt76_dev *dev, 215 struct mt76_connac_hw_txp *txp) 216 { 217 u32 last_mask; 218 int i; 219 220 if (is_mt7663(dev) || is_connac2(dev) || is_mt7925(dev)) 221 last_mask = MT_TXD_LEN_LAST; 222 else 223 last_mask = MT_TXD_LEN_MSDU_LAST; 224 225 for (i = 0; i < ARRAY_SIZE(txp->ptr); i++) { 226 struct mt76_connac_txp_ptr *ptr = &txp->ptr[i]; 227 bool last; 228 u16 len; 229 230 len = le16_to_cpu(ptr->len0); 231 last = len & last_mask; 232 len &= MT_TXD_LEN_MASK; 233 dma_unmap_single(dev->dev, le32_to_cpu(ptr->buf0), len, 234 DMA_TO_DEVICE); 235 if (last) 236 break; 237 238 len = le16_to_cpu(ptr->len1); 239 last = len & last_mask; 240 len &= MT_TXD_LEN_MASK; 241 dma_unmap_single(dev->dev, le32_to_cpu(ptr->buf1), len, 242 DMA_TO_DEVICE); 243 if (last) 244 break; 245 } 246 } 247 248 void mt76_connac_txp_skb_unmap(struct mt76_dev *dev, 249 struct mt76_txwi_cache *t) 250 { 251 struct mt76_connac_txp_common *txp; 252 253 txp = mt76_connac_txwi_to_txp(dev, t); 254 if (is_mt76_fw_txp(dev)) 255 mt76_connac_txp_skb_unmap_fw(dev, &txp->fw); 256 else 257 mt76_connac_txp_skb_unmap_hw(dev, &txp->hw); 258 } 259 EXPORT_SYMBOL_GPL(mt76_connac_txp_skb_unmap); 260 261 int mt76_connac_init_tx_queues(struct mt76_phy *phy, int idx, int n_desc, 262 int ring_base, void *wed, u32 flags) 263 { 264 int i, err; 265 266 err = mt76_init_tx_queue(phy, 0, idx, n_desc, ring_base, 267 wed, flags); 268 if (err < 0) 269 return err; 270 271 for (i = 1; i <= MT_TXQ_PSD; i++) 272 phy->q_tx[i] = phy->q_tx[0]; 273 274 return 0; 275 } 276 EXPORT_SYMBOL_GPL(mt76_connac_init_tx_queues); 277 278 #define __bitrate_mask_check(_mcs, _mode) \ 279 ({ \ 280 u8 i = 0; \ 281 for (nss = 0; i < ARRAY_SIZE(mask->control[band]._mcs); i++) { \ 282 if (!mask->control[band]._mcs[i]) \ 283 continue; \ 284 if (hweight16(mask->control[band]._mcs[i]) == 1) { \ 285 mode = MT_PHY_TYPE_##_mode; \ 286 rateidx = ffs(mask->control[band]._mcs[i]) - 1; \ 287 if (mode == MT_PHY_TYPE_HT) \ 288 rateidx += 8 * i; \ 289 else \ 290 nss = i + 1; \ 291 goto out; \ 292 } \ 293 } \ 294 }) 295 296 u16 mt76_connac2_mac_tx_rate_val(struct mt76_phy *mphy, 297 struct ieee80211_bss_conf *conf, 298 bool beacon, bool mcast) 299 { 300 u8 nss = 0, mode = 0, band = NL80211_BAND_2GHZ; 301 int rateidx = 0, offset = 0, mcast_rate; 302 struct cfg80211_chan_def *chandef; 303 struct mt76_vif_link *mvif; 304 305 if (!conf) 306 goto legacy; 307 308 mvif = mt76_vif_conf_link(mphy->dev, conf->vif, conf); 309 chandef = mvif->ctx ? &mvif->ctx->def : &mphy->chandef; 310 band = chandef->chan->band; 311 312 if (is_connac2(mphy->dev)) { 313 rateidx = ffs(conf->basic_rates) - 1; 314 goto legacy; 315 } 316 317 if (beacon) { 318 struct cfg80211_bitrate_mask *mask; 319 320 mask = &conf->beacon_tx_rate; 321 322 __bitrate_mask_check(he_mcs, HE_SU); 323 __bitrate_mask_check(vht_mcs, VHT); 324 __bitrate_mask_check(ht_mcs, HT); 325 326 if (hweight32(mask->control[band].legacy) == 1) { 327 rateidx = ffs(mask->control[band].legacy) - 1; 328 goto legacy; 329 } 330 } 331 332 mcast_rate = conf->mcast_rate[band]; 333 if (mcast && mcast_rate > 0) 334 rateidx = mcast_rate - 1; 335 else 336 rateidx = ffs(conf->basic_rates) - 1; 337 338 legacy: 339 if (band != NL80211_BAND_2GHZ) 340 offset = 4; 341 342 /* pick the lowest rate for hidden nodes */ 343 if (rateidx < 0) 344 rateidx = 0; 345 346 rateidx += offset; 347 if (rateidx >= ARRAY_SIZE(mt76_rates)) 348 rateidx = offset; 349 350 rateidx = mt76_rates[rateidx].hw_value; 351 mode = rateidx >> 8; 352 rateidx &= GENMASK(7, 0); 353 out: 354 return FIELD_PREP(MT_TX_RATE_NSS, nss) | 355 FIELD_PREP(MT_TX_RATE_IDX, rateidx) | 356 FIELD_PREP(MT_TX_RATE_MODE, mode); 357 } 358 EXPORT_SYMBOL_GPL(mt76_connac2_mac_tx_rate_val); 359 360 static void 361 mt76_connac2_mac_write_txwi_8023(__le32 *txwi, struct sk_buff *skb, 362 struct mt76_wcid *wcid) 363 { 364 u8 tid = skb->priority & IEEE80211_QOS_CTL_TID_MASK; 365 u8 fc_type, fc_stype; 366 u16 ethertype; 367 bool wmm = false; 368 u32 val; 369 370 if (wcid->sta) { 371 struct ieee80211_sta *sta; 372 373 sta = container_of((void *)wcid, struct ieee80211_sta, drv_priv); 374 wmm = sta->wme; 375 } 376 377 val = FIELD_PREP(MT_TXD1_HDR_FORMAT, MT_HDR_FORMAT_802_3) | 378 FIELD_PREP(MT_TXD1_TID, tid); 379 380 ethertype = get_unaligned_be16(&skb->data[12]); 381 if (ethertype >= ETH_P_802_3_MIN) 382 val |= MT_TXD1_ETH_802_3; 383 384 txwi[1] |= cpu_to_le32(val); 385 386 fc_type = IEEE80211_FTYPE_DATA >> 2; 387 fc_stype = wmm ? IEEE80211_STYPE_QOS_DATA >> 4 : 0; 388 389 val = FIELD_PREP(MT_TXD2_FRAME_TYPE, fc_type) | 390 FIELD_PREP(MT_TXD2_SUB_TYPE, fc_stype); 391 392 txwi[2] |= cpu_to_le32(val); 393 394 val = FIELD_PREP(MT_TXD7_TYPE, fc_type) | 395 FIELD_PREP(MT_TXD7_SUB_TYPE, fc_stype); 396 397 txwi[7] |= cpu_to_le32(val); 398 } 399 400 static void 401 mt76_connac2_mac_write_txwi_80211(struct mt76_dev *dev, __le32 *txwi, 402 struct sk_buff *skb, 403 struct ieee80211_key_conf *key) 404 { 405 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)skb->data; 406 struct ieee80211_mgmt *mgmt = (struct ieee80211_mgmt *)skb->data; 407 struct ieee80211_tx_info *info = IEEE80211_SKB_CB(skb); 408 bool multicast = is_multicast_ether_addr(hdr->addr1); 409 u8 tid = skb->priority & IEEE80211_QOS_CTL_TID_MASK; 410 __le16 fc = hdr->frame_control; 411 __le16 sc = hdr->seq_ctrl; 412 u8 fc_type, fc_stype; 413 u32 val; 414 415 if (ieee80211_is_action(fc) && 416 skb->len >= IEEE80211_MIN_ACTION_SIZE(addba_req.capab) && 417 mgmt->u.action.category == WLAN_CATEGORY_BACK && 418 mgmt->u.action.action_code == WLAN_ACTION_ADDBA_REQ) { 419 u16 capab = le16_to_cpu(mgmt->u.action.addba_req.capab); 420 421 txwi[5] |= cpu_to_le32(MT_TXD5_ADD_BA); 422 tid = (capab >> 2) & IEEE80211_QOS_CTL_TID_MASK; 423 } else if (ieee80211_is_back_req(hdr->frame_control)) { 424 struct ieee80211_bar *bar = (struct ieee80211_bar *)hdr; 425 u16 control = le16_to_cpu(bar->control); 426 427 tid = FIELD_GET(IEEE80211_BAR_CTRL_TID_INFO_MASK, control); 428 } 429 430 val = FIELD_PREP(MT_TXD1_HDR_FORMAT, MT_HDR_FORMAT_802_11) | 431 FIELD_PREP(MT_TXD1_HDR_INFO, 432 ieee80211_get_hdrlen_from_skb(skb) / 2) | 433 FIELD_PREP(MT_TXD1_TID, tid); 434 435 txwi[1] |= cpu_to_le32(val); 436 437 fc_type = (le16_to_cpu(fc) & IEEE80211_FCTL_FTYPE) >> 2; 438 fc_stype = (le16_to_cpu(fc) & IEEE80211_FCTL_STYPE) >> 4; 439 440 val = FIELD_PREP(MT_TXD2_FRAME_TYPE, fc_type) | 441 FIELD_PREP(MT_TXD2_SUB_TYPE, fc_stype) | 442 FIELD_PREP(MT_TXD2_MULTICAST, multicast); 443 444 if (key && multicast && ieee80211_is_robust_mgmt_frame(skb) && 445 key->cipher == WLAN_CIPHER_SUITE_AES_CMAC) { 446 val |= MT_TXD2_BIP; 447 txwi[3] &= ~cpu_to_le32(MT_TXD3_PROTECT_FRAME); 448 } 449 450 if (!ieee80211_is_data(fc) || multicast || 451 info->flags & IEEE80211_TX_CTL_USE_MINRATE) 452 val |= MT_TXD2_FIX_RATE; 453 454 if (ieee80211_has_morefrags(fc) && ieee80211_is_first_frag(sc)) 455 val |= FIELD_PREP(MT_TXD2_FRAG, MT_TX_FRAG_FIRST); 456 else if (ieee80211_has_morefrags(fc) && !ieee80211_is_first_frag(sc)) 457 val |= FIELD_PREP(MT_TXD2_FRAG, MT_TX_FRAG_MID); 458 else if (!ieee80211_has_morefrags(fc) && !ieee80211_is_first_frag(sc)) 459 val |= FIELD_PREP(MT_TXD2_FRAG, MT_TX_FRAG_LAST); 460 461 txwi[2] |= cpu_to_le32(val); 462 463 if (ieee80211_is_beacon(fc)) { 464 txwi[3] &= ~cpu_to_le32(MT_TXD3_SW_POWER_MGMT); 465 txwi[3] |= cpu_to_le32(MT_TXD3_REM_TX_COUNT); 466 } 467 468 if (info->flags & IEEE80211_TX_CTL_INJECTED) { 469 u16 seqno = le16_to_cpu(sc); 470 471 if (ieee80211_is_back_req(hdr->frame_control)) { 472 struct ieee80211_bar *bar; 473 474 bar = (struct ieee80211_bar *)skb->data; 475 seqno = le16_to_cpu(bar->start_seq_num); 476 } 477 478 val = MT_TXD3_SN_VALID | 479 FIELD_PREP(MT_TXD3_SEQ, IEEE80211_SEQ_TO_SN(seqno)); 480 txwi[3] |= cpu_to_le32(val); 481 txwi[7] &= ~cpu_to_le32(MT_TXD7_HW_AMSDU); 482 } 483 484 if (mt76_is_mmio(dev)) { 485 val = FIELD_PREP(MT_TXD7_TYPE, fc_type) | 486 FIELD_PREP(MT_TXD7_SUB_TYPE, fc_stype); 487 txwi[7] |= cpu_to_le32(val); 488 } else { 489 val = FIELD_PREP(MT_TXD8_L_TYPE, fc_type) | 490 FIELD_PREP(MT_TXD8_L_SUB_TYPE, fc_stype); 491 txwi[8] |= cpu_to_le32(val); 492 } 493 } 494 495 void mt76_connac2_mac_write_txwi(struct mt76_dev *dev, __le32 *txwi, 496 struct sk_buff *skb, struct mt76_wcid *wcid, 497 struct ieee80211_key_conf *key, int pid, 498 enum mt76_txq_id qid, u32 changed) 499 { 500 struct ieee80211_tx_info *info = IEEE80211_SKB_CB(skb); 501 u8 phy_idx = (info->hw_queue & MT_TX_HW_QUEUE_PHY) >> 2; 502 struct ieee80211_vif *vif = info->control.vif; 503 struct mt76_phy *mphy = &dev->phy; 504 u8 p_fmt, q_idx, omac_idx = 0, wmm_idx = 0, band_idx = 0; 505 u32 val, sz_txd = mt76_is_mmio(dev) ? MT_TXD_SIZE : MT_SDIO_TXD_SIZE; 506 bool is_8023 = info->flags & IEEE80211_TX_CTL_HW_80211_ENCAP; 507 bool beacon = !!(changed & (BSS_CHANGED_BEACON | 508 BSS_CHANGED_BEACON_ENABLED)); 509 bool inband_disc = !!(changed & (BSS_CHANGED_UNSOL_BCAST_PROBE_RESP | 510 BSS_CHANGED_FILS_DISCOVERY)); 511 bool amsdu_en = wcid->amsdu; 512 513 if (vif) { 514 struct mt76_vif_link *mvif = (struct mt76_vif_link *)vif->drv_priv; 515 516 omac_idx = mvif->omac_idx; 517 wmm_idx = mvif->wmm_idx; 518 band_idx = mvif->band_idx; 519 } 520 521 if (phy_idx && dev->phys[MT_BAND1]) 522 mphy = dev->phys[MT_BAND1]; 523 524 if (inband_disc) { 525 p_fmt = MT_TX_TYPE_FW; 526 q_idx = MT_LMAC_ALTX0; 527 } else if (beacon) { 528 p_fmt = MT_TX_TYPE_FW; 529 q_idx = MT_LMAC_BCN0; 530 } else if (qid >= MT_TXQ_PSD) { 531 p_fmt = mt76_is_mmio(dev) ? MT_TX_TYPE_CT : MT_TX_TYPE_SF; 532 q_idx = MT_LMAC_ALTX0; 533 } else { 534 p_fmt = mt76_is_mmio(dev) ? MT_TX_TYPE_CT : MT_TX_TYPE_SF; 535 q_idx = wmm_idx * MT76_CONNAC_MAX_WMM_SETS + 536 mt76_connac_lmac_mapping(skb_get_queue_mapping(skb)); 537 538 /* mt7915 WA only counts WED path */ 539 if (is_mt7915(dev) && mtk_wed_device_active(&dev->mmio.wed)) 540 wcid->stats.tx_packets++; 541 } 542 543 val = FIELD_PREP(MT_TXD0_TX_BYTES, skb->len + sz_txd) | 544 FIELD_PREP(MT_TXD0_PKT_FMT, p_fmt) | 545 FIELD_PREP(MT_TXD0_Q_IDX, q_idx); 546 txwi[0] = cpu_to_le32(val); 547 548 val = MT_TXD1_LONG_FORMAT | 549 FIELD_PREP(MT_TXD1_WLAN_IDX, wcid->idx) | 550 FIELD_PREP(MT_TXD1_OWN_MAC, omac_idx); 551 if (!is_connac2(dev)) 552 val |= MT_TXD1_VTA; 553 if (phy_idx || band_idx) 554 val |= MT_TXD1_TGID; 555 556 txwi[1] = cpu_to_le32(val); 557 txwi[2] = 0; 558 559 val = FIELD_PREP(MT_TXD3_REM_TX_COUNT, 15); 560 if (!is_connac2(dev)) 561 val |= MT_TXD3_SW_POWER_MGMT; 562 if (key) 563 val |= MT_TXD3_PROTECT_FRAME; 564 if (info->flags & IEEE80211_TX_CTL_NO_ACK) 565 val |= MT_TXD3_NO_ACK; 566 567 txwi[3] = cpu_to_le32(val); 568 txwi[4] = 0; 569 570 val = FIELD_PREP(MT_TXD5_PID, pid); 571 if (pid >= MT_PACKET_ID_FIRST) { 572 val |= MT_TXD5_TX_STATUS_HOST; 573 amsdu_en = 0; 574 } 575 576 txwi[5] = cpu_to_le32(val); 577 txwi[6] = 0; 578 txwi[7] = amsdu_en ? cpu_to_le32(MT_TXD7_HW_AMSDU) : 0; 579 580 if (is_8023) 581 mt76_connac2_mac_write_txwi_8023(txwi, skb, wcid); 582 else 583 mt76_connac2_mac_write_txwi_80211(dev, txwi, skb, key); 584 585 if (txwi[2] & cpu_to_le32(MT_TXD2_FIX_RATE)) { 586 /* Fixed rata is available just for 802.11 txd */ 587 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)skb->data; 588 bool multicast = ieee80211_is_data(hdr->frame_control) && 589 is_multicast_ether_addr(hdr->addr1); 590 u16 rate = mt76_connac2_mac_tx_rate_val(mphy, 591 vif ? &vif->bss_conf : NULL, 592 beacon, multicast); 593 u32 val = MT_TXD6_FIXED_BW; 594 595 /* hardware won't add HTC for mgmt/ctrl frame */ 596 txwi[2] |= cpu_to_le32(MT_TXD2_HTC_VLD); 597 598 val |= FIELD_PREP(MT_TXD6_TX_RATE, rate); 599 txwi[6] |= cpu_to_le32(val); 600 txwi[3] |= cpu_to_le32(MT_TXD3_BA_DISABLE); 601 602 if (!is_connac2(dev)) { 603 u8 spe_idx = mt76_connac_spe_idx(mphy->antenna_mask); 604 605 if (!spe_idx) 606 spe_idx = 24 + phy_idx; 607 txwi[7] |= cpu_to_le32(FIELD_PREP(MT_TXD7_SPE_IDX, spe_idx)); 608 } 609 610 txwi[7] &= ~cpu_to_le32(MT_TXD7_HW_AMSDU); 611 } 612 } 613 EXPORT_SYMBOL_GPL(mt76_connac2_mac_write_txwi); 614 615 bool mt76_connac2_mac_fill_txs(struct mt76_dev *dev, struct mt76_wcid *wcid, 616 __le32 *txs_data) 617 { 618 struct mt76_sta_stats *stats = &wcid->stats; 619 struct ieee80211_supported_band *sband; 620 struct mt76_phy *mphy; 621 struct rate_info rate = {}; 622 bool cck = false; 623 u32 txrate, txs, mode, stbc; 624 625 txs = le32_to_cpu(txs_data[0]); 626 627 /* PPDU based reporting */ 628 if (mtk_wed_device_active(&dev->mmio.wed) && 629 FIELD_GET(MT_TXS0_TXS_FORMAT, txs) > 1) { 630 stats->tx_bytes += 631 le32_get_bits(txs_data[5], MT_TXS5_MPDU_TX_BYTE) - 632 le32_get_bits(txs_data[7], MT_TXS7_MPDU_RETRY_BYTE); 633 stats->tx_failed += 634 le32_get_bits(txs_data[6], MT_TXS6_MPDU_FAIL_CNT); 635 stats->tx_retries += 636 le32_get_bits(txs_data[7], MT_TXS7_MPDU_RETRY_CNT); 637 638 if (wcid->sta) { 639 struct ieee80211_sta *sta; 640 u8 tid; 641 642 sta = container_of((void *)wcid, struct ieee80211_sta, 643 drv_priv); 644 tid = FIELD_GET(MT_TXS0_TID, txs); 645 646 ieee80211_refresh_tx_agg_session_timer(sta, tid); 647 } 648 } 649 650 txrate = FIELD_GET(MT_TXS0_TX_RATE, txs); 651 652 rate.mcs = FIELD_GET(MT_TX_RATE_IDX, txrate); 653 rate.nss = FIELD_GET(MT_TX_RATE_NSS, txrate) + 1; 654 stbc = FIELD_GET(MT_TX_RATE_STBC, txrate); 655 656 if (stbc && rate.nss > 1) 657 rate.nss >>= 1; 658 659 if (rate.nss - 1 < ARRAY_SIZE(stats->tx_nss)) 660 stats->tx_nss[rate.nss - 1]++; 661 if (rate.mcs < ARRAY_SIZE(stats->tx_mcs)) 662 stats->tx_mcs[rate.mcs]++; 663 664 mode = FIELD_GET(MT_TX_RATE_MODE, txrate); 665 switch (mode) { 666 case MT_PHY_TYPE_CCK: 667 cck = true; 668 fallthrough; 669 case MT_PHY_TYPE_OFDM: 670 mphy = &dev->phy; 671 if (wcid->phy_idx == MT_BAND1 && dev->phys[MT_BAND1]) 672 mphy = dev->phys[MT_BAND1]; 673 674 if (mphy->chandef.chan->band == NL80211_BAND_5GHZ) 675 sband = &mphy->sband_5g.sband; 676 else if (mphy->chandef.chan->band == NL80211_BAND_6GHZ) 677 sband = &mphy->sband_6g.sband; 678 else 679 sband = &mphy->sband_2g.sband; 680 681 rate.mcs = mt76_get_rate(mphy->dev, sband, rate.mcs, cck); 682 rate.legacy = sband->bitrates[rate.mcs].bitrate; 683 break; 684 case MT_PHY_TYPE_HT: 685 case MT_PHY_TYPE_HT_GF: 686 if (rate.mcs > 31) 687 return false; 688 689 rate.flags = RATE_INFO_FLAGS_MCS; 690 if (wcid->rate.flags & RATE_INFO_FLAGS_SHORT_GI) 691 rate.flags |= RATE_INFO_FLAGS_SHORT_GI; 692 break; 693 case MT_PHY_TYPE_VHT: 694 if (rate.mcs > 9) 695 return false; 696 697 rate.flags = RATE_INFO_FLAGS_VHT_MCS; 698 break; 699 case MT_PHY_TYPE_HE_SU: 700 case MT_PHY_TYPE_HE_EXT_SU: 701 case MT_PHY_TYPE_HE_TB: 702 case MT_PHY_TYPE_HE_MU: 703 if (rate.mcs > 11) 704 return false; 705 706 rate.he_gi = wcid->rate.he_gi; 707 rate.he_dcm = FIELD_GET(MT_TX_RATE_DCM, txrate); 708 rate.flags = RATE_INFO_FLAGS_HE_MCS; 709 break; 710 default: 711 return false; 712 } 713 714 stats->tx_mode[mode]++; 715 716 switch (FIELD_GET(MT_TXS0_BW, txs)) { 717 case IEEE80211_STA_RX_BW_160: 718 rate.bw = RATE_INFO_BW_160; 719 stats->tx_bw[3]++; 720 break; 721 case IEEE80211_STA_RX_BW_80: 722 rate.bw = RATE_INFO_BW_80; 723 stats->tx_bw[2]++; 724 break; 725 case IEEE80211_STA_RX_BW_40: 726 rate.bw = RATE_INFO_BW_40; 727 stats->tx_bw[1]++; 728 break; 729 default: 730 rate.bw = RATE_INFO_BW_20; 731 stats->tx_bw[0]++; 732 break; 733 } 734 wcid->rate = rate; 735 736 return true; 737 } 738 EXPORT_SYMBOL_GPL(mt76_connac2_mac_fill_txs); 739 740 bool mt76_connac2_mac_add_txs_skb(struct mt76_dev *dev, struct mt76_wcid *wcid, 741 int pid, __le32 *txs_data) 742 { 743 struct sk_buff_head list; 744 struct sk_buff *skb; 745 746 if (le32_get_bits(txs_data[0], MT_TXS0_TXS_FORMAT) == MT_TXS_PPDU_FMT) 747 return false; 748 749 mt76_tx_status_lock(dev, &list); 750 skb = mt76_tx_status_skb_get(dev, wcid, pid, &list); 751 if (skb) { 752 struct ieee80211_tx_info *info = IEEE80211_SKB_CB(skb); 753 754 if (!(le32_to_cpu(txs_data[0]) & MT_TXS0_ACK_ERROR_MASK)) 755 info->flags |= IEEE80211_TX_STAT_ACK; 756 757 info->status.ampdu_len = 1; 758 info->status.ampdu_ack_len = 759 !!(info->flags & IEEE80211_TX_STAT_ACK); 760 info->status.rates[0].idx = -1; 761 762 mt76_connac2_mac_fill_txs(dev, wcid, txs_data); 763 mt76_tx_status_skb_done(dev, skb, &list); 764 } 765 mt76_tx_status_unlock(dev, &list); 766 767 return !!skb; 768 } 769 EXPORT_SYMBOL_GPL(mt76_connac2_mac_add_txs_skb); 770 771 static void 772 mt76_connac2_mac_decode_he_radiotap_ru(struct mt76_rx_status *status, 773 struct ieee80211_radiotap_he *he, 774 __le32 *rxv) 775 { 776 u32 ru_h, ru_l; 777 u8 ru, offs = 0; 778 779 ru_l = le32_get_bits(rxv[0], MT_PRXV_HE_RU_ALLOC_L); 780 ru_h = le32_get_bits(rxv[1], MT_PRXV_HE_RU_ALLOC_H); 781 ru = (u8)(ru_l | ru_h << 4); 782 783 status->bw = RATE_INFO_BW_HE_RU; 784 785 switch (ru) { 786 case 0 ... 36: 787 status->he_ru = NL80211_RATE_INFO_HE_RU_ALLOC_26; 788 offs = ru; 789 break; 790 case 37 ... 52: 791 status->he_ru = NL80211_RATE_INFO_HE_RU_ALLOC_52; 792 offs = ru - 37; 793 break; 794 case 53 ... 60: 795 status->he_ru = NL80211_RATE_INFO_HE_RU_ALLOC_106; 796 offs = ru - 53; 797 break; 798 case 61 ... 64: 799 status->he_ru = NL80211_RATE_INFO_HE_RU_ALLOC_242; 800 offs = ru - 61; 801 break; 802 case 65 ... 66: 803 status->he_ru = NL80211_RATE_INFO_HE_RU_ALLOC_484; 804 offs = ru - 65; 805 break; 806 case 67: 807 status->he_ru = NL80211_RATE_INFO_HE_RU_ALLOC_996; 808 break; 809 case 68: 810 status->he_ru = NL80211_RATE_INFO_HE_RU_ALLOC_2x996; 811 break; 812 } 813 814 he->data1 |= HE_BITS(DATA1_BW_RU_ALLOC_KNOWN); 815 he->data2 |= HE_BITS(DATA2_RU_OFFSET_KNOWN) | 816 le16_encode_bits(offs, 817 IEEE80211_RADIOTAP_HE_DATA2_RU_OFFSET); 818 } 819 820 static void 821 mt76_connac2_mac_decode_he_mu_radiotap(struct mt76_dev *dev, struct sk_buff *skb, 822 __le32 *rxv) 823 { 824 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb; 825 static struct ieee80211_radiotap_he_mu mu_known = { 826 .flags1 = HE_BITS(MU_FLAGS1_SIG_B_MCS_KNOWN) | 827 HE_BITS(MU_FLAGS1_SIG_B_DCM_KNOWN) | 828 HE_BITS(MU_FLAGS1_CH1_RU_KNOWN) | 829 HE_BITS(MU_FLAGS1_SIG_B_SYMS_USERS_KNOWN), 830 .flags2 = HE_BITS(MU_FLAGS2_BW_FROM_SIG_A_BW_KNOWN), 831 }; 832 struct ieee80211_radiotap_he_mu *he_mu; 833 834 if (is_connac2(dev)) { 835 mu_known.flags1 |= HE_BITS(MU_FLAGS1_SIG_B_COMP_KNOWN); 836 mu_known.flags2 |= HE_BITS(MU_FLAGS2_PUNC_FROM_SIG_A_BW_KNOWN); 837 } 838 839 status->flag |= RX_FLAG_RADIOTAP_HE_MU; 840 841 he_mu = skb_push(skb, sizeof(mu_known)); 842 memcpy(he_mu, &mu_known, sizeof(mu_known)); 843 844 #define MU_PREP(f, v) le16_encode_bits(v, IEEE80211_RADIOTAP_HE_MU_##f) 845 846 he_mu->flags1 |= MU_PREP(FLAGS1_SIG_B_MCS, status->rate_idx); 847 if (status->he_dcm) 848 he_mu->flags1 |= MU_PREP(FLAGS1_SIG_B_DCM, status->he_dcm); 849 850 he_mu->flags2 |= MU_PREP(FLAGS2_BW_FROM_SIG_A_BW, status->bw) | 851 MU_PREP(FLAGS2_SIG_B_SYMS_USERS, 852 le32_get_bits(rxv[2], MT_CRXV_HE_NUM_USER)); 853 854 he_mu->ru_ch1[0] = le32_get_bits(rxv[3], MT_CRXV_HE_RU0); 855 856 if (status->bw >= RATE_INFO_BW_40) { 857 he_mu->flags1 |= HE_BITS(MU_FLAGS1_CH2_RU_KNOWN); 858 he_mu->ru_ch2[0] = 859 le32_get_bits(rxv[3], MT_CRXV_HE_RU1); 860 } 861 862 if (status->bw >= RATE_INFO_BW_80) { 863 he_mu->ru_ch1[1] = 864 le32_get_bits(rxv[3], MT_CRXV_HE_RU2); 865 he_mu->ru_ch2[1] = 866 le32_get_bits(rxv[3], MT_CRXV_HE_RU3); 867 } 868 } 869 870 void mt76_connac2_mac_decode_he_radiotap(struct mt76_dev *dev, 871 struct sk_buff *skb, 872 __le32 *rxv, u32 mode) 873 { 874 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb; 875 static const struct ieee80211_radiotap_he known = { 876 .data1 = HE_BITS(DATA1_DATA_MCS_KNOWN) | 877 HE_BITS(DATA1_DATA_DCM_KNOWN) | 878 HE_BITS(DATA1_STBC_KNOWN) | 879 HE_BITS(DATA1_CODING_KNOWN) | 880 HE_BITS(DATA1_LDPC_XSYMSEG_KNOWN) | 881 HE_BITS(DATA1_DOPPLER_KNOWN) | 882 HE_BITS(DATA1_SPTL_REUSE_KNOWN) | 883 HE_BITS(DATA1_BSS_COLOR_KNOWN), 884 .data2 = HE_BITS(DATA2_GI_KNOWN) | 885 HE_BITS(DATA2_TXBF_KNOWN) | 886 HE_BITS(DATA2_PE_DISAMBIG_KNOWN) | 887 HE_BITS(DATA2_TXOP_KNOWN), 888 }; 889 u32 ltf_size = le32_get_bits(rxv[2], MT_CRXV_HE_LTF_SIZE) + 1; 890 struct ieee80211_radiotap_he *he; 891 892 status->flag |= RX_FLAG_RADIOTAP_HE; 893 894 he = skb_push(skb, sizeof(known)); 895 memcpy(he, &known, sizeof(known)); 896 897 he->data3 = HE_PREP(DATA3_BSS_COLOR, BSS_COLOR, rxv[14]) | 898 HE_PREP(DATA3_LDPC_XSYMSEG, LDPC_EXT_SYM, rxv[2]); 899 he->data4 = HE_PREP(DATA4_SU_MU_SPTL_REUSE, SR_MASK, rxv[11]); 900 he->data5 = HE_PREP(DATA5_PE_DISAMBIG, PE_DISAMBIG, rxv[2]) | 901 le16_encode_bits(ltf_size, 902 IEEE80211_RADIOTAP_HE_DATA5_LTF_SIZE); 903 if (le32_to_cpu(rxv[0]) & MT_PRXV_TXBF) 904 he->data5 |= HE_BITS(DATA5_TXBF); 905 he->data6 = HE_PREP(DATA6_TXOP, TXOP_DUR, rxv[14]) | 906 HE_PREP(DATA6_DOPPLER, DOPPLER, rxv[14]); 907 908 switch (mode) { 909 case MT_PHY_TYPE_HE_SU: 910 he->data1 |= HE_BITS(DATA1_FORMAT_SU) | 911 HE_BITS(DATA1_UL_DL_KNOWN) | 912 HE_BITS(DATA1_BEAM_CHANGE_KNOWN) | 913 HE_BITS(DATA1_BW_RU_ALLOC_KNOWN); 914 915 he->data3 |= HE_PREP(DATA3_BEAM_CHANGE, BEAM_CHNG, rxv[14]) | 916 HE_PREP(DATA3_UL_DL, UPLINK, rxv[2]); 917 break; 918 case MT_PHY_TYPE_HE_EXT_SU: 919 he->data1 |= HE_BITS(DATA1_FORMAT_EXT_SU) | 920 HE_BITS(DATA1_UL_DL_KNOWN) | 921 HE_BITS(DATA1_BW_RU_ALLOC_KNOWN); 922 923 he->data3 |= HE_PREP(DATA3_UL_DL, UPLINK, rxv[2]); 924 break; 925 case MT_PHY_TYPE_HE_MU: 926 he->data1 |= HE_BITS(DATA1_FORMAT_MU) | 927 HE_BITS(DATA1_UL_DL_KNOWN); 928 929 he->data3 |= HE_PREP(DATA3_UL_DL, UPLINK, rxv[2]); 930 he->data4 |= HE_PREP(DATA4_MU_STA_ID, MU_AID, rxv[7]); 931 932 mt76_connac2_mac_decode_he_radiotap_ru(status, he, rxv); 933 mt76_connac2_mac_decode_he_mu_radiotap(dev, skb, rxv); 934 break; 935 case MT_PHY_TYPE_HE_TB: 936 he->data1 |= HE_BITS(DATA1_FORMAT_TRIG) | 937 HE_BITS(DATA1_SPTL_REUSE2_KNOWN) | 938 HE_BITS(DATA1_SPTL_REUSE3_KNOWN) | 939 HE_BITS(DATA1_SPTL_REUSE4_KNOWN); 940 941 he->data4 |= HE_PREP(DATA4_TB_SPTL_REUSE1, SR_MASK, rxv[11]) | 942 HE_PREP(DATA4_TB_SPTL_REUSE2, SR1_MASK, rxv[11]) | 943 HE_PREP(DATA4_TB_SPTL_REUSE3, SR2_MASK, rxv[11]) | 944 HE_PREP(DATA4_TB_SPTL_REUSE4, SR3_MASK, rxv[11]); 945 946 mt76_connac2_mac_decode_he_radiotap_ru(status, he, rxv); 947 break; 948 default: 949 break; 950 } 951 } 952 EXPORT_SYMBOL_GPL(mt76_connac2_mac_decode_he_radiotap); 953 954 /* The HW does not translate the mac header to 802.3 for mesh point */ 955 int mt76_connac2_reverse_frag0_hdr_trans(struct ieee80211_vif *vif, 956 struct sk_buff *skb, u16 hdr_offset) 957 { 958 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb; 959 struct ethhdr *eth_hdr = (struct ethhdr *)(skb->data + hdr_offset); 960 __le32 *rxd = (__le32 *)skb->data; 961 struct ieee80211_sta *sta; 962 struct ieee80211_hdr hdr; 963 u16 frame_control; 964 965 if (le32_get_bits(rxd[3], MT_RXD3_NORMAL_ADDR_TYPE) != 966 MT_RXD3_NORMAL_U2M) 967 return -EINVAL; 968 969 if (!(le32_to_cpu(rxd[1]) & MT_RXD1_NORMAL_GROUP_4)) 970 return -EINVAL; 971 972 sta = container_of((void *)status->wcid, struct ieee80211_sta, drv_priv); 973 974 /* store the info from RXD and ethhdr to avoid being overridden */ 975 frame_control = le32_get_bits(rxd[6], MT_RXD6_FRAME_CONTROL); 976 hdr.frame_control = cpu_to_le16(frame_control); 977 hdr.seq_ctrl = cpu_to_le16(le32_get_bits(rxd[8], MT_RXD8_SEQ_CTRL)); 978 hdr.duration_id = 0; 979 980 ether_addr_copy(hdr.addr1, vif->addr); 981 ether_addr_copy(hdr.addr2, sta->addr); 982 switch (frame_control & (IEEE80211_FCTL_TODS | 983 IEEE80211_FCTL_FROMDS)) { 984 case 0: 985 ether_addr_copy(hdr.addr3, vif->bss_conf.bssid); 986 break; 987 case IEEE80211_FCTL_FROMDS: 988 ether_addr_copy(hdr.addr3, eth_hdr->h_source); 989 break; 990 case IEEE80211_FCTL_TODS: 991 ether_addr_copy(hdr.addr3, eth_hdr->h_dest); 992 break; 993 case IEEE80211_FCTL_TODS | IEEE80211_FCTL_FROMDS: 994 ether_addr_copy(hdr.addr3, eth_hdr->h_dest); 995 ether_addr_copy(hdr.addr4, eth_hdr->h_source); 996 break; 997 default: 998 return -EINVAL; 999 } 1000 1001 skb_pull(skb, hdr_offset + sizeof(struct ethhdr) - 2); 1002 if (eth_hdr->h_proto == cpu_to_be16(ETH_P_AARP) || 1003 eth_hdr->h_proto == cpu_to_be16(ETH_P_IPX)) 1004 ether_addr_copy(skb_push(skb, ETH_ALEN), bridge_tunnel_header); 1005 else if (be16_to_cpu(eth_hdr->h_proto) >= ETH_P_802_3_MIN) 1006 ether_addr_copy(skb_push(skb, ETH_ALEN), rfc1042_header); 1007 else 1008 skb_pull(skb, 2); 1009 1010 if (ieee80211_has_order(hdr.frame_control)) 1011 memcpy(skb_push(skb, IEEE80211_HT_CTL_LEN), &rxd[9], 1012 IEEE80211_HT_CTL_LEN); 1013 if (ieee80211_is_data_qos(hdr.frame_control)) { 1014 __le16 qos_ctrl; 1015 1016 qos_ctrl = cpu_to_le16(le32_get_bits(rxd[8], MT_RXD8_QOS_CTL)); 1017 memcpy(skb_push(skb, IEEE80211_QOS_CTL_LEN), &qos_ctrl, 1018 IEEE80211_QOS_CTL_LEN); 1019 } 1020 1021 if (ieee80211_has_a4(hdr.frame_control)) 1022 memcpy(skb_push(skb, sizeof(hdr)), &hdr, sizeof(hdr)); 1023 else 1024 memcpy(skb_push(skb, sizeof(hdr) - 6), &hdr, sizeof(hdr) - 6); 1025 1026 return 0; 1027 } 1028 EXPORT_SYMBOL_GPL(mt76_connac2_reverse_frag0_hdr_trans); 1029 1030 int mt76_connac2_mac_fill_rx_rate(struct mt76_dev *dev, 1031 struct mt76_rx_status *status, 1032 struct ieee80211_supported_band *sband, 1033 __le32 *rxv, u8 *mode) 1034 { 1035 u32 v0, v2; 1036 u8 stbc, gi, bw, dcm, nss; 1037 int i, idx; 1038 bool cck = false; 1039 1040 v0 = le32_to_cpu(rxv[0]); 1041 v2 = le32_to_cpu(rxv[2]); 1042 1043 idx = i = FIELD_GET(MT_PRXV_TX_RATE, v0); 1044 nss = FIELD_GET(MT_PRXV_NSTS, v0) + 1; 1045 1046 if (!is_mt7915(dev)) { 1047 stbc = FIELD_GET(MT_PRXV_HT_STBC, v0); 1048 gi = FIELD_GET(MT_PRXV_HT_SGI, v0); 1049 *mode = FIELD_GET(MT_PRXV_TX_MODE, v0); 1050 if (is_connac2(dev)) 1051 dcm = !!(idx & MT_PRXV_TX_DCM); 1052 else 1053 dcm = FIELD_GET(MT_PRXV_DCM, v0); 1054 bw = FIELD_GET(MT_PRXV_FRAME_MODE, v0); 1055 } else { 1056 stbc = FIELD_GET(MT_CRXV_HT_STBC, v2); 1057 gi = FIELD_GET(MT_CRXV_HT_SHORT_GI, v2); 1058 *mode = FIELD_GET(MT_CRXV_TX_MODE, v2); 1059 dcm = !!(idx & GENMASK(3, 0) & MT_PRXV_TX_DCM); 1060 bw = FIELD_GET(MT_CRXV_FRAME_MODE, v2); 1061 } 1062 1063 switch (*mode) { 1064 case MT_PHY_TYPE_CCK: 1065 cck = true; 1066 fallthrough; 1067 case MT_PHY_TYPE_OFDM: 1068 i = mt76_get_rate(dev, sband, i, cck); 1069 break; 1070 case MT_PHY_TYPE_HT_GF: 1071 case MT_PHY_TYPE_HT: 1072 status->encoding = RX_ENC_HT; 1073 if (gi) 1074 status->enc_flags |= RX_ENC_FLAG_SHORT_GI; 1075 if (i > 31) 1076 return -EINVAL; 1077 break; 1078 case MT_PHY_TYPE_VHT: 1079 status->nss = nss; 1080 status->encoding = RX_ENC_VHT; 1081 if (gi) 1082 status->enc_flags |= RX_ENC_FLAG_SHORT_GI; 1083 if (i > 11) 1084 return -EINVAL; 1085 break; 1086 case MT_PHY_TYPE_HE_MU: 1087 case MT_PHY_TYPE_HE_SU: 1088 case MT_PHY_TYPE_HE_EXT_SU: 1089 case MT_PHY_TYPE_HE_TB: 1090 status->nss = nss; 1091 status->encoding = RX_ENC_HE; 1092 i &= GENMASK(3, 0); 1093 1094 if (gi <= NL80211_RATE_INFO_HE_GI_3_2) 1095 status->he_gi = gi; 1096 1097 status->he_dcm = dcm; 1098 break; 1099 default: 1100 return -EINVAL; 1101 } 1102 status->rate_idx = i; 1103 1104 switch (bw) { 1105 case IEEE80211_STA_RX_BW_20: 1106 break; 1107 case IEEE80211_STA_RX_BW_40: 1108 if (*mode & MT_PHY_TYPE_HE_EXT_SU && 1109 (idx & MT_PRXV_TX_ER_SU_106T)) { 1110 status->bw = RATE_INFO_BW_HE_RU; 1111 status->he_ru = 1112 NL80211_RATE_INFO_HE_RU_ALLOC_106; 1113 } else { 1114 status->bw = RATE_INFO_BW_40; 1115 } 1116 break; 1117 case IEEE80211_STA_RX_BW_80: 1118 status->bw = RATE_INFO_BW_80; 1119 break; 1120 case IEEE80211_STA_RX_BW_160: 1121 status->bw = RATE_INFO_BW_160; 1122 break; 1123 default: 1124 return -EINVAL; 1125 } 1126 1127 status->enc_flags |= RX_ENC_FLAG_STBC_MASK * stbc; 1128 if (*mode < MT_PHY_TYPE_HE_SU && gi) 1129 status->enc_flags |= RX_ENC_FLAG_SHORT_GI; 1130 1131 return 0; 1132 } 1133 EXPORT_SYMBOL_GPL(mt76_connac2_mac_fill_rx_rate); 1134 1135 void mt76_connac2_tx_check_aggr(struct ieee80211_sta *sta, __le32 *txwi) 1136 { 1137 struct mt76_wcid *wcid; 1138 u16 fc, tid; 1139 u32 val; 1140 1141 if (!sta || 1142 !(sta->deflink.ht_cap.ht_supported || sta->deflink.he_cap.has_he)) 1143 return; 1144 1145 tid = le32_get_bits(txwi[1], MT_TXD1_TID); 1146 if (tid >= 6) /* skip VO queue */ 1147 return; 1148 1149 val = le32_to_cpu(txwi[2]); 1150 fc = FIELD_GET(MT_TXD2_FRAME_TYPE, val) << 2 | 1151 FIELD_GET(MT_TXD2_SUB_TYPE, val) << 4; 1152 if (unlikely(fc != (IEEE80211_FTYPE_DATA | IEEE80211_STYPE_QOS_DATA))) 1153 return; 1154 1155 wcid = (struct mt76_wcid *)sta->drv_priv; 1156 if (!test_and_set_bit(tid, &wcid->ampdu_state)) { 1157 if (ieee80211_start_tx_ba_session(sta, tid, 0)) 1158 clear_bit(tid, &wcid->ampdu_state); 1159 } 1160 } 1161 EXPORT_SYMBOL_GPL(mt76_connac2_tx_check_aggr); 1162 1163 void mt76_connac2_txwi_free(struct mt76_dev *dev, struct mt76_txwi_cache *t, 1164 struct ieee80211_sta *sta, 1165 struct list_head *free_list) 1166 { 1167 struct mt76_wcid *wcid; 1168 __le32 *txwi; 1169 u16 wcid_idx; 1170 1171 mt76_connac_txp_skb_unmap(dev, t); 1172 if (!t->skb) 1173 goto out; 1174 1175 txwi = (__le32 *)mt76_get_txwi_ptr(dev, t); 1176 if (sta) { 1177 wcid = (struct mt76_wcid *)sta->drv_priv; 1178 wcid_idx = wcid->idx; 1179 } else { 1180 wcid_idx = le32_get_bits(txwi[1], MT_TXD1_WLAN_IDX); 1181 wcid = __mt76_wcid_ptr(dev, wcid_idx); 1182 1183 if (wcid && wcid->sta) { 1184 sta = container_of((void *)wcid, struct ieee80211_sta, 1185 drv_priv); 1186 mt76_wcid_add_poll(dev, wcid); 1187 } 1188 } 1189 1190 if (sta && likely(t->skb->protocol != cpu_to_be16(ETH_P_PAE))) 1191 mt76_connac2_tx_check_aggr(sta, txwi); 1192 1193 __mt76_tx_complete_skb(dev, wcid_idx, t->skb, free_list); 1194 out: 1195 t->skb = NULL; 1196 mt76_put_txwi(dev, t); 1197 } 1198 EXPORT_SYMBOL_GPL(mt76_connac2_txwi_free); 1199 1200 void mt76_connac2_tx_token_put(struct mt76_dev *dev) 1201 { 1202 struct mt76_txwi_cache *txwi; 1203 int id; 1204 1205 spin_lock_bh(&dev->token_lock); 1206 idr_for_each_entry(&dev->token, txwi, id) { 1207 mt76_connac2_txwi_free(dev, txwi, NULL, NULL); 1208 dev->token_count--; 1209 } 1210 spin_unlock_bh(&dev->token_lock); 1211 idr_destroy(&dev->token); 1212 1213 for (id = 0; id < __MT_MAX_BAND; id++) { 1214 struct mt76_phy *phy = dev->phys[id]; 1215 if (phy) 1216 atomic_set(&phy->mgmt_tx_pending, 0); 1217 } 1218 } 1219 EXPORT_SYMBOL_GPL(mt76_connac2_tx_token_put); 1220