1 // SPDX-License-Identifier: ISC 2 /* Copyright (C) 2020 MediaTek Inc. */ 3 4 #include "mt76_connac.h" 5 #include "mt76_connac2_mac.h" 6 #include "dma.h" 7 8 #define HE_BITS(f) cpu_to_le16(IEEE80211_RADIOTAP_HE_##f) 9 #define HE_PREP(f, m, v) le16_encode_bits(le32_get_bits(v, MT_CRXV_HE_##m),\ 10 IEEE80211_RADIOTAP_HE_##f) 11 12 int mt76_connac_pm_wake(struct mt76_phy *phy, struct mt76_connac_pm *pm) 13 { 14 struct mt76_dev *dev = phy->dev; 15 16 if (mt76_is_usb(dev)) 17 return 0; 18 19 cancel_delayed_work_sync(&pm->ps_work); 20 if (!test_bit(MT76_STATE_PM, &phy->state)) 21 return 0; 22 23 if (pm->suspended) 24 return 0; 25 26 queue_work(dev->wq, &pm->wake_work); 27 if (!wait_event_timeout(pm->wait, 28 !test_bit(MT76_STATE_PM, &phy->state), 29 3 * HZ)) { 30 ieee80211_wake_queues(phy->hw); 31 return -ETIMEDOUT; 32 } 33 34 return 0; 35 } 36 EXPORT_SYMBOL_GPL(mt76_connac_pm_wake); 37 38 void mt76_connac_power_save_sched(struct mt76_phy *phy, 39 struct mt76_connac_pm *pm) 40 { 41 struct mt76_dev *dev = phy->dev; 42 43 if (mt76_is_usb(dev)) 44 return; 45 46 if (!pm->enable) 47 return; 48 49 if (pm->suspended) 50 return; 51 52 pm->last_activity = jiffies; 53 54 if (!test_bit(MT76_STATE_PM, &phy->state)) { 55 cancel_delayed_work(&phy->mac_work); 56 queue_delayed_work(dev->wq, &pm->ps_work, pm->idle_timeout); 57 } 58 } 59 EXPORT_SYMBOL_GPL(mt76_connac_power_save_sched); 60 61 void mt76_connac_free_pending_tx_skbs(struct mt76_connac_pm *pm, 62 struct mt76_wcid *wcid) 63 { 64 int i; 65 66 spin_lock_bh(&pm->txq_lock); 67 for (i = 0; i < IEEE80211_NUM_ACS; i++) { 68 if (wcid && pm->tx_q[i].wcid != wcid) 69 continue; 70 71 dev_kfree_skb(pm->tx_q[i].skb); 72 pm->tx_q[i].skb = NULL; 73 } 74 spin_unlock_bh(&pm->txq_lock); 75 } 76 EXPORT_SYMBOL_GPL(mt76_connac_free_pending_tx_skbs); 77 78 void mt76_connac_pm_queue_skb(struct ieee80211_hw *hw, 79 struct mt76_connac_pm *pm, 80 struct mt76_wcid *wcid, 81 struct sk_buff *skb) 82 { 83 int qid = skb_get_queue_mapping(skb); 84 struct mt76_phy *phy = hw->priv; 85 86 spin_lock_bh(&pm->txq_lock); 87 if (!pm->tx_q[qid].skb) { 88 ieee80211_stop_queues(hw); 89 pm->tx_q[qid].wcid = wcid; 90 pm->tx_q[qid].skb = skb; 91 queue_work(phy->dev->wq, &pm->wake_work); 92 } else { 93 dev_kfree_skb(skb); 94 } 95 spin_unlock_bh(&pm->txq_lock); 96 } 97 EXPORT_SYMBOL_GPL(mt76_connac_pm_queue_skb); 98 99 void mt76_connac_pm_dequeue_skbs(struct mt76_phy *phy, 100 struct mt76_connac_pm *pm) 101 { 102 int i; 103 104 spin_lock_bh(&pm->txq_lock); 105 for (i = 0; i < IEEE80211_NUM_ACS; i++) { 106 struct mt76_wcid *wcid = pm->tx_q[i].wcid; 107 struct ieee80211_sta *sta = NULL; 108 109 if (!pm->tx_q[i].skb) 110 continue; 111 112 if (wcid && wcid->sta) 113 sta = container_of((void *)wcid, struct ieee80211_sta, 114 drv_priv); 115 116 mt76_tx(phy, sta, wcid, pm->tx_q[i].skb); 117 pm->tx_q[i].skb = NULL; 118 } 119 spin_unlock_bh(&pm->txq_lock); 120 121 mt76_worker_schedule(&phy->dev->tx_worker); 122 } 123 EXPORT_SYMBOL_GPL(mt76_connac_pm_dequeue_skbs); 124 125 void mt76_connac_tx_complete_skb(struct mt76_dev *mdev, 126 struct mt76_queue_entry *e) 127 { 128 if (!e->txwi) { 129 dev_kfree_skb_any(e->skb); 130 return; 131 } 132 133 /* error path */ 134 if (e->skb == DMA_DUMMY_DATA) { 135 struct mt76_connac_txp_common *txp; 136 struct mt76_txwi_cache *t; 137 u16 token; 138 139 txp = mt76_connac_txwi_to_txp(mdev, e->txwi); 140 if (is_mt76_fw_txp(mdev)) 141 token = le16_to_cpu(txp->fw.token); 142 else 143 token = le16_to_cpu(txp->hw.msdu_id[0]) & 144 ~MT_MSDU_ID_VALID; 145 146 t = mt76_token_put(mdev, token); 147 e->skb = t ? t->skb : NULL; 148 } 149 150 if (e->skb) 151 mt76_tx_complete_skb(mdev, e->wcid, e->skb); 152 } 153 EXPORT_SYMBOL_GPL(mt76_connac_tx_complete_skb); 154 155 void mt76_connac_write_hw_txp(struct mt76_dev *dev, 156 struct mt76_tx_info *tx_info, 157 void *txp_ptr, u32 id) 158 { 159 struct mt76_connac_hw_txp *txp = txp_ptr; 160 struct mt76_connac_txp_ptr *ptr = &txp->ptr[0]; 161 int i, nbuf = tx_info->nbuf - 1; 162 u32 last_mask; 163 164 tx_info->buf[0].len = MT_TXD_SIZE + sizeof(*txp); 165 tx_info->nbuf = 1; 166 167 txp->msdu_id[0] = cpu_to_le16(id | MT_MSDU_ID_VALID); 168 169 if (is_mt7663(dev) || is_mt7921(dev)) 170 last_mask = MT_TXD_LEN_LAST; 171 else 172 last_mask = MT_TXD_LEN_AMSDU_LAST | 173 MT_TXD_LEN_MSDU_LAST; 174 175 for (i = 0; i < nbuf; i++) { 176 u16 len = tx_info->buf[i + 1].len & MT_TXD_LEN_MASK; 177 u32 addr = tx_info->buf[i + 1].addr; 178 179 if (i == nbuf - 1) 180 len |= last_mask; 181 182 if (i & 1) { 183 ptr->buf1 = cpu_to_le32(addr); 184 ptr->len1 = cpu_to_le16(len); 185 ptr++; 186 } else { 187 ptr->buf0 = cpu_to_le32(addr); 188 ptr->len0 = cpu_to_le16(len); 189 } 190 } 191 } 192 EXPORT_SYMBOL_GPL(mt76_connac_write_hw_txp); 193 194 static void 195 mt76_connac_txp_skb_unmap_fw(struct mt76_dev *mdev, 196 struct mt76_connac_fw_txp *txp) 197 { 198 struct device *dev = is_connac_v1(mdev) ? mdev->dev : mdev->dma_dev; 199 int i; 200 201 for (i = 0; i < txp->nbuf; i++) 202 dma_unmap_single(dev, le32_to_cpu(txp->buf[i]), 203 le16_to_cpu(txp->len[i]), DMA_TO_DEVICE); 204 } 205 206 static void 207 mt76_connac_txp_skb_unmap_hw(struct mt76_dev *dev, 208 struct mt76_connac_hw_txp *txp) 209 { 210 u32 last_mask; 211 int i; 212 213 if (is_mt7663(dev) || is_mt7921(dev)) 214 last_mask = MT_TXD_LEN_LAST; 215 else 216 last_mask = MT_TXD_LEN_MSDU_LAST; 217 218 for (i = 0; i < ARRAY_SIZE(txp->ptr); i++) { 219 struct mt76_connac_txp_ptr *ptr = &txp->ptr[i]; 220 bool last; 221 u16 len; 222 223 len = le16_to_cpu(ptr->len0); 224 last = len & last_mask; 225 len &= MT_TXD_LEN_MASK; 226 dma_unmap_single(dev->dev, le32_to_cpu(ptr->buf0), len, 227 DMA_TO_DEVICE); 228 if (last) 229 break; 230 231 len = le16_to_cpu(ptr->len1); 232 last = len & last_mask; 233 len &= MT_TXD_LEN_MASK; 234 dma_unmap_single(dev->dev, le32_to_cpu(ptr->buf1), len, 235 DMA_TO_DEVICE); 236 if (last) 237 break; 238 } 239 } 240 241 void mt76_connac_txp_skb_unmap(struct mt76_dev *dev, 242 struct mt76_txwi_cache *t) 243 { 244 struct mt76_connac_txp_common *txp; 245 246 txp = mt76_connac_txwi_to_txp(dev, t); 247 if (is_mt76_fw_txp(dev)) 248 mt76_connac_txp_skb_unmap_fw(dev, &txp->fw); 249 else 250 mt76_connac_txp_skb_unmap_hw(dev, &txp->hw); 251 } 252 EXPORT_SYMBOL_GPL(mt76_connac_txp_skb_unmap); 253 254 int mt76_connac_init_tx_queues(struct mt76_phy *phy, int idx, int n_desc, 255 int ring_base, u32 flags) 256 { 257 int i, err; 258 259 err = mt76_init_tx_queue(phy, 0, idx, n_desc, ring_base, flags); 260 if (err < 0) 261 return err; 262 263 for (i = 1; i <= MT_TXQ_PSD; i++) 264 phy->q_tx[i] = phy->q_tx[0]; 265 266 return 0; 267 } 268 EXPORT_SYMBOL_GPL(mt76_connac_init_tx_queues); 269 270 static u16 271 mt76_connac2_mac_tx_rate_val(struct mt76_phy *mphy, struct ieee80211_vif *vif, 272 bool beacon, bool mcast) 273 { 274 u8 mode = 0, band = mphy->chandef.chan->band; 275 int rateidx = 0, mcast_rate; 276 277 if (!vif) 278 goto legacy; 279 280 if (is_mt7921(mphy->dev)) { 281 rateidx = ffs(vif->bss_conf.basic_rates) - 1; 282 goto legacy; 283 } 284 285 if (beacon) { 286 struct cfg80211_bitrate_mask *mask; 287 288 mask = &vif->bss_conf.beacon_tx_rate; 289 if (hweight16(mask->control[band].he_mcs[0]) == 1) { 290 rateidx = ffs(mask->control[band].he_mcs[0]) - 1; 291 mode = MT_PHY_TYPE_HE_SU; 292 goto out; 293 } else if (hweight16(mask->control[band].vht_mcs[0]) == 1) { 294 rateidx = ffs(mask->control[band].vht_mcs[0]) - 1; 295 mode = MT_PHY_TYPE_VHT; 296 goto out; 297 } else if (hweight8(mask->control[band].ht_mcs[0]) == 1) { 298 rateidx = ffs(mask->control[band].ht_mcs[0]) - 1; 299 mode = MT_PHY_TYPE_HT; 300 goto out; 301 } else if (hweight32(mask->control[band].legacy) == 1) { 302 rateidx = ffs(mask->control[band].legacy) - 1; 303 goto legacy; 304 } 305 } 306 307 mcast_rate = vif->bss_conf.mcast_rate[band]; 308 if (mcast && mcast_rate > 0) 309 rateidx = mcast_rate - 1; 310 else 311 rateidx = ffs(vif->bss_conf.basic_rates) - 1; 312 313 legacy: 314 rateidx = mt76_calculate_default_rate(mphy, rateidx); 315 mode = rateidx >> 8; 316 rateidx &= GENMASK(7, 0); 317 318 out: 319 return FIELD_PREP(MT_TX_RATE_IDX, rateidx) | 320 FIELD_PREP(MT_TX_RATE_MODE, mode); 321 } 322 323 static void 324 mt76_connac2_mac_write_txwi_8023(__le32 *txwi, struct sk_buff *skb, 325 struct mt76_wcid *wcid) 326 { 327 u8 tid = skb->priority & IEEE80211_QOS_CTL_TID_MASK; 328 u8 fc_type, fc_stype; 329 u16 ethertype; 330 bool wmm = false; 331 u32 val; 332 333 if (wcid->sta) { 334 struct ieee80211_sta *sta; 335 336 sta = container_of((void *)wcid, struct ieee80211_sta, drv_priv); 337 wmm = sta->wme; 338 } 339 340 val = FIELD_PREP(MT_TXD1_HDR_FORMAT, MT_HDR_FORMAT_802_3) | 341 FIELD_PREP(MT_TXD1_TID, tid); 342 343 ethertype = get_unaligned_be16(&skb->data[12]); 344 if (ethertype >= ETH_P_802_3_MIN) 345 val |= MT_TXD1_ETH_802_3; 346 347 txwi[1] |= cpu_to_le32(val); 348 349 fc_type = IEEE80211_FTYPE_DATA >> 2; 350 fc_stype = wmm ? IEEE80211_STYPE_QOS_DATA >> 4 : 0; 351 352 val = FIELD_PREP(MT_TXD2_FRAME_TYPE, fc_type) | 353 FIELD_PREP(MT_TXD2_SUB_TYPE, fc_stype); 354 355 txwi[2] |= cpu_to_le32(val); 356 357 val = FIELD_PREP(MT_TXD7_TYPE, fc_type) | 358 FIELD_PREP(MT_TXD7_SUB_TYPE, fc_stype); 359 360 txwi[7] |= cpu_to_le32(val); 361 } 362 363 static void 364 mt76_connac2_mac_write_txwi_80211(struct mt76_dev *dev, __le32 *txwi, 365 struct sk_buff *skb, 366 struct ieee80211_key_conf *key) 367 { 368 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)skb->data; 369 struct ieee80211_mgmt *mgmt = (struct ieee80211_mgmt *)skb->data; 370 struct ieee80211_tx_info *info = IEEE80211_SKB_CB(skb); 371 bool multicast = is_multicast_ether_addr(hdr->addr1); 372 u8 tid = skb->priority & IEEE80211_QOS_CTL_TID_MASK; 373 __le16 fc = hdr->frame_control; 374 u8 fc_type, fc_stype; 375 u32 val; 376 377 if (ieee80211_is_action(fc) && 378 mgmt->u.action.category == WLAN_CATEGORY_BACK && 379 mgmt->u.action.u.addba_req.action_code == WLAN_ACTION_ADDBA_REQ) { 380 u16 capab = le16_to_cpu(mgmt->u.action.u.addba_req.capab); 381 382 txwi[5] |= cpu_to_le32(MT_TXD5_ADD_BA); 383 tid = (capab >> 2) & IEEE80211_QOS_CTL_TID_MASK; 384 } else if (ieee80211_is_back_req(hdr->frame_control)) { 385 struct ieee80211_bar *bar = (struct ieee80211_bar *)hdr; 386 u16 control = le16_to_cpu(bar->control); 387 388 tid = FIELD_GET(IEEE80211_BAR_CTRL_TID_INFO_MASK, control); 389 } 390 391 val = FIELD_PREP(MT_TXD1_HDR_FORMAT, MT_HDR_FORMAT_802_11) | 392 FIELD_PREP(MT_TXD1_HDR_INFO, 393 ieee80211_get_hdrlen_from_skb(skb) / 2) | 394 FIELD_PREP(MT_TXD1_TID, tid); 395 396 txwi[1] |= cpu_to_le32(val); 397 398 fc_type = (le16_to_cpu(fc) & IEEE80211_FCTL_FTYPE) >> 2; 399 fc_stype = (le16_to_cpu(fc) & IEEE80211_FCTL_STYPE) >> 4; 400 401 val = FIELD_PREP(MT_TXD2_FRAME_TYPE, fc_type) | 402 FIELD_PREP(MT_TXD2_SUB_TYPE, fc_stype) | 403 FIELD_PREP(MT_TXD2_MULTICAST, multicast); 404 405 if (key && multicast && ieee80211_is_robust_mgmt_frame(skb) && 406 key->cipher == WLAN_CIPHER_SUITE_AES_CMAC) { 407 val |= MT_TXD2_BIP; 408 txwi[3] &= ~cpu_to_le32(MT_TXD3_PROTECT_FRAME); 409 } 410 411 if (!ieee80211_is_data(fc) || multicast || 412 info->flags & IEEE80211_TX_CTL_USE_MINRATE) 413 val |= MT_TXD2_FIX_RATE; 414 415 txwi[2] |= cpu_to_le32(val); 416 417 if (ieee80211_is_beacon(fc)) { 418 txwi[3] &= ~cpu_to_le32(MT_TXD3_SW_POWER_MGMT); 419 txwi[3] |= cpu_to_le32(MT_TXD3_REM_TX_COUNT); 420 if (!is_mt7921(dev)) 421 txwi[7] |= cpu_to_le32(FIELD_PREP(MT_TXD7_SPE_IDX, 422 0x18)); 423 } 424 425 if (info->flags & IEEE80211_TX_CTL_INJECTED) { 426 u16 seqno = le16_to_cpu(hdr->seq_ctrl); 427 428 if (ieee80211_is_back_req(hdr->frame_control)) { 429 struct ieee80211_bar *bar; 430 431 bar = (struct ieee80211_bar *)skb->data; 432 seqno = le16_to_cpu(bar->start_seq_num); 433 } 434 435 val = MT_TXD3_SN_VALID | 436 FIELD_PREP(MT_TXD3_SEQ, IEEE80211_SEQ_TO_SN(seqno)); 437 txwi[3] |= cpu_to_le32(val); 438 txwi[7] &= ~cpu_to_le32(MT_TXD7_HW_AMSDU); 439 } 440 441 if (mt76_is_mmio(dev)) { 442 val = FIELD_PREP(MT_TXD7_TYPE, fc_type) | 443 FIELD_PREP(MT_TXD7_SUB_TYPE, fc_stype); 444 txwi[7] |= cpu_to_le32(val); 445 } else { 446 val = FIELD_PREP(MT_TXD8_L_TYPE, fc_type) | 447 FIELD_PREP(MT_TXD8_L_SUB_TYPE, fc_stype); 448 txwi[8] |= cpu_to_le32(val); 449 } 450 } 451 452 void mt76_connac2_mac_write_txwi(struct mt76_dev *dev, __le32 *txwi, 453 struct sk_buff *skb, struct mt76_wcid *wcid, 454 struct ieee80211_key_conf *key, int pid, 455 enum mt76_txq_id qid, u32 changed) 456 { 457 struct ieee80211_tx_info *info = IEEE80211_SKB_CB(skb); 458 u8 phy_idx = (info->hw_queue & MT_TX_HW_QUEUE_PHY) >> 2; 459 struct ieee80211_vif *vif = info->control.vif; 460 struct mt76_phy *mphy = &dev->phy; 461 u8 p_fmt, q_idx, omac_idx = 0, wmm_idx = 0, band_idx = 0; 462 u32 val, sz_txd = mt76_is_mmio(dev) ? MT_TXD_SIZE : MT_SDIO_TXD_SIZE; 463 bool is_8023 = info->flags & IEEE80211_TX_CTL_HW_80211_ENCAP; 464 bool beacon = !!(changed & (BSS_CHANGED_BEACON | 465 BSS_CHANGED_BEACON_ENABLED)); 466 bool inband_disc = !!(changed & (BSS_CHANGED_UNSOL_BCAST_PROBE_RESP | 467 BSS_CHANGED_FILS_DISCOVERY)); 468 469 if (vif) { 470 struct mt76_vif *mvif = (struct mt76_vif *)vif->drv_priv; 471 472 omac_idx = mvif->omac_idx; 473 wmm_idx = mvif->wmm_idx; 474 band_idx = mvif->band_idx; 475 } 476 477 if (phy_idx && dev->phys[MT_BAND1]) 478 mphy = dev->phys[MT_BAND1]; 479 480 if (inband_disc) { 481 p_fmt = MT_TX_TYPE_FW; 482 q_idx = MT_LMAC_ALTX0; 483 } else if (beacon) { 484 p_fmt = MT_TX_TYPE_FW; 485 q_idx = MT_LMAC_BCN0; 486 } else if (qid >= MT_TXQ_PSD) { 487 p_fmt = mt76_is_mmio(dev) ? MT_TX_TYPE_CT : MT_TX_TYPE_SF; 488 q_idx = MT_LMAC_ALTX0; 489 } else { 490 p_fmt = mt76_is_mmio(dev) ? MT_TX_TYPE_CT : MT_TX_TYPE_SF; 491 q_idx = wmm_idx * MT76_CONNAC_MAX_WMM_SETS + 492 mt76_connac_lmac_mapping(skb_get_queue_mapping(skb)); 493 494 /* counting non-offloading skbs */ 495 wcid->stats.tx_bytes += skb->len; 496 wcid->stats.tx_packets++; 497 } 498 499 val = FIELD_PREP(MT_TXD0_TX_BYTES, skb->len + sz_txd) | 500 FIELD_PREP(MT_TXD0_PKT_FMT, p_fmt) | 501 FIELD_PREP(MT_TXD0_Q_IDX, q_idx); 502 txwi[0] = cpu_to_le32(val); 503 504 val = MT_TXD1_LONG_FORMAT | 505 FIELD_PREP(MT_TXD1_WLAN_IDX, wcid->idx) | 506 FIELD_PREP(MT_TXD1_OWN_MAC, omac_idx); 507 if (!is_mt7921(dev)) 508 val |= MT_TXD1_VTA; 509 if (phy_idx || band_idx) 510 val |= MT_TXD1_TGID; 511 512 txwi[1] = cpu_to_le32(val); 513 txwi[2] = 0; 514 515 val = FIELD_PREP(MT_TXD3_REM_TX_COUNT, 15); 516 if (!is_mt7921(dev)) 517 val |= MT_TXD3_SW_POWER_MGMT; 518 if (key) 519 val |= MT_TXD3_PROTECT_FRAME; 520 if (info->flags & IEEE80211_TX_CTL_NO_ACK) 521 val |= MT_TXD3_NO_ACK; 522 523 txwi[3] = cpu_to_le32(val); 524 txwi[4] = 0; 525 526 val = FIELD_PREP(MT_TXD5_PID, pid); 527 if (pid >= MT_PACKET_ID_FIRST) 528 val |= MT_TXD5_TX_STATUS_HOST; 529 530 txwi[5] = cpu_to_le32(val); 531 txwi[6] = 0; 532 txwi[7] = wcid->amsdu ? cpu_to_le32(MT_TXD7_HW_AMSDU) : 0; 533 534 if (is_8023) 535 mt76_connac2_mac_write_txwi_8023(txwi, skb, wcid); 536 else 537 mt76_connac2_mac_write_txwi_80211(dev, txwi, skb, key); 538 539 if (txwi[2] & cpu_to_le32(MT_TXD2_FIX_RATE)) { 540 /* Fixed rata is available just for 802.11 txd */ 541 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)skb->data; 542 bool multicast = is_multicast_ether_addr(hdr->addr1); 543 u16 rate = mt76_connac2_mac_tx_rate_val(mphy, vif, beacon, 544 multicast); 545 u32 val = MT_TXD6_FIXED_BW; 546 547 /* hardware won't add HTC for mgmt/ctrl frame */ 548 txwi[2] |= cpu_to_le32(MT_TXD2_HTC_VLD); 549 550 val |= FIELD_PREP(MT_TXD6_TX_RATE, rate); 551 txwi[6] |= cpu_to_le32(val); 552 txwi[3] |= cpu_to_le32(MT_TXD3_BA_DISABLE); 553 } 554 } 555 EXPORT_SYMBOL_GPL(mt76_connac2_mac_write_txwi); 556 557 bool mt76_connac2_mac_fill_txs(struct mt76_dev *dev, struct mt76_wcid *wcid, 558 __le32 *txs_data) 559 { 560 struct mt76_sta_stats *stats = &wcid->stats; 561 struct ieee80211_supported_band *sband; 562 struct mt76_phy *mphy; 563 struct rate_info rate = {}; 564 bool cck = false; 565 u32 txrate, txs, mode; 566 567 txs = le32_to_cpu(txs_data[0]); 568 569 /* PPDU based reporting */ 570 if (FIELD_GET(MT_TXS0_TXS_FORMAT, txs) > 1) { 571 stats->tx_bytes += 572 le32_get_bits(txs_data[5], MT_TXS5_MPDU_TX_BYTE); 573 stats->tx_packets += 574 le32_get_bits(txs_data[5], MT_TXS5_MPDU_TX_CNT); 575 stats->tx_failed += 576 le32_get_bits(txs_data[6], MT_TXS6_MPDU_FAIL_CNT); 577 stats->tx_retries += 578 le32_get_bits(txs_data[7], MT_TXS7_MPDU_RETRY_CNT); 579 } 580 581 txrate = FIELD_GET(MT_TXS0_TX_RATE, txs); 582 583 rate.mcs = FIELD_GET(MT_TX_RATE_IDX, txrate); 584 rate.nss = FIELD_GET(MT_TX_RATE_NSS, txrate) + 1; 585 586 if (rate.nss - 1 < ARRAY_SIZE(stats->tx_nss)) 587 stats->tx_nss[rate.nss - 1]++; 588 if (rate.mcs < ARRAY_SIZE(stats->tx_mcs)) 589 stats->tx_mcs[rate.mcs]++; 590 591 mode = FIELD_GET(MT_TX_RATE_MODE, txrate); 592 switch (mode) { 593 case MT_PHY_TYPE_CCK: 594 cck = true; 595 fallthrough; 596 case MT_PHY_TYPE_OFDM: 597 mphy = &dev->phy; 598 if (wcid->phy_idx == MT_BAND1 && dev->phys[MT_BAND1]) 599 mphy = dev->phys[MT_BAND1]; 600 601 if (mphy->chandef.chan->band == NL80211_BAND_5GHZ) 602 sband = &mphy->sband_5g.sband; 603 else if (mphy->chandef.chan->band == NL80211_BAND_6GHZ) 604 sband = &mphy->sband_6g.sband; 605 else 606 sband = &mphy->sband_2g.sband; 607 608 rate.mcs = mt76_get_rate(mphy->dev, sband, rate.mcs, cck); 609 rate.legacy = sband->bitrates[rate.mcs].bitrate; 610 break; 611 case MT_PHY_TYPE_HT: 612 case MT_PHY_TYPE_HT_GF: 613 if (rate.mcs > 31) 614 return false; 615 616 rate.flags = RATE_INFO_FLAGS_MCS; 617 if (wcid->rate.flags & RATE_INFO_FLAGS_SHORT_GI) 618 rate.flags |= RATE_INFO_FLAGS_SHORT_GI; 619 break; 620 case MT_PHY_TYPE_VHT: 621 if (rate.mcs > 9) 622 return false; 623 624 rate.flags = RATE_INFO_FLAGS_VHT_MCS; 625 break; 626 case MT_PHY_TYPE_HE_SU: 627 case MT_PHY_TYPE_HE_EXT_SU: 628 case MT_PHY_TYPE_HE_TB: 629 case MT_PHY_TYPE_HE_MU: 630 if (rate.mcs > 11) 631 return false; 632 633 rate.he_gi = wcid->rate.he_gi; 634 rate.he_dcm = FIELD_GET(MT_TX_RATE_DCM, txrate); 635 rate.flags = RATE_INFO_FLAGS_HE_MCS; 636 break; 637 default: 638 return false; 639 } 640 641 stats->tx_mode[mode]++; 642 643 switch (FIELD_GET(MT_TXS0_BW, txs)) { 644 case IEEE80211_STA_RX_BW_160: 645 rate.bw = RATE_INFO_BW_160; 646 stats->tx_bw[3]++; 647 break; 648 case IEEE80211_STA_RX_BW_80: 649 rate.bw = RATE_INFO_BW_80; 650 stats->tx_bw[2]++; 651 break; 652 case IEEE80211_STA_RX_BW_40: 653 rate.bw = RATE_INFO_BW_40; 654 stats->tx_bw[1]++; 655 break; 656 default: 657 rate.bw = RATE_INFO_BW_20; 658 stats->tx_bw[0]++; 659 break; 660 } 661 wcid->rate = rate; 662 663 return true; 664 } 665 EXPORT_SYMBOL_GPL(mt76_connac2_mac_fill_txs); 666 667 bool mt76_connac2_mac_add_txs_skb(struct mt76_dev *dev, struct mt76_wcid *wcid, 668 int pid, __le32 *txs_data) 669 { 670 struct sk_buff_head list; 671 struct sk_buff *skb; 672 673 mt76_tx_status_lock(dev, &list); 674 skb = mt76_tx_status_skb_get(dev, wcid, pid, &list); 675 if (skb) { 676 struct ieee80211_tx_info *info = IEEE80211_SKB_CB(skb); 677 bool noacked = !(info->flags & IEEE80211_TX_STAT_ACK); 678 679 if (!(le32_to_cpu(txs_data[0]) & MT_TXS0_ACK_ERROR_MASK)) 680 info->flags |= IEEE80211_TX_STAT_ACK; 681 682 info->status.ampdu_len = 1; 683 info->status.ampdu_ack_len = !noacked; 684 info->status.rates[0].idx = -1; 685 686 wcid->stats.tx_failed += noacked; 687 688 mt76_connac2_mac_fill_txs(dev, wcid, txs_data); 689 mt76_tx_status_skb_done(dev, skb, &list); 690 } 691 mt76_tx_status_unlock(dev, &list); 692 693 return !!skb; 694 } 695 EXPORT_SYMBOL_GPL(mt76_connac2_mac_add_txs_skb); 696 697 static void 698 mt76_connac2_mac_decode_he_radiotap_ru(struct mt76_rx_status *status, 699 struct ieee80211_radiotap_he *he, 700 __le32 *rxv) 701 { 702 u32 ru_h, ru_l; 703 u8 ru, offs = 0; 704 705 ru_l = le32_get_bits(rxv[0], MT_PRXV_HE_RU_ALLOC_L); 706 ru_h = le32_get_bits(rxv[1], MT_PRXV_HE_RU_ALLOC_H); 707 ru = (u8)(ru_l | ru_h << 4); 708 709 status->bw = RATE_INFO_BW_HE_RU; 710 711 switch (ru) { 712 case 0 ... 36: 713 status->he_ru = NL80211_RATE_INFO_HE_RU_ALLOC_26; 714 offs = ru; 715 break; 716 case 37 ... 52: 717 status->he_ru = NL80211_RATE_INFO_HE_RU_ALLOC_52; 718 offs = ru - 37; 719 break; 720 case 53 ... 60: 721 status->he_ru = NL80211_RATE_INFO_HE_RU_ALLOC_106; 722 offs = ru - 53; 723 break; 724 case 61 ... 64: 725 status->he_ru = NL80211_RATE_INFO_HE_RU_ALLOC_242; 726 offs = ru - 61; 727 break; 728 case 65 ... 66: 729 status->he_ru = NL80211_RATE_INFO_HE_RU_ALLOC_484; 730 offs = ru - 65; 731 break; 732 case 67: 733 status->he_ru = NL80211_RATE_INFO_HE_RU_ALLOC_996; 734 break; 735 case 68: 736 status->he_ru = NL80211_RATE_INFO_HE_RU_ALLOC_2x996; 737 break; 738 } 739 740 he->data1 |= HE_BITS(DATA1_BW_RU_ALLOC_KNOWN); 741 he->data2 |= HE_BITS(DATA2_RU_OFFSET_KNOWN) | 742 le16_encode_bits(offs, 743 IEEE80211_RADIOTAP_HE_DATA2_RU_OFFSET); 744 } 745 746 static void 747 mt76_connac2_mac_decode_he_mu_radiotap(struct mt76_dev *dev, struct sk_buff *skb, 748 __le32 *rxv) 749 { 750 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb; 751 static struct ieee80211_radiotap_he_mu mu_known = { 752 .flags1 = HE_BITS(MU_FLAGS1_SIG_B_MCS_KNOWN) | 753 HE_BITS(MU_FLAGS1_SIG_B_DCM_KNOWN) | 754 HE_BITS(MU_FLAGS1_CH1_RU_KNOWN) | 755 HE_BITS(MU_FLAGS1_SIG_B_SYMS_USERS_KNOWN), 756 .flags2 = HE_BITS(MU_FLAGS2_BW_FROM_SIG_A_BW_KNOWN), 757 }; 758 struct ieee80211_radiotap_he_mu *he_mu; 759 760 if (is_mt7921(dev)) { 761 mu_known.flags1 |= HE_BITS(MU_FLAGS1_SIG_B_COMP_KNOWN); 762 mu_known.flags2 |= HE_BITS(MU_FLAGS2_PUNC_FROM_SIG_A_BW_KNOWN); 763 } 764 765 status->flag |= RX_FLAG_RADIOTAP_HE_MU; 766 767 he_mu = skb_push(skb, sizeof(mu_known)); 768 memcpy(he_mu, &mu_known, sizeof(mu_known)); 769 770 #define MU_PREP(f, v) le16_encode_bits(v, IEEE80211_RADIOTAP_HE_MU_##f) 771 772 he_mu->flags1 |= MU_PREP(FLAGS1_SIG_B_MCS, status->rate_idx); 773 if (status->he_dcm) 774 he_mu->flags1 |= MU_PREP(FLAGS1_SIG_B_DCM, status->he_dcm); 775 776 he_mu->flags2 |= MU_PREP(FLAGS2_BW_FROM_SIG_A_BW, status->bw) | 777 MU_PREP(FLAGS2_SIG_B_SYMS_USERS, 778 le32_get_bits(rxv[2], MT_CRXV_HE_NUM_USER)); 779 780 he_mu->ru_ch1[0] = le32_get_bits(rxv[3], MT_CRXV_HE_RU0); 781 782 if (status->bw >= RATE_INFO_BW_40) { 783 he_mu->flags1 |= HE_BITS(MU_FLAGS1_CH2_RU_KNOWN); 784 he_mu->ru_ch2[0] = 785 le32_get_bits(rxv[3], MT_CRXV_HE_RU1); 786 } 787 788 if (status->bw >= RATE_INFO_BW_80) { 789 he_mu->ru_ch1[1] = 790 le32_get_bits(rxv[3], MT_CRXV_HE_RU2); 791 he_mu->ru_ch2[1] = 792 le32_get_bits(rxv[3], MT_CRXV_HE_RU3); 793 } 794 } 795 796 void mt76_connac2_mac_decode_he_radiotap(struct mt76_dev *dev, 797 struct sk_buff *skb, 798 __le32 *rxv, u32 mode) 799 { 800 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb; 801 static const struct ieee80211_radiotap_he known = { 802 .data1 = HE_BITS(DATA1_DATA_MCS_KNOWN) | 803 HE_BITS(DATA1_DATA_DCM_KNOWN) | 804 HE_BITS(DATA1_STBC_KNOWN) | 805 HE_BITS(DATA1_CODING_KNOWN) | 806 HE_BITS(DATA1_LDPC_XSYMSEG_KNOWN) | 807 HE_BITS(DATA1_DOPPLER_KNOWN) | 808 HE_BITS(DATA1_SPTL_REUSE_KNOWN) | 809 HE_BITS(DATA1_BSS_COLOR_KNOWN), 810 .data2 = HE_BITS(DATA2_GI_KNOWN) | 811 HE_BITS(DATA2_TXBF_KNOWN) | 812 HE_BITS(DATA2_PE_DISAMBIG_KNOWN) | 813 HE_BITS(DATA2_TXOP_KNOWN), 814 }; 815 u32 ltf_size = le32_get_bits(rxv[2], MT_CRXV_HE_LTF_SIZE) + 1; 816 struct ieee80211_radiotap_he *he; 817 818 status->flag |= RX_FLAG_RADIOTAP_HE; 819 820 he = skb_push(skb, sizeof(known)); 821 memcpy(he, &known, sizeof(known)); 822 823 he->data3 = HE_PREP(DATA3_BSS_COLOR, BSS_COLOR, rxv[14]) | 824 HE_PREP(DATA3_LDPC_XSYMSEG, LDPC_EXT_SYM, rxv[2]); 825 he->data4 = HE_PREP(DATA4_SU_MU_SPTL_REUSE, SR_MASK, rxv[11]); 826 he->data5 = HE_PREP(DATA5_PE_DISAMBIG, PE_DISAMBIG, rxv[2]) | 827 le16_encode_bits(ltf_size, 828 IEEE80211_RADIOTAP_HE_DATA5_LTF_SIZE); 829 if (le32_to_cpu(rxv[0]) & MT_PRXV_TXBF) 830 he->data5 |= HE_BITS(DATA5_TXBF); 831 he->data6 = HE_PREP(DATA6_TXOP, TXOP_DUR, rxv[14]) | 832 HE_PREP(DATA6_DOPPLER, DOPPLER, rxv[14]); 833 834 switch (mode) { 835 case MT_PHY_TYPE_HE_SU: 836 he->data1 |= HE_BITS(DATA1_FORMAT_SU) | 837 HE_BITS(DATA1_UL_DL_KNOWN) | 838 HE_BITS(DATA1_BEAM_CHANGE_KNOWN) | 839 HE_BITS(DATA1_BW_RU_ALLOC_KNOWN); 840 841 he->data3 |= HE_PREP(DATA3_BEAM_CHANGE, BEAM_CHNG, rxv[14]) | 842 HE_PREP(DATA3_UL_DL, UPLINK, rxv[2]); 843 break; 844 case MT_PHY_TYPE_HE_EXT_SU: 845 he->data1 |= HE_BITS(DATA1_FORMAT_EXT_SU) | 846 HE_BITS(DATA1_UL_DL_KNOWN) | 847 HE_BITS(DATA1_BW_RU_ALLOC_KNOWN); 848 849 he->data3 |= HE_PREP(DATA3_UL_DL, UPLINK, rxv[2]); 850 break; 851 case MT_PHY_TYPE_HE_MU: 852 he->data1 |= HE_BITS(DATA1_FORMAT_MU) | 853 HE_BITS(DATA1_UL_DL_KNOWN); 854 855 he->data3 |= HE_PREP(DATA3_UL_DL, UPLINK, rxv[2]); 856 he->data4 |= HE_PREP(DATA4_MU_STA_ID, MU_AID, rxv[7]); 857 858 mt76_connac2_mac_decode_he_radiotap_ru(status, he, rxv); 859 mt76_connac2_mac_decode_he_mu_radiotap(dev, skb, rxv); 860 break; 861 case MT_PHY_TYPE_HE_TB: 862 he->data1 |= HE_BITS(DATA1_FORMAT_TRIG) | 863 HE_BITS(DATA1_SPTL_REUSE2_KNOWN) | 864 HE_BITS(DATA1_SPTL_REUSE3_KNOWN) | 865 HE_BITS(DATA1_SPTL_REUSE4_KNOWN); 866 867 he->data4 |= HE_PREP(DATA4_TB_SPTL_REUSE1, SR_MASK, rxv[11]) | 868 HE_PREP(DATA4_TB_SPTL_REUSE2, SR1_MASK, rxv[11]) | 869 HE_PREP(DATA4_TB_SPTL_REUSE3, SR2_MASK, rxv[11]) | 870 HE_PREP(DATA4_TB_SPTL_REUSE4, SR3_MASK, rxv[11]); 871 872 mt76_connac2_mac_decode_he_radiotap_ru(status, he, rxv); 873 break; 874 default: 875 break; 876 } 877 } 878 EXPORT_SYMBOL_GPL(mt76_connac2_mac_decode_he_radiotap); 879 880 /* The HW does not translate the mac header to 802.3 for mesh point */ 881 int mt76_connac2_reverse_frag0_hdr_trans(struct ieee80211_vif *vif, 882 struct sk_buff *skb, u16 hdr_offset) 883 { 884 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb; 885 struct ethhdr *eth_hdr = (struct ethhdr *)(skb->data + hdr_offset); 886 __le32 *rxd = (__le32 *)skb->data; 887 struct ieee80211_sta *sta; 888 struct ieee80211_hdr hdr; 889 u16 frame_control; 890 891 if (le32_get_bits(rxd[3], MT_RXD3_NORMAL_ADDR_TYPE) != 892 MT_RXD3_NORMAL_U2M) 893 return -EINVAL; 894 895 if (!(le32_to_cpu(rxd[1]) & MT_RXD1_NORMAL_GROUP_4)) 896 return -EINVAL; 897 898 sta = container_of((void *)status->wcid, struct ieee80211_sta, drv_priv); 899 900 /* store the info from RXD and ethhdr to avoid being overridden */ 901 frame_control = le32_get_bits(rxd[6], MT_RXD6_FRAME_CONTROL); 902 hdr.frame_control = cpu_to_le16(frame_control); 903 hdr.seq_ctrl = cpu_to_le16(le32_get_bits(rxd[8], MT_RXD8_SEQ_CTRL)); 904 hdr.duration_id = 0; 905 906 ether_addr_copy(hdr.addr1, vif->addr); 907 ether_addr_copy(hdr.addr2, sta->addr); 908 switch (frame_control & (IEEE80211_FCTL_TODS | 909 IEEE80211_FCTL_FROMDS)) { 910 case 0: 911 ether_addr_copy(hdr.addr3, vif->bss_conf.bssid); 912 break; 913 case IEEE80211_FCTL_FROMDS: 914 ether_addr_copy(hdr.addr3, eth_hdr->h_source); 915 break; 916 case IEEE80211_FCTL_TODS: 917 ether_addr_copy(hdr.addr3, eth_hdr->h_dest); 918 break; 919 case IEEE80211_FCTL_TODS | IEEE80211_FCTL_FROMDS: 920 ether_addr_copy(hdr.addr3, eth_hdr->h_dest); 921 ether_addr_copy(hdr.addr4, eth_hdr->h_source); 922 break; 923 default: 924 break; 925 } 926 927 skb_pull(skb, hdr_offset + sizeof(struct ethhdr) - 2); 928 if (eth_hdr->h_proto == cpu_to_be16(ETH_P_AARP) || 929 eth_hdr->h_proto == cpu_to_be16(ETH_P_IPX)) 930 ether_addr_copy(skb_push(skb, ETH_ALEN), bridge_tunnel_header); 931 else if (be16_to_cpu(eth_hdr->h_proto) >= ETH_P_802_3_MIN) 932 ether_addr_copy(skb_push(skb, ETH_ALEN), rfc1042_header); 933 else 934 skb_pull(skb, 2); 935 936 if (ieee80211_has_order(hdr.frame_control)) 937 memcpy(skb_push(skb, IEEE80211_HT_CTL_LEN), &rxd[9], 938 IEEE80211_HT_CTL_LEN); 939 if (ieee80211_is_data_qos(hdr.frame_control)) { 940 __le16 qos_ctrl; 941 942 qos_ctrl = cpu_to_le16(le32_get_bits(rxd[8], MT_RXD8_QOS_CTL)); 943 memcpy(skb_push(skb, IEEE80211_QOS_CTL_LEN), &qos_ctrl, 944 IEEE80211_QOS_CTL_LEN); 945 } 946 947 if (ieee80211_has_a4(hdr.frame_control)) 948 memcpy(skb_push(skb, sizeof(hdr)), &hdr, sizeof(hdr)); 949 else 950 memcpy(skb_push(skb, sizeof(hdr) - 6), &hdr, sizeof(hdr) - 6); 951 952 return 0; 953 } 954 EXPORT_SYMBOL_GPL(mt76_connac2_reverse_frag0_hdr_trans); 955 956 int mt76_connac2_mac_fill_rx_rate(struct mt76_dev *dev, 957 struct mt76_rx_status *status, 958 struct ieee80211_supported_band *sband, 959 __le32 *rxv, u8 *mode) 960 { 961 u32 v0, v2; 962 u8 stbc, gi, bw, dcm, nss; 963 int i, idx; 964 bool cck = false; 965 966 v0 = le32_to_cpu(rxv[0]); 967 v2 = le32_to_cpu(rxv[2]); 968 969 idx = i = FIELD_GET(MT_PRXV_TX_RATE, v0); 970 nss = FIELD_GET(MT_PRXV_NSTS, v0) + 1; 971 972 if (!is_mt7915(dev)) { 973 stbc = FIELD_GET(MT_PRXV_HT_STBC, v0); 974 gi = FIELD_GET(MT_PRXV_HT_SGI, v0); 975 *mode = FIELD_GET(MT_PRXV_TX_MODE, v0); 976 if (is_mt7921(dev)) 977 dcm = !!(idx & MT_PRXV_TX_DCM); 978 else 979 dcm = FIELD_GET(MT_PRXV_DCM, v0); 980 bw = FIELD_GET(MT_PRXV_FRAME_MODE, v0); 981 } else { 982 stbc = FIELD_GET(MT_CRXV_HT_STBC, v2); 983 gi = FIELD_GET(MT_CRXV_HT_SHORT_GI, v2); 984 *mode = FIELD_GET(MT_CRXV_TX_MODE, v2); 985 dcm = !!(idx & GENMASK(3, 0) & MT_PRXV_TX_DCM); 986 bw = FIELD_GET(MT_CRXV_FRAME_MODE, v2); 987 } 988 989 switch (*mode) { 990 case MT_PHY_TYPE_CCK: 991 cck = true; 992 fallthrough; 993 case MT_PHY_TYPE_OFDM: 994 i = mt76_get_rate(dev, sband, i, cck); 995 break; 996 case MT_PHY_TYPE_HT_GF: 997 case MT_PHY_TYPE_HT: 998 status->encoding = RX_ENC_HT; 999 if (gi) 1000 status->enc_flags |= RX_ENC_FLAG_SHORT_GI; 1001 if (i > 31) 1002 return -EINVAL; 1003 break; 1004 case MT_PHY_TYPE_VHT: 1005 status->nss = nss; 1006 status->encoding = RX_ENC_VHT; 1007 if (gi) 1008 status->enc_flags |= RX_ENC_FLAG_SHORT_GI; 1009 if (i > 11) 1010 return -EINVAL; 1011 break; 1012 case MT_PHY_TYPE_HE_MU: 1013 case MT_PHY_TYPE_HE_SU: 1014 case MT_PHY_TYPE_HE_EXT_SU: 1015 case MT_PHY_TYPE_HE_TB: 1016 status->nss = nss; 1017 status->encoding = RX_ENC_HE; 1018 i &= GENMASK(3, 0); 1019 1020 if (gi <= NL80211_RATE_INFO_HE_GI_3_2) 1021 status->he_gi = gi; 1022 1023 status->he_dcm = dcm; 1024 break; 1025 default: 1026 return -EINVAL; 1027 } 1028 status->rate_idx = i; 1029 1030 switch (bw) { 1031 case IEEE80211_STA_RX_BW_20: 1032 break; 1033 case IEEE80211_STA_RX_BW_40: 1034 if (*mode & MT_PHY_TYPE_HE_EXT_SU && 1035 (idx & MT_PRXV_TX_ER_SU_106T)) { 1036 status->bw = RATE_INFO_BW_HE_RU; 1037 status->he_ru = 1038 NL80211_RATE_INFO_HE_RU_ALLOC_106; 1039 } else { 1040 status->bw = RATE_INFO_BW_40; 1041 } 1042 break; 1043 case IEEE80211_STA_RX_BW_80: 1044 status->bw = RATE_INFO_BW_80; 1045 break; 1046 case IEEE80211_STA_RX_BW_160: 1047 status->bw = RATE_INFO_BW_160; 1048 break; 1049 default: 1050 return -EINVAL; 1051 } 1052 1053 status->enc_flags |= RX_ENC_FLAG_STBC_MASK * stbc; 1054 if (*mode < MT_PHY_TYPE_HE_SU && gi) 1055 status->enc_flags |= RX_ENC_FLAG_SHORT_GI; 1056 1057 return 0; 1058 } 1059 EXPORT_SYMBOL_GPL(mt76_connac2_mac_fill_rx_rate); 1060