Lines Matching +full:data +full:- +full:rates
1 // SPDX-License-Identifier: ISC
88 /* UNII-5 */
113 /* UNII-6 */
120 /* UNII-7 */
138 /* UNII-8 */
207 struct mt76_dev *dev = phy->dev; in mt76_led_init()
208 struct ieee80211_hw *hw = phy->hw; in mt76_led_init()
209 struct device_node *np = dev->dev->of_node; in mt76_led_init()
211 if (!phy->leds.cdev.brightness_set && !phy->leds.cdev.blink_set) in mt76_led_init()
218 dev_info(dev->dev, in mt76_led_init()
223 if (phy == &dev->phy) { in mt76_led_init()
226 if (!of_property_read_u32(np, "led-sources", &led_pin)) in mt76_led_init()
227 phy->leds.pin = led_pin; in mt76_led_init()
229 phy->leds.al = in mt76_led_init()
230 of_property_read_bool(np, "led-active-low"); in mt76_led_init()
236 snprintf(phy->leds.name, sizeof(phy->leds.name), "mt76-%s", in mt76_led_init()
237 wiphy_name(hw->wiphy)); in mt76_led_init()
239 phy->leds.cdev.name = phy->leds.name; in mt76_led_init()
240 phy->leds.cdev.default_trigger = in mt76_led_init()
246 dev_info(dev->dev, in mt76_led_init()
247 "registering led '%s'\n", phy->leds.name); in mt76_led_init()
249 return led_classdev_register(dev->dev, &phy->leds.cdev); in mt76_led_init()
254 if (!phy->leds.cdev.brightness_set && !phy->leds.cdev.blink_set) in mt76_led_cleanup()
257 led_classdev_unregister(&phy->leds.cdev); in mt76_led_cleanup()
265 struct ieee80211_sta_ht_cap *ht_cap = &sband->ht_cap; in mt76_init_stream_cap()
266 int i, nstream = hweight8(phy->antenna_mask); in mt76_init_stream_cap()
271 ht_cap->cap |= IEEE80211_HT_CAP_TX_STBC; in mt76_init_stream_cap()
273 ht_cap->cap &= ~IEEE80211_HT_CAP_TX_STBC; in mt76_init_stream_cap()
276 ht_cap->mcs.rx_mask[i] = i < nstream ? 0xff : 0; in mt76_init_stream_cap()
281 vht_cap = &sband->vht_cap; in mt76_init_stream_cap()
283 vht_cap->cap |= IEEE80211_VHT_CAP_TXSTBC; in mt76_init_stream_cap()
285 vht_cap->cap &= ~IEEE80211_VHT_CAP_TXSTBC; in mt76_init_stream_cap()
286 vht_cap->cap |= IEEE80211_VHT_CAP_TX_ANTENNA_PATTERN | in mt76_init_stream_cap()
296 vht_cap->vht_mcs.rx_mcs_map = cpu_to_le16(mcs_map); in mt76_init_stream_cap()
297 vht_cap->vht_mcs.tx_mcs_map = cpu_to_le16(mcs_map); in mt76_init_stream_cap()
298 if (ieee80211_hw_check(phy->hw, SUPPORTS_VHT_EXT_NSS_BW)) in mt76_init_stream_cap()
299 vht_cap->vht_mcs.tx_highest |= in mt76_init_stream_cap()
305 if (phy->cap.has_2ghz) in mt76_set_stream_caps()
306 mt76_init_stream_cap(phy, &phy->sband_2g.sband, false); in mt76_set_stream_caps()
307 if (phy->cap.has_5ghz) in mt76_set_stream_caps()
308 mt76_init_stream_cap(phy, &phy->sband_5g.sband, vht); in mt76_set_stream_caps()
309 if (phy->cap.has_6ghz) in mt76_set_stream_caps()
310 mt76_init_stream_cap(phy, &phy->sband_6g.sband, vht); in mt76_set_stream_caps()
317 struct ieee80211_rate *rates, int n_rates, in mt76_init_sband() argument
320 struct ieee80211_supported_band *sband = &msband->sband; in mt76_init_sband()
323 struct mt76_dev *dev = phy->dev; in mt76_init_sband()
328 chanlist = devm_kmemdup(dev->dev, chan, size, GFP_KERNEL); in mt76_init_sband()
330 return -ENOMEM; in mt76_init_sband()
332 msband->chan = devm_kcalloc(dev->dev, n_chan, sizeof(*msband->chan), in mt76_init_sband()
334 if (!msband->chan) in mt76_init_sband()
335 return -ENOMEM; in mt76_init_sband()
337 sband->channels = chanlist; in mt76_init_sband()
338 sband->n_channels = n_chan; in mt76_init_sband()
339 sband->bitrates = rates; in mt76_init_sband()
340 sband->n_bitrates = n_rates; in mt76_init_sband()
345 ht_cap = &sband->ht_cap; in mt76_init_sband()
346 ht_cap->ht_supported = true; in mt76_init_sband()
347 ht_cap->cap |= IEEE80211_HT_CAP_SUP_WIDTH_20_40 | in mt76_init_sband()
353 ht_cap->mcs.tx_params = IEEE80211_HT_MCS_TX_DEFINED; in mt76_init_sband()
354 ht_cap->ampdu_factor = IEEE80211_HT_MAX_AMPDU_64K; in mt76_init_sband()
361 vht_cap = &sband->vht_cap; in mt76_init_sband()
362 vht_cap->vht_supported = true; in mt76_init_sband()
363 vht_cap->cap |= IEEE80211_VHT_CAP_RXLDPC | in mt76_init_sband()
372 mt76_init_sband_2g(struct mt76_phy *phy, struct ieee80211_rate *rates, in mt76_init_sband_2g() argument
375 phy->hw->wiphy->bands[NL80211_BAND_2GHZ] = &phy->sband_2g.sband; in mt76_init_sband_2g()
377 return mt76_init_sband(phy, &phy->sband_2g, mt76_channels_2ghz, in mt76_init_sband_2g()
378 ARRAY_SIZE(mt76_channels_2ghz), rates, in mt76_init_sband_2g()
383 mt76_init_sband_5g(struct mt76_phy *phy, struct ieee80211_rate *rates, in mt76_init_sband_5g() argument
386 phy->hw->wiphy->bands[NL80211_BAND_5GHZ] = &phy->sband_5g.sband; in mt76_init_sband_5g()
388 return mt76_init_sband(phy, &phy->sband_5g, mt76_channels_5ghz, in mt76_init_sband_5g()
389 ARRAY_SIZE(mt76_channels_5ghz), rates, in mt76_init_sband_5g()
394 mt76_init_sband_6g(struct mt76_phy *phy, struct ieee80211_rate *rates, in mt76_init_sband_6g() argument
397 phy->hw->wiphy->bands[NL80211_BAND_6GHZ] = &phy->sband_6g.sband; in mt76_init_sband_6g()
399 return mt76_init_sband(phy, &phy->sband_6g, mt76_channels_6ghz, in mt76_init_sband_6g()
400 ARRAY_SIZE(mt76_channels_6ghz), rates, in mt76_init_sband_6g()
408 struct ieee80211_supported_band *sband = &msband->sband; in mt76_check_sband()
415 for (i = 0; i < sband->n_channels; i++) { in mt76_check_sband()
416 if (sband->channels[i].flags & IEEE80211_CHAN_DISABLED) in mt76_check_sband()
424 cfg80211_chandef_create(&phy->chandef, &sband->channels[0], in mt76_check_sband()
426 phy->chan_state = &msband->chan[0]; in mt76_check_sband()
427 phy->dev->band_phys[band] = phy; in mt76_check_sband()
431 sband->n_channels = 0; in mt76_check_sband()
432 if (phy->hw->wiphy->bands[band] == sband) in mt76_check_sband()
433 phy->hw->wiphy->bands[band] = NULL; in mt76_check_sband()
439 struct mt76_dev *dev = phy->dev; in mt76_phy_init()
440 struct wiphy *wiphy = hw->wiphy; in mt76_phy_init()
442 INIT_LIST_HEAD(&phy->tx_list); in mt76_phy_init()
443 spin_lock_init(&phy->tx_lock); in mt76_phy_init()
444 INIT_DELAYED_WORK(&phy->roc_work, mt76_roc_complete_work); in mt76_phy_init()
446 if ((void *)phy != hw->priv) in mt76_phy_init()
449 SET_IEEE80211_DEV(hw, dev->dev); in mt76_phy_init()
450 SET_IEEE80211_PERM_ADDR(hw, phy->macaddr); in mt76_phy_init()
452 wiphy->features |= NL80211_FEATURE_ACTIVE_MONITOR | in mt76_phy_init()
454 wiphy->flags |= WIPHY_FLAG_HAS_CHANNEL_SWITCH | in mt76_phy_init()
462 wiphy->available_antennas_tx = phy->antenna_mask; in mt76_phy_init()
463 wiphy->available_antennas_rx = phy->antenna_mask; in mt76_phy_init()
465 wiphy->sar_capa = &mt76_sar_capa; in mt76_phy_init()
466 phy->frp = devm_kcalloc(dev->dev, wiphy->sar_capa->num_freq_ranges, in mt76_phy_init()
469 if (!phy->frp) in mt76_phy_init()
470 return -ENOMEM; in mt76_phy_init()
472 hw->txq_data_size = sizeof(struct mt76_txq); in mt76_phy_init()
473 hw->uapsd_max_sp_len = IEEE80211_WMM_IE_STA_QOSINFO_SP_ALL; in mt76_phy_init()
475 if (!hw->max_tx_fragments) in mt76_phy_init()
476 hw->max_tx_fragments = 16; in mt76_phy_init()
487 if (!(dev->drv->drv_flags & MT_DRV_AMSDU_OFFLOAD) && in mt76_phy_init()
488 hw->max_tx_fragments > 1) { in mt76_phy_init()
504 struct ieee80211_hw *hw = dev->phy.hw; in mt76_alloc_radio_phy()
509 phy = devm_kzalloc(dev->dev, size + phy_size, GFP_KERNEL); in mt76_alloc_radio_phy()
513 phy->dev = dev; in mt76_alloc_radio_phy()
514 phy->hw = hw; in mt76_alloc_radio_phy()
516 phy->priv = (void *)phy + phy_size; in mt76_alloc_radio_phy()
518 phy->priv = (u8 *)phy + phy_size; in mt76_alloc_radio_phy()
520 phy->band_idx = band_idx; in mt76_alloc_radio_phy()
539 phy = hw->priv; in mt76_alloc_phy()
540 phy->dev = dev; in mt76_alloc_phy()
541 phy->hw = hw; in mt76_alloc_phy()
543 phy->priv = hw->priv + phy_size; in mt76_alloc_phy()
545 phy->priv = (u8 *)hw->priv + phy_size; in mt76_alloc_phy()
547 phy->band_idx = band_idx; in mt76_alloc_phy()
549 hw->wiphy->flags |= WIPHY_FLAG_IBSS_RSN; in mt76_alloc_phy()
550 hw->wiphy->interface_modes = in mt76_alloc_phy()
565 struct ieee80211_rate *rates, int n_rates) in mt76_register_phy() argument
569 ret = mt76_phy_init(phy, phy->hw); in mt76_register_phy()
573 if (phy->cap.has_2ghz) { in mt76_register_phy()
574 ret = mt76_init_sband_2g(phy, rates, n_rates); in mt76_register_phy()
579 if (phy->cap.has_5ghz) { in mt76_register_phy()
580 ret = mt76_init_sband_5g(phy, rates + 4, n_rates - 4, vht); in mt76_register_phy()
585 if (phy->cap.has_6ghz) { in mt76_register_phy()
586 ret = mt76_init_sband_6g(phy, rates + 4, n_rates - 4); in mt76_register_phy()
599 wiphy_read_of_freq_limits(phy->hw->wiphy); in mt76_register_phy()
600 mt76_check_sband(phy, &phy->sband_2g, NL80211_BAND_2GHZ); in mt76_register_phy()
601 mt76_check_sband(phy, &phy->sband_5g, NL80211_BAND_5GHZ); in mt76_register_phy()
602 mt76_check_sband(phy, &phy->sband_6g, NL80211_BAND_6GHZ); in mt76_register_phy()
604 if ((void *)phy == phy->hw->priv) { in mt76_register_phy()
605 ret = ieee80211_register_hw(phy->hw); in mt76_register_phy()
610 set_bit(MT76_STATE_REGISTERED, &phy->state); in mt76_register_phy()
611 phy->dev->phys[phy->band_idx] = phy; in mt76_register_phy()
619 struct mt76_dev *dev = phy->dev; in mt76_unregister_phy()
621 if (!test_bit(MT76_STATE_REGISTERED, &phy->state)) in mt76_unregister_phy()
629 ieee80211_unregister_hw(phy->hw); in mt76_unregister_phy()
630 dev->phys[phy->band_idx] = NULL; in mt76_unregister_phy()
641 .dev = dev->dma_dev, in mt76_create_page_pool()
643 int idx = is_qrx ? q - dev->q_rx : -1; in mt76_create_page_pool()
667 if (idx >= 0 && idx < ARRAY_SIZE(dev->napi)) in mt76_create_page_pool()
668 pp_params.napi = &dev->napi[idx]; in mt76_create_page_pool()
671 q->page_pool = page_pool_create(&pp_params); in mt76_create_page_pool()
672 if (IS_ERR(q->page_pool)) { in mt76_create_page_pool()
673 int err = PTR_ERR(q->page_pool); in mt76_create_page_pool()
675 q->page_pool = NULL; in mt76_create_page_pool()
697 dev = hw->priv; in mt76_alloc_device()
698 dev->hw = hw; in mt76_alloc_device()
699 dev->dev = pdev; in mt76_alloc_device()
700 dev->drv = drv_ops; in mt76_alloc_device()
701 dev->dma_dev = pdev; in mt76_alloc_device()
703 phy = &dev->phy; in mt76_alloc_device()
704 phy->dev = dev; in mt76_alloc_device()
705 phy->hw = hw; in mt76_alloc_device()
706 phy->band_idx = MT_BAND0; in mt76_alloc_device()
707 dev->phys[phy->band_idx] = phy; in mt76_alloc_device()
709 spin_lock_init(&dev->rx_lock); in mt76_alloc_device()
710 spin_lock_init(&dev->lock); in mt76_alloc_device()
711 spin_lock_init(&dev->cc_lock); in mt76_alloc_device()
712 spin_lock_init(&dev->status_lock); in mt76_alloc_device()
713 spin_lock_init(&dev->wed_lock); in mt76_alloc_device()
714 mutex_init(&dev->mutex); in mt76_alloc_device()
715 init_waitqueue_head(&dev->tx_wait); in mt76_alloc_device()
717 skb_queue_head_init(&dev->mcu.res_q); in mt76_alloc_device()
718 init_waitqueue_head(&dev->mcu.wait); in mt76_alloc_device()
719 mutex_init(&dev->mcu.mutex); in mt76_alloc_device()
720 dev->tx_worker.fn = mt76_tx_worker; in mt76_alloc_device()
722 hw->wiphy->flags |= WIPHY_FLAG_IBSS_RSN; in mt76_alloc_device()
723 hw->wiphy->interface_modes = in mt76_alloc_device()
733 spin_lock_init(&dev->token_lock); in mt76_alloc_device()
734 idr_init(&dev->token); in mt76_alloc_device()
736 spin_lock_init(&dev->rx_token_lock); in mt76_alloc_device()
737 idr_init(&dev->rx_token); in mt76_alloc_device()
739 INIT_LIST_HEAD(&dev->wcid_list); in mt76_alloc_device()
740 INIT_LIST_HEAD(&dev->sta_poll_list); in mt76_alloc_device()
741 spin_lock_init(&dev->sta_poll_lock); in mt76_alloc_device()
743 INIT_LIST_HEAD(&dev->txwi_cache); in mt76_alloc_device()
744 INIT_LIST_HEAD(&dev->rxwi_cache); in mt76_alloc_device()
745 dev->token_size = dev->drv->token_size; in mt76_alloc_device()
746 INIT_DELAYED_WORK(&dev->scan_work, mt76_scan_work); in mt76_alloc_device()
748 for (i = 0; i < ARRAY_SIZE(dev->q_rx); i++) in mt76_alloc_device()
749 skb_queue_head_init(&dev->rx_skb[i]); in mt76_alloc_device()
751 dev->wq = alloc_ordered_workqueue("mt76", 0); in mt76_alloc_device()
752 if (!dev->wq) { in mt76_alloc_device()
762 struct ieee80211_rate *rates, int n_rates) in mt76_register_device() argument
764 struct ieee80211_hw *hw = dev->hw; in mt76_register_device()
765 struct mt76_phy *phy = &dev->phy; in mt76_register_device()
768 dev_set_drvdata(dev->dev, dev); in mt76_register_device()
769 mt76_wcid_init(&dev->global_wcid, phy->band_idx); in mt76_register_device()
774 if (phy->cap.has_2ghz) { in mt76_register_device()
775 ret = mt76_init_sband_2g(phy, rates, n_rates); in mt76_register_device()
780 if (phy->cap.has_5ghz) { in mt76_register_device()
781 ret = mt76_init_sband_5g(phy, rates + 4, n_rates - 4, vht); in mt76_register_device()
786 if (phy->cap.has_6ghz) { in mt76_register_device()
787 ret = mt76_init_sband_6g(phy, rates + 4, n_rates - 4); in mt76_register_device()
792 wiphy_read_of_freq_limits(hw->wiphy); in mt76_register_device()
793 mt76_check_sband(&dev->phy, &phy->sband_2g, NL80211_BAND_2GHZ); in mt76_register_device()
794 mt76_check_sband(&dev->phy, &phy->sband_5g, NL80211_BAND_5GHZ); in mt76_register_device()
795 mt76_check_sband(&dev->phy, &phy->sband_6g, NL80211_BAND_6GHZ); in mt76_register_device()
809 WARN_ON(mt76_worker_setup(hw, &dev->tx_worker, NULL, "tx")); in mt76_register_device()
810 set_bit(MT76_STATE_REGISTERED, &phy->state); in mt76_register_device()
811 sched_set_fifo_low(dev->tx_worker.task); in mt76_register_device()
820 struct ieee80211_hw *hw = dev->hw; in mt76_unregister_device()
823 if (!test_bit(MT76_STATE_REGISTERED, &dev->phy.state)) in mt76_unregister_device()
828 mt76_led_cleanup(&dev->phy); in mt76_unregister_device()
831 mt76_wcid_cleanup(dev, &dev->global_wcid); in mt76_unregister_device()
835 ieee80211_unregister_hw(dev->hw); in mt76_unregister_device()
842 mt76_worker_teardown(&dev->tx_worker); in mt76_free_device()
843 if (dev->wq) { in mt76_free_device()
844 destroy_workqueue(dev->wq); in mt76_free_device()
845 dev->wq = NULL; in mt76_free_device()
847 ieee80211_free_hw(dev->hw); in mt76_free_device()
854 struct mt76_vif_link *mlink = (struct mt76_vif_link *)vif->drv_priv; in mt76_vif_phy()
857 if (!hw->wiphy->n_radio) in mt76_vif_phy()
858 return hw->priv; in mt76_vif_phy()
860 if (!mlink->ctx) in mt76_vif_phy()
863 ctx = (struct mt76_chanctx *)mlink->ctx->drv_priv; in mt76_vif_phy()
864 return ctx->phy; in mt76_vif_phy()
869 struct sk_buff *skb = phy->rx_amsdu[q].head; in mt76_rx_release_amsdu()
870 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb; in mt76_rx_release_amsdu()
871 struct mt76_dev *dev = phy->dev; in mt76_rx_release_amsdu()
873 phy->rx_amsdu[q].head = NULL; in mt76_rx_release_amsdu()
874 phy->rx_amsdu[q].tail = NULL; in mt76_rx_release_amsdu()
878 * A single MSDU can be parsed as A-MSDU when the unauthenticated A-MSDU in mt76_rx_release_amsdu()
883 if (skb_shinfo(skb)->frag_list) { in mt76_rx_release_amsdu()
886 if (!(status->flag & RX_FLAG_8023)) { in mt76_rx_release_amsdu()
889 if ((status->flag & in mt76_rx_release_amsdu()
895 if (ether_addr_equal(skb->data + offset, rfc1042_header)) { in mt76_rx_release_amsdu()
900 __skb_queue_tail(&dev->rx_skb[q], skb); in mt76_rx_release_amsdu()
906 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb; in mt76_rx_release_burst()
908 if (phy->rx_amsdu[q].head && in mt76_rx_release_burst()
909 (!status->amsdu || status->first_amsdu || in mt76_rx_release_burst()
910 status->seqno != phy->rx_amsdu[q].seqno)) in mt76_rx_release_burst()
913 if (!phy->rx_amsdu[q].head) { in mt76_rx_release_burst()
914 phy->rx_amsdu[q].tail = &skb_shinfo(skb)->frag_list; in mt76_rx_release_burst()
915 phy->rx_amsdu[q].seqno = status->seqno; in mt76_rx_release_burst()
916 phy->rx_amsdu[q].head = skb; in mt76_rx_release_burst()
918 *phy->rx_amsdu[q].tail = skb; in mt76_rx_release_burst()
919 phy->rx_amsdu[q].tail = &skb->next; in mt76_rx_release_burst()
922 if (!status->amsdu || status->last_amsdu) in mt76_rx_release_burst()
928 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb; in mt76_rx()
929 struct mt76_phy *phy = mt76_dev_phy(dev, status->phy_idx); in mt76_rx()
931 if (!test_bit(MT76_STATE_RUNNING, &phy->state)) { in mt76_rx()
937 if (phy->test.state == MT76_TM_STATE_RX_FRAMES) { in mt76_rx()
938 phy->test.rx_stats.packets[q]++; in mt76_rx()
939 if (status->flag & RX_FLAG_FAILED_FCS_CRC) in mt76_rx()
940 phy->test.rx_stats.fcs_error[q]++; in mt76_rx()
954 q = phy->q_tx[i]; in mt76_has_tx_pending()
955 if (q && q->queued) in mt76_has_tx_pending()
969 if (c->band == NL80211_BAND_2GHZ) in mt76_channel_state()
970 msband = &phy->sband_2g; in mt76_channel_state()
971 else if (c->band == NL80211_BAND_6GHZ) in mt76_channel_state()
972 msband = &phy->sband_6g; in mt76_channel_state()
974 msband = &phy->sband_5g; in mt76_channel_state()
976 idx = c - &msband->sband.channels[0]; in mt76_channel_state()
977 return &msband->chan[idx]; in mt76_channel_state()
982 struct mt76_channel_state *state = phy->chan_state; in mt76_update_survey_active_time()
984 state->cc_active += ktime_to_us(ktime_sub(time, in mt76_update_survey_active_time()
985 phy->survey_time)); in mt76_update_survey_active_time()
986 phy->survey_time = time; in mt76_update_survey_active_time()
992 struct mt76_dev *dev = phy->dev; in mt76_update_survey()
995 if (dev->drv->update_survey) in mt76_update_survey()
996 dev->drv->update_survey(phy); in mt76_update_survey()
1001 if (dev->drv->drv_flags & MT_DRV_SW_RX_AIRTIME) { in mt76_update_survey()
1002 struct mt76_channel_state *state = phy->chan_state; in mt76_update_survey()
1004 spin_lock_bh(&dev->cc_lock); in mt76_update_survey()
1005 state->cc_bss_rx += dev->cur_cc_bss_rx; in mt76_update_survey()
1006 dev->cur_cc_bss_rx = 0; in mt76_update_survey()
1007 spin_unlock_bh(&dev->cc_lock); in mt76_update_survey()
1015 struct mt76_dev *dev = phy->dev; in __mt76_set_channel()
1019 set_bit(MT76_RESET, &phy->state); in __mt76_set_channel()
1021 mt76_worker_disable(&dev->tx_worker); in __mt76_set_channel()
1022 wait_event_timeout(dev->tx_wait, !mt76_has_tx_pending(phy), timeout); in __mt76_set_channel()
1025 if (phy->chandef.chan->center_freq != chandef->chan->center_freq || in __mt76_set_channel()
1026 phy->chandef.width != chandef->width) in __mt76_set_channel()
1027 phy->dfs_state = MT_DFS_STATE_UNKNOWN; in __mt76_set_channel()
1029 phy->chandef = *chandef; in __mt76_set_channel()
1030 phy->chan_state = mt76_channel_state(phy, chandef->chan); in __mt76_set_channel()
1031 phy->offchannel = offchannel; in __mt76_set_channel()
1034 phy->main_chandef = *chandef; in __mt76_set_channel()
1036 if (chandef->chan != phy->main_chandef.chan) in __mt76_set_channel()
1037 memset(phy->chan_state, 0, sizeof(*phy->chan_state)); in __mt76_set_channel()
1039 ret = dev->drv->set_channel(phy); in __mt76_set_channel()
1041 clear_bit(MT76_RESET, &phy->state); in __mt76_set_channel()
1042 mt76_worker_enable(&dev->tx_worker); in __mt76_set_channel()
1043 mt76_worker_schedule(&dev->tx_worker); in __mt76_set_channel()
1051 struct mt76_dev *dev = phy->dev; in mt76_set_channel()
1054 cancel_delayed_work_sync(&phy->mac_work); in mt76_set_channel()
1056 mutex_lock(&dev->mutex); in mt76_set_channel()
1058 mutex_unlock(&dev->mutex); in mt76_set_channel()
1065 struct ieee80211_hw *hw = phy->hw; in mt76_update_channel()
1066 struct cfg80211_chan_def *chandef = &hw->conf.chandef; in mt76_update_channel()
1067 bool offchannel = hw->conf.flags & IEEE80211_CONF_OFFCHANNEL; in mt76_update_channel()
1069 phy->radar_enabled = hw->conf.radar_enabled; in mt76_update_channel()
1078 if (*idx < phy->sband_2g.sband.n_channels) in mt76_get_survey_sband()
1079 return &phy->sband_2g; in mt76_get_survey_sband()
1081 *idx -= phy->sband_2g.sband.n_channels; in mt76_get_survey_sband()
1082 if (*idx < phy->sband_5g.sband.n_channels) in mt76_get_survey_sband()
1083 return &phy->sband_5g; in mt76_get_survey_sband()
1085 *idx -= phy->sband_5g.sband.n_channels; in mt76_get_survey_sband()
1086 if (*idx < phy->sband_6g.sband.n_channels) in mt76_get_survey_sband()
1087 return &phy->sband_6g; in mt76_get_survey_sband()
1089 *idx -= phy->sband_6g.sband.n_channels; in mt76_get_survey_sband()
1096 struct mt76_phy *phy = hw->priv; in mt76_get_survey()
1097 struct mt76_dev *dev = phy->dev; in mt76_get_survey()
1104 mutex_lock(&dev->mutex); in mt76_get_survey()
1106 for (phy_idx = 0; phy_idx < ARRAY_SIZE(dev->phys); phy_idx++) { in mt76_get_survey()
1108 phy = dev->phys[phy_idx]; in mt76_get_survey()
1109 if (!phy || phy->hw != hw) in mt76_get_survey()
1114 if (idx == 0 && phy->dev->drv->update_survey) in mt76_get_survey()
1117 if (sband || !hw->wiphy->n_radio) in mt76_get_survey()
1122 ret = -ENOENT; in mt76_get_survey()
1126 chan = &sband->sband.channels[idx]; in mt76_get_survey()
1130 survey->channel = chan; in mt76_get_survey()
1131 survey->filled = SURVEY_INFO_TIME | SURVEY_INFO_TIME_BUSY; in mt76_get_survey()
1132 survey->filled |= dev->drv->survey_flags; in mt76_get_survey()
1133 if (state->noise) in mt76_get_survey()
1134 survey->filled |= SURVEY_INFO_NOISE_DBM; in mt76_get_survey()
1136 if (chan == phy->main_chandef.chan) { in mt76_get_survey()
1137 survey->filled |= SURVEY_INFO_IN_USE; in mt76_get_survey()
1139 if (dev->drv->drv_flags & MT_DRV_SW_RX_AIRTIME) in mt76_get_survey()
1140 survey->filled |= SURVEY_INFO_TIME_BSS_RX; in mt76_get_survey()
1143 survey->time_busy = div_u64(state->cc_busy, 1000); in mt76_get_survey()
1144 survey->time_rx = div_u64(state->cc_rx, 1000); in mt76_get_survey()
1145 survey->time = div_u64(state->cc_active, 1000); in mt76_get_survey()
1146 survey->noise = state->noise; in mt76_get_survey()
1148 spin_lock_bh(&dev->cc_lock); in mt76_get_survey()
1149 survey->time_bss_rx = div_u64(state->cc_bss_rx, 1000); in mt76_get_survey()
1150 survey->time_tx = div_u64(state->cc_tx, 1000); in mt76_get_survey()
1151 spin_unlock_bh(&dev->cc_lock); in mt76_get_survey()
1154 mutex_unlock(&dev->mutex); in mt76_get_survey()
1166 wcid->rx_check_pn = false; in mt76_wcid_key_setup()
1171 if (key->cipher != WLAN_CIPHER_SUITE_CCMP) in mt76_wcid_key_setup()
1174 wcid->rx_check_pn = true; in mt76_wcid_key_setup()
1176 /* data frame */ in mt76_wcid_key_setup()
1179 memcpy(wcid->rx_key_pn[i], seq.ccmp.pn, sizeof(seq.ccmp.pn)); in mt76_wcid_key_setup()
1183 ieee80211_get_key_rx_seq(key, -1, &seq); in mt76_wcid_key_setup()
1184 memcpy(wcid->rx_key_pn[i], seq.ccmp.pn, sizeof(seq.ccmp.pn)); in mt76_wcid_key_setup()
1191 int signal = -128; in mt76_rx_signal()
1205 diff = signal - cur; in mt76_rx_signal()
1227 mstat = *((struct mt76_rx_status *)skb->cb); in mt76_rx_convert()
1230 status->flag = mstat.flag; in mt76_rx_convert()
1231 status->freq = mstat.freq; in mt76_rx_convert()
1232 status->enc_flags = mstat.enc_flags; in mt76_rx_convert()
1233 status->encoding = mstat.encoding; in mt76_rx_convert()
1234 status->bw = mstat.bw; in mt76_rx_convert()
1235 if (status->encoding == RX_ENC_EHT) { in mt76_rx_convert()
1236 status->eht.ru = mstat.eht.ru; in mt76_rx_convert()
1237 status->eht.gi = mstat.eht.gi; in mt76_rx_convert()
1239 status->he_ru = mstat.he_ru; in mt76_rx_convert()
1240 status->he_gi = mstat.he_gi; in mt76_rx_convert()
1241 status->he_dcm = mstat.he_dcm; in mt76_rx_convert()
1243 status->rate_idx = mstat.rate_idx; in mt76_rx_convert()
1244 status->nss = mstat.nss; in mt76_rx_convert()
1245 status->band = mstat.band; in mt76_rx_convert()
1246 status->signal = mstat.signal; in mt76_rx_convert()
1247 status->chains = mstat.chains; in mt76_rx_convert()
1248 status->ampdu_reference = mstat.ampdu_ref; in mt76_rx_convert()
1249 status->device_timestamp = mstat.timestamp; in mt76_rx_convert()
1250 status->mactime = mstat.timestamp; in mt76_rx_convert()
1251 status->signal = mt76_rx_signal(mstat.chains, mstat.chain_signal); in mt76_rx_convert()
1252 if (status->signal <= -128) in mt76_rx_convert()
1253 status->flag |= RX_FLAG_NO_SIGNAL_VAL; in mt76_rx_convert()
1255 if (ieee80211_is_beacon(hdr->frame_control) || in mt76_rx_convert()
1256 ieee80211_is_probe_resp(hdr->frame_control)) in mt76_rx_convert()
1257 status->boottime_ns = ktime_get_boottime_ns(); in mt76_rx_convert()
1259 BUILD_BUG_ON(sizeof(mstat) > sizeof(skb->cb)); in mt76_rx_convert()
1260 BUILD_BUG_ON(sizeof(status->chain_signal) != in mt76_rx_convert()
1262 memcpy(status->chain_signal, mstat.chain_signal, in mt76_rx_convert()
1266 status->link_valid = mstat.wcid->link_valid; in mt76_rx_convert()
1267 status->link_id = mstat.wcid->link_id; in mt76_rx_convert()
1277 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb; in mt76_check_ccmp_pn()
1278 struct mt76_wcid *wcid = status->wcid; in mt76_check_ccmp_pn()
1283 if (!(status->flag & RX_FLAG_DECRYPTED)) in mt76_check_ccmp_pn()
1286 if (status->flag & RX_FLAG_ONLY_MONITOR) in mt76_check_ccmp_pn()
1289 if (!wcid || !wcid->rx_check_pn) in mt76_check_ccmp_pn()
1292 security_idx = status->qos_ctl & IEEE80211_QOS_CTL_TID_MASK; in mt76_check_ccmp_pn()
1293 if (status->flag & RX_FLAG_8023) in mt76_check_ccmp_pn()
1297 if (!(status->flag & RX_FLAG_IV_STRIPPED)) { in mt76_check_ccmp_pn()
1303 !ieee80211_is_first_frag(hdr->frame_control)) in mt76_check_ccmp_pn()
1307 /* IEEE 802.11-2020, 12.5.3.4.4 "PN and replay detection" c): in mt76_check_ccmp_pn()
1313 if (ieee80211_is_mgmt(hdr->frame_control) && in mt76_check_ccmp_pn()
1314 !ieee80211_has_tods(hdr->frame_control)) in mt76_check_ccmp_pn()
1318 BUILD_BUG_ON(sizeof(status->iv) != sizeof(wcid->rx_key_pn[0])); in mt76_check_ccmp_pn()
1319 ret = memcmp(status->iv, wcid->rx_key_pn[security_idx], in mt76_check_ccmp_pn()
1320 sizeof(status->iv)); in mt76_check_ccmp_pn()
1322 status->flag |= RX_FLAG_ONLY_MONITOR; in mt76_check_ccmp_pn()
1326 memcpy(wcid->rx_key_pn[security_idx], status->iv, sizeof(status->iv)); in mt76_check_ccmp_pn()
1328 if (status->flag & RX_FLAG_IV_STRIPPED) in mt76_check_ccmp_pn()
1329 status->flag |= RX_FLAG_PN_VALIDATED; in mt76_check_ccmp_pn()
1336 struct mt76_wcid *wcid = status->wcid; in mt76_airtime_report()
1338 .enc_flags = status->enc_flags, in mt76_airtime_report()
1339 .rate_idx = status->rate_idx, in mt76_airtime_report()
1340 .encoding = status->encoding, in mt76_airtime_report()
1341 .band = status->band, in mt76_airtime_report()
1342 .nss = status->nss, in mt76_airtime_report()
1343 .bw = status->bw, in mt76_airtime_report()
1347 u8 tidno = status->qos_ctl & IEEE80211_QOS_CTL_TID_MASK; in mt76_airtime_report()
1349 airtime = ieee80211_calc_rx_airtime(dev->hw, &info, len); in mt76_airtime_report()
1350 spin_lock(&dev->cc_lock); in mt76_airtime_report()
1351 dev->cur_cc_bss_rx += airtime; in mt76_airtime_report()
1352 spin_unlock(&dev->cc_lock); in mt76_airtime_report()
1354 if (!wcid || !wcid->sta) in mt76_airtime_report()
1367 if (!dev->rx_ampdu_len) in mt76_airtime_flush_ampdu()
1370 wcid_idx = dev->rx_ampdu_status.wcid_idx; in mt76_airtime_flush_ampdu()
1371 if (wcid_idx < ARRAY_SIZE(dev->wcid)) in mt76_airtime_flush_ampdu()
1372 wcid = rcu_dereference(dev->wcid[wcid_idx]); in mt76_airtime_flush_ampdu()
1375 dev->rx_ampdu_status.wcid = wcid; in mt76_airtime_flush_ampdu()
1377 mt76_airtime_report(dev, &dev->rx_ampdu_status, dev->rx_ampdu_len); in mt76_airtime_flush_ampdu()
1379 dev->rx_ampdu_len = 0; in mt76_airtime_flush_ampdu()
1380 dev->rx_ampdu_ref = 0; in mt76_airtime_flush_ampdu()
1386 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb; in mt76_airtime_check()
1387 struct mt76_wcid *wcid = status->wcid; in mt76_airtime_check()
1389 if (!(dev->drv->drv_flags & MT_DRV_SW_RX_AIRTIME)) in mt76_airtime_check()
1392 if (!wcid || !wcid->sta) { in mt76_airtime_check()
1395 if (status->flag & RX_FLAG_8023) in mt76_airtime_check()
1398 if (!ether_addr_equal(hdr->addr1, dev->phy.macaddr)) in mt76_airtime_check()
1404 if (!(status->flag & RX_FLAG_AMPDU_DETAILS) || in mt76_airtime_check()
1405 status->ampdu_ref != dev->rx_ampdu_ref) in mt76_airtime_check()
1408 if (status->flag & RX_FLAG_AMPDU_DETAILS) { in mt76_airtime_check()
1409 if (!dev->rx_ampdu_len || in mt76_airtime_check()
1410 status->ampdu_ref != dev->rx_ampdu_ref) { in mt76_airtime_check()
1411 dev->rx_ampdu_status = *status; in mt76_airtime_check()
1412 dev->rx_ampdu_status.wcid_idx = wcid ? wcid->idx : 0xff; in mt76_airtime_check()
1413 dev->rx_ampdu_ref = status->ampdu_ref; in mt76_airtime_check()
1416 dev->rx_ampdu_len += skb->len; in mt76_airtime_check()
1420 mt76_airtime_report(dev, status, skb->len); in mt76_airtime_check()
1426 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb; in mt76_check_sta()
1430 struct mt76_wcid *wcid = status->wcid; in mt76_check_sta()
1431 u8 tidno = status->qos_ctl & IEEE80211_QOS_CTL_TID_MASK; in mt76_check_sta()
1434 hw = mt76_phy_hw(dev, status->phy_idx); in mt76_check_sta()
1435 if (ieee80211_is_pspoll(hdr->frame_control) && !wcid && in mt76_check_sta()
1436 !(status->flag & RX_FLAG_8023)) { in mt76_check_sta()
1437 sta = ieee80211_find_sta_by_ifaddr(hw, hdr->addr2, NULL); in mt76_check_sta()
1439 wcid = status->wcid = (struct mt76_wcid *)sta->drv_priv; in mt76_check_sta()
1444 if (!wcid || !wcid->sta) in mt76_check_sta()
1449 if (status->signal <= 0) in mt76_check_sta()
1450 ewma_signal_add(&wcid->rssi, -status->signal); in mt76_check_sta()
1452 wcid->inactive_count = 0; in mt76_check_sta()
1454 if (status->flag & RX_FLAG_8023) in mt76_check_sta()
1457 if (!test_bit(MT_WCID_FLAG_CHECK_PS, &wcid->flags)) in mt76_check_sta()
1460 if (ieee80211_is_pspoll(hdr->frame_control)) { in mt76_check_sta()
1465 if (ieee80211_has_morefrags(hdr->frame_control) || in mt76_check_sta()
1466 !(ieee80211_is_mgmt(hdr->frame_control) || in mt76_check_sta()
1467 ieee80211_is_data(hdr->frame_control))) in mt76_check_sta()
1470 ps = ieee80211_has_pm(hdr->frame_control); in mt76_check_sta()
1472 if (ps && (ieee80211_is_data_qos(hdr->frame_control) || in mt76_check_sta()
1473 ieee80211_is_qos_nullfunc(hdr->frame_control))) in mt76_check_sta()
1476 if (!!test_bit(MT_WCID_FLAG_PS, &wcid->flags) == ps) in mt76_check_sta()
1480 set_bit(MT_WCID_FLAG_PS, &wcid->flags); in mt76_check_sta()
1482 if (dev->drv->sta_ps) in mt76_check_sta()
1483 dev->drv->sta_ps(dev, sta, ps); in mt76_check_sta()
1486 clear_bit(MT_WCID_FLAG_PS, &wcid->flags); in mt76_check_sta()
1503 spin_lock(&dev->rx_lock); in mt76_rx_complete()
1505 struct sk_buff *nskb = skb_shinfo(skb)->frag_list; in mt76_rx_complete()
1508 skb_shinfo(skb)->frag_list = NULL; in mt76_rx_complete()
1515 nskb = nskb->next; in mt76_rx_complete()
1516 skb->next = NULL; in mt76_rx_complete()
1522 spin_unlock(&dev->rx_lock); in mt76_rx_complete()
1543 while ((skb = __skb_dequeue(&dev->rx_skb[q])) != NULL) { in mt76_rx_poll_complete()
1545 if (mtk_wed_device_active(&dev->mmio.wed)) in mt76_rx_poll_complete()
1559 struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv; in mt76_sta_add()
1560 struct mt76_dev *dev = phy->dev; in mt76_sta_add()
1564 mutex_lock(&dev->mutex); in mt76_sta_add()
1566 ret = dev->drv->sta_add(dev, vif, sta); in mt76_sta_add()
1570 for (i = 0; i < ARRAY_SIZE(sta->txq); i++) { in mt76_sta_add()
1573 if (!sta->txq[i]) in mt76_sta_add()
1576 mtxq = (struct mt76_txq *)sta->txq[i]->drv_priv; in mt76_sta_add()
1577 mtxq->wcid = wcid->idx; in mt76_sta_add()
1580 ewma_signal_init(&wcid->rssi); in mt76_sta_add()
1581 rcu_assign_pointer(dev->wcid[wcid->idx], wcid); in mt76_sta_add()
1582 phy->num_sta++; in mt76_sta_add()
1584 mt76_wcid_init(wcid, phy->band_idx); in mt76_sta_add()
1586 mutex_unlock(&dev->mutex); in mt76_sta_add()
1594 struct mt76_dev *dev = phy->dev; in __mt76_sta_remove()
1595 struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv; in __mt76_sta_remove()
1596 int i, idx = wcid->idx; in __mt76_sta_remove()
1598 for (i = 0; i < ARRAY_SIZE(wcid->aggr); i++) in __mt76_sta_remove()
1601 if (dev->drv->sta_remove) in __mt76_sta_remove()
1602 dev->drv->sta_remove(dev, vif, sta); in __mt76_sta_remove()
1606 mt76_wcid_mask_clear(dev->wcid_mask, idx); in __mt76_sta_remove()
1607 phy->num_sta--; in __mt76_sta_remove()
1615 struct mt76_dev *dev = phy->dev; in mt76_sta_remove()
1617 mutex_lock(&dev->mutex); in mt76_sta_remove()
1619 mutex_unlock(&dev->mutex); in mt76_sta_remove()
1627 struct mt76_phy *phy = hw->priv; in mt76_sta_state()
1628 struct mt76_dev *dev = phy->dev; in mt76_sta_state()
1633 return -EINVAL; in mt76_sta_state()
1643 if (!dev->drv->sta_event) in mt76_sta_state()
1658 return dev->drv->sta_event(dev, vif, sta, ev); in mt76_sta_state()
1665 struct mt76_phy *phy = hw->priv; in mt76_sta_pre_rcu_remove()
1666 struct mt76_dev *dev = phy->dev; in mt76_sta_pre_rcu_remove()
1667 struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv; in mt76_sta_pre_rcu_remove()
1669 mutex_lock(&dev->mutex); in mt76_sta_pre_rcu_remove()
1670 spin_lock_bh(&dev->status_lock); in mt76_sta_pre_rcu_remove()
1671 rcu_assign_pointer(dev->wcid[wcid->idx], NULL); in mt76_sta_pre_rcu_remove()
1672 spin_unlock_bh(&dev->status_lock); in mt76_sta_pre_rcu_remove()
1673 mutex_unlock(&dev->mutex); in mt76_sta_pre_rcu_remove()
1679 wcid->hw_key_idx = -1; in mt76_wcid_init()
1680 wcid->phy_idx = band_idx; in mt76_wcid_init()
1682 INIT_LIST_HEAD(&wcid->tx_list); in mt76_wcid_init()
1683 skb_queue_head_init(&wcid->tx_pending); in mt76_wcid_init()
1684 skb_queue_head_init(&wcid->tx_offchannel); in mt76_wcid_init()
1686 INIT_LIST_HEAD(&wcid->list); in mt76_wcid_init()
1687 idr_init(&wcid->pktid); in mt76_wcid_init()
1689 INIT_LIST_HEAD(&wcid->poll_list); in mt76_wcid_init()
1695 struct mt76_phy *phy = mt76_dev_phy(dev, wcid->phy_idx); in mt76_wcid_cleanup()
1701 mt76_tx_status_skb_get(dev, wcid, -1, &list); in mt76_wcid_cleanup()
1704 idr_destroy(&wcid->pktid); in mt76_wcid_cleanup()
1706 spin_lock_bh(&phy->tx_lock); in mt76_wcid_cleanup()
1708 if (!list_empty(&wcid->tx_list)) in mt76_wcid_cleanup()
1709 list_del_init(&wcid->tx_list); in mt76_wcid_cleanup()
1711 spin_lock(&wcid->tx_pending.lock); in mt76_wcid_cleanup()
1712 skb_queue_splice_tail_init(&wcid->tx_pending, &list); in mt76_wcid_cleanup()
1713 spin_unlock(&wcid->tx_pending.lock); in mt76_wcid_cleanup()
1715 spin_unlock_bh(&phy->tx_lock); in mt76_wcid_cleanup()
1726 if (test_bit(MT76_MCU_RESET, &dev->phy.state)) in mt76_wcid_add_poll()
1729 spin_lock_bh(&dev->sta_poll_lock); in mt76_wcid_add_poll()
1730 if (list_empty(&wcid->poll_list)) in mt76_wcid_add_poll()
1731 list_add_tail(&wcid->poll_list, &dev->sta_poll_list); in mt76_wcid_add_poll()
1732 spin_unlock_bh(&dev->sta_poll_lock); in mt76_wcid_add_poll()
1743 return -EINVAL; in mt76_get_txpower()
1745 n_chains = hweight16(phy->chainmask); in mt76_get_txpower()
1747 *dbm = DIV_ROUND_UP(phy->txpower_cur + delta, 2); in mt76_get_txpower()
1756 struct mt76_phy *phy = hw->priv; in mt76_init_sar_power()
1757 const struct cfg80211_sar_capa *capa = hw->wiphy->sar_capa; in mt76_init_sar_power()
1760 if (sar->type != NL80211_SAR_TYPE_POWER || !sar->num_sub_specs) in mt76_init_sar_power()
1761 return -EINVAL; in mt76_init_sar_power()
1763 for (i = 0; i < sar->num_sub_specs; i++) { in mt76_init_sar_power()
1764 u32 index = sar->sub_specs[i].freq_range_index; in mt76_init_sar_power()
1766 s32 power = sar->sub_specs[i].power >> 1; in mt76_init_sar_power()
1768 if (power > 127 || power < -127) in mt76_init_sar_power()
1771 phy->frp[index].range = &capa->freq_ranges[index]; in mt76_init_sar_power()
1772 phy->frp[index].power = power; in mt76_init_sar_power()
1783 const struct cfg80211_sar_capa *capa = phy->hw->wiphy->sar_capa; in mt76_get_sar_power()
1786 if (!capa || !phy->frp) in mt76_get_sar_power()
1789 if (power > 127 || power < -127) in mt76_get_sar_power()
1792 freq = ieee80211_channel_to_frequency(chan->hw_value, chan->band); in mt76_get_sar_power()
1793 for (i = 0 ; i < capa->num_freq_ranges; i++) { in mt76_get_sar_power()
1794 if (phy->frp[i].range && in mt76_get_sar_power()
1795 freq >= phy->frp[i].range->start_freq && in mt76_get_sar_power()
1796 freq < phy->frp[i].range->end_freq) { in mt76_get_sar_power()
1797 power = min_t(int, phy->frp[i].power, power); in mt76_get_sar_power()
1809 if (vif->bss_conf.csa_active && ieee80211_beacon_cntdwn_is_complete(vif, 0)) in __mt76_csa_finish()
1815 if (!dev->csa_complete) in mt76_csa_finish()
1818 ieee80211_iterate_active_interfaces_atomic(dev->hw, in mt76_csa_finish()
1822 dev->csa_complete = 0; in mt76_csa_finish()
1831 if (!vif->bss_conf.csa_active) in __mt76_csa_check()
1834 dev->csa_complete |= ieee80211_beacon_cntdwn_is_complete(vif, 0); in __mt76_csa_check()
1839 ieee80211_iterate_active_interfaces_atomic(dev->hw, in mt76_csa_check()
1854 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb; in mt76_insert_ccmp_hdr()
1856 u8 *hdr, *pn = status->iv; in mt76_insert_ccmp_hdr()
1859 memmove(skb->data, skb->data + 8, hdr_len); in mt76_insert_ccmp_hdr()
1860 hdr = skb->data + hdr_len; in mt76_insert_ccmp_hdr()
1871 status->flag &= ~RX_FLAG_IV_STRIPPED; in mt76_insert_ccmp_hdr()
1879 bool is_2g = sband->band == NL80211_BAND_2GHZ; in mt76_get_rate()
1880 int i, offset = 0, len = sband->n_bitrates; in mt76_get_rate()
1892 if ((sband->bitrates[i].hw_value & GENMASK(7, 0)) == idx) in mt76_get_rate()
1903 struct mt76_phy *phy = hw->priv; in mt76_sw_scan()
1905 set_bit(MT76_SCANNING, &phy->state); in mt76_sw_scan()
1911 struct mt76_phy *phy = hw->priv; in mt76_sw_scan_complete()
1913 clear_bit(MT76_SCANNING, &phy->state); in mt76_sw_scan_complete()
1919 struct mt76_phy *phy = hw->priv; in mt76_get_antenna()
1920 struct mt76_dev *dev = phy->dev; in mt76_get_antenna()
1923 mutex_lock(&dev->mutex); in mt76_get_antenna()
1925 for (i = 0; i < ARRAY_SIZE(dev->phys); i++) in mt76_get_antenna()
1926 if (dev->phys[i] && dev->phys[i]->hw == hw) in mt76_get_antenna()
1927 *tx_ant |= dev->phys[i]->chainmask; in mt76_get_antenna()
1929 mutex_unlock(&dev->mutex); in mt76_get_antenna()
1942 hwq = devm_kzalloc(dev->dev, sizeof(*hwq), GFP_KERNEL); in mt76_init_queue()
1944 return ERR_PTR(-ENOMEM); in mt76_init_queue()
1946 hwq->flags = flags; in mt76_init_queue()
1947 hwq->wed = wed; in mt76_init_queue()
1949 err = dev->queue_ops->alloc(dev, hwq, idx, n_desc, 0, ring_base); in mt76_init_queue()
1960 int i, ei = wi->initial_stat_idx; in mt76_ethtool_worker()
1961 u64 *data = wi->data; in mt76_ethtool_worker() local
1963 wi->sta_count++; in mt76_ethtool_worker()
1965 data[ei++] += stats->tx_mode[MT_PHY_TYPE_CCK]; in mt76_ethtool_worker()
1966 data[ei++] += stats->tx_mode[MT_PHY_TYPE_OFDM]; in mt76_ethtool_worker()
1967 data[ei++] += stats->tx_mode[MT_PHY_TYPE_HT]; in mt76_ethtool_worker()
1968 data[ei++] += stats->tx_mode[MT_PHY_TYPE_HT_GF]; in mt76_ethtool_worker()
1969 data[ei++] += stats->tx_mode[MT_PHY_TYPE_VHT]; in mt76_ethtool_worker()
1970 data[ei++] += stats->tx_mode[MT_PHY_TYPE_HE_SU]; in mt76_ethtool_worker()
1971 data[ei++] += stats->tx_mode[MT_PHY_TYPE_HE_EXT_SU]; in mt76_ethtool_worker()
1972 data[ei++] += stats->tx_mode[MT_PHY_TYPE_HE_TB]; in mt76_ethtool_worker()
1973 data[ei++] += stats->tx_mode[MT_PHY_TYPE_HE_MU]; in mt76_ethtool_worker()
1975 data[ei++] += stats->tx_mode[MT_PHY_TYPE_EHT_SU]; in mt76_ethtool_worker()
1976 data[ei++] += stats->tx_mode[MT_PHY_TYPE_EHT_TRIG]; in mt76_ethtool_worker()
1977 data[ei++] += stats->tx_mode[MT_PHY_TYPE_EHT_MU]; in mt76_ethtool_worker()
1980 for (i = 0; i < (ARRAY_SIZE(stats->tx_bw) - !eht); i++) in mt76_ethtool_worker()
1981 data[ei++] += stats->tx_bw[i]; in mt76_ethtool_worker()
1984 data[ei++] += stats->tx_mcs[i]; in mt76_ethtool_worker()
1987 data[ei++] += stats->tx_nss[i]; in mt76_ethtool_worker()
1989 wi->worker_stat_count = ei - wi->initial_stat_idx; in mt76_ethtool_worker()
1993 void mt76_ethtool_page_pool_stats(struct mt76_dev *dev, u64 *data, int *index) in mt76_ethtool_page_pool_stats() argument
2000 page_pool_get_stats(dev->q_rx[i].page_pool, &stats); in mt76_ethtool_page_pool_stats()
2002 page_pool_ethtool_stats_get(data, &stats); in mt76_ethtool_page_pool_stats()
2010 struct ieee80211_hw *hw = phy->hw; in mt76_phy_dfs_state()
2011 struct mt76_dev *dev = phy->dev; in mt76_phy_dfs_state()
2013 if (dev->region == NL80211_DFS_UNSET || in mt76_phy_dfs_state()
2014 test_bit(MT76_SCANNING, &phy->state)) in mt76_phy_dfs_state()
2017 if (!phy->radar_enabled) { in mt76_phy_dfs_state()
2018 if ((hw->conf.flags & IEEE80211_CONF_MONITOR) && in mt76_phy_dfs_state()
2019 (phy->chandef.chan->flags & IEEE80211_CHAN_RADAR)) in mt76_phy_dfs_state()
2025 if (!cfg80211_reg_can_beacon(hw->wiphy, &phy->chandef, NL80211_IFTYPE_AP)) in mt76_phy_dfs_state()
2034 struct mt76_vif_link *mlink = (struct mt76_vif_link *)vif->drv_priv; in mt76_vif_cleanup()
2035 struct mt76_vif_data *mvif = mlink->mvif; in mt76_vif_cleanup()
2037 rcu_assign_pointer(mvif->link[0], NULL); in mt76_vif_cleanup()
2039 if (mvif->roc_phy) in mt76_vif_cleanup()
2040 mt76_abort_roc(mvif->roc_phy); in mt76_vif_cleanup()