xref: /linux/drivers/net/wireless/mediatek/mt76/mac80211.c (revision 17cfcb68af3bc7d5e8ae08779b1853310a2949f3)
1 // SPDX-License-Identifier: ISC
2 /*
3  * Copyright (C) 2016 Felix Fietkau <nbd@nbd.name>
4  */
5 #include <linux/of.h>
6 #include "mt76.h"
7 
8 #define CHAN2G(_idx, _freq) {			\
9 	.band = NL80211_BAND_2GHZ,		\
10 	.center_freq = (_freq),			\
11 	.hw_value = (_idx),			\
12 	.max_power = 30,			\
13 }
14 
15 #define CHAN5G(_idx, _freq) {			\
16 	.band = NL80211_BAND_5GHZ,		\
17 	.center_freq = (_freq),			\
18 	.hw_value = (_idx),			\
19 	.max_power = 30,			\
20 }
21 
22 static const struct ieee80211_channel mt76_channels_2ghz[] = {
23 	CHAN2G(1, 2412),
24 	CHAN2G(2, 2417),
25 	CHAN2G(3, 2422),
26 	CHAN2G(4, 2427),
27 	CHAN2G(5, 2432),
28 	CHAN2G(6, 2437),
29 	CHAN2G(7, 2442),
30 	CHAN2G(8, 2447),
31 	CHAN2G(9, 2452),
32 	CHAN2G(10, 2457),
33 	CHAN2G(11, 2462),
34 	CHAN2G(12, 2467),
35 	CHAN2G(13, 2472),
36 	CHAN2G(14, 2484),
37 };
38 
39 static const struct ieee80211_channel mt76_channels_5ghz[] = {
40 	CHAN5G(36, 5180),
41 	CHAN5G(40, 5200),
42 	CHAN5G(44, 5220),
43 	CHAN5G(48, 5240),
44 
45 	CHAN5G(52, 5260),
46 	CHAN5G(56, 5280),
47 	CHAN5G(60, 5300),
48 	CHAN5G(64, 5320),
49 
50 	CHAN5G(100, 5500),
51 	CHAN5G(104, 5520),
52 	CHAN5G(108, 5540),
53 	CHAN5G(112, 5560),
54 	CHAN5G(116, 5580),
55 	CHAN5G(120, 5600),
56 	CHAN5G(124, 5620),
57 	CHAN5G(128, 5640),
58 	CHAN5G(132, 5660),
59 	CHAN5G(136, 5680),
60 	CHAN5G(140, 5700),
61 
62 	CHAN5G(149, 5745),
63 	CHAN5G(153, 5765),
64 	CHAN5G(157, 5785),
65 	CHAN5G(161, 5805),
66 	CHAN5G(165, 5825),
67 };
68 
69 static const struct ieee80211_tpt_blink mt76_tpt_blink[] = {
70 	{ .throughput =   0 * 1024, .blink_time = 334 },
71 	{ .throughput =   1 * 1024, .blink_time = 260 },
72 	{ .throughput =   5 * 1024, .blink_time = 220 },
73 	{ .throughput =  10 * 1024, .blink_time = 190 },
74 	{ .throughput =  20 * 1024, .blink_time = 170 },
75 	{ .throughput =  50 * 1024, .blink_time = 150 },
76 	{ .throughput =  70 * 1024, .blink_time = 130 },
77 	{ .throughput = 100 * 1024, .blink_time = 110 },
78 	{ .throughput = 200 * 1024, .blink_time =  80 },
79 	{ .throughput = 300 * 1024, .blink_time =  50 },
80 };
81 
82 static int mt76_led_init(struct mt76_dev *dev)
83 {
84 	struct device_node *np = dev->dev->of_node;
85 	struct ieee80211_hw *hw = dev->hw;
86 	int led_pin;
87 
88 	if (!dev->led_cdev.brightness_set && !dev->led_cdev.blink_set)
89 		return 0;
90 
91 	snprintf(dev->led_name, sizeof(dev->led_name),
92 		 "mt76-%s", wiphy_name(hw->wiphy));
93 
94 	dev->led_cdev.name = dev->led_name;
95 	dev->led_cdev.default_trigger =
96 		ieee80211_create_tpt_led_trigger(hw,
97 					IEEE80211_TPT_LEDTRIG_FL_RADIO,
98 					mt76_tpt_blink,
99 					ARRAY_SIZE(mt76_tpt_blink));
100 
101 	np = of_get_child_by_name(np, "led");
102 	if (np) {
103 		if (!of_property_read_u32(np, "led-sources", &led_pin))
104 			dev->led_pin = led_pin;
105 		dev->led_al = of_property_read_bool(np, "led-active-low");
106 	}
107 
108 	return devm_led_classdev_register(dev->dev, &dev->led_cdev);
109 }
110 
111 static void mt76_init_stream_cap(struct mt76_dev *dev,
112 				 struct ieee80211_supported_band *sband,
113 				 bool vht)
114 {
115 	struct ieee80211_sta_ht_cap *ht_cap = &sband->ht_cap;
116 	int i, nstream = hweight8(dev->antenna_mask);
117 	struct ieee80211_sta_vht_cap *vht_cap;
118 	u16 mcs_map = 0;
119 
120 	if (nstream > 1)
121 		ht_cap->cap |= IEEE80211_HT_CAP_TX_STBC;
122 	else
123 		ht_cap->cap &= ~IEEE80211_HT_CAP_TX_STBC;
124 
125 	for (i = 0; i < IEEE80211_HT_MCS_MASK_LEN; i++)
126 		ht_cap->mcs.rx_mask[i] = i < nstream ? 0xff : 0;
127 
128 	if (!vht)
129 		return;
130 
131 	vht_cap = &sband->vht_cap;
132 	if (nstream > 1)
133 		vht_cap->cap |= IEEE80211_VHT_CAP_TXSTBC;
134 	else
135 		vht_cap->cap &= ~IEEE80211_VHT_CAP_TXSTBC;
136 
137 	for (i = 0; i < 8; i++) {
138 		if (i < nstream)
139 			mcs_map |= (IEEE80211_VHT_MCS_SUPPORT_0_9 << (i * 2));
140 		else
141 			mcs_map |=
142 				(IEEE80211_VHT_MCS_NOT_SUPPORTED << (i * 2));
143 	}
144 	vht_cap->vht_mcs.rx_mcs_map = cpu_to_le16(mcs_map);
145 	vht_cap->vht_mcs.tx_mcs_map = cpu_to_le16(mcs_map);
146 }
147 
148 void mt76_set_stream_caps(struct mt76_dev *dev, bool vht)
149 {
150 	if (dev->cap.has_2ghz)
151 		mt76_init_stream_cap(dev, &dev->sband_2g.sband, false);
152 	if (dev->cap.has_5ghz)
153 		mt76_init_stream_cap(dev, &dev->sband_5g.sband, vht);
154 }
155 EXPORT_SYMBOL_GPL(mt76_set_stream_caps);
156 
157 static int
158 mt76_init_sband(struct mt76_dev *dev, struct mt76_sband *msband,
159 		const struct ieee80211_channel *chan, int n_chan,
160 		struct ieee80211_rate *rates, int n_rates, bool vht)
161 {
162 	struct ieee80211_supported_band *sband = &msband->sband;
163 	struct ieee80211_sta_ht_cap *ht_cap;
164 	struct ieee80211_sta_vht_cap *vht_cap;
165 	void *chanlist;
166 	int size;
167 
168 	size = n_chan * sizeof(*chan);
169 	chanlist = devm_kmemdup(dev->dev, chan, size, GFP_KERNEL);
170 	if (!chanlist)
171 		return -ENOMEM;
172 
173 	msband->chan = devm_kcalloc(dev->dev, n_chan, sizeof(*msband->chan),
174 				    GFP_KERNEL);
175 	if (!msband->chan)
176 		return -ENOMEM;
177 
178 	sband->channels = chanlist;
179 	sband->n_channels = n_chan;
180 	sband->bitrates = rates;
181 	sband->n_bitrates = n_rates;
182 	dev->chandef.chan = &sband->channels[0];
183 
184 	ht_cap = &sband->ht_cap;
185 	ht_cap->ht_supported = true;
186 	ht_cap->cap |= IEEE80211_HT_CAP_SUP_WIDTH_20_40 |
187 		       IEEE80211_HT_CAP_GRN_FLD |
188 		       IEEE80211_HT_CAP_SGI_20 |
189 		       IEEE80211_HT_CAP_SGI_40 |
190 		       (1 << IEEE80211_HT_CAP_RX_STBC_SHIFT);
191 
192 	ht_cap->mcs.tx_params = IEEE80211_HT_MCS_TX_DEFINED;
193 	ht_cap->ampdu_factor = IEEE80211_HT_MAX_AMPDU_64K;
194 	ht_cap->ampdu_density = IEEE80211_HT_MPDU_DENSITY_4;
195 
196 	mt76_init_stream_cap(dev, sband, vht);
197 
198 	if (!vht)
199 		return 0;
200 
201 	vht_cap = &sband->vht_cap;
202 	vht_cap->vht_supported = true;
203 	vht_cap->cap |= IEEE80211_VHT_CAP_RXLDPC |
204 			IEEE80211_VHT_CAP_RXSTBC_1 |
205 			IEEE80211_VHT_CAP_SHORT_GI_80 |
206 			IEEE80211_VHT_CAP_RX_ANTENNA_PATTERN |
207 			IEEE80211_VHT_CAP_TX_ANTENNA_PATTERN |
208 			(3 << IEEE80211_VHT_CAP_MAX_A_MPDU_LENGTH_EXPONENT_SHIFT);
209 
210 	return 0;
211 }
212 
213 static int
214 mt76_init_sband_2g(struct mt76_dev *dev, struct ieee80211_rate *rates,
215 		   int n_rates)
216 {
217 	dev->hw->wiphy->bands[NL80211_BAND_2GHZ] = &dev->sband_2g.sband;
218 
219 	return mt76_init_sband(dev, &dev->sband_2g,
220 			       mt76_channels_2ghz,
221 			       ARRAY_SIZE(mt76_channels_2ghz),
222 			       rates, n_rates, false);
223 }
224 
225 static int
226 mt76_init_sband_5g(struct mt76_dev *dev, struct ieee80211_rate *rates,
227 		   int n_rates, bool vht)
228 {
229 	dev->hw->wiphy->bands[NL80211_BAND_5GHZ] = &dev->sband_5g.sband;
230 
231 	return mt76_init_sband(dev, &dev->sband_5g,
232 			       mt76_channels_5ghz,
233 			       ARRAY_SIZE(mt76_channels_5ghz),
234 			       rates, n_rates, vht);
235 }
236 
237 static void
238 mt76_check_sband(struct mt76_dev *dev, int band)
239 {
240 	struct ieee80211_supported_band *sband = dev->hw->wiphy->bands[band];
241 	bool found = false;
242 	int i;
243 
244 	if (!sband)
245 		return;
246 
247 	for (i = 0; i < sband->n_channels; i++) {
248 		if (sband->channels[i].flags & IEEE80211_CHAN_DISABLED)
249 			continue;
250 
251 		found = true;
252 		break;
253 	}
254 
255 	if (found)
256 		return;
257 
258 	sband->n_channels = 0;
259 	dev->hw->wiphy->bands[band] = NULL;
260 }
261 
262 struct mt76_dev *
263 mt76_alloc_device(struct device *pdev, unsigned int size,
264 		  const struct ieee80211_ops *ops,
265 		  const struct mt76_driver_ops *drv_ops)
266 {
267 	struct ieee80211_hw *hw;
268 	struct mt76_dev *dev;
269 
270 	hw = ieee80211_alloc_hw(size, ops);
271 	if (!hw)
272 		return NULL;
273 
274 	dev = hw->priv;
275 	dev->hw = hw;
276 	dev->dev = pdev;
277 	dev->drv = drv_ops;
278 
279 	spin_lock_init(&dev->rx_lock);
280 	spin_lock_init(&dev->lock);
281 	spin_lock_init(&dev->cc_lock);
282 	mutex_init(&dev->mutex);
283 	init_waitqueue_head(&dev->tx_wait);
284 	skb_queue_head_init(&dev->status_list);
285 
286 	tasklet_init(&dev->tx_tasklet, mt76_tx_tasklet, (unsigned long)dev);
287 
288 	return dev;
289 }
290 EXPORT_SYMBOL_GPL(mt76_alloc_device);
291 
292 int mt76_register_device(struct mt76_dev *dev, bool vht,
293 			 struct ieee80211_rate *rates, int n_rates)
294 {
295 	struct ieee80211_hw *hw = dev->hw;
296 	struct wiphy *wiphy = hw->wiphy;
297 	int ret;
298 
299 	dev_set_drvdata(dev->dev, dev);
300 
301 	INIT_LIST_HEAD(&dev->txwi_cache);
302 
303 	SET_IEEE80211_DEV(hw, dev->dev);
304 	SET_IEEE80211_PERM_ADDR(hw, dev->macaddr);
305 
306 	wiphy->features |= NL80211_FEATURE_ACTIVE_MONITOR;
307 
308 	wiphy_ext_feature_set(wiphy, NL80211_EXT_FEATURE_CQM_RSSI_LIST);
309 
310 	wiphy->available_antennas_tx = dev->antenna_mask;
311 	wiphy->available_antennas_rx = dev->antenna_mask;
312 
313 	hw->txq_data_size = sizeof(struct mt76_txq);
314 	hw->max_tx_fragments = 16;
315 
316 	ieee80211_hw_set(hw, SIGNAL_DBM);
317 	ieee80211_hw_set(hw, PS_NULLFUNC_STACK);
318 	ieee80211_hw_set(hw, HOST_BROADCAST_PS_BUFFERING);
319 	ieee80211_hw_set(hw, AMPDU_AGGREGATION);
320 	ieee80211_hw_set(hw, SUPPORTS_RC_TABLE);
321 	ieee80211_hw_set(hw, SUPPORT_FAST_XMIT);
322 	ieee80211_hw_set(hw, SUPPORTS_CLONED_SKBS);
323 	ieee80211_hw_set(hw, SUPPORTS_AMSDU_IN_AMPDU);
324 	ieee80211_hw_set(hw, TX_AMSDU);
325 	ieee80211_hw_set(hw, TX_FRAG_LIST);
326 	ieee80211_hw_set(hw, MFP_CAPABLE);
327 	ieee80211_hw_set(hw, AP_LINK_PS);
328 	ieee80211_hw_set(hw, REPORTS_TX_ACK_STATUS);
329 	ieee80211_hw_set(hw, NEEDS_UNIQUE_STA_ADDR);
330 
331 	wiphy->flags |= WIPHY_FLAG_IBSS_RSN;
332 
333 	if (dev->cap.has_2ghz) {
334 		ret = mt76_init_sband_2g(dev, rates, n_rates);
335 		if (ret)
336 			return ret;
337 	}
338 
339 	if (dev->cap.has_5ghz) {
340 		ret = mt76_init_sband_5g(dev, rates + 4, n_rates - 4, vht);
341 		if (ret)
342 			return ret;
343 	}
344 
345 	wiphy_read_of_freq_limits(dev->hw->wiphy);
346 	mt76_check_sband(dev, NL80211_BAND_2GHZ);
347 	mt76_check_sband(dev, NL80211_BAND_5GHZ);
348 
349 	if (IS_ENABLED(CONFIG_MT76_LEDS)) {
350 		ret = mt76_led_init(dev);
351 		if (ret)
352 			return ret;
353 	}
354 
355 	return ieee80211_register_hw(hw);
356 }
357 EXPORT_SYMBOL_GPL(mt76_register_device);
358 
359 void mt76_unregister_device(struct mt76_dev *dev)
360 {
361 	struct ieee80211_hw *hw = dev->hw;
362 
363 	mt76_tx_status_check(dev, NULL, true);
364 	ieee80211_unregister_hw(hw);
365 }
366 EXPORT_SYMBOL_GPL(mt76_unregister_device);
367 
368 void mt76_free_device(struct mt76_dev *dev)
369 {
370 	mt76_tx_free(dev);
371 	ieee80211_free_hw(dev->hw);
372 }
373 EXPORT_SYMBOL_GPL(mt76_free_device);
374 
375 void mt76_rx(struct mt76_dev *dev, enum mt76_rxq_id q, struct sk_buff *skb)
376 {
377 	if (!test_bit(MT76_STATE_RUNNING, &dev->state)) {
378 		dev_kfree_skb(skb);
379 		return;
380 	}
381 
382 	__skb_queue_tail(&dev->rx_skb[q], skb);
383 }
384 EXPORT_SYMBOL_GPL(mt76_rx);
385 
386 bool mt76_has_tx_pending(struct mt76_dev *dev)
387 {
388 	struct mt76_queue *q;
389 	int i;
390 
391 	for (i = 0; i < ARRAY_SIZE(dev->q_tx); i++) {
392 		q = dev->q_tx[i].q;
393 		if (q && q->queued)
394 			return true;
395 	}
396 
397 	return false;
398 }
399 EXPORT_SYMBOL_GPL(mt76_has_tx_pending);
400 
401 void mt76_set_channel(struct mt76_dev *dev)
402 {
403 	struct ieee80211_hw *hw = dev->hw;
404 	struct cfg80211_chan_def *chandef = &hw->conf.chandef;
405 	struct mt76_channel_state *state;
406 	bool offchannel = hw->conf.flags & IEEE80211_CONF_OFFCHANNEL;
407 	int timeout = HZ / 5;
408 
409 	wait_event_timeout(dev->tx_wait, !mt76_has_tx_pending(dev), timeout);
410 
411 	if (dev->drv->update_survey)
412 		dev->drv->update_survey(dev);
413 
414 	dev->chandef = *chandef;
415 
416 	if (!offchannel)
417 		dev->main_chan = chandef->chan;
418 
419 	if (chandef->chan != dev->main_chan) {
420 		state = mt76_channel_state(dev, chandef->chan);
421 		memset(state, 0, sizeof(*state));
422 	}
423 }
424 EXPORT_SYMBOL_GPL(mt76_set_channel);
425 
426 int mt76_get_survey(struct ieee80211_hw *hw, int idx,
427 		    struct survey_info *survey)
428 {
429 	struct mt76_dev *dev = hw->priv;
430 	struct mt76_sband *sband;
431 	struct ieee80211_channel *chan;
432 	struct mt76_channel_state *state;
433 	int ret = 0;
434 
435 	if (idx == 0 && dev->drv->update_survey)
436 		dev->drv->update_survey(dev);
437 
438 	sband = &dev->sband_2g;
439 	if (idx >= sband->sband.n_channels) {
440 		idx -= sband->sband.n_channels;
441 		sband = &dev->sband_5g;
442 	}
443 
444 	if (idx >= sband->sband.n_channels)
445 		return -ENOENT;
446 
447 	chan = &sband->sband.channels[idx];
448 	state = mt76_channel_state(dev, chan);
449 
450 	memset(survey, 0, sizeof(*survey));
451 	survey->channel = chan;
452 	survey->filled = SURVEY_INFO_TIME | SURVEY_INFO_TIME_BUSY;
453 	if (chan == dev->main_chan)
454 		survey->filled |= SURVEY_INFO_IN_USE;
455 
456 	spin_lock_bh(&dev->cc_lock);
457 	survey->time = div_u64(state->cc_active, 1000);
458 	survey->time_busy = div_u64(state->cc_busy, 1000);
459 	spin_unlock_bh(&dev->cc_lock);
460 
461 	return ret;
462 }
463 EXPORT_SYMBOL_GPL(mt76_get_survey);
464 
465 void mt76_wcid_key_setup(struct mt76_dev *dev, struct mt76_wcid *wcid,
466 			 struct ieee80211_key_conf *key)
467 {
468 	struct ieee80211_key_seq seq;
469 	int i;
470 
471 	wcid->rx_check_pn = false;
472 
473 	if (!key)
474 		return;
475 
476 	if (key->cipher != WLAN_CIPHER_SUITE_CCMP)
477 		return;
478 
479 	wcid->rx_check_pn = true;
480 	for (i = 0; i < IEEE80211_NUM_TIDS; i++) {
481 		ieee80211_get_key_rx_seq(key, i, &seq);
482 		memcpy(wcid->rx_key_pn[i], seq.ccmp.pn, sizeof(seq.ccmp.pn));
483 	}
484 }
485 EXPORT_SYMBOL(mt76_wcid_key_setup);
486 
487 static struct ieee80211_sta *mt76_rx_convert(struct sk_buff *skb)
488 {
489 	struct ieee80211_rx_status *status = IEEE80211_SKB_RXCB(skb);
490 	struct mt76_rx_status mstat;
491 
492 	mstat = *((struct mt76_rx_status *)skb->cb);
493 	memset(status, 0, sizeof(*status));
494 
495 	status->flag = mstat.flag;
496 	status->freq = mstat.freq;
497 	status->enc_flags = mstat.enc_flags;
498 	status->encoding = mstat.encoding;
499 	status->bw = mstat.bw;
500 	status->rate_idx = mstat.rate_idx;
501 	status->nss = mstat.nss;
502 	status->band = mstat.band;
503 	status->signal = mstat.signal;
504 	status->chains = mstat.chains;
505 
506 	BUILD_BUG_ON(sizeof(mstat) > sizeof(skb->cb));
507 	BUILD_BUG_ON(sizeof(status->chain_signal) !=
508 		     sizeof(mstat.chain_signal));
509 	memcpy(status->chain_signal, mstat.chain_signal,
510 	       sizeof(mstat.chain_signal));
511 
512 	return wcid_to_sta(mstat.wcid);
513 }
514 
515 static int
516 mt76_check_ccmp_pn(struct sk_buff *skb)
517 {
518 	struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
519 	struct mt76_wcid *wcid = status->wcid;
520 	struct ieee80211_hdr *hdr;
521 	int ret;
522 
523 	if (!(status->flag & RX_FLAG_DECRYPTED))
524 		return 0;
525 
526 	if (!wcid || !wcid->rx_check_pn)
527 		return 0;
528 
529 	if (!(status->flag & RX_FLAG_IV_STRIPPED)) {
530 		/*
531 		 * Validate the first fragment both here and in mac80211
532 		 * All further fragments will be validated by mac80211 only.
533 		 */
534 		hdr = (struct ieee80211_hdr *)skb->data;
535 		if (ieee80211_is_frag(hdr) &&
536 		    !ieee80211_is_first_frag(hdr->frame_control))
537 			return 0;
538 	}
539 
540 	BUILD_BUG_ON(sizeof(status->iv) != sizeof(wcid->rx_key_pn[0]));
541 	ret = memcmp(status->iv, wcid->rx_key_pn[status->tid],
542 		     sizeof(status->iv));
543 	if (ret <= 0)
544 		return -EINVAL; /* replay */
545 
546 	memcpy(wcid->rx_key_pn[status->tid], status->iv, sizeof(status->iv));
547 
548 	if (status->flag & RX_FLAG_IV_STRIPPED)
549 		status->flag |= RX_FLAG_PN_VALIDATED;
550 
551 	return 0;
552 }
553 
554 static void
555 mt76_check_sta(struct mt76_dev *dev, struct sk_buff *skb)
556 {
557 	struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
558 	struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)skb->data;
559 	struct ieee80211_sta *sta;
560 	struct mt76_wcid *wcid = status->wcid;
561 	bool ps;
562 	int i;
563 
564 	if (ieee80211_is_pspoll(hdr->frame_control) && !wcid) {
565 		sta = ieee80211_find_sta_by_ifaddr(dev->hw, hdr->addr2, NULL);
566 		if (sta)
567 			wcid = status->wcid = (struct mt76_wcid *)sta->drv_priv;
568 	}
569 
570 	if (!wcid || !wcid->sta)
571 		return;
572 
573 	sta = container_of((void *)wcid, struct ieee80211_sta, drv_priv);
574 
575 	if (status->signal <= 0)
576 		ewma_signal_add(&wcid->rssi, -status->signal);
577 
578 	wcid->inactive_count = 0;
579 
580 	if (!test_bit(MT_WCID_FLAG_CHECK_PS, &wcid->flags))
581 		return;
582 
583 	if (ieee80211_is_pspoll(hdr->frame_control)) {
584 		ieee80211_sta_pspoll(sta);
585 		return;
586 	}
587 
588 	if (ieee80211_has_morefrags(hdr->frame_control) ||
589 	    !(ieee80211_is_mgmt(hdr->frame_control) ||
590 	      ieee80211_is_data(hdr->frame_control)))
591 		return;
592 
593 	ps = ieee80211_has_pm(hdr->frame_control);
594 
595 	if (ps && (ieee80211_is_data_qos(hdr->frame_control) ||
596 		   ieee80211_is_qos_nullfunc(hdr->frame_control)))
597 		ieee80211_sta_uapsd_trigger(sta, status->tid);
598 
599 	if (!!test_bit(MT_WCID_FLAG_PS, &wcid->flags) == ps)
600 		return;
601 
602 	if (ps)
603 		set_bit(MT_WCID_FLAG_PS, &wcid->flags);
604 	else
605 		clear_bit(MT_WCID_FLAG_PS, &wcid->flags);
606 
607 	dev->drv->sta_ps(dev, sta, ps);
608 	ieee80211_sta_ps_transition(sta, ps);
609 
610 	if (ps)
611 		return;
612 
613 	for (i = 0; i < ARRAY_SIZE(sta->txq); i++) {
614 		struct mt76_txq *mtxq;
615 
616 		if (!sta->txq[i])
617 			continue;
618 
619 		mtxq = (struct mt76_txq *)sta->txq[i]->drv_priv;
620 		if (!skb_queue_empty(&mtxq->retry_q))
621 			ieee80211_schedule_txq(dev->hw, sta->txq[i]);
622 	}
623 }
624 
625 void mt76_rx_complete(struct mt76_dev *dev, struct sk_buff_head *frames,
626 		      struct napi_struct *napi)
627 {
628 	struct ieee80211_sta *sta;
629 	struct sk_buff *skb;
630 
631 	spin_lock(&dev->rx_lock);
632 	while ((skb = __skb_dequeue(frames)) != NULL) {
633 		if (mt76_check_ccmp_pn(skb)) {
634 			dev_kfree_skb(skb);
635 			continue;
636 		}
637 
638 		sta = mt76_rx_convert(skb);
639 		ieee80211_rx_napi(dev->hw, sta, skb, napi);
640 	}
641 	spin_unlock(&dev->rx_lock);
642 }
643 
644 void mt76_rx_poll_complete(struct mt76_dev *dev, enum mt76_rxq_id q,
645 			   struct napi_struct *napi)
646 {
647 	struct sk_buff_head frames;
648 	struct sk_buff *skb;
649 
650 	__skb_queue_head_init(&frames);
651 
652 	while ((skb = __skb_dequeue(&dev->rx_skb[q])) != NULL) {
653 		mt76_check_sta(dev, skb);
654 		mt76_rx_aggr_reorder(skb, &frames);
655 	}
656 
657 	mt76_rx_complete(dev, &frames, napi);
658 }
659 EXPORT_SYMBOL_GPL(mt76_rx_poll_complete);
660 
661 static int
662 mt76_sta_add(struct mt76_dev *dev, struct ieee80211_vif *vif,
663 	     struct ieee80211_sta *sta)
664 {
665 	struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv;
666 	int ret;
667 	int i;
668 
669 	mutex_lock(&dev->mutex);
670 
671 	ret = dev->drv->sta_add(dev, vif, sta);
672 	if (ret)
673 		goto out;
674 
675 	for (i = 0; i < ARRAY_SIZE(sta->txq); i++) {
676 		struct mt76_txq *mtxq;
677 
678 		if (!sta->txq[i])
679 			continue;
680 
681 		mtxq = (struct mt76_txq *)sta->txq[i]->drv_priv;
682 		mtxq->wcid = wcid;
683 
684 		mt76_txq_init(dev, sta->txq[i]);
685 	}
686 
687 	ewma_signal_init(&wcid->rssi);
688 	rcu_assign_pointer(dev->wcid[wcid->idx], wcid);
689 
690 out:
691 	mutex_unlock(&dev->mutex);
692 
693 	return ret;
694 }
695 
696 void __mt76_sta_remove(struct mt76_dev *dev, struct ieee80211_vif *vif,
697 		       struct ieee80211_sta *sta)
698 {
699 	struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv;
700 	int i, idx = wcid->idx;
701 
702 	rcu_assign_pointer(dev->wcid[idx], NULL);
703 	synchronize_rcu();
704 
705 	for (i = 0; i < ARRAY_SIZE(wcid->aggr); i++)
706 		mt76_rx_aggr_stop(dev, wcid, i);
707 
708 	if (dev->drv->sta_remove)
709 		dev->drv->sta_remove(dev, vif, sta);
710 
711 	mt76_tx_status_check(dev, wcid, true);
712 	for (i = 0; i < ARRAY_SIZE(sta->txq); i++)
713 		mt76_txq_remove(dev, sta->txq[i]);
714 	mt76_wcid_free(dev->wcid_mask, idx);
715 }
716 EXPORT_SYMBOL_GPL(__mt76_sta_remove);
717 
718 static void
719 mt76_sta_remove(struct mt76_dev *dev, struct ieee80211_vif *vif,
720 		struct ieee80211_sta *sta)
721 {
722 	mutex_lock(&dev->mutex);
723 	__mt76_sta_remove(dev, vif, sta);
724 	mutex_unlock(&dev->mutex);
725 }
726 
727 int mt76_sta_state(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
728 		   struct ieee80211_sta *sta,
729 		   enum ieee80211_sta_state old_state,
730 		   enum ieee80211_sta_state new_state)
731 {
732 	struct mt76_dev *dev = hw->priv;
733 
734 	if (old_state == IEEE80211_STA_NOTEXIST &&
735 	    new_state == IEEE80211_STA_NONE)
736 		return mt76_sta_add(dev, vif, sta);
737 
738 	if (old_state == IEEE80211_STA_AUTH &&
739 	    new_state == IEEE80211_STA_ASSOC &&
740 	    dev->drv->sta_assoc)
741 		dev->drv->sta_assoc(dev, vif, sta);
742 
743 	if (old_state == IEEE80211_STA_NONE &&
744 	    new_state == IEEE80211_STA_NOTEXIST)
745 		mt76_sta_remove(dev, vif, sta);
746 
747 	return 0;
748 }
749 EXPORT_SYMBOL_GPL(mt76_sta_state);
750 
751 int mt76_get_txpower(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
752 		     int *dbm)
753 {
754 	struct mt76_dev *dev = hw->priv;
755 	int n_chains = hweight8(dev->antenna_mask);
756 
757 	*dbm = DIV_ROUND_UP(dev->txpower_cur, 2);
758 
759 	/* convert from per-chain power to combined
760 	 * output power
761 	 */
762 	switch (n_chains) {
763 	case 4:
764 		*dbm += 6;
765 		break;
766 	case 3:
767 		*dbm += 4;
768 		break;
769 	case 2:
770 		*dbm += 3;
771 		break;
772 	default:
773 		break;
774 	}
775 
776 	return 0;
777 }
778 EXPORT_SYMBOL_GPL(mt76_get_txpower);
779 
780 static void
781 __mt76_csa_finish(void *priv, u8 *mac, struct ieee80211_vif *vif)
782 {
783 	if (vif->csa_active && ieee80211_csa_is_complete(vif))
784 		ieee80211_csa_finish(vif);
785 }
786 
787 void mt76_csa_finish(struct mt76_dev *dev)
788 {
789 	if (!dev->csa_complete)
790 		return;
791 
792 	ieee80211_iterate_active_interfaces_atomic(dev->hw,
793 		IEEE80211_IFACE_ITER_RESUME_ALL,
794 		__mt76_csa_finish, dev);
795 
796 	dev->csa_complete = 0;
797 }
798 EXPORT_SYMBOL_GPL(mt76_csa_finish);
799 
800 static void
801 __mt76_csa_check(void *priv, u8 *mac, struct ieee80211_vif *vif)
802 {
803 	struct mt76_dev *dev = priv;
804 
805 	if (!vif->csa_active)
806 		return;
807 
808 	dev->csa_complete |= ieee80211_csa_is_complete(vif);
809 }
810 
811 void mt76_csa_check(struct mt76_dev *dev)
812 {
813 	ieee80211_iterate_active_interfaces_atomic(dev->hw,
814 		IEEE80211_IFACE_ITER_RESUME_ALL,
815 		__mt76_csa_check, dev);
816 }
817 EXPORT_SYMBOL_GPL(mt76_csa_check);
818 
819 int
820 mt76_set_tim(struct ieee80211_hw *hw, struct ieee80211_sta *sta, bool set)
821 {
822 	return 0;
823 }
824 EXPORT_SYMBOL_GPL(mt76_set_tim);
825 
826 void mt76_insert_ccmp_hdr(struct sk_buff *skb, u8 key_id)
827 {
828 	struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
829 	int hdr_len = ieee80211_get_hdrlen_from_skb(skb);
830 	u8 *hdr, *pn = status->iv;
831 
832 	__skb_push(skb, 8);
833 	memmove(skb->data, skb->data + 8, hdr_len);
834 	hdr = skb->data + hdr_len;
835 
836 	hdr[0] = pn[5];
837 	hdr[1] = pn[4];
838 	hdr[2] = 0;
839 	hdr[3] = 0x20 | (key_id << 6);
840 	hdr[4] = pn[3];
841 	hdr[5] = pn[2];
842 	hdr[6] = pn[1];
843 	hdr[7] = pn[0];
844 
845 	status->flag &= ~RX_FLAG_IV_STRIPPED;
846 }
847 EXPORT_SYMBOL_GPL(mt76_insert_ccmp_hdr);
848 
849 int mt76_get_rate(struct mt76_dev *dev,
850 		  struct ieee80211_supported_band *sband,
851 		  int idx, bool cck)
852 {
853 	int i, offset = 0, len = sband->n_bitrates;
854 
855 	if (cck) {
856 		if (sband == &dev->sband_5g.sband)
857 			return 0;
858 
859 		idx &= ~BIT(2); /* short preamble */
860 	} else if (sband == &dev->sband_2g.sband) {
861 		offset = 4;
862 	}
863 
864 	for (i = offset; i < len; i++) {
865 		if ((sband->bitrates[i].hw_value & GENMASK(7, 0)) == idx)
866 			return i;
867 	}
868 
869 	return 0;
870 }
871 EXPORT_SYMBOL_GPL(mt76_get_rate);
872 
873 void mt76_sw_scan(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
874 		  const u8 *mac)
875 {
876 	struct mt76_dev *dev = hw->priv;
877 
878 	set_bit(MT76_SCANNING, &dev->state);
879 }
880 EXPORT_SYMBOL_GPL(mt76_sw_scan);
881 
882 void mt76_sw_scan_complete(struct ieee80211_hw *hw, struct ieee80211_vif *vif)
883 {
884 	struct mt76_dev *dev = hw->priv;
885 
886 	clear_bit(MT76_SCANNING, &dev->state);
887 }
888 EXPORT_SYMBOL_GPL(mt76_sw_scan_complete);
889