xref: /linux/drivers/net/wireless/mediatek/mt76/mac80211.c (revision 8b8ab5c2353404b87b4ecde37dbaea2f040aec1b)
1 // SPDX-License-Identifier: ISC
2 /*
3  * Copyright (C) 2016 Felix Fietkau <nbd@nbd.name>
4  */
5 #include <linux/of.h>
6 #include "mt76.h"
7 
8 #define CHAN2G(_idx, _freq) {			\
9 	.band = NL80211_BAND_2GHZ,		\
10 	.center_freq = (_freq),			\
11 	.hw_value = (_idx),			\
12 	.max_power = 30,			\
13 }
14 
15 #define CHAN5G(_idx, _freq) {			\
16 	.band = NL80211_BAND_5GHZ,		\
17 	.center_freq = (_freq),			\
18 	.hw_value = (_idx),			\
19 	.max_power = 30,			\
20 }
21 
22 static const struct ieee80211_channel mt76_channels_2ghz[] = {
23 	CHAN2G(1, 2412),
24 	CHAN2G(2, 2417),
25 	CHAN2G(3, 2422),
26 	CHAN2G(4, 2427),
27 	CHAN2G(5, 2432),
28 	CHAN2G(6, 2437),
29 	CHAN2G(7, 2442),
30 	CHAN2G(8, 2447),
31 	CHAN2G(9, 2452),
32 	CHAN2G(10, 2457),
33 	CHAN2G(11, 2462),
34 	CHAN2G(12, 2467),
35 	CHAN2G(13, 2472),
36 	CHAN2G(14, 2484),
37 };
38 
39 static const struct ieee80211_channel mt76_channels_5ghz[] = {
40 	CHAN5G(36, 5180),
41 	CHAN5G(40, 5200),
42 	CHAN5G(44, 5220),
43 	CHAN5G(48, 5240),
44 
45 	CHAN5G(52, 5260),
46 	CHAN5G(56, 5280),
47 	CHAN5G(60, 5300),
48 	CHAN5G(64, 5320),
49 
50 	CHAN5G(100, 5500),
51 	CHAN5G(104, 5520),
52 	CHAN5G(108, 5540),
53 	CHAN5G(112, 5560),
54 	CHAN5G(116, 5580),
55 	CHAN5G(120, 5600),
56 	CHAN5G(124, 5620),
57 	CHAN5G(128, 5640),
58 	CHAN5G(132, 5660),
59 	CHAN5G(136, 5680),
60 	CHAN5G(140, 5700),
61 
62 	CHAN5G(149, 5745),
63 	CHAN5G(153, 5765),
64 	CHAN5G(157, 5785),
65 	CHAN5G(161, 5805),
66 	CHAN5G(165, 5825),
67 };
68 
69 static const struct ieee80211_tpt_blink mt76_tpt_blink[] = {
70 	{ .throughput =   0 * 1024, .blink_time = 334 },
71 	{ .throughput =   1 * 1024, .blink_time = 260 },
72 	{ .throughput =   5 * 1024, .blink_time = 220 },
73 	{ .throughput =  10 * 1024, .blink_time = 190 },
74 	{ .throughput =  20 * 1024, .blink_time = 170 },
75 	{ .throughput =  50 * 1024, .blink_time = 150 },
76 	{ .throughput =  70 * 1024, .blink_time = 130 },
77 	{ .throughput = 100 * 1024, .blink_time = 110 },
78 	{ .throughput = 200 * 1024, .blink_time =  80 },
79 	{ .throughput = 300 * 1024, .blink_time =  50 },
80 };
81 
82 static int mt76_led_init(struct mt76_dev *dev)
83 {
84 	struct device_node *np = dev->dev->of_node;
85 	struct ieee80211_hw *hw = dev->hw;
86 	int led_pin;
87 
88 	if (!dev->led_cdev.brightness_set && !dev->led_cdev.blink_set)
89 		return 0;
90 
91 	snprintf(dev->led_name, sizeof(dev->led_name),
92 		 "mt76-%s", wiphy_name(hw->wiphy));
93 
94 	dev->led_cdev.name = dev->led_name;
95 	dev->led_cdev.default_trigger =
96 		ieee80211_create_tpt_led_trigger(hw,
97 					IEEE80211_TPT_LEDTRIG_FL_RADIO,
98 					mt76_tpt_blink,
99 					ARRAY_SIZE(mt76_tpt_blink));
100 
101 	np = of_get_child_by_name(np, "led");
102 	if (np) {
103 		if (!of_property_read_u32(np, "led-sources", &led_pin))
104 			dev->led_pin = led_pin;
105 		dev->led_al = of_property_read_bool(np, "led-active-low");
106 	}
107 
108 	return devm_led_classdev_register(dev->dev, &dev->led_cdev);
109 }
110 
111 static void mt76_init_stream_cap(struct mt76_dev *dev,
112 				 struct ieee80211_supported_band *sband,
113 				 bool vht)
114 {
115 	struct ieee80211_sta_ht_cap *ht_cap = &sband->ht_cap;
116 	int i, nstream = hweight8(dev->antenna_mask);
117 	struct ieee80211_sta_vht_cap *vht_cap;
118 	u16 mcs_map = 0;
119 
120 	if (nstream > 1)
121 		ht_cap->cap |= IEEE80211_HT_CAP_TX_STBC;
122 	else
123 		ht_cap->cap &= ~IEEE80211_HT_CAP_TX_STBC;
124 
125 	for (i = 0; i < IEEE80211_HT_MCS_MASK_LEN; i++)
126 		ht_cap->mcs.rx_mask[i] = i < nstream ? 0xff : 0;
127 
128 	if (!vht)
129 		return;
130 
131 	vht_cap = &sband->vht_cap;
132 	if (nstream > 1)
133 		vht_cap->cap |= IEEE80211_VHT_CAP_TXSTBC;
134 	else
135 		vht_cap->cap &= ~IEEE80211_VHT_CAP_TXSTBC;
136 
137 	for (i = 0; i < 8; i++) {
138 		if (i < nstream)
139 			mcs_map |= (IEEE80211_VHT_MCS_SUPPORT_0_9 << (i * 2));
140 		else
141 			mcs_map |=
142 				(IEEE80211_VHT_MCS_NOT_SUPPORTED << (i * 2));
143 	}
144 	vht_cap->vht_mcs.rx_mcs_map = cpu_to_le16(mcs_map);
145 	vht_cap->vht_mcs.tx_mcs_map = cpu_to_le16(mcs_map);
146 }
147 
148 void mt76_set_stream_caps(struct mt76_dev *dev, bool vht)
149 {
150 	if (dev->cap.has_2ghz)
151 		mt76_init_stream_cap(dev, &dev->sband_2g.sband, false);
152 	if (dev->cap.has_5ghz)
153 		mt76_init_stream_cap(dev, &dev->sband_5g.sband, vht);
154 }
155 EXPORT_SYMBOL_GPL(mt76_set_stream_caps);
156 
157 static int
158 mt76_init_sband(struct mt76_dev *dev, struct mt76_sband *msband,
159 		const struct ieee80211_channel *chan, int n_chan,
160 		struct ieee80211_rate *rates, int n_rates, bool vht)
161 {
162 	struct ieee80211_supported_band *sband = &msband->sband;
163 	struct ieee80211_sta_ht_cap *ht_cap;
164 	struct ieee80211_sta_vht_cap *vht_cap;
165 	void *chanlist;
166 	int size;
167 
168 	size = n_chan * sizeof(*chan);
169 	chanlist = devm_kmemdup(dev->dev, chan, size, GFP_KERNEL);
170 	if (!chanlist)
171 		return -ENOMEM;
172 
173 	msband->chan = devm_kcalloc(dev->dev, n_chan, sizeof(*msband->chan),
174 				    GFP_KERNEL);
175 	if (!msband->chan)
176 		return -ENOMEM;
177 
178 	sband->channels = chanlist;
179 	sband->n_channels = n_chan;
180 	sband->bitrates = rates;
181 	sband->n_bitrates = n_rates;
182 	dev->chandef.chan = &sband->channels[0];
183 
184 	ht_cap = &sband->ht_cap;
185 	ht_cap->ht_supported = true;
186 	ht_cap->cap |= IEEE80211_HT_CAP_SUP_WIDTH_20_40 |
187 		       IEEE80211_HT_CAP_GRN_FLD |
188 		       IEEE80211_HT_CAP_SGI_20 |
189 		       IEEE80211_HT_CAP_SGI_40 |
190 		       (1 << IEEE80211_HT_CAP_RX_STBC_SHIFT);
191 
192 	ht_cap->mcs.tx_params = IEEE80211_HT_MCS_TX_DEFINED;
193 	ht_cap->ampdu_factor = IEEE80211_HT_MAX_AMPDU_64K;
194 	ht_cap->ampdu_density = IEEE80211_HT_MPDU_DENSITY_4;
195 
196 	mt76_init_stream_cap(dev, sband, vht);
197 
198 	if (!vht)
199 		return 0;
200 
201 	vht_cap = &sband->vht_cap;
202 	vht_cap->vht_supported = true;
203 	vht_cap->cap |= IEEE80211_VHT_CAP_RXLDPC |
204 			IEEE80211_VHT_CAP_RXSTBC_1 |
205 			IEEE80211_VHT_CAP_SHORT_GI_80 |
206 			IEEE80211_VHT_CAP_RX_ANTENNA_PATTERN |
207 			IEEE80211_VHT_CAP_TX_ANTENNA_PATTERN |
208 			(3 << IEEE80211_VHT_CAP_MAX_A_MPDU_LENGTH_EXPONENT_SHIFT);
209 
210 	return 0;
211 }
212 
213 static int
214 mt76_init_sband_2g(struct mt76_dev *dev, struct ieee80211_rate *rates,
215 		   int n_rates)
216 {
217 	dev->hw->wiphy->bands[NL80211_BAND_2GHZ] = &dev->sband_2g.sband;
218 
219 	return mt76_init_sband(dev, &dev->sband_2g,
220 			       mt76_channels_2ghz,
221 			       ARRAY_SIZE(mt76_channels_2ghz),
222 			       rates, n_rates, false);
223 }
224 
225 static int
226 mt76_init_sband_5g(struct mt76_dev *dev, struct ieee80211_rate *rates,
227 		   int n_rates, bool vht)
228 {
229 	dev->hw->wiphy->bands[NL80211_BAND_5GHZ] = &dev->sband_5g.sband;
230 
231 	return mt76_init_sband(dev, &dev->sband_5g,
232 			       mt76_channels_5ghz,
233 			       ARRAY_SIZE(mt76_channels_5ghz),
234 			       rates, n_rates, vht);
235 }
236 
237 static void
238 mt76_check_sband(struct mt76_dev *dev, int band)
239 {
240 	struct ieee80211_supported_band *sband = dev->hw->wiphy->bands[band];
241 	bool found = false;
242 	int i;
243 
244 	if (!sband)
245 		return;
246 
247 	for (i = 0; i < sband->n_channels; i++) {
248 		if (sband->channels[i].flags & IEEE80211_CHAN_DISABLED)
249 			continue;
250 
251 		found = true;
252 		break;
253 	}
254 
255 	if (found)
256 		return;
257 
258 	sband->n_channels = 0;
259 	dev->hw->wiphy->bands[band] = NULL;
260 }
261 
262 struct mt76_dev *
263 mt76_alloc_device(struct device *pdev, unsigned int size,
264 		  const struct ieee80211_ops *ops,
265 		  const struct mt76_driver_ops *drv_ops)
266 {
267 	struct ieee80211_hw *hw;
268 	struct mt76_dev *dev;
269 
270 	hw = ieee80211_alloc_hw(size, ops);
271 	if (!hw)
272 		return NULL;
273 
274 	dev = hw->priv;
275 	dev->hw = hw;
276 	dev->dev = pdev;
277 	dev->drv = drv_ops;
278 
279 	spin_lock_init(&dev->rx_lock);
280 	spin_lock_init(&dev->lock);
281 	spin_lock_init(&dev->cc_lock);
282 	mutex_init(&dev->mutex);
283 	init_waitqueue_head(&dev->tx_wait);
284 	skb_queue_head_init(&dev->status_list);
285 
286 	tasklet_init(&dev->tx_tasklet, mt76_tx_tasklet, (unsigned long)dev);
287 
288 	return dev;
289 }
290 EXPORT_SYMBOL_GPL(mt76_alloc_device);
291 
292 int mt76_register_device(struct mt76_dev *dev, bool vht,
293 			 struct ieee80211_rate *rates, int n_rates)
294 {
295 	struct ieee80211_hw *hw = dev->hw;
296 	struct wiphy *wiphy = hw->wiphy;
297 	int ret;
298 
299 	dev_set_drvdata(dev->dev, dev);
300 
301 	INIT_LIST_HEAD(&dev->txwi_cache);
302 
303 	SET_IEEE80211_DEV(hw, dev->dev);
304 	SET_IEEE80211_PERM_ADDR(hw, dev->macaddr);
305 
306 	wiphy->features |= NL80211_FEATURE_ACTIVE_MONITOR;
307 
308 	wiphy_ext_feature_set(wiphy, NL80211_EXT_FEATURE_CQM_RSSI_LIST);
309 
310 	wiphy->available_antennas_tx = dev->antenna_mask;
311 	wiphy->available_antennas_rx = dev->antenna_mask;
312 
313 	hw->txq_data_size = sizeof(struct mt76_txq);
314 	hw->max_tx_fragments = 16;
315 
316 	ieee80211_hw_set(hw, SIGNAL_DBM);
317 	ieee80211_hw_set(hw, PS_NULLFUNC_STACK);
318 	ieee80211_hw_set(hw, HOST_BROADCAST_PS_BUFFERING);
319 	ieee80211_hw_set(hw, AMPDU_AGGREGATION);
320 	ieee80211_hw_set(hw, SUPPORTS_RC_TABLE);
321 	ieee80211_hw_set(hw, SUPPORT_FAST_XMIT);
322 	ieee80211_hw_set(hw, SUPPORTS_CLONED_SKBS);
323 	ieee80211_hw_set(hw, SUPPORTS_AMSDU_IN_AMPDU);
324 	ieee80211_hw_set(hw, TX_AMSDU);
325 	ieee80211_hw_set(hw, TX_FRAG_LIST);
326 	ieee80211_hw_set(hw, MFP_CAPABLE);
327 	ieee80211_hw_set(hw, AP_LINK_PS);
328 	ieee80211_hw_set(hw, REPORTS_TX_ACK_STATUS);
329 	ieee80211_hw_set(hw, NEEDS_UNIQUE_STA_ADDR);
330 
331 	wiphy->flags |= WIPHY_FLAG_IBSS_RSN;
332 
333 	if (dev->cap.has_2ghz) {
334 		ret = mt76_init_sband_2g(dev, rates, n_rates);
335 		if (ret)
336 			return ret;
337 	}
338 
339 	if (dev->cap.has_5ghz) {
340 		ret = mt76_init_sband_5g(dev, rates + 4, n_rates - 4, vht);
341 		if (ret)
342 			return ret;
343 	}
344 
345 	wiphy_read_of_freq_limits(dev->hw->wiphy);
346 	mt76_check_sband(dev, NL80211_BAND_2GHZ);
347 	mt76_check_sband(dev, NL80211_BAND_5GHZ);
348 
349 	if (IS_ENABLED(CONFIG_MT76_LEDS)) {
350 		ret = mt76_led_init(dev);
351 		if (ret)
352 			return ret;
353 	}
354 
355 	return ieee80211_register_hw(hw);
356 }
357 EXPORT_SYMBOL_GPL(mt76_register_device);
358 
359 void mt76_unregister_device(struct mt76_dev *dev)
360 {
361 	struct ieee80211_hw *hw = dev->hw;
362 
363 	mt76_tx_status_check(dev, NULL, true);
364 	ieee80211_unregister_hw(hw);
365 }
366 EXPORT_SYMBOL_GPL(mt76_unregister_device);
367 
368 void mt76_free_device(struct mt76_dev *dev)
369 {
370 	mt76_tx_free(dev);
371 	ieee80211_free_hw(dev->hw);
372 }
373 EXPORT_SYMBOL_GPL(mt76_free_device);
374 
375 void mt76_rx(struct mt76_dev *dev, enum mt76_rxq_id q, struct sk_buff *skb)
376 {
377 	if (!test_bit(MT76_STATE_RUNNING, &dev->state)) {
378 		dev_kfree_skb(skb);
379 		return;
380 	}
381 
382 	__skb_queue_tail(&dev->rx_skb[q], skb);
383 }
384 EXPORT_SYMBOL_GPL(mt76_rx);
385 
386 bool mt76_has_tx_pending(struct mt76_dev *dev)
387 {
388 	struct mt76_queue *q;
389 	int i;
390 
391 	for (i = 0; i < ARRAY_SIZE(dev->q_tx); i++) {
392 		q = dev->q_tx[i].q;
393 		if (q && q->queued)
394 			return true;
395 	}
396 
397 	return false;
398 }
399 EXPORT_SYMBOL_GPL(mt76_has_tx_pending);
400 
401 void mt76_set_channel(struct mt76_dev *dev)
402 {
403 	struct ieee80211_hw *hw = dev->hw;
404 	struct cfg80211_chan_def *chandef = &hw->conf.chandef;
405 	struct mt76_channel_state *state;
406 	bool offchannel = hw->conf.flags & IEEE80211_CONF_OFFCHANNEL;
407 	int timeout = HZ / 5;
408 
409 	if (offchannel)
410 		set_bit(MT76_OFFCHANNEL, &dev->state);
411 	else
412 		clear_bit(MT76_OFFCHANNEL, &dev->state);
413 
414 	wait_event_timeout(dev->tx_wait, !mt76_has_tx_pending(dev), timeout);
415 
416 	if (dev->drv->update_survey)
417 		dev->drv->update_survey(dev);
418 
419 	dev->chandef = *chandef;
420 
421 	if (!offchannel)
422 		dev->main_chan = chandef->chan;
423 
424 	if (chandef->chan != dev->main_chan) {
425 		state = mt76_channel_state(dev, chandef->chan);
426 		memset(state, 0, sizeof(*state));
427 	}
428 }
429 EXPORT_SYMBOL_GPL(mt76_set_channel);
430 
431 int mt76_get_survey(struct ieee80211_hw *hw, int idx,
432 		    struct survey_info *survey)
433 {
434 	struct mt76_dev *dev = hw->priv;
435 	struct mt76_sband *sband;
436 	struct ieee80211_channel *chan;
437 	struct mt76_channel_state *state;
438 	int ret = 0;
439 
440 	if (idx == 0 && dev->drv->update_survey)
441 		dev->drv->update_survey(dev);
442 
443 	sband = &dev->sband_2g;
444 	if (idx >= sband->sband.n_channels) {
445 		idx -= sband->sband.n_channels;
446 		sband = &dev->sband_5g;
447 	}
448 
449 	if (idx >= sband->sband.n_channels)
450 		return -ENOENT;
451 
452 	chan = &sband->sband.channels[idx];
453 	state = mt76_channel_state(dev, chan);
454 
455 	memset(survey, 0, sizeof(*survey));
456 	survey->channel = chan;
457 	survey->filled = SURVEY_INFO_TIME | SURVEY_INFO_TIME_BUSY;
458 	if (chan == dev->main_chan)
459 		survey->filled |= SURVEY_INFO_IN_USE;
460 
461 	spin_lock_bh(&dev->cc_lock);
462 	survey->time = div_u64(state->cc_active, 1000);
463 	survey->time_busy = div_u64(state->cc_busy, 1000);
464 	spin_unlock_bh(&dev->cc_lock);
465 
466 	return ret;
467 }
468 EXPORT_SYMBOL_GPL(mt76_get_survey);
469 
470 void mt76_wcid_key_setup(struct mt76_dev *dev, struct mt76_wcid *wcid,
471 			 struct ieee80211_key_conf *key)
472 {
473 	struct ieee80211_key_seq seq;
474 	int i;
475 
476 	wcid->rx_check_pn = false;
477 
478 	if (!key)
479 		return;
480 
481 	if (key->cipher != WLAN_CIPHER_SUITE_CCMP)
482 		return;
483 
484 	wcid->rx_check_pn = true;
485 	for (i = 0; i < IEEE80211_NUM_TIDS; i++) {
486 		ieee80211_get_key_rx_seq(key, i, &seq);
487 		memcpy(wcid->rx_key_pn[i], seq.ccmp.pn, sizeof(seq.ccmp.pn));
488 	}
489 }
490 EXPORT_SYMBOL(mt76_wcid_key_setup);
491 
492 struct ieee80211_sta *mt76_rx_convert(struct sk_buff *skb)
493 {
494 	struct ieee80211_rx_status *status = IEEE80211_SKB_RXCB(skb);
495 	struct mt76_rx_status mstat;
496 
497 	mstat = *((struct mt76_rx_status *)skb->cb);
498 	memset(status, 0, sizeof(*status));
499 
500 	status->flag = mstat.flag;
501 	status->freq = mstat.freq;
502 	status->enc_flags = mstat.enc_flags;
503 	status->encoding = mstat.encoding;
504 	status->bw = mstat.bw;
505 	status->rate_idx = mstat.rate_idx;
506 	status->nss = mstat.nss;
507 	status->band = mstat.band;
508 	status->signal = mstat.signal;
509 	status->chains = mstat.chains;
510 
511 	BUILD_BUG_ON(sizeof(mstat) > sizeof(skb->cb));
512 	BUILD_BUG_ON(sizeof(status->chain_signal) !=
513 		     sizeof(mstat.chain_signal));
514 	memcpy(status->chain_signal, mstat.chain_signal,
515 	       sizeof(mstat.chain_signal));
516 
517 	return wcid_to_sta(mstat.wcid);
518 }
519 EXPORT_SYMBOL(mt76_rx_convert);
520 
521 static int
522 mt76_check_ccmp_pn(struct sk_buff *skb)
523 {
524 	struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
525 	struct mt76_wcid *wcid = status->wcid;
526 	struct ieee80211_hdr *hdr;
527 	int ret;
528 
529 	if (!(status->flag & RX_FLAG_DECRYPTED))
530 		return 0;
531 
532 	if (!wcid || !wcid->rx_check_pn)
533 		return 0;
534 
535 	if (!(status->flag & RX_FLAG_IV_STRIPPED)) {
536 		/*
537 		 * Validate the first fragment both here and in mac80211
538 		 * All further fragments will be validated by mac80211 only.
539 		 */
540 		hdr = (struct ieee80211_hdr *)skb->data;
541 		if (ieee80211_is_frag(hdr) &&
542 		    !ieee80211_is_first_frag(hdr->frame_control))
543 			return 0;
544 	}
545 
546 	BUILD_BUG_ON(sizeof(status->iv) != sizeof(wcid->rx_key_pn[0]));
547 	ret = memcmp(status->iv, wcid->rx_key_pn[status->tid],
548 		     sizeof(status->iv));
549 	if (ret <= 0)
550 		return -EINVAL; /* replay */
551 
552 	memcpy(wcid->rx_key_pn[status->tid], status->iv, sizeof(status->iv));
553 
554 	if (status->flag & RX_FLAG_IV_STRIPPED)
555 		status->flag |= RX_FLAG_PN_VALIDATED;
556 
557 	return 0;
558 }
559 
560 static void
561 mt76_check_sta(struct mt76_dev *dev, struct sk_buff *skb)
562 {
563 	struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
564 	struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)skb->data;
565 	struct ieee80211_sta *sta;
566 	struct mt76_wcid *wcid = status->wcid;
567 	bool ps;
568 	int i;
569 
570 	if (ieee80211_is_pspoll(hdr->frame_control) && !wcid) {
571 		sta = ieee80211_find_sta_by_ifaddr(dev->hw, hdr->addr2, NULL);
572 		if (sta)
573 			wcid = status->wcid = (struct mt76_wcid *)sta->drv_priv;
574 	}
575 
576 	if (!wcid || !wcid->sta)
577 		return;
578 
579 	sta = container_of((void *)wcid, struct ieee80211_sta, drv_priv);
580 
581 	if (status->signal <= 0)
582 		ewma_signal_add(&wcid->rssi, -status->signal);
583 
584 	wcid->inactive_count = 0;
585 
586 	if (!test_bit(MT_WCID_FLAG_CHECK_PS, &wcid->flags))
587 		return;
588 
589 	if (ieee80211_is_pspoll(hdr->frame_control)) {
590 		ieee80211_sta_pspoll(sta);
591 		return;
592 	}
593 
594 	if (ieee80211_has_morefrags(hdr->frame_control) ||
595 	    !(ieee80211_is_mgmt(hdr->frame_control) ||
596 	      ieee80211_is_data(hdr->frame_control)))
597 		return;
598 
599 	ps = ieee80211_has_pm(hdr->frame_control);
600 
601 	if (ps && (ieee80211_is_data_qos(hdr->frame_control) ||
602 		   ieee80211_is_qos_nullfunc(hdr->frame_control)))
603 		ieee80211_sta_uapsd_trigger(sta, status->tid);
604 
605 	if (!!test_bit(MT_WCID_FLAG_PS, &wcid->flags) == ps)
606 		return;
607 
608 	if (ps)
609 		set_bit(MT_WCID_FLAG_PS, &wcid->flags);
610 	else
611 		clear_bit(MT_WCID_FLAG_PS, &wcid->flags);
612 
613 	dev->drv->sta_ps(dev, sta, ps);
614 	ieee80211_sta_ps_transition(sta, ps);
615 
616 	if (ps)
617 		return;
618 
619 	for (i = 0; i < ARRAY_SIZE(sta->txq); i++) {
620 		struct mt76_txq *mtxq;
621 
622 		if (!sta->txq[i])
623 			continue;
624 
625 		mtxq = (struct mt76_txq *)sta->txq[i]->drv_priv;
626 		if (!skb_queue_empty(&mtxq->retry_q))
627 			ieee80211_schedule_txq(dev->hw, sta->txq[i]);
628 	}
629 }
630 
631 void mt76_rx_complete(struct mt76_dev *dev, struct sk_buff_head *frames,
632 		      struct napi_struct *napi)
633 {
634 	struct ieee80211_sta *sta;
635 	struct sk_buff *skb;
636 
637 	spin_lock(&dev->rx_lock);
638 	while ((skb = __skb_dequeue(frames)) != NULL) {
639 		if (mt76_check_ccmp_pn(skb)) {
640 			dev_kfree_skb(skb);
641 			continue;
642 		}
643 
644 		sta = mt76_rx_convert(skb);
645 		ieee80211_rx_napi(dev->hw, sta, skb, napi);
646 	}
647 	spin_unlock(&dev->rx_lock);
648 }
649 
650 void mt76_rx_poll_complete(struct mt76_dev *dev, enum mt76_rxq_id q,
651 			   struct napi_struct *napi)
652 {
653 	struct sk_buff_head frames;
654 	struct sk_buff *skb;
655 
656 	__skb_queue_head_init(&frames);
657 
658 	while ((skb = __skb_dequeue(&dev->rx_skb[q])) != NULL) {
659 		mt76_check_sta(dev, skb);
660 		mt76_rx_aggr_reorder(skb, &frames);
661 	}
662 
663 	mt76_rx_complete(dev, &frames, napi);
664 }
665 EXPORT_SYMBOL_GPL(mt76_rx_poll_complete);
666 
667 static int
668 mt76_sta_add(struct mt76_dev *dev, struct ieee80211_vif *vif,
669 	     struct ieee80211_sta *sta)
670 {
671 	struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv;
672 	int ret;
673 	int i;
674 
675 	mutex_lock(&dev->mutex);
676 
677 	ret = dev->drv->sta_add(dev, vif, sta);
678 	if (ret)
679 		goto out;
680 
681 	for (i = 0; i < ARRAY_SIZE(sta->txq); i++) {
682 		struct mt76_txq *mtxq;
683 
684 		if (!sta->txq[i])
685 			continue;
686 
687 		mtxq = (struct mt76_txq *)sta->txq[i]->drv_priv;
688 		mtxq->wcid = wcid;
689 
690 		mt76_txq_init(dev, sta->txq[i]);
691 	}
692 
693 	ewma_signal_init(&wcid->rssi);
694 	rcu_assign_pointer(dev->wcid[wcid->idx], wcid);
695 
696 out:
697 	mutex_unlock(&dev->mutex);
698 
699 	return ret;
700 }
701 
702 void __mt76_sta_remove(struct mt76_dev *dev, struct ieee80211_vif *vif,
703 		       struct ieee80211_sta *sta)
704 {
705 	struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv;
706 	int i, idx = wcid->idx;
707 
708 	rcu_assign_pointer(dev->wcid[idx], NULL);
709 	synchronize_rcu();
710 
711 	if (dev->drv->sta_remove)
712 		dev->drv->sta_remove(dev, vif, sta);
713 
714 	mt76_tx_status_check(dev, wcid, true);
715 	for (i = 0; i < ARRAY_SIZE(sta->txq); i++)
716 		mt76_txq_remove(dev, sta->txq[i]);
717 	mt76_wcid_free(dev->wcid_mask, idx);
718 }
719 EXPORT_SYMBOL_GPL(__mt76_sta_remove);
720 
721 static void
722 mt76_sta_remove(struct mt76_dev *dev, struct ieee80211_vif *vif,
723 		struct ieee80211_sta *sta)
724 {
725 	mutex_lock(&dev->mutex);
726 	__mt76_sta_remove(dev, vif, sta);
727 	mutex_unlock(&dev->mutex);
728 }
729 
730 int mt76_sta_state(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
731 		   struct ieee80211_sta *sta,
732 		   enum ieee80211_sta_state old_state,
733 		   enum ieee80211_sta_state new_state)
734 {
735 	struct mt76_dev *dev = hw->priv;
736 
737 	if (old_state == IEEE80211_STA_NOTEXIST &&
738 	    new_state == IEEE80211_STA_NONE)
739 		return mt76_sta_add(dev, vif, sta);
740 
741 	if (old_state == IEEE80211_STA_AUTH &&
742 	    new_state == IEEE80211_STA_ASSOC &&
743 	    dev->drv->sta_assoc)
744 		dev->drv->sta_assoc(dev, vif, sta);
745 
746 	if (old_state == IEEE80211_STA_NONE &&
747 	    new_state == IEEE80211_STA_NOTEXIST)
748 		mt76_sta_remove(dev, vif, sta);
749 
750 	return 0;
751 }
752 EXPORT_SYMBOL_GPL(mt76_sta_state);
753 
754 int mt76_get_txpower(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
755 		     int *dbm)
756 {
757 	struct mt76_dev *dev = hw->priv;
758 	int n_chains = hweight8(dev->antenna_mask);
759 
760 	*dbm = DIV_ROUND_UP(dev->txpower_cur, 2);
761 
762 	/* convert from per-chain power to combined
763 	 * output power
764 	 */
765 	switch (n_chains) {
766 	case 4:
767 		*dbm += 6;
768 		break;
769 	case 3:
770 		*dbm += 4;
771 		break;
772 	case 2:
773 		*dbm += 3;
774 		break;
775 	default:
776 		break;
777 	}
778 
779 	return 0;
780 }
781 EXPORT_SYMBOL_GPL(mt76_get_txpower);
782 
783 static void
784 __mt76_csa_finish(void *priv, u8 *mac, struct ieee80211_vif *vif)
785 {
786 	if (vif->csa_active && ieee80211_csa_is_complete(vif))
787 		ieee80211_csa_finish(vif);
788 }
789 
790 void mt76_csa_finish(struct mt76_dev *dev)
791 {
792 	if (!dev->csa_complete)
793 		return;
794 
795 	ieee80211_iterate_active_interfaces_atomic(dev->hw,
796 		IEEE80211_IFACE_ITER_RESUME_ALL,
797 		__mt76_csa_finish, dev);
798 
799 	dev->csa_complete = 0;
800 }
801 EXPORT_SYMBOL_GPL(mt76_csa_finish);
802 
803 static void
804 __mt76_csa_check(void *priv, u8 *mac, struct ieee80211_vif *vif)
805 {
806 	struct mt76_dev *dev = priv;
807 
808 	if (!vif->csa_active)
809 		return;
810 
811 	dev->csa_complete |= ieee80211_csa_is_complete(vif);
812 }
813 
814 void mt76_csa_check(struct mt76_dev *dev)
815 {
816 	ieee80211_iterate_active_interfaces_atomic(dev->hw,
817 		IEEE80211_IFACE_ITER_RESUME_ALL,
818 		__mt76_csa_check, dev);
819 }
820 EXPORT_SYMBOL_GPL(mt76_csa_check);
821 
822 int
823 mt76_set_tim(struct ieee80211_hw *hw, struct ieee80211_sta *sta, bool set)
824 {
825 	return 0;
826 }
827 EXPORT_SYMBOL_GPL(mt76_set_tim);
828 
829 void mt76_insert_ccmp_hdr(struct sk_buff *skb, u8 key_id)
830 {
831 	struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
832 	int hdr_len = ieee80211_get_hdrlen_from_skb(skb);
833 	u8 *hdr, *pn = status->iv;
834 
835 	__skb_push(skb, 8);
836 	memmove(skb->data, skb->data + 8, hdr_len);
837 	hdr = skb->data + hdr_len;
838 
839 	hdr[0] = pn[5];
840 	hdr[1] = pn[4];
841 	hdr[2] = 0;
842 	hdr[3] = 0x20 | (key_id << 6);
843 	hdr[4] = pn[3];
844 	hdr[5] = pn[2];
845 	hdr[6] = pn[1];
846 	hdr[7] = pn[0];
847 
848 	status->flag &= ~RX_FLAG_IV_STRIPPED;
849 }
850 EXPORT_SYMBOL_GPL(mt76_insert_ccmp_hdr);
851 
852 int mt76_get_rate(struct mt76_dev *dev,
853 		  struct ieee80211_supported_band *sband,
854 		  int idx, bool cck)
855 {
856 	int i, offset = 0, len = sband->n_bitrates;
857 
858 	if (cck) {
859 		if (sband == &dev->sband_5g.sband)
860 			return 0;
861 
862 		idx &= ~BIT(2); /* short preamble */
863 	} else if (sband == &dev->sband_2g.sband) {
864 		offset = 4;
865 	}
866 
867 	for (i = offset; i < len; i++) {
868 		if ((sband->bitrates[i].hw_value & GENMASK(7, 0)) == idx)
869 			return i;
870 	}
871 
872 	return 0;
873 }
874 EXPORT_SYMBOL_GPL(mt76_get_rate);
875 
876 void mt76_sw_scan(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
877 		  const u8 *mac)
878 {
879 	struct mt76_dev *dev = hw->priv;
880 
881 	set_bit(MT76_SCANNING, &dev->state);
882 }
883 EXPORT_SYMBOL_GPL(mt76_sw_scan);
884 
885 void mt76_sw_scan_complete(struct ieee80211_hw *hw, struct ieee80211_vif *vif)
886 {
887 	struct mt76_dev *dev = hw->priv;
888 
889 	clear_bit(MT76_SCANNING, &dev->state);
890 }
891 EXPORT_SYMBOL_GPL(mt76_sw_scan_complete);
892