xref: /freebsd/sys/contrib/dev/mediatek/mt76/testmode.c (revision d0b2dbfa0ecf2bbc9709efc5e20baf8e4b44bbbf)
1 // SPDX-License-Identifier: ISC
2 /* Copyright (C) 2020 Felix Fietkau <nbd@nbd.name> */
3 
4 #include <linux/random.h>
5 #include "mt76.h"
6 
7 const struct nla_policy mt76_tm_policy[NUM_MT76_TM_ATTRS] = {
8 	[MT76_TM_ATTR_RESET] = { .type = NLA_FLAG },
9 	[MT76_TM_ATTR_STATE] = { .type = NLA_U8 },
10 	[MT76_TM_ATTR_TX_COUNT] = { .type = NLA_U32 },
11 	[MT76_TM_ATTR_TX_RATE_MODE] = { .type = NLA_U8 },
12 	[MT76_TM_ATTR_TX_RATE_NSS] = { .type = NLA_U8 },
13 	[MT76_TM_ATTR_TX_RATE_IDX] = { .type = NLA_U8 },
14 	[MT76_TM_ATTR_TX_RATE_SGI] = { .type = NLA_U8 },
15 	[MT76_TM_ATTR_TX_RATE_LDPC] = { .type = NLA_U8 },
16 	[MT76_TM_ATTR_TX_RATE_STBC] = { .type = NLA_U8 },
17 	[MT76_TM_ATTR_TX_LTF] = { .type = NLA_U8 },
18 	[MT76_TM_ATTR_TX_ANTENNA] = { .type = NLA_U8 },
19 	[MT76_TM_ATTR_TX_SPE_IDX] = { .type = NLA_U8 },
20 	[MT76_TM_ATTR_TX_POWER_CONTROL] = { .type = NLA_U8 },
21 	[MT76_TM_ATTR_TX_POWER] = { .type = NLA_NESTED },
22 	[MT76_TM_ATTR_TX_DUTY_CYCLE] = { .type = NLA_U8 },
23 	[MT76_TM_ATTR_TX_IPG] = { .type = NLA_U32 },
24 	[MT76_TM_ATTR_TX_TIME] = { .type = NLA_U32 },
25 	[MT76_TM_ATTR_FREQ_OFFSET] = { .type = NLA_U32 },
26 	[MT76_TM_ATTR_DRV_DATA] = { .type = NLA_NESTED },
27 };
28 EXPORT_SYMBOL_GPL(mt76_tm_policy);
29 
30 void mt76_testmode_tx_pending(struct mt76_phy *phy)
31 {
32 	struct mt76_testmode_data *td = &phy->test;
33 	struct mt76_dev *dev = phy->dev;
34 	struct mt76_wcid *wcid = &dev->global_wcid;
35 	struct sk_buff *skb = td->tx_skb;
36 	struct mt76_queue *q;
37 	u16 tx_queued_limit;
38 	int qid;
39 
40 	if (!skb || !td->tx_pending)
41 		return;
42 
43 	qid = skb_get_queue_mapping(skb);
44 	q = phy->q_tx[qid];
45 
46 	tx_queued_limit = td->tx_queued_limit ? td->tx_queued_limit : 1000;
47 
48 	spin_lock_bh(&q->lock);
49 
50 	while (td->tx_pending > 0 &&
51 	       td->tx_queued - td->tx_done < tx_queued_limit &&
52 	       q->queued < q->ndesc / 2) {
53 		int ret;
54 
55 		ret = dev->queue_ops->tx_queue_skb(dev, q, qid, skb_get(skb),
56 						   wcid, NULL);
57 		if (ret < 0)
58 			break;
59 
60 		td->tx_pending--;
61 		td->tx_queued++;
62 	}
63 
64 	dev->queue_ops->kick(dev, q);
65 
66 	spin_unlock_bh(&q->lock);
67 }
68 
69 static u32
70 mt76_testmode_max_mpdu_len(struct mt76_phy *phy, u8 tx_rate_mode)
71 {
72 	switch (tx_rate_mode) {
73 	case MT76_TM_TX_MODE_HT:
74 		return IEEE80211_MAX_MPDU_LEN_HT_7935;
75 	case MT76_TM_TX_MODE_VHT:
76 	case MT76_TM_TX_MODE_HE_SU:
77 	case MT76_TM_TX_MODE_HE_EXT_SU:
78 	case MT76_TM_TX_MODE_HE_TB:
79 	case MT76_TM_TX_MODE_HE_MU:
80 		if (phy->sband_5g.sband.vht_cap.cap &
81 		    IEEE80211_VHT_CAP_MAX_MPDU_LENGTH_7991)
82 			return IEEE80211_MAX_MPDU_LEN_VHT_7991;
83 		return IEEE80211_MAX_MPDU_LEN_VHT_11454;
84 	case MT76_TM_TX_MODE_CCK:
85 	case MT76_TM_TX_MODE_OFDM:
86 	default:
87 		return IEEE80211_MAX_FRAME_LEN;
88 	}
89 }
90 
91 static void
92 mt76_testmode_free_skb(struct mt76_phy *phy)
93 {
94 	struct mt76_testmode_data *td = &phy->test;
95 
96 	dev_kfree_skb(td->tx_skb);
97 	td->tx_skb = NULL;
98 }
99 
100 int mt76_testmode_alloc_skb(struct mt76_phy *phy, u32 len)
101 {
102 #define MT_TXP_MAX_LEN	4095
103 	u16 fc = IEEE80211_FTYPE_DATA | IEEE80211_STYPE_DATA |
104 		 IEEE80211_FCTL_FROMDS;
105 	struct mt76_testmode_data *td = &phy->test;
106 	struct sk_buff **frag_tail, *head;
107 	struct ieee80211_tx_info *info;
108 	struct ieee80211_hdr *hdr;
109 	u32 max_len, head_len;
110 	int nfrags, i;
111 
112 	max_len = mt76_testmode_max_mpdu_len(phy, td->tx_rate_mode);
113 	if (len > max_len)
114 		len = max_len;
115 	else if (len < sizeof(struct ieee80211_hdr))
116 		len = sizeof(struct ieee80211_hdr);
117 
118 	nfrags = len / MT_TXP_MAX_LEN;
119 	head_len = nfrags ? MT_TXP_MAX_LEN : len;
120 
121 	if (len > IEEE80211_MAX_FRAME_LEN)
122 		fc |= IEEE80211_STYPE_QOS_DATA;
123 
124 	head = alloc_skb(head_len, GFP_KERNEL);
125 	if (!head)
126 		return -ENOMEM;
127 
128 	hdr = __skb_put_zero(head, sizeof(*hdr));
129 	hdr->frame_control = cpu_to_le16(fc);
130 	memcpy(hdr->addr1, td->addr[0], ETH_ALEN);
131 	memcpy(hdr->addr2, td->addr[1], ETH_ALEN);
132 	memcpy(hdr->addr3, td->addr[2], ETH_ALEN);
133 	skb_set_queue_mapping(head, IEEE80211_AC_BE);
134 	get_random_bytes(__skb_put(head, head_len - sizeof(*hdr)),
135 			 head_len - sizeof(*hdr));
136 
137 	info = IEEE80211_SKB_CB(head);
138 	info->flags = IEEE80211_TX_CTL_INJECTED |
139 		      IEEE80211_TX_CTL_NO_ACK |
140 		      IEEE80211_TX_CTL_NO_PS_BUFFER;
141 
142 	info->hw_queue |= FIELD_PREP(MT_TX_HW_QUEUE_PHY, phy->band_idx);
143 	frag_tail = &skb_shinfo(head)->frag_list;
144 
145 	for (i = 0; i < nfrags; i++) {
146 		struct sk_buff *frag;
147 		u16 frag_len;
148 
149 		if (i == nfrags - 1)
150 			frag_len = len % MT_TXP_MAX_LEN;
151 		else
152 			frag_len = MT_TXP_MAX_LEN;
153 
154 		frag = alloc_skb(frag_len, GFP_KERNEL);
155 		if (!frag) {
156 			mt76_testmode_free_skb(phy);
157 			dev_kfree_skb(head);
158 			return -ENOMEM;
159 		}
160 
161 		get_random_bytes(__skb_put(frag, frag_len), frag_len);
162 		head->len += frag->len;
163 		head->data_len += frag->len;
164 
165 		*frag_tail = frag;
166 		frag_tail = &(*frag_tail)->next;
167 	}
168 
169 	mt76_testmode_free_skb(phy);
170 	td->tx_skb = head;
171 
172 	return 0;
173 }
174 EXPORT_SYMBOL(mt76_testmode_alloc_skb);
175 
176 static int
177 mt76_testmode_tx_init(struct mt76_phy *phy)
178 {
179 	struct mt76_testmode_data *td = &phy->test;
180 	struct ieee80211_tx_info *info;
181 	struct ieee80211_tx_rate *rate;
182 	u8 max_nss = hweight8(phy->antenna_mask);
183 	int ret;
184 
185 	ret = mt76_testmode_alloc_skb(phy, td->tx_mpdu_len);
186 	if (ret)
187 		return ret;
188 
189 	if (td->tx_rate_mode > MT76_TM_TX_MODE_VHT)
190 		goto out;
191 
192 	if (td->tx_antenna_mask)
193 		max_nss = min_t(u8, max_nss, hweight8(td->tx_antenna_mask));
194 
195 	info = IEEE80211_SKB_CB(td->tx_skb);
196 	rate = &info->control.rates[0];
197 	rate->count = 1;
198 	rate->idx = td->tx_rate_idx;
199 
200 	switch (td->tx_rate_mode) {
201 	case MT76_TM_TX_MODE_CCK:
202 		if (phy->chandef.chan->band != NL80211_BAND_2GHZ)
203 			return -EINVAL;
204 
205 		if (rate->idx > 4)
206 			return -EINVAL;
207 		break;
208 	case MT76_TM_TX_MODE_OFDM:
209 		if (phy->chandef.chan->band != NL80211_BAND_2GHZ)
210 			break;
211 
212 		if (rate->idx > 8)
213 			return -EINVAL;
214 
215 		rate->idx += 4;
216 		break;
217 	case MT76_TM_TX_MODE_HT:
218 		if (rate->idx > 8 * max_nss &&
219 			!(rate->idx == 32 &&
220 			  phy->chandef.width >= NL80211_CHAN_WIDTH_40))
221 			return -EINVAL;
222 
223 		rate->flags |= IEEE80211_TX_RC_MCS;
224 		break;
225 	case MT76_TM_TX_MODE_VHT:
226 		if (rate->idx > 9)
227 			return -EINVAL;
228 
229 		if (td->tx_rate_nss > max_nss)
230 			return -EINVAL;
231 
232 		ieee80211_rate_set_vht(rate, td->tx_rate_idx, td->tx_rate_nss);
233 		rate->flags |= IEEE80211_TX_RC_VHT_MCS;
234 		break;
235 	default:
236 		break;
237 	}
238 
239 	if (td->tx_rate_sgi)
240 		rate->flags |= IEEE80211_TX_RC_SHORT_GI;
241 
242 	if (td->tx_rate_ldpc)
243 		info->flags |= IEEE80211_TX_CTL_LDPC;
244 
245 	if (td->tx_rate_stbc)
246 		info->flags |= IEEE80211_TX_CTL_STBC;
247 
248 	if (td->tx_rate_mode >= MT76_TM_TX_MODE_HT) {
249 		switch (phy->chandef.width) {
250 		case NL80211_CHAN_WIDTH_40:
251 			rate->flags |= IEEE80211_TX_RC_40_MHZ_WIDTH;
252 			break;
253 		case NL80211_CHAN_WIDTH_80:
254 			rate->flags |= IEEE80211_TX_RC_80_MHZ_WIDTH;
255 			break;
256 		case NL80211_CHAN_WIDTH_80P80:
257 		case NL80211_CHAN_WIDTH_160:
258 			rate->flags |= IEEE80211_TX_RC_160_MHZ_WIDTH;
259 			break;
260 		default:
261 			break;
262 		}
263 	}
264 out:
265 	return 0;
266 }
267 
268 static void
269 mt76_testmode_tx_start(struct mt76_phy *phy)
270 {
271 	struct mt76_testmode_data *td = &phy->test;
272 	struct mt76_dev *dev = phy->dev;
273 
274 	td->tx_queued = 0;
275 	td->tx_done = 0;
276 	td->tx_pending = td->tx_count;
277 	mt76_worker_schedule(&dev->tx_worker);
278 }
279 
280 static void
281 mt76_testmode_tx_stop(struct mt76_phy *phy)
282 {
283 	struct mt76_testmode_data *td = &phy->test;
284 	struct mt76_dev *dev = phy->dev;
285 
286 	mt76_worker_disable(&dev->tx_worker);
287 
288 	td->tx_pending = 0;
289 
290 	mt76_worker_enable(&dev->tx_worker);
291 
292 	wait_event_timeout(dev->tx_wait, td->tx_done == td->tx_queued,
293 			   MT76_TM_TIMEOUT * HZ);
294 
295 	mt76_testmode_free_skb(phy);
296 }
297 
298 static inline void
299 mt76_testmode_param_set(struct mt76_testmode_data *td, u16 idx)
300 {
301 	td->param_set[idx / 32] |= BIT(idx % 32);
302 }
303 
304 static inline bool
305 mt76_testmode_param_present(struct mt76_testmode_data *td, u16 idx)
306 {
307 	return td->param_set[idx / 32] & BIT(idx % 32);
308 }
309 
310 static void
311 mt76_testmode_init_defaults(struct mt76_phy *phy)
312 {
313 	struct mt76_testmode_data *td = &phy->test;
314 
315 	if (td->tx_mpdu_len > 0)
316 		return;
317 
318 	td->tx_mpdu_len = 1024;
319 	td->tx_count = 1;
320 	td->tx_rate_mode = MT76_TM_TX_MODE_OFDM;
321 	td->tx_rate_nss = 1;
322 
323 	memcpy(td->addr[0], phy->macaddr, ETH_ALEN);
324 	memcpy(td->addr[1], phy->macaddr, ETH_ALEN);
325 	memcpy(td->addr[2], phy->macaddr, ETH_ALEN);
326 }
327 
328 static int
329 __mt76_testmode_set_state(struct mt76_phy *phy, enum mt76_testmode_state state)
330 {
331 	enum mt76_testmode_state prev_state = phy->test.state;
332 	struct mt76_dev *dev = phy->dev;
333 	int err;
334 
335 	if (prev_state == MT76_TM_STATE_TX_FRAMES)
336 		mt76_testmode_tx_stop(phy);
337 
338 	if (state == MT76_TM_STATE_TX_FRAMES) {
339 		err = mt76_testmode_tx_init(phy);
340 		if (err)
341 			return err;
342 	}
343 
344 	err = dev->test_ops->set_state(phy, state);
345 	if (err) {
346 		if (state == MT76_TM_STATE_TX_FRAMES)
347 			mt76_testmode_tx_stop(phy);
348 
349 		return err;
350 	}
351 
352 	if (state == MT76_TM_STATE_TX_FRAMES)
353 		mt76_testmode_tx_start(phy);
354 	else if (state == MT76_TM_STATE_RX_FRAMES) {
355 		memset(&phy->test.rx_stats, 0, sizeof(phy->test.rx_stats));
356 	}
357 
358 	phy->test.state = state;
359 
360 	return 0;
361 }
362 
363 int mt76_testmode_set_state(struct mt76_phy *phy, enum mt76_testmode_state state)
364 {
365 	struct mt76_testmode_data *td = &phy->test;
366 	struct ieee80211_hw *hw = phy->hw;
367 
368 	if (state == td->state && state == MT76_TM_STATE_OFF)
369 		return 0;
370 
371 	if (state > MT76_TM_STATE_OFF &&
372 	    (!test_bit(MT76_STATE_RUNNING, &phy->state) ||
373 	     !(hw->conf.flags & IEEE80211_CONF_MONITOR)))
374 		return -ENOTCONN;
375 
376 	if (state != MT76_TM_STATE_IDLE &&
377 	    td->state != MT76_TM_STATE_IDLE) {
378 		int ret;
379 
380 		ret = __mt76_testmode_set_state(phy, MT76_TM_STATE_IDLE);
381 		if (ret)
382 			return ret;
383 	}
384 
385 	return __mt76_testmode_set_state(phy, state);
386 
387 }
388 EXPORT_SYMBOL(mt76_testmode_set_state);
389 
390 static int
391 mt76_tm_get_u8(struct nlattr *attr, u8 *dest, u8 min, u8 max)
392 {
393 	u8 val;
394 
395 	if (!attr)
396 		return 0;
397 
398 	val = nla_get_u8(attr);
399 	if (val < min || val > max)
400 		return -EINVAL;
401 
402 	*dest = val;
403 	return 0;
404 }
405 
406 int mt76_testmode_cmd(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
407 		      void *data, int len)
408 {
409 	struct mt76_phy *phy = hw->priv;
410 	struct mt76_dev *dev = phy->dev;
411 	struct mt76_testmode_data *td = &phy->test;
412 	struct nlattr *tb[NUM_MT76_TM_ATTRS];
413 	u32 state;
414 	int err;
415 	int i;
416 
417 	if (!dev->test_ops)
418 		return -EOPNOTSUPP;
419 
420 	err = nla_parse_deprecated(tb, MT76_TM_ATTR_MAX, data, len,
421 				   mt76_tm_policy, NULL);
422 	if (err)
423 		return err;
424 
425 	err = -EINVAL;
426 
427 	mutex_lock(&dev->mutex);
428 
429 	if (tb[MT76_TM_ATTR_RESET]) {
430 		mt76_testmode_set_state(phy, MT76_TM_STATE_OFF);
431 		memset(td, 0, sizeof(*td));
432 	}
433 
434 	mt76_testmode_init_defaults(phy);
435 
436 	if (tb[MT76_TM_ATTR_TX_COUNT])
437 		td->tx_count = nla_get_u32(tb[MT76_TM_ATTR_TX_COUNT]);
438 
439 	if (tb[MT76_TM_ATTR_TX_RATE_IDX])
440 		td->tx_rate_idx = nla_get_u8(tb[MT76_TM_ATTR_TX_RATE_IDX]);
441 
442 	if (mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_MODE], &td->tx_rate_mode,
443 			   0, MT76_TM_TX_MODE_MAX) ||
444 	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_NSS], &td->tx_rate_nss,
445 			   1, hweight8(phy->antenna_mask)) ||
446 	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_SGI], &td->tx_rate_sgi, 0, 2) ||
447 	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_LDPC], &td->tx_rate_ldpc, 0, 1) ||
448 	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_STBC], &td->tx_rate_stbc, 0, 1) ||
449 	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_LTF], &td->tx_ltf, 0, 2) ||
450 	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_ANTENNA],
451 			   &td->tx_antenna_mask, 0, 0xff) ||
452 	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_SPE_IDX], &td->tx_spe_idx, 0, 27) ||
453 	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_DUTY_CYCLE],
454 			   &td->tx_duty_cycle, 0, 99) ||
455 	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_POWER_CONTROL],
456 			   &td->tx_power_control, 0, 1))
457 		goto out;
458 
459 	if (tb[MT76_TM_ATTR_TX_LENGTH]) {
460 		u32 val = nla_get_u32(tb[MT76_TM_ATTR_TX_LENGTH]);
461 
462 		if (val > mt76_testmode_max_mpdu_len(phy, td->tx_rate_mode) ||
463 		    val < sizeof(struct ieee80211_hdr))
464 			goto out;
465 
466 		td->tx_mpdu_len = val;
467 	}
468 
469 	if (tb[MT76_TM_ATTR_TX_IPG])
470 		td->tx_ipg = nla_get_u32(tb[MT76_TM_ATTR_TX_IPG]);
471 
472 	if (tb[MT76_TM_ATTR_TX_TIME])
473 		td->tx_time = nla_get_u32(tb[MT76_TM_ATTR_TX_TIME]);
474 
475 	if (tb[MT76_TM_ATTR_FREQ_OFFSET])
476 		td->freq_offset = nla_get_u32(tb[MT76_TM_ATTR_FREQ_OFFSET]);
477 
478 	if (tb[MT76_TM_ATTR_STATE]) {
479 		state = nla_get_u32(tb[MT76_TM_ATTR_STATE]);
480 		if (state > MT76_TM_STATE_MAX)
481 			goto out;
482 	} else {
483 		state = td->state;
484 	}
485 
486 	if (tb[MT76_TM_ATTR_TX_POWER]) {
487 		struct nlattr *cur;
488 		int idx = 0;
489 		int rem;
490 
491 		nla_for_each_nested(cur, tb[MT76_TM_ATTR_TX_POWER], rem) {
492 			if (nla_len(cur) != 1 ||
493 			    idx >= ARRAY_SIZE(td->tx_power))
494 				goto out;
495 
496 			td->tx_power[idx++] = nla_get_u8(cur);
497 		}
498 	}
499 
500 	if (tb[MT76_TM_ATTR_MAC_ADDRS]) {
501 		struct nlattr *cur;
502 		int idx = 0;
503 		int rem;
504 
505 		nla_for_each_nested(cur, tb[MT76_TM_ATTR_MAC_ADDRS], rem) {
506 			if (nla_len(cur) != ETH_ALEN || idx >= 3)
507 				goto out;
508 
509 			memcpy(td->addr[idx], nla_data(cur), ETH_ALEN);
510 			idx++;
511 		}
512 	}
513 
514 	if (dev->test_ops->set_params) {
515 		err = dev->test_ops->set_params(phy, tb, state);
516 		if (err)
517 			goto out;
518 	}
519 
520 	for (i = MT76_TM_ATTR_STATE; i < ARRAY_SIZE(tb); i++)
521 		if (tb[i])
522 			mt76_testmode_param_set(td, i);
523 
524 	err = 0;
525 	if (tb[MT76_TM_ATTR_STATE])
526 		err = mt76_testmode_set_state(phy, state);
527 
528 out:
529 	mutex_unlock(&dev->mutex);
530 
531 	return err;
532 }
533 EXPORT_SYMBOL(mt76_testmode_cmd);
534 
535 static int
536 mt76_testmode_dump_stats(struct mt76_phy *phy, struct sk_buff *msg)
537 {
538 	struct mt76_testmode_data *td = &phy->test;
539 	struct mt76_dev *dev = phy->dev;
540 	u64 rx_packets = 0;
541 	u64 rx_fcs_error = 0;
542 	int i;
543 
544 	if (dev->test_ops->dump_stats) {
545 		int ret;
546 
547 		ret = dev->test_ops->dump_stats(phy, msg);
548 		if (ret)
549 			return ret;
550 	}
551 
552 	for (i = 0; i < ARRAY_SIZE(td->rx_stats.packets); i++) {
553 		rx_packets += td->rx_stats.packets[i];
554 		rx_fcs_error += td->rx_stats.fcs_error[i];
555 	}
556 
557 	if (nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_PENDING, td->tx_pending) ||
558 	    nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_QUEUED, td->tx_queued) ||
559 	    nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_DONE, td->tx_done) ||
560 	    nla_put_u64_64bit(msg, MT76_TM_STATS_ATTR_RX_PACKETS, rx_packets,
561 			      MT76_TM_STATS_ATTR_PAD) ||
562 	    nla_put_u64_64bit(msg, MT76_TM_STATS_ATTR_RX_FCS_ERROR, rx_fcs_error,
563 			      MT76_TM_STATS_ATTR_PAD))
564 		return -EMSGSIZE;
565 
566 	return 0;
567 }
568 
569 int mt76_testmode_dump(struct ieee80211_hw *hw, struct sk_buff *msg,
570 		       struct netlink_callback *cb, void *data, int len)
571 {
572 	struct mt76_phy *phy = hw->priv;
573 	struct mt76_dev *dev = phy->dev;
574 	struct mt76_testmode_data *td = &phy->test;
575 	struct nlattr *tb[NUM_MT76_TM_ATTRS] = {};
576 	int err = 0;
577 	void *a;
578 	int i;
579 
580 	if (!dev->test_ops)
581 		return -EOPNOTSUPP;
582 
583 	if (cb->args[2]++ > 0)
584 		return -ENOENT;
585 
586 	if (data) {
587 		err = nla_parse_deprecated(tb, MT76_TM_ATTR_MAX, data, len,
588 					   mt76_tm_policy, NULL);
589 		if (err)
590 			return err;
591 	}
592 
593 	mutex_lock(&dev->mutex);
594 
595 	if (tb[MT76_TM_ATTR_STATS]) {
596 		err = -EINVAL;
597 
598 		a = nla_nest_start(msg, MT76_TM_ATTR_STATS);
599 		if (a) {
600 			err = mt76_testmode_dump_stats(phy, msg);
601 			nla_nest_end(msg, a);
602 		}
603 
604 		goto out;
605 	}
606 
607 	mt76_testmode_init_defaults(phy);
608 
609 	err = -EMSGSIZE;
610 	if (nla_put_u32(msg, MT76_TM_ATTR_STATE, td->state))
611 		goto out;
612 
613 	if (dev->test_mtd.name &&
614 	    (nla_put_string(msg, MT76_TM_ATTR_MTD_PART, dev->test_mtd.name) ||
615 	     nla_put_u32(msg, MT76_TM_ATTR_MTD_OFFSET, dev->test_mtd.offset)))
616 		goto out;
617 
618 	if (nla_put_u32(msg, MT76_TM_ATTR_TX_COUNT, td->tx_count) ||
619 	    nla_put_u32(msg, MT76_TM_ATTR_TX_LENGTH, td->tx_mpdu_len) ||
620 	    nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_MODE, td->tx_rate_mode) ||
621 	    nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_NSS, td->tx_rate_nss) ||
622 	    nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_IDX, td->tx_rate_idx) ||
623 	    nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_SGI, td->tx_rate_sgi) ||
624 	    nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_LDPC, td->tx_rate_ldpc) ||
625 	    nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_STBC, td->tx_rate_stbc) ||
626 	    (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_LTF) &&
627 	     nla_put_u8(msg, MT76_TM_ATTR_TX_LTF, td->tx_ltf)) ||
628 	    (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_ANTENNA) &&
629 	     nla_put_u8(msg, MT76_TM_ATTR_TX_ANTENNA, td->tx_antenna_mask)) ||
630 	    (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_SPE_IDX) &&
631 	     nla_put_u8(msg, MT76_TM_ATTR_TX_SPE_IDX, td->tx_spe_idx)) ||
632 	    (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_DUTY_CYCLE) &&
633 	     nla_put_u8(msg, MT76_TM_ATTR_TX_DUTY_CYCLE, td->tx_duty_cycle)) ||
634 	    (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_IPG) &&
635 	     nla_put_u32(msg, MT76_TM_ATTR_TX_IPG, td->tx_ipg)) ||
636 	    (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_TIME) &&
637 	     nla_put_u32(msg, MT76_TM_ATTR_TX_TIME, td->tx_time)) ||
638 	    (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_POWER_CONTROL) &&
639 	     nla_put_u8(msg, MT76_TM_ATTR_TX_POWER_CONTROL, td->tx_power_control)) ||
640 	    (mt76_testmode_param_present(td, MT76_TM_ATTR_FREQ_OFFSET) &&
641 	     nla_put_u8(msg, MT76_TM_ATTR_FREQ_OFFSET, td->freq_offset)))
642 		goto out;
643 
644 	if (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_POWER)) {
645 		a = nla_nest_start(msg, MT76_TM_ATTR_TX_POWER);
646 		if (!a)
647 			goto out;
648 
649 		for (i = 0; i < ARRAY_SIZE(td->tx_power); i++)
650 			if (nla_put_u8(msg, i, td->tx_power[i]))
651 				goto out;
652 
653 		nla_nest_end(msg, a);
654 	}
655 
656 	if (mt76_testmode_param_present(td, MT76_TM_ATTR_MAC_ADDRS)) {
657 		a = nla_nest_start(msg, MT76_TM_ATTR_MAC_ADDRS);
658 		if (!a)
659 			goto out;
660 
661 		for (i = 0; i < 3; i++)
662 			if (nla_put(msg, i, ETH_ALEN, td->addr[i]))
663 				goto out;
664 
665 		nla_nest_end(msg, a);
666 	}
667 
668 	err = 0;
669 
670 out:
671 	mutex_unlock(&dev->mutex);
672 
673 	return err;
674 }
675 EXPORT_SYMBOL(mt76_testmode_dump);
676