1 // SPDX-License-Identifier: ISC 2 /* Copyright (C) 2020 Felix Fietkau <nbd@nbd.name> */ 3 #include "mt76.h" 4 5 static const struct nla_policy mt76_tm_policy[NUM_MT76_TM_ATTRS] = { 6 [MT76_TM_ATTR_RESET] = { .type = NLA_FLAG }, 7 [MT76_TM_ATTR_STATE] = { .type = NLA_U8 }, 8 [MT76_TM_ATTR_TX_COUNT] = { .type = NLA_U32 }, 9 [MT76_TM_ATTR_TX_RATE_MODE] = { .type = NLA_U8 }, 10 [MT76_TM_ATTR_TX_RATE_NSS] = { .type = NLA_U8 }, 11 [MT76_TM_ATTR_TX_RATE_IDX] = { .type = NLA_U8 }, 12 [MT76_TM_ATTR_TX_RATE_SGI] = { .type = NLA_U8 }, 13 [MT76_TM_ATTR_TX_RATE_LDPC] = { .type = NLA_U8 }, 14 [MT76_TM_ATTR_TX_RATE_STBC] = { .type = NLA_U8 }, 15 [MT76_TM_ATTR_TX_LTF] = { .type = NLA_U8 }, 16 [MT76_TM_ATTR_TX_ANTENNA] = { .type = NLA_U8 }, 17 [MT76_TM_ATTR_TX_POWER_CONTROL] = { .type = NLA_U8 }, 18 [MT76_TM_ATTR_TX_POWER] = { .type = NLA_NESTED }, 19 [MT76_TM_ATTR_FREQ_OFFSET] = { .type = NLA_U32 }, 20 }; 21 22 void mt76_testmode_tx_pending(struct mt76_dev *dev) 23 { 24 struct mt76_testmode_data *td = &dev->test; 25 struct mt76_wcid *wcid = &dev->global_wcid; 26 struct mt76_phy *phy = &dev->phy; 27 struct sk_buff *skb = td->tx_skb; 28 struct mt76_queue *q; 29 int qid; 30 31 if (!skb || !td->tx_pending) 32 return; 33 34 qid = skb_get_queue_mapping(skb); 35 q = phy->q_tx[qid]; 36 37 spin_lock_bh(&q->lock); 38 39 while (td->tx_pending > 0 && td->tx_queued - td->tx_done < 1000 && 40 q->queued < q->ndesc / 2) { 41 int ret; 42 43 ret = dev->queue_ops->tx_queue_skb(dev, q, skb_get(skb), wcid, 44 NULL); 45 if (ret < 0) 46 break; 47 48 td->tx_pending--; 49 td->tx_queued++; 50 } 51 52 dev->queue_ops->kick(dev, q); 53 54 spin_unlock_bh(&q->lock); 55 } 56 57 58 static int 59 mt76_testmode_tx_init(struct mt76_dev *dev) 60 { 61 struct mt76_testmode_data *td = &dev->test; 62 struct mt76_phy *phy = &dev->phy; 63 struct ieee80211_tx_info *info; 64 struct ieee80211_hdr *hdr; 65 struct sk_buff *skb; 66 u16 fc = IEEE80211_FTYPE_DATA | IEEE80211_STYPE_DATA | 67 IEEE80211_FCTL_FROMDS; 68 struct ieee80211_tx_rate *rate; 69 u8 max_nss = hweight8(phy->antenna_mask); 70 71 if (td->tx_antenna_mask) 72 max_nss = min_t(u8, max_nss, hweight8(td->tx_antenna_mask)); 73 74 skb = alloc_skb(td->tx_msdu_len, GFP_KERNEL); 75 if (!skb) 76 return -ENOMEM; 77 78 dev_kfree_skb(td->tx_skb); 79 td->tx_skb = skb; 80 hdr = __skb_put_zero(skb, td->tx_msdu_len); 81 hdr->frame_control = cpu_to_le16(fc); 82 memcpy(hdr->addr1, phy->macaddr, sizeof(phy->macaddr)); 83 memcpy(hdr->addr2, phy->macaddr, sizeof(phy->macaddr)); 84 memcpy(hdr->addr3, phy->macaddr, sizeof(phy->macaddr)); 85 86 info = IEEE80211_SKB_CB(skb); 87 info->flags = IEEE80211_TX_CTL_INJECTED | 88 IEEE80211_TX_CTL_NO_ACK | 89 IEEE80211_TX_CTL_NO_PS_BUFFER; 90 91 if (td->tx_rate_mode > MT76_TM_TX_MODE_VHT) 92 goto out; 93 94 rate = &info->control.rates[0]; 95 rate->count = 1; 96 rate->idx = td->tx_rate_idx; 97 98 switch (td->tx_rate_mode) { 99 case MT76_TM_TX_MODE_CCK: 100 if (phy->chandef.chan->band != NL80211_BAND_2GHZ) 101 return -EINVAL; 102 103 if (rate->idx > 4) 104 return -EINVAL; 105 break; 106 case MT76_TM_TX_MODE_OFDM: 107 if (phy->chandef.chan->band != NL80211_BAND_2GHZ) 108 break; 109 110 if (rate->idx > 8) 111 return -EINVAL; 112 113 rate->idx += 4; 114 break; 115 case MT76_TM_TX_MODE_HT: 116 if (rate->idx > 8 * max_nss && 117 !(rate->idx == 32 && 118 phy->chandef.width >= NL80211_CHAN_WIDTH_40)) 119 return -EINVAL; 120 121 rate->flags |= IEEE80211_TX_RC_MCS; 122 break; 123 case MT76_TM_TX_MODE_VHT: 124 if (rate->idx > 9) 125 return -EINVAL; 126 127 if (td->tx_rate_nss > max_nss) 128 return -EINVAL; 129 130 ieee80211_rate_set_vht(rate, td->tx_rate_idx, td->tx_rate_nss); 131 rate->flags |= IEEE80211_TX_RC_VHT_MCS; 132 break; 133 default: 134 break; 135 } 136 137 if (td->tx_rate_sgi) 138 rate->flags |= IEEE80211_TX_RC_SHORT_GI; 139 140 if (td->tx_rate_ldpc) 141 info->flags |= IEEE80211_TX_CTL_LDPC; 142 143 if (td->tx_rate_stbc) 144 info->flags |= IEEE80211_TX_CTL_STBC; 145 146 if (td->tx_rate_mode >= MT76_TM_TX_MODE_HT) { 147 switch (phy->chandef.width) { 148 case NL80211_CHAN_WIDTH_40: 149 rate->flags |= IEEE80211_TX_RC_40_MHZ_WIDTH; 150 break; 151 case NL80211_CHAN_WIDTH_80: 152 rate->flags |= IEEE80211_TX_RC_80_MHZ_WIDTH; 153 break; 154 case NL80211_CHAN_WIDTH_80P80: 155 case NL80211_CHAN_WIDTH_160: 156 rate->flags |= IEEE80211_TX_RC_160_MHZ_WIDTH; 157 break; 158 default: 159 break; 160 } 161 } 162 out: 163 skb_set_queue_mapping(skb, IEEE80211_AC_BE); 164 165 return 0; 166 } 167 168 static void 169 mt76_testmode_tx_start(struct mt76_dev *dev) 170 { 171 struct mt76_testmode_data *td = &dev->test; 172 173 td->tx_queued = 0; 174 td->tx_done = 0; 175 td->tx_pending = td->tx_count; 176 mt76_worker_schedule(&dev->tx_worker); 177 } 178 179 static void 180 mt76_testmode_tx_stop(struct mt76_dev *dev) 181 { 182 struct mt76_testmode_data *td = &dev->test; 183 184 mt76_worker_disable(&dev->tx_worker); 185 186 td->tx_pending = 0; 187 188 mt76_worker_enable(&dev->tx_worker); 189 190 wait_event_timeout(dev->tx_wait, td->tx_done == td->tx_queued, 10 * HZ); 191 192 dev_kfree_skb(td->tx_skb); 193 td->tx_skb = NULL; 194 } 195 196 static inline void 197 mt76_testmode_param_set(struct mt76_testmode_data *td, u16 idx) 198 { 199 td->param_set[idx / 32] |= BIT(idx % 32); 200 } 201 202 static inline bool 203 mt76_testmode_param_present(struct mt76_testmode_data *td, u16 idx) 204 { 205 return td->param_set[idx / 32] & BIT(idx % 32); 206 } 207 208 static void 209 mt76_testmode_init_defaults(struct mt76_dev *dev) 210 { 211 struct mt76_testmode_data *td = &dev->test; 212 213 if (td->tx_msdu_len > 0) 214 return; 215 216 td->tx_msdu_len = 1024; 217 td->tx_count = 1; 218 td->tx_rate_mode = MT76_TM_TX_MODE_OFDM; 219 td->tx_rate_nss = 1; 220 } 221 222 static int 223 __mt76_testmode_set_state(struct mt76_dev *dev, enum mt76_testmode_state state) 224 { 225 enum mt76_testmode_state prev_state = dev->test.state; 226 int err; 227 228 if (prev_state == MT76_TM_STATE_TX_FRAMES) 229 mt76_testmode_tx_stop(dev); 230 231 if (state == MT76_TM_STATE_TX_FRAMES) { 232 err = mt76_testmode_tx_init(dev); 233 if (err) 234 return err; 235 } 236 237 err = dev->test_ops->set_state(dev, state); 238 if (err) { 239 if (state == MT76_TM_STATE_TX_FRAMES) 240 mt76_testmode_tx_stop(dev); 241 242 return err; 243 } 244 245 if (state == MT76_TM_STATE_TX_FRAMES) 246 mt76_testmode_tx_start(dev); 247 else if (state == MT76_TM_STATE_RX_FRAMES) { 248 memset(&dev->test.rx_stats, 0, sizeof(dev->test.rx_stats)); 249 } 250 251 dev->test.state = state; 252 253 return 0; 254 } 255 256 int mt76_testmode_set_state(struct mt76_dev *dev, enum mt76_testmode_state state) 257 { 258 struct mt76_testmode_data *td = &dev->test; 259 struct ieee80211_hw *hw = dev->phy.hw; 260 261 if (state == td->state && state == MT76_TM_STATE_OFF) 262 return 0; 263 264 if (state > MT76_TM_STATE_OFF && 265 (!test_bit(MT76_STATE_RUNNING, &dev->phy.state) || 266 !(hw->conf.flags & IEEE80211_CONF_MONITOR))) 267 return -ENOTCONN; 268 269 if (state != MT76_TM_STATE_IDLE && 270 td->state != MT76_TM_STATE_IDLE) { 271 int ret; 272 273 ret = __mt76_testmode_set_state(dev, MT76_TM_STATE_IDLE); 274 if (ret) 275 return ret; 276 } 277 278 return __mt76_testmode_set_state(dev, state); 279 280 } 281 EXPORT_SYMBOL(mt76_testmode_set_state); 282 283 static int 284 mt76_tm_get_u8(struct nlattr *attr, u8 *dest, u8 min, u8 max) 285 { 286 u8 val; 287 288 if (!attr) 289 return 0; 290 291 val = nla_get_u8(attr); 292 if (val < min || val > max) 293 return -EINVAL; 294 295 *dest = val; 296 return 0; 297 } 298 299 int mt76_testmode_cmd(struct ieee80211_hw *hw, struct ieee80211_vif *vif, 300 void *data, int len) 301 { 302 struct mt76_phy *phy = hw->priv; 303 struct mt76_dev *dev = phy->dev; 304 struct mt76_testmode_data *td = &dev->test; 305 struct nlattr *tb[NUM_MT76_TM_ATTRS]; 306 u32 state; 307 int err; 308 int i; 309 310 if (!dev->test_ops) 311 return -EOPNOTSUPP; 312 313 err = nla_parse_deprecated(tb, MT76_TM_ATTR_MAX, data, len, 314 mt76_tm_policy, NULL); 315 if (err) 316 return err; 317 318 err = -EINVAL; 319 320 mutex_lock(&dev->mutex); 321 322 if (tb[MT76_TM_ATTR_RESET]) { 323 mt76_testmode_set_state(dev, MT76_TM_STATE_OFF); 324 memset(td, 0, sizeof(*td)); 325 } 326 327 mt76_testmode_init_defaults(dev); 328 329 if (tb[MT76_TM_ATTR_TX_COUNT]) 330 td->tx_count = nla_get_u32(tb[MT76_TM_ATTR_TX_COUNT]); 331 332 if (tb[MT76_TM_ATTR_TX_LENGTH]) { 333 u32 val = nla_get_u32(tb[MT76_TM_ATTR_TX_LENGTH]); 334 335 if (val > IEEE80211_MAX_FRAME_LEN || 336 val < sizeof(struct ieee80211_hdr)) 337 goto out; 338 339 td->tx_msdu_len = val; 340 } 341 342 if (tb[MT76_TM_ATTR_TX_RATE_IDX]) 343 td->tx_rate_idx = nla_get_u8(tb[MT76_TM_ATTR_TX_RATE_IDX]); 344 345 if (mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_MODE], &td->tx_rate_mode, 346 0, MT76_TM_TX_MODE_MAX) || 347 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_NSS], &td->tx_rate_nss, 348 1, hweight8(phy->antenna_mask)) || 349 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_SGI], &td->tx_rate_sgi, 0, 2) || 350 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_LDPC], &td->tx_rate_ldpc, 0, 1) || 351 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_STBC], &td->tx_rate_stbc, 0, 1) || 352 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_LTF], &td->tx_ltf, 0, 2) || 353 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_ANTENNA], &td->tx_antenna_mask, 1, 354 phy->antenna_mask) || 355 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_POWER_CONTROL], 356 &td->tx_power_control, 0, 1)) 357 goto out; 358 359 if (tb[MT76_TM_ATTR_FREQ_OFFSET]) 360 td->freq_offset = nla_get_u32(tb[MT76_TM_ATTR_FREQ_OFFSET]); 361 362 if (tb[MT76_TM_ATTR_STATE]) { 363 state = nla_get_u32(tb[MT76_TM_ATTR_STATE]); 364 if (state > MT76_TM_STATE_MAX) 365 goto out; 366 } else { 367 state = td->state; 368 } 369 370 if (tb[MT76_TM_ATTR_TX_POWER]) { 371 struct nlattr *cur; 372 int idx = 0; 373 int rem; 374 375 nla_for_each_nested(cur, tb[MT76_TM_ATTR_TX_POWER], rem) { 376 if (nla_len(cur) != 1 || 377 idx >= ARRAY_SIZE(td->tx_power)) 378 goto out; 379 380 td->tx_power[idx++] = nla_get_u8(cur); 381 } 382 } 383 384 if (dev->test_ops->set_params) { 385 err = dev->test_ops->set_params(dev, tb, state); 386 if (err) 387 goto out; 388 } 389 390 for (i = MT76_TM_ATTR_STATE; i < ARRAY_SIZE(tb); i++) 391 if (tb[i]) 392 mt76_testmode_param_set(td, i); 393 394 err = 0; 395 if (tb[MT76_TM_ATTR_STATE]) 396 err = mt76_testmode_set_state(dev, state); 397 398 out: 399 mutex_unlock(&dev->mutex); 400 401 return err; 402 } 403 EXPORT_SYMBOL(mt76_testmode_cmd); 404 405 static int 406 mt76_testmode_dump_stats(struct mt76_dev *dev, struct sk_buff *msg) 407 { 408 struct mt76_testmode_data *td = &dev->test; 409 u64 rx_packets = 0; 410 u64 rx_fcs_error = 0; 411 int i; 412 413 for (i = 0; i < ARRAY_SIZE(td->rx_stats.packets); i++) { 414 rx_packets += td->rx_stats.packets[i]; 415 rx_fcs_error += td->rx_stats.fcs_error[i]; 416 } 417 418 if (nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_PENDING, td->tx_pending) || 419 nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_QUEUED, td->tx_queued) || 420 nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_DONE, td->tx_done) || 421 nla_put_u64_64bit(msg, MT76_TM_STATS_ATTR_RX_PACKETS, rx_packets, 422 MT76_TM_STATS_ATTR_PAD) || 423 nla_put_u64_64bit(msg, MT76_TM_STATS_ATTR_RX_FCS_ERROR, rx_fcs_error, 424 MT76_TM_STATS_ATTR_PAD)) 425 return -EMSGSIZE; 426 427 if (dev->test_ops->dump_stats) 428 return dev->test_ops->dump_stats(dev, msg); 429 430 return 0; 431 } 432 433 int mt76_testmode_dump(struct ieee80211_hw *hw, struct sk_buff *msg, 434 struct netlink_callback *cb, void *data, int len) 435 { 436 struct mt76_phy *phy = hw->priv; 437 struct mt76_dev *dev = phy->dev; 438 struct mt76_testmode_data *td = &dev->test; 439 struct nlattr *tb[NUM_MT76_TM_ATTRS] = {}; 440 int err = 0; 441 void *a; 442 int i; 443 444 if (!dev->test_ops) 445 return -EOPNOTSUPP; 446 447 if (cb->args[2]++ > 0) 448 return -ENOENT; 449 450 if (data) { 451 err = nla_parse_deprecated(tb, MT76_TM_ATTR_MAX, data, len, 452 mt76_tm_policy, NULL); 453 if (err) 454 return err; 455 } 456 457 mutex_lock(&dev->mutex); 458 459 if (tb[MT76_TM_ATTR_STATS]) { 460 err = -EINVAL; 461 462 a = nla_nest_start(msg, MT76_TM_ATTR_STATS); 463 if (a) { 464 err = mt76_testmode_dump_stats(dev, msg); 465 nla_nest_end(msg, a); 466 } 467 468 goto out; 469 } 470 471 mt76_testmode_init_defaults(dev); 472 473 err = -EMSGSIZE; 474 if (nla_put_u32(msg, MT76_TM_ATTR_STATE, td->state)) 475 goto out; 476 477 if (td->mtd_name && 478 (nla_put_string(msg, MT76_TM_ATTR_MTD_PART, td->mtd_name) || 479 nla_put_u32(msg, MT76_TM_ATTR_MTD_OFFSET, td->mtd_offset))) 480 goto out; 481 482 if (nla_put_u32(msg, MT76_TM_ATTR_TX_COUNT, td->tx_count) || 483 nla_put_u32(msg, MT76_TM_ATTR_TX_LENGTH, td->tx_msdu_len) || 484 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_MODE, td->tx_rate_mode) || 485 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_NSS, td->tx_rate_nss) || 486 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_IDX, td->tx_rate_idx) || 487 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_SGI, td->tx_rate_sgi) || 488 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_LDPC, td->tx_rate_ldpc) || 489 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_STBC, td->tx_rate_stbc) || 490 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_LTF) && 491 nla_put_u8(msg, MT76_TM_ATTR_TX_LTF, td->tx_ltf)) || 492 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_ANTENNA) && 493 nla_put_u8(msg, MT76_TM_ATTR_TX_ANTENNA, td->tx_antenna_mask)) || 494 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_POWER_CONTROL) && 495 nla_put_u8(msg, MT76_TM_ATTR_TX_POWER_CONTROL, td->tx_power_control)) || 496 (mt76_testmode_param_present(td, MT76_TM_ATTR_FREQ_OFFSET) && 497 nla_put_u8(msg, MT76_TM_ATTR_FREQ_OFFSET, td->freq_offset))) 498 goto out; 499 500 if (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_POWER)) { 501 a = nla_nest_start(msg, MT76_TM_ATTR_TX_POWER); 502 if (!a) 503 goto out; 504 505 for (i = 0; i < ARRAY_SIZE(td->tx_power); i++) 506 if (nla_put_u8(msg, i, td->tx_power[i])) 507 goto out; 508 509 nla_nest_end(msg, a); 510 } 511 512 err = 0; 513 514 out: 515 mutex_unlock(&dev->mutex); 516 517 return err; 518 } 519 EXPORT_SYMBOL(mt76_testmode_dump); 520