1 // SPDX-License-Identifier: GPL-2.0-only 2 /* 3 * (c) Copyright 2002-2010, Ralink Technology, Inc. 4 * Copyright (C) 2014 Felix Fietkau <nbd@openwrt.org> 5 * Copyright (C) 2015 Jakub Kicinski <kubakici@wp.pl> 6 * Copyright (C) 2018 Stanislaw Gruszka <stf_xl@wp.pl> 7 */ 8 9 #include <linux/kernel.h> 10 #include <linux/etherdevice.h> 11 12 #include "mt76x0.h" 13 #include "mcu.h" 14 #include "eeprom.h" 15 #include "phy.h" 16 #include "initvals.h" 17 #include "initvals_phy.h" 18 #include "../mt76x02_phy.h" 19 20 static int 21 mt76x0_rf_csr_wr(struct mt76x02_dev *dev, u32 offset, u8 value) 22 { 23 int ret = 0; 24 u8 bank, reg; 25 26 if (test_bit(MT76_REMOVED, &dev->mt76.state)) 27 return -ENODEV; 28 29 bank = MT_RF_BANK(offset); 30 reg = MT_RF_REG(offset); 31 32 if (WARN_ON_ONCE(reg > 127) || WARN_ON_ONCE(bank > 8)) 33 return -EINVAL; 34 35 mutex_lock(&dev->phy_mutex); 36 37 if (!mt76_poll(dev, MT_RF_CSR_CFG, MT_RF_CSR_CFG_KICK, 0, 100)) { 38 ret = -ETIMEDOUT; 39 goto out; 40 } 41 42 mt76_wr(dev, MT_RF_CSR_CFG, 43 FIELD_PREP(MT_RF_CSR_CFG_DATA, value) | 44 FIELD_PREP(MT_RF_CSR_CFG_REG_BANK, bank) | 45 FIELD_PREP(MT_RF_CSR_CFG_REG_ID, reg) | 46 MT_RF_CSR_CFG_WR | 47 MT_RF_CSR_CFG_KICK); 48 49 out: 50 mutex_unlock(&dev->phy_mutex); 51 52 if (ret < 0) 53 dev_err(dev->mt76.dev, "Error: RF write %d:%d failed:%d!!\n", 54 bank, reg, ret); 55 56 return ret; 57 } 58 59 static int mt76x0_rf_csr_rr(struct mt76x02_dev *dev, u32 offset) 60 { 61 int ret = -ETIMEDOUT; 62 u32 val; 63 u8 bank, reg; 64 65 if (test_bit(MT76_REMOVED, &dev->mt76.state)) 66 return -ENODEV; 67 68 bank = MT_RF_BANK(offset); 69 reg = MT_RF_REG(offset); 70 71 if (WARN_ON_ONCE(reg > 127) || WARN_ON_ONCE(bank > 8)) 72 return -EINVAL; 73 74 mutex_lock(&dev->phy_mutex); 75 76 if (!mt76_poll(dev, MT_RF_CSR_CFG, MT_RF_CSR_CFG_KICK, 0, 100)) 77 goto out; 78 79 mt76_wr(dev, MT_RF_CSR_CFG, 80 FIELD_PREP(MT_RF_CSR_CFG_REG_BANK, bank) | 81 FIELD_PREP(MT_RF_CSR_CFG_REG_ID, reg) | 82 MT_RF_CSR_CFG_KICK); 83 84 if (!mt76_poll(dev, MT_RF_CSR_CFG, MT_RF_CSR_CFG_KICK, 0, 100)) 85 goto out; 86 87 val = mt76_rr(dev, MT_RF_CSR_CFG); 88 if (FIELD_GET(MT_RF_CSR_CFG_REG_ID, val) == reg && 89 FIELD_GET(MT_RF_CSR_CFG_REG_BANK, val) == bank) 90 ret = FIELD_GET(MT_RF_CSR_CFG_DATA, val); 91 92 out: 93 mutex_unlock(&dev->phy_mutex); 94 95 if (ret < 0) 96 dev_err(dev->mt76.dev, "Error: RF read %d:%d failed:%d!!\n", 97 bank, reg, ret); 98 99 return ret; 100 } 101 102 static int 103 mt76x0_rf_wr(struct mt76x02_dev *dev, u32 offset, u8 val) 104 { 105 if (mt76_is_usb(dev)) { 106 struct mt76_reg_pair pair = { 107 .reg = offset, 108 .value = val, 109 }; 110 111 WARN_ON_ONCE(!test_bit(MT76_STATE_MCU_RUNNING, 112 &dev->mt76.state)); 113 return mt76_wr_rp(dev, MT_MCU_MEMMAP_RF, &pair, 1); 114 } else { 115 return mt76x0_rf_csr_wr(dev, offset, val); 116 } 117 } 118 119 static int mt76x0_rf_rr(struct mt76x02_dev *dev, u32 offset) 120 { 121 int ret; 122 u32 val; 123 124 if (mt76_is_usb(dev)) { 125 struct mt76_reg_pair pair = { 126 .reg = offset, 127 }; 128 129 WARN_ON_ONCE(!test_bit(MT76_STATE_MCU_RUNNING, 130 &dev->mt76.state)); 131 ret = mt76_rd_rp(dev, MT_MCU_MEMMAP_RF, &pair, 1); 132 val = pair.value; 133 } else { 134 ret = val = mt76x0_rf_csr_rr(dev, offset); 135 } 136 137 return (ret < 0) ? ret : val; 138 } 139 140 static int 141 mt76x0_rf_rmw(struct mt76x02_dev *dev, u32 offset, u8 mask, u8 val) 142 { 143 int ret; 144 145 ret = mt76x0_rf_rr(dev, offset); 146 if (ret < 0) 147 return ret; 148 149 val |= ret & ~mask; 150 151 ret = mt76x0_rf_wr(dev, offset, val); 152 return ret ? ret : val; 153 } 154 155 static int 156 mt76x0_rf_set(struct mt76x02_dev *dev, u32 offset, u8 val) 157 { 158 return mt76x0_rf_rmw(dev, offset, 0, val); 159 } 160 161 static int 162 mt76x0_rf_clear(struct mt76x02_dev *dev, u32 offset, u8 mask) 163 { 164 return mt76x0_rf_rmw(dev, offset, mask, 0); 165 } 166 167 static void 168 mt76x0_phy_rf_csr_wr_rp(struct mt76x02_dev *dev, 169 const struct mt76_reg_pair *data, 170 int n) 171 { 172 while (n-- > 0) { 173 mt76x0_rf_csr_wr(dev, data->reg, data->value); 174 data++; 175 } 176 } 177 178 #define RF_RANDOM_WRITE(dev, tab) do { \ 179 if (mt76_is_mmio(dev)) \ 180 mt76x0_phy_rf_csr_wr_rp(dev, tab, ARRAY_SIZE(tab)); \ 181 else \ 182 mt76_wr_rp(dev, MT_MCU_MEMMAP_RF, tab, ARRAY_SIZE(tab));\ 183 } while (0) 184 185 int mt76x0_phy_wait_bbp_ready(struct mt76x02_dev *dev) 186 { 187 int i = 20; 188 u32 val; 189 190 do { 191 val = mt76_rr(dev, MT_BBP(CORE, 0)); 192 if (val && ~val) 193 break; 194 } while (--i); 195 196 if (!i) { 197 dev_err(dev->mt76.dev, "Error: BBP is not ready\n"); 198 return -EIO; 199 } 200 201 dev_dbg(dev->mt76.dev, "BBP version %08x\n", val); 202 return 0; 203 } 204 205 static void 206 mt76x0_phy_set_band(struct mt76x02_dev *dev, enum nl80211_band band) 207 { 208 switch (band) { 209 case NL80211_BAND_2GHZ: 210 RF_RANDOM_WRITE(dev, mt76x0_rf_2g_channel_0_tab); 211 212 mt76x0_rf_wr(dev, MT_RF(5, 0), 0x45); 213 mt76x0_rf_wr(dev, MT_RF(6, 0), 0x44); 214 215 mt76_wr(dev, MT_TX_ALC_VGA3, 0x00050007); 216 mt76_wr(dev, MT_TX0_RF_GAIN_CORR, 0x003E0002); 217 break; 218 case NL80211_BAND_5GHZ: 219 RF_RANDOM_WRITE(dev, mt76x0_rf_5g_channel_0_tab); 220 221 mt76x0_rf_wr(dev, MT_RF(5, 0), 0x44); 222 mt76x0_rf_wr(dev, MT_RF(6, 0), 0x45); 223 224 mt76_wr(dev, MT_TX_ALC_VGA3, 0x00000005); 225 mt76_wr(dev, MT_TX0_RF_GAIN_CORR, 0x01010102); 226 break; 227 default: 228 break; 229 } 230 } 231 232 static void 233 mt76x0_phy_set_chan_rf_params(struct mt76x02_dev *dev, u8 channel, u16 rf_bw_band) 234 { 235 const struct mt76x0_freq_item *freq_item; 236 u16 rf_band = rf_bw_band & 0xff00; 237 u16 rf_bw = rf_bw_band & 0x00ff; 238 enum nl80211_band band; 239 bool b_sdm = false; 240 u32 mac_reg; 241 int i; 242 243 for (i = 0; i < ARRAY_SIZE(mt76x0_sdm_channel); i++) { 244 if (channel == mt76x0_sdm_channel[i]) { 245 b_sdm = true; 246 break; 247 } 248 } 249 250 for (i = 0; i < ARRAY_SIZE(mt76x0_frequency_plan); i++) { 251 if (channel == mt76x0_frequency_plan[i].channel) { 252 rf_band = mt76x0_frequency_plan[i].band; 253 254 if (b_sdm) 255 freq_item = &(mt76x0_sdm_frequency_plan[i]); 256 else 257 freq_item = &(mt76x0_frequency_plan[i]); 258 259 mt76x0_rf_wr(dev, MT_RF(0, 37), freq_item->pllR37); 260 mt76x0_rf_wr(dev, MT_RF(0, 36), freq_item->pllR36); 261 mt76x0_rf_wr(dev, MT_RF(0, 35), freq_item->pllR35); 262 mt76x0_rf_wr(dev, MT_RF(0, 34), freq_item->pllR34); 263 mt76x0_rf_wr(dev, MT_RF(0, 33), freq_item->pllR33); 264 265 mt76x0_rf_rmw(dev, MT_RF(0, 32), 0xe0, 266 freq_item->pllR32_b7b5); 267 268 /* R32<4:0> pll_den: (Denomina - 8) */ 269 mt76x0_rf_rmw(dev, MT_RF(0, 32), MT_RF_PLL_DEN_MASK, 270 freq_item->pllR32_b4b0); 271 272 /* R31<7:5> */ 273 mt76x0_rf_rmw(dev, MT_RF(0, 31), 0xe0, 274 freq_item->pllR31_b7b5); 275 276 /* R31<4:0> pll_k(Nominator) */ 277 mt76x0_rf_rmw(dev, MT_RF(0, 31), MT_RF_PLL_K_MASK, 278 freq_item->pllR31_b4b0); 279 280 /* R30<7> sdm_reset_n */ 281 if (b_sdm) { 282 mt76x0_rf_clear(dev, MT_RF(0, 30), 283 MT_RF_SDM_RESET_MASK); 284 mt76x0_rf_set(dev, MT_RF(0, 30), 285 MT_RF_SDM_RESET_MASK); 286 } else { 287 mt76x0_rf_rmw(dev, MT_RF(0, 30), 288 MT_RF_SDM_RESET_MASK, 289 freq_item->pllR30_b7); 290 } 291 292 /* R30<6:2> sdmmash_prbs,sin */ 293 mt76x0_rf_rmw(dev, MT_RF(0, 30), 294 MT_RF_SDM_MASH_PRBS_MASK, 295 freq_item->pllR30_b6b2); 296 297 /* R30<1> sdm_bp */ 298 mt76x0_rf_rmw(dev, MT_RF(0, 30), MT_RF_SDM_BP_MASK, 299 freq_item->pllR30_b1 << 1); 300 301 /* R30<0> R29<7:0> (hex) pll_n */ 302 mt76x0_rf_wr(dev, MT_RF(0, 29), 303 freq_item->pll_n & 0xff); 304 305 mt76x0_rf_rmw(dev, MT_RF(0, 30), 0x1, 306 (freq_item->pll_n >> 8) & 0x1); 307 308 /* R28<7:6> isi_iso */ 309 mt76x0_rf_rmw(dev, MT_RF(0, 28), MT_RF_ISI_ISO_MASK, 310 freq_item->pllR28_b7b6); 311 312 /* R28<5:4> pfd_dly */ 313 mt76x0_rf_rmw(dev, MT_RF(0, 28), MT_RF_PFD_DLY_MASK, 314 freq_item->pllR28_b5b4); 315 316 /* R28<3:2> clksel option */ 317 mt76x0_rf_rmw(dev, MT_RF(0, 28), MT_RF_CLK_SEL_MASK, 318 freq_item->pllR28_b3b2); 319 320 /* R28<1:0> R27<7:0> R26<7:0> (hex) sdm_k */ 321 mt76x0_rf_wr(dev, MT_RF(0, 26), 322 freq_item->pll_sdm_k & 0xff); 323 mt76x0_rf_wr(dev, MT_RF(0, 27), 324 (freq_item->pll_sdm_k >> 8) & 0xff); 325 326 mt76x0_rf_rmw(dev, MT_RF(0, 28), 0x3, 327 (freq_item->pll_sdm_k >> 16) & 0x3); 328 329 /* R24<1:0> xo_div */ 330 mt76x0_rf_rmw(dev, MT_RF(0, 24), MT_RF_XO_DIV_MASK, 331 freq_item->pllR24_b1b0); 332 333 break; 334 } 335 } 336 337 for (i = 0; i < ARRAY_SIZE(mt76x0_rf_bw_switch_tab); i++) { 338 if (rf_bw == mt76x0_rf_bw_switch_tab[i].bw_band) { 339 mt76x0_rf_wr(dev, 340 mt76x0_rf_bw_switch_tab[i].rf_bank_reg, 341 mt76x0_rf_bw_switch_tab[i].value); 342 } else if ((rf_bw == (mt76x0_rf_bw_switch_tab[i].bw_band & 0xFF)) && 343 (rf_band & mt76x0_rf_bw_switch_tab[i].bw_band)) { 344 mt76x0_rf_wr(dev, 345 mt76x0_rf_bw_switch_tab[i].rf_bank_reg, 346 mt76x0_rf_bw_switch_tab[i].value); 347 } 348 } 349 350 for (i = 0; i < ARRAY_SIZE(mt76x0_rf_band_switch_tab); i++) { 351 if (mt76x0_rf_band_switch_tab[i].bw_band & rf_band) { 352 mt76x0_rf_wr(dev, 353 mt76x0_rf_band_switch_tab[i].rf_bank_reg, 354 mt76x0_rf_band_switch_tab[i].value); 355 } 356 } 357 358 mt76_clear(dev, MT_RF_MISC, 0xc); 359 360 band = (rf_band & RF_G_BAND) ? NL80211_BAND_2GHZ : NL80211_BAND_5GHZ; 361 if (mt76x02_ext_pa_enabled(dev, band)) { 362 /* 363 MT_RF_MISC (offset: 0x0518) 364 [2]1'b1: enable external A band PA, 1'b0: disable external A band PA 365 [3]1'b1: enable external G band PA, 1'b0: disable external G band PA 366 */ 367 if (rf_band & RF_A_BAND) 368 mt76_set(dev, MT_RF_MISC, BIT(2)); 369 else 370 mt76_set(dev, MT_RF_MISC, BIT(3)); 371 372 /* External PA */ 373 for (i = 0; i < ARRAY_SIZE(mt76x0_rf_ext_pa_tab); i++) 374 if (mt76x0_rf_ext_pa_tab[i].bw_band & rf_band) 375 mt76x0_rf_wr(dev, 376 mt76x0_rf_ext_pa_tab[i].rf_bank_reg, 377 mt76x0_rf_ext_pa_tab[i].value); 378 } 379 380 if (rf_band & RF_G_BAND) { 381 mt76_wr(dev, MT_TX0_RF_GAIN_ATTEN, 0x63707400); 382 /* Set Atten mode = 2 For G band, Disable Tx Inc dcoc. */ 383 mac_reg = mt76_rr(dev, MT_TX_ALC_CFG_1); 384 mac_reg &= 0x896400FF; 385 mt76_wr(dev, MT_TX_ALC_CFG_1, mac_reg); 386 } else { 387 mt76_wr(dev, MT_TX0_RF_GAIN_ATTEN, 0x686A7800); 388 /* Set Atten mode = 0 For Ext A band, Disable Tx Inc dcoc Cal. */ 389 mac_reg = mt76_rr(dev, MT_TX_ALC_CFG_1); 390 mac_reg &= 0x890400FF; 391 mt76_wr(dev, MT_TX_ALC_CFG_1, mac_reg); 392 } 393 } 394 395 static void 396 mt76x0_phy_set_chan_bbp_params(struct mt76x02_dev *dev, u16 rf_bw_band) 397 { 398 int i; 399 400 for (i = 0; i < ARRAY_SIZE(mt76x0_bbp_switch_tab); i++) { 401 const struct mt76x0_bbp_switch_item *item = &mt76x0_bbp_switch_tab[i]; 402 const struct mt76_reg_pair *pair = &item->reg_pair; 403 404 if ((rf_bw_band & item->bw_band) != rf_bw_band) 405 continue; 406 407 if (pair->reg == MT_BBP(AGC, 8)) { 408 u32 val = pair->value; 409 u8 gain; 410 411 gain = FIELD_GET(MT_BBP_AGC_GAIN, val); 412 gain -= dev->cal.rx.lna_gain * 2; 413 val &= ~MT_BBP_AGC_GAIN; 414 val |= FIELD_PREP(MT_BBP_AGC_GAIN, gain); 415 mt76_wr(dev, pair->reg, val); 416 } else { 417 mt76_wr(dev, pair->reg, pair->value); 418 } 419 } 420 } 421 422 static void mt76x0_phy_ant_select(struct mt76x02_dev *dev) 423 { 424 u16 ee_ant = mt76x02_eeprom_get(dev, MT_EE_ANTENNA); 425 u16 nic_conf2 = mt76x02_eeprom_get(dev, MT_EE_NIC_CONF_2); 426 u32 wlan, coex3, cmb; 427 bool ant_div; 428 429 wlan = mt76_rr(dev, MT_WLAN_FUN_CTRL); 430 cmb = mt76_rr(dev, MT_CMB_CTRL); 431 coex3 = mt76_rr(dev, MT_COEXCFG3); 432 433 cmb &= ~(BIT(14) | BIT(12)); 434 wlan &= ~(BIT(6) | BIT(5)); 435 coex3 &= ~GENMASK(5, 2); 436 437 if (ee_ant & MT_EE_ANTENNA_DUAL) { 438 /* dual antenna mode */ 439 ant_div = !(nic_conf2 & MT_EE_NIC_CONF_2_ANT_OPT) && 440 (nic_conf2 & MT_EE_NIC_CONF_2_ANT_DIV); 441 if (ant_div) 442 cmb |= BIT(12); 443 else 444 coex3 |= BIT(4); 445 coex3 |= BIT(3); 446 if (dev->mt76.cap.has_2ghz) 447 wlan |= BIT(6); 448 } else { 449 /* sigle antenna mode */ 450 if (dev->mt76.cap.has_5ghz) { 451 coex3 |= BIT(3) | BIT(4); 452 } else { 453 wlan |= BIT(6); 454 coex3 |= BIT(1); 455 } 456 } 457 458 if (is_mt7630(dev)) 459 cmb |= BIT(14) | BIT(11); 460 461 mt76_wr(dev, MT_WLAN_FUN_CTRL, wlan); 462 mt76_wr(dev, MT_CMB_CTRL, cmb); 463 mt76_clear(dev, MT_COEXCFG0, BIT(2)); 464 mt76_wr(dev, MT_COEXCFG3, coex3); 465 } 466 467 static void 468 mt76x0_phy_bbp_set_bw(struct mt76x02_dev *dev, enum nl80211_chan_width width) 469 { 470 enum { BW_20 = 0, BW_40 = 1, BW_80 = 2, BW_10 = 4}; 471 int bw; 472 473 switch (width) { 474 default: 475 case NL80211_CHAN_WIDTH_20_NOHT: 476 case NL80211_CHAN_WIDTH_20: 477 bw = BW_20; 478 break; 479 case NL80211_CHAN_WIDTH_40: 480 bw = BW_40; 481 break; 482 case NL80211_CHAN_WIDTH_80: 483 bw = BW_80; 484 break; 485 case NL80211_CHAN_WIDTH_10: 486 bw = BW_10; 487 break; 488 case NL80211_CHAN_WIDTH_80P80: 489 case NL80211_CHAN_WIDTH_160: 490 case NL80211_CHAN_WIDTH_5: 491 /* TODO error */ 492 return ; 493 } 494 495 mt76x02_mcu_function_select(dev, BW_SETTING, bw); 496 } 497 498 static void mt76x0_phy_tssi_dc_calibrate(struct mt76x02_dev *dev) 499 { 500 struct ieee80211_channel *chan = dev->mt76.chandef.chan; 501 u32 val; 502 503 if (chan->band == NL80211_BAND_5GHZ) 504 mt76x0_rf_clear(dev, MT_RF(0, 67), 0xf); 505 506 /* bypass ADDA control */ 507 mt76_wr(dev, MT_RF_SETTING_0, 0x60002237); 508 mt76_wr(dev, MT_RF_BYPASS_0, 0xffffffff); 509 510 /* bbp sw reset */ 511 mt76_set(dev, MT_BBP(CORE, 4), BIT(0)); 512 usleep_range(500, 1000); 513 mt76_clear(dev, MT_BBP(CORE, 4), BIT(0)); 514 515 val = (chan->band == NL80211_BAND_5GHZ) ? 0x80055 : 0x80050; 516 mt76_wr(dev, MT_BBP(CORE, 34), val); 517 518 /* enable TX with DAC0 input */ 519 mt76_wr(dev, MT_BBP(TXBE, 6), BIT(31)); 520 521 mt76_poll_msec(dev, MT_BBP(CORE, 34), BIT(4), 0, 200); 522 dev->cal.tssi_dc = mt76_rr(dev, MT_BBP(CORE, 35)) & 0xff; 523 524 /* stop bypass ADDA */ 525 mt76_wr(dev, MT_RF_BYPASS_0, 0); 526 /* stop TX */ 527 mt76_wr(dev, MT_BBP(TXBE, 6), 0); 528 /* bbp sw reset */ 529 mt76_set(dev, MT_BBP(CORE, 4), BIT(0)); 530 usleep_range(500, 1000); 531 mt76_clear(dev, MT_BBP(CORE, 4), BIT(0)); 532 533 if (chan->band == NL80211_BAND_5GHZ) 534 mt76x0_rf_rmw(dev, MT_RF(0, 67), 0xf, 0x4); 535 } 536 537 static int 538 mt76x0_phy_tssi_adc_calibrate(struct mt76x02_dev *dev, s16 *ltssi, 539 u8 *info) 540 { 541 struct ieee80211_channel *chan = dev->mt76.chandef.chan; 542 u32 val; 543 544 val = (chan->band == NL80211_BAND_5GHZ) ? 0x80055 : 0x80050; 545 mt76_wr(dev, MT_BBP(CORE, 34), val); 546 547 if (!mt76_poll_msec(dev, MT_BBP(CORE, 34), BIT(4), 0, 200)) { 548 mt76_clear(dev, MT_BBP(CORE, 34), BIT(4)); 549 return -ETIMEDOUT; 550 } 551 552 *ltssi = mt76_rr(dev, MT_BBP(CORE, 35)) & 0xff; 553 if (chan->band == NL80211_BAND_5GHZ) 554 *ltssi += 128; 555 556 /* set packet info#1 mode */ 557 mt76_wr(dev, MT_BBP(CORE, 34), 0x80041); 558 info[0] = mt76_rr(dev, MT_BBP(CORE, 35)) & 0xff; 559 560 /* set packet info#2 mode */ 561 mt76_wr(dev, MT_BBP(CORE, 34), 0x80042); 562 info[1] = mt76_rr(dev, MT_BBP(CORE, 35)) & 0xff; 563 564 /* set packet info#3 mode */ 565 mt76_wr(dev, MT_BBP(CORE, 34), 0x80043); 566 info[2] = mt76_rr(dev, MT_BBP(CORE, 35)) & 0xff; 567 568 return 0; 569 } 570 571 static u8 mt76x0_phy_get_rf_pa_mode(struct mt76x02_dev *dev, 572 int index, u8 tx_rate) 573 { 574 u32 val, reg; 575 576 reg = (index == 1) ? MT_RF_PA_MODE_CFG1 : MT_RF_PA_MODE_CFG0; 577 val = mt76_rr(dev, reg); 578 return (val & (3 << (tx_rate * 2))) >> (tx_rate * 2); 579 } 580 581 static int 582 mt76x0_phy_get_target_power(struct mt76x02_dev *dev, u8 tx_mode, 583 u8 *info, s8 *target_power, 584 s8 *target_pa_power) 585 { 586 u8 tx_rate, cur_power; 587 588 cur_power = mt76_rr(dev, MT_TX_ALC_CFG_0) & MT_TX_ALC_CFG_0_CH_INIT_0; 589 switch (tx_mode) { 590 case 0: 591 /* cck rates */ 592 tx_rate = (info[0] & 0x60) >> 5; 593 if (tx_rate > 3) 594 return -EINVAL; 595 596 *target_power = cur_power + dev->mt76.rate_power.cck[tx_rate]; 597 *target_pa_power = mt76x0_phy_get_rf_pa_mode(dev, 0, tx_rate); 598 break; 599 case 1: { 600 u8 index; 601 602 /* ofdm rates */ 603 tx_rate = (info[0] & 0xf0) >> 4; 604 switch (tx_rate) { 605 case 0xb: 606 index = 0; 607 break; 608 case 0xf: 609 index = 1; 610 break; 611 case 0xa: 612 index = 2; 613 break; 614 case 0xe: 615 index = 3; 616 break; 617 case 0x9: 618 index = 4; 619 break; 620 case 0xd: 621 index = 5; 622 break; 623 case 0x8: 624 index = 6; 625 break; 626 case 0xc: 627 index = 7; 628 break; 629 default: 630 return -EINVAL; 631 } 632 633 *target_power = cur_power + dev->mt76.rate_power.ofdm[index]; 634 *target_pa_power = mt76x0_phy_get_rf_pa_mode(dev, 0, index + 4); 635 break; 636 } 637 case 4: 638 /* vht rates */ 639 tx_rate = info[1] & 0xf; 640 if (tx_rate > 9) 641 return -EINVAL; 642 643 *target_power = cur_power + dev->mt76.rate_power.vht[tx_rate]; 644 *target_pa_power = mt76x0_phy_get_rf_pa_mode(dev, 1, tx_rate); 645 break; 646 default: 647 /* ht rates */ 648 tx_rate = info[1] & 0x7f; 649 if (tx_rate > 9) 650 return -EINVAL; 651 652 *target_power = cur_power + dev->mt76.rate_power.ht[tx_rate]; 653 *target_pa_power = mt76x0_phy_get_rf_pa_mode(dev, 1, tx_rate); 654 break; 655 } 656 657 return 0; 658 } 659 660 static s16 mt76x0_phy_lin2db(u16 val) 661 { 662 u32 mantissa = val << 4; 663 int ret, data; 664 s16 exp = -4; 665 666 while (mantissa < BIT(15)) { 667 mantissa <<= 1; 668 if (--exp < -20) 669 return -10000; 670 } 671 while (mantissa > 0xffff) { 672 mantissa >>= 1; 673 if (++exp > 20) 674 return -10000; 675 } 676 677 /* s(15,0) */ 678 if (mantissa <= 47104) 679 data = mantissa + (mantissa >> 3) + (mantissa >> 4) - 38400; 680 else 681 data = mantissa - (mantissa >> 3) - (mantissa >> 6) - 23040; 682 data = max_t(int, 0, data); 683 684 ret = ((15 + exp) << 15) + data; 685 ret = (ret << 2) + (ret << 1) + (ret >> 6) + (ret >> 7); 686 return ret >> 10; 687 } 688 689 static int 690 mt76x0_phy_get_delta_power(struct mt76x02_dev *dev, u8 tx_mode, 691 s8 target_power, s8 target_pa_power, 692 s16 ltssi) 693 { 694 struct ieee80211_channel *chan = dev->mt76.chandef.chan; 695 int tssi_target = target_power << 12, tssi_slope; 696 int tssi_offset, tssi_db, ret; 697 u32 data; 698 u16 val; 699 700 if (chan->band == NL80211_BAND_5GHZ) { 701 u8 bound[7]; 702 int i, err; 703 704 err = mt76x02_eeprom_copy(dev, MT_EE_TSSI_BOUND1, bound, 705 sizeof(bound)); 706 if (err < 0) 707 return err; 708 709 for (i = 0; i < ARRAY_SIZE(bound); i++) { 710 if (chan->hw_value <= bound[i] || !bound[i]) 711 break; 712 } 713 val = mt76x02_eeprom_get(dev, MT_EE_TSSI_SLOPE_5G + i * 2); 714 715 tssi_offset = val >> 8; 716 if ((tssi_offset >= 64 && tssi_offset <= 127) || 717 (tssi_offset & BIT(7))) 718 tssi_offset -= BIT(8); 719 } else { 720 val = mt76x02_eeprom_get(dev, MT_EE_TSSI_SLOPE_2G); 721 722 tssi_offset = val >> 8; 723 if (tssi_offset & BIT(7)) 724 tssi_offset -= BIT(8); 725 } 726 tssi_slope = val & 0xff; 727 728 switch (target_pa_power) { 729 case 1: 730 if (chan->band == NL80211_BAND_2GHZ) 731 tssi_target += 29491; /* 3.6 * 8192 */ 732 /* fall through */ 733 case 0: 734 break; 735 default: 736 tssi_target += 4424; /* 0.54 * 8192 */ 737 break; 738 } 739 740 if (!tx_mode) { 741 data = mt76_rr(dev, MT_BBP(CORE, 1)); 742 if (is_mt7630(dev) && mt76_is_mmio(dev)) { 743 int offset; 744 745 /* 2.3 * 8192 or 1.5 * 8192 */ 746 offset = (data & BIT(5)) ? 18841 : 12288; 747 tssi_target += offset; 748 } else if (data & BIT(5)) { 749 /* 0.8 * 8192 */ 750 tssi_target += 6554; 751 } 752 } 753 754 data = mt76_rr(dev, MT_BBP(TXBE, 4)); 755 switch (data & 0x3) { 756 case 1: 757 tssi_target -= 49152; /* -6db * 8192 */ 758 break; 759 case 2: 760 tssi_target -= 98304; /* -12db * 8192 */ 761 break; 762 case 3: 763 tssi_target += 49152; /* 6db * 8192 */ 764 break; 765 default: 766 break; 767 } 768 769 tssi_db = mt76x0_phy_lin2db(ltssi - dev->cal.tssi_dc) * tssi_slope; 770 if (chan->band == NL80211_BAND_5GHZ) { 771 tssi_db += ((tssi_offset - 50) << 10); /* offset s4.3 */ 772 tssi_target -= tssi_db; 773 if (ltssi > 254 && tssi_target > 0) { 774 /* upper saturate */ 775 tssi_target = 0; 776 } 777 } else { 778 tssi_db += (tssi_offset << 9); /* offset s3.4 */ 779 tssi_target -= tssi_db; 780 /* upper-lower saturate */ 781 if ((ltssi > 126 && tssi_target > 0) || 782 ((ltssi - dev->cal.tssi_dc) < 1 && tssi_target < 0)) { 783 tssi_target = 0; 784 } 785 } 786 787 if ((dev->cal.tssi_target ^ tssi_target) < 0 && 788 dev->cal.tssi_target > -4096 && dev->cal.tssi_target < 4096 && 789 tssi_target > -4096 && tssi_target < 4096) { 790 if ((tssi_target < 0 && 791 tssi_target + dev->cal.tssi_target > 0) || 792 (tssi_target > 0 && 793 tssi_target + dev->cal.tssi_target <= 0)) 794 tssi_target = 0; 795 else 796 dev->cal.tssi_target = tssi_target; 797 } else { 798 dev->cal.tssi_target = tssi_target; 799 } 800 801 /* make the compensate value to the nearest compensate code */ 802 if (tssi_target > 0) 803 tssi_target += 2048; 804 else 805 tssi_target -= 2048; 806 tssi_target >>= 12; 807 808 ret = mt76_get_field(dev, MT_TX_ALC_CFG_1, MT_TX_ALC_CFG_1_TEMP_COMP); 809 if (ret & BIT(5)) 810 ret -= BIT(6); 811 ret += tssi_target; 812 813 ret = min_t(int, 31, ret); 814 return max_t(int, -32, ret); 815 } 816 817 static void mt76x0_phy_tssi_calibrate(struct mt76x02_dev *dev) 818 { 819 s8 target_power, target_pa_power; 820 u8 tssi_info[3], tx_mode; 821 s16 ltssi; 822 s8 val; 823 824 if (mt76x0_phy_tssi_adc_calibrate(dev, <ssi, tssi_info) < 0) 825 return; 826 827 tx_mode = tssi_info[0] & 0x7; 828 if (mt76x0_phy_get_target_power(dev, tx_mode, tssi_info, 829 &target_power, &target_pa_power) < 0) 830 return; 831 832 val = mt76x0_phy_get_delta_power(dev, tx_mode, target_power, 833 target_pa_power, ltssi); 834 mt76_rmw_field(dev, MT_TX_ALC_CFG_1, MT_TX_ALC_CFG_1_TEMP_COMP, val); 835 } 836 837 void mt76x0_phy_set_txpower(struct mt76x02_dev *dev) 838 { 839 struct mt76_rate_power *t = &dev->mt76.rate_power; 840 s8 info; 841 842 mt76x0_get_tx_power_per_rate(dev, dev->mt76.chandef.chan, t); 843 mt76x0_get_power_info(dev, dev->mt76.chandef.chan, &info); 844 845 mt76x02_add_rate_power_offset(t, info); 846 mt76x02_limit_rate_power(t, dev->mt76.txpower_conf); 847 dev->mt76.txpower_cur = mt76x02_get_max_rate_power(t); 848 mt76x02_add_rate_power_offset(t, -info); 849 850 dev->target_power = info; 851 mt76x02_phy_set_txpower(dev, info, info); 852 } 853 854 void mt76x0_phy_calibrate(struct mt76x02_dev *dev, bool power_on) 855 { 856 struct ieee80211_channel *chan = dev->mt76.chandef.chan; 857 int is_5ghz = (chan->band == NL80211_BAND_5GHZ) ? 1 : 0; 858 u32 val, tx_alc, reg_val; 859 860 if (is_mt7630(dev)) 861 return; 862 863 if (power_on) { 864 mt76x02_mcu_calibrate(dev, MCU_CAL_R, 0); 865 mt76x02_mcu_calibrate(dev, MCU_CAL_VCO, chan->hw_value); 866 usleep_range(10, 20); 867 868 if (mt76x0_tssi_enabled(dev)) { 869 mt76_wr(dev, MT_MAC_SYS_CTRL, 870 MT_MAC_SYS_CTRL_ENABLE_RX); 871 mt76x0_phy_tssi_dc_calibrate(dev); 872 mt76_wr(dev, MT_MAC_SYS_CTRL, 873 MT_MAC_SYS_CTRL_ENABLE_TX | 874 MT_MAC_SYS_CTRL_ENABLE_RX); 875 } 876 } 877 878 tx_alc = mt76_rr(dev, MT_TX_ALC_CFG_0); 879 mt76_wr(dev, MT_TX_ALC_CFG_0, 0); 880 usleep_range(500, 700); 881 882 reg_val = mt76_rr(dev, MT_BBP(IBI, 9)); 883 mt76_wr(dev, MT_BBP(IBI, 9), 0xffffff7e); 884 885 if (is_5ghz) { 886 if (chan->hw_value < 100) 887 val = 0x701; 888 else if (chan->hw_value < 140) 889 val = 0x801; 890 else 891 val = 0x901; 892 } else { 893 val = 0x600; 894 } 895 896 mt76x02_mcu_calibrate(dev, MCU_CAL_FULL, val); 897 msleep(350); 898 mt76x02_mcu_calibrate(dev, MCU_CAL_LC, is_5ghz); 899 usleep_range(15000, 20000); 900 901 mt76_wr(dev, MT_BBP(IBI, 9), reg_val); 902 mt76_wr(dev, MT_TX_ALC_CFG_0, tx_alc); 903 mt76x02_mcu_calibrate(dev, MCU_CAL_RXDCOC, 1); 904 } 905 EXPORT_SYMBOL_GPL(mt76x0_phy_calibrate); 906 907 int mt76x0_phy_set_channel(struct mt76x02_dev *dev, 908 struct cfg80211_chan_def *chandef) 909 { 910 u32 ext_cca_chan[4] = { 911 [0] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 0) | 912 FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 1) | 913 FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 2) | 914 FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 3) | 915 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(0)), 916 [1] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 1) | 917 FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 0) | 918 FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 2) | 919 FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 3) | 920 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(1)), 921 [2] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 2) | 922 FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 3) | 923 FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 1) | 924 FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 0) | 925 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(2)), 926 [3] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 3) | 927 FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 2) | 928 FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 1) | 929 FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 0) | 930 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(3)), 931 }; 932 bool scan = test_bit(MT76_SCANNING, &dev->mt76.state); 933 int ch_group_index, freq, freq1; 934 u8 channel; 935 u32 val; 936 u16 rf_bw_band; 937 938 freq = chandef->chan->center_freq; 939 freq1 = chandef->center_freq1; 940 channel = chandef->chan->hw_value; 941 rf_bw_band = (channel <= 14) ? RF_G_BAND : RF_A_BAND; 942 dev->mt76.chandef = *chandef; 943 944 switch (chandef->width) { 945 case NL80211_CHAN_WIDTH_40: 946 if (freq1 > freq) 947 ch_group_index = 0; 948 else 949 ch_group_index = 1; 950 channel += 2 - ch_group_index * 4; 951 rf_bw_band |= RF_BW_40; 952 break; 953 case NL80211_CHAN_WIDTH_80: 954 ch_group_index = (freq - freq1 + 30) / 20; 955 if (WARN_ON(ch_group_index < 0 || ch_group_index > 3)) 956 ch_group_index = 0; 957 channel += 6 - ch_group_index * 4; 958 rf_bw_band |= RF_BW_80; 959 break; 960 default: 961 ch_group_index = 0; 962 rf_bw_band |= RF_BW_20; 963 break; 964 } 965 966 if (mt76_is_usb(dev)) { 967 mt76x0_phy_bbp_set_bw(dev, chandef->width); 968 } else { 969 if (chandef->width == NL80211_CHAN_WIDTH_80 || 970 chandef->width == NL80211_CHAN_WIDTH_40) 971 val = 0x201; 972 else 973 val = 0x601; 974 mt76_wr(dev, MT_TX_SW_CFG0, val); 975 } 976 mt76x02_phy_set_bw(dev, chandef->width, ch_group_index); 977 mt76x02_phy_set_band(dev, chandef->chan->band, 978 ch_group_index & 1); 979 980 mt76_rmw(dev, MT_EXT_CCA_CFG, 981 (MT_EXT_CCA_CFG_CCA0 | 982 MT_EXT_CCA_CFG_CCA1 | 983 MT_EXT_CCA_CFG_CCA2 | 984 MT_EXT_CCA_CFG_CCA3 | 985 MT_EXT_CCA_CFG_CCA_MASK), 986 ext_cca_chan[ch_group_index]); 987 988 mt76x0_phy_set_band(dev, chandef->chan->band); 989 mt76x0_phy_set_chan_rf_params(dev, channel, rf_bw_band); 990 991 /* set Japan Tx filter at channel 14 */ 992 if (channel == 14) 993 mt76_set(dev, MT_BBP(CORE, 1), 0x20); 994 else 995 mt76_clear(dev, MT_BBP(CORE, 1), 0x20); 996 997 mt76x0_read_rx_gain(dev); 998 mt76x0_phy_set_chan_bbp_params(dev, rf_bw_band); 999 1000 /* enable vco */ 1001 mt76x0_rf_set(dev, MT_RF(0, 4), BIT(7)); 1002 if (scan) 1003 return 0; 1004 1005 mt76x02_init_agc_gain(dev); 1006 mt76x0_phy_calibrate(dev, false); 1007 mt76x0_phy_set_txpower(dev); 1008 1009 ieee80211_queue_delayed_work(dev->mt76.hw, &dev->cal_work, 1010 MT_CALIBRATE_INTERVAL); 1011 1012 return 0; 1013 } 1014 1015 static void mt76x0_phy_temp_sensor(struct mt76x02_dev *dev) 1016 { 1017 u8 rf_b7_73, rf_b0_66, rf_b0_67; 1018 s8 val; 1019 1020 rf_b7_73 = mt76x0_rf_rr(dev, MT_RF(7, 73)); 1021 rf_b0_66 = mt76x0_rf_rr(dev, MT_RF(0, 66)); 1022 rf_b0_67 = mt76x0_rf_rr(dev, MT_RF(0, 67)); 1023 1024 mt76x0_rf_wr(dev, MT_RF(7, 73), 0x02); 1025 mt76x0_rf_wr(dev, MT_RF(0, 66), 0x23); 1026 mt76x0_rf_wr(dev, MT_RF(0, 67), 0x01); 1027 1028 mt76_wr(dev, MT_BBP(CORE, 34), 0x00080055); 1029 if (!mt76_poll_msec(dev, MT_BBP(CORE, 34), BIT(4), 0, 200)) { 1030 mt76_clear(dev, MT_BBP(CORE, 34), BIT(4)); 1031 goto done; 1032 } 1033 1034 val = mt76_rr(dev, MT_BBP(CORE, 35)); 1035 val = (35 * (val - dev->cal.rx.temp_offset)) / 10 + 25; 1036 1037 if (abs(val - dev->cal.temp_vco) > 20) { 1038 mt76x02_mcu_calibrate(dev, MCU_CAL_VCO, 1039 dev->mt76.chandef.chan->hw_value); 1040 dev->cal.temp_vco = val; 1041 } 1042 if (abs(val - dev->cal.temp) > 30) { 1043 mt76x0_phy_calibrate(dev, false); 1044 dev->cal.temp = val; 1045 } 1046 1047 done: 1048 mt76x0_rf_wr(dev, MT_RF(7, 73), rf_b7_73); 1049 mt76x0_rf_wr(dev, MT_RF(0, 66), rf_b0_66); 1050 mt76x0_rf_wr(dev, MT_RF(0, 67), rf_b0_67); 1051 } 1052 1053 static void mt76x0_phy_set_gain_val(struct mt76x02_dev *dev) 1054 { 1055 u8 gain = dev->cal.agc_gain_cur[0] - dev->cal.agc_gain_adjust; 1056 1057 mt76_rmw_field(dev, MT_BBP(AGC, 8), MT_BBP_AGC_GAIN, gain); 1058 1059 if ((dev->mt76.chandef.chan->flags & IEEE80211_CHAN_RADAR) && 1060 !is_mt7630(dev)) 1061 mt76x02_phy_dfs_adjust_agc(dev); 1062 } 1063 1064 static void 1065 mt76x0_phy_update_channel_gain(struct mt76x02_dev *dev) 1066 { 1067 bool gain_change; 1068 u8 gain_delta; 1069 int low_gain; 1070 1071 dev->cal.avg_rssi_all = mt76_get_min_avg_rssi(&dev->mt76); 1072 if (!dev->cal.avg_rssi_all) 1073 dev->cal.avg_rssi_all = -75; 1074 1075 low_gain = (dev->cal.avg_rssi_all > mt76x02_get_rssi_gain_thresh(dev)) + 1076 (dev->cal.avg_rssi_all > mt76x02_get_low_rssi_gain_thresh(dev)); 1077 1078 gain_change = dev->cal.low_gain < 0 || 1079 (dev->cal.low_gain & 2) ^ (low_gain & 2); 1080 dev->cal.low_gain = low_gain; 1081 1082 if (!gain_change) { 1083 if (mt76x02_phy_adjust_vga_gain(dev)) 1084 mt76x0_phy_set_gain_val(dev); 1085 return; 1086 } 1087 1088 dev->cal.agc_gain_adjust = (low_gain == 2) ? 0 : 10; 1089 gain_delta = (low_gain == 2) ? 10 : 0; 1090 1091 dev->cal.agc_gain_cur[0] = dev->cal.agc_gain_init[0] - gain_delta; 1092 mt76x0_phy_set_gain_val(dev); 1093 1094 /* clear false CCA counters */ 1095 mt76_rr(dev, MT_RX_STAT_1); 1096 } 1097 1098 static void mt76x0_phy_calibration_work(struct work_struct *work) 1099 { 1100 struct mt76x02_dev *dev = container_of(work, struct mt76x02_dev, 1101 cal_work.work); 1102 1103 mt76x0_phy_update_channel_gain(dev); 1104 if (mt76x0_tssi_enabled(dev)) 1105 mt76x0_phy_tssi_calibrate(dev); 1106 else 1107 mt76x0_phy_temp_sensor(dev); 1108 1109 ieee80211_queue_delayed_work(dev->mt76.hw, &dev->cal_work, 1110 4 * MT_CALIBRATE_INTERVAL); 1111 } 1112 1113 static void mt76x0_rf_patch_reg_array(struct mt76x02_dev *dev, 1114 const struct mt76_reg_pair *rp, int len) 1115 { 1116 int i; 1117 1118 for (i = 0; i < len; i++) { 1119 u32 reg = rp[i].reg; 1120 u8 val = rp[i].value; 1121 1122 switch (reg) { 1123 case MT_RF(0, 3): 1124 if (mt76_is_mmio(dev)) { 1125 if (is_mt7630(dev)) 1126 val = 0x70; 1127 else 1128 val = 0x63; 1129 } else { 1130 val = 0x73; 1131 } 1132 break; 1133 case MT_RF(0, 21): 1134 if (is_mt7610e(dev)) 1135 val = 0x10; 1136 else 1137 val = 0x12; 1138 break; 1139 case MT_RF(5, 2): 1140 if (is_mt7630(dev)) 1141 val = 0x1d; 1142 else if (is_mt7610e(dev)) 1143 val = 0x00; 1144 else 1145 val = 0x0c; 1146 break; 1147 default: 1148 break; 1149 } 1150 mt76x0_rf_wr(dev, reg, val); 1151 } 1152 } 1153 1154 static void mt76x0_phy_rf_init(struct mt76x02_dev *dev) 1155 { 1156 int i; 1157 u8 val; 1158 1159 mt76x0_rf_patch_reg_array(dev, mt76x0_rf_central_tab, 1160 ARRAY_SIZE(mt76x0_rf_central_tab)); 1161 mt76x0_rf_patch_reg_array(dev, mt76x0_rf_2g_channel_0_tab, 1162 ARRAY_SIZE(mt76x0_rf_2g_channel_0_tab)); 1163 RF_RANDOM_WRITE(dev, mt76x0_rf_5g_channel_0_tab); 1164 RF_RANDOM_WRITE(dev, mt76x0_rf_vga_channel_0_tab); 1165 1166 for (i = 0; i < ARRAY_SIZE(mt76x0_rf_bw_switch_tab); i++) { 1167 const struct mt76x0_rf_switch_item *item = &mt76x0_rf_bw_switch_tab[i]; 1168 1169 if (item->bw_band == RF_BW_20) 1170 mt76x0_rf_wr(dev, item->rf_bank_reg, item->value); 1171 else if (((RF_G_BAND | RF_BW_20) & item->bw_band) == (RF_G_BAND | RF_BW_20)) 1172 mt76x0_rf_wr(dev, item->rf_bank_reg, item->value); 1173 } 1174 1175 for (i = 0; i < ARRAY_SIZE(mt76x0_rf_band_switch_tab); i++) { 1176 if (mt76x0_rf_band_switch_tab[i].bw_band & RF_G_BAND) { 1177 mt76x0_rf_wr(dev, 1178 mt76x0_rf_band_switch_tab[i].rf_bank_reg, 1179 mt76x0_rf_band_switch_tab[i].value); 1180 } 1181 } 1182 1183 /* 1184 Frequency calibration 1185 E1: B0.R22<6:0>: xo_cxo<6:0> 1186 E2: B0.R21<0>: xo_cxo<0>, B0.R22<7:0>: xo_cxo<8:1> 1187 */ 1188 mt76x0_rf_wr(dev, MT_RF(0, 22), 1189 min_t(u8, dev->cal.rx.freq_offset, 0xbf)); 1190 val = mt76x0_rf_rr(dev, MT_RF(0, 22)); 1191 1192 /* Reset procedure DAC during power-up: 1193 * - set B0.R73<7> 1194 * - clear B0.R73<7> 1195 * - set B0.R73<7> 1196 */ 1197 mt76x0_rf_set(dev, MT_RF(0, 73), BIT(7)); 1198 mt76x0_rf_clear(dev, MT_RF(0, 73), BIT(7)); 1199 mt76x0_rf_set(dev, MT_RF(0, 73), BIT(7)); 1200 1201 /* vcocal_en: initiate VCO calibration (reset after completion)) */ 1202 mt76x0_rf_set(dev, MT_RF(0, 4), 0x80); 1203 } 1204 1205 void mt76x0_phy_init(struct mt76x02_dev *dev) 1206 { 1207 INIT_DELAYED_WORK(&dev->cal_work, mt76x0_phy_calibration_work); 1208 1209 mt76x0_phy_ant_select(dev); 1210 mt76x0_phy_rf_init(dev); 1211 mt76x02_phy_set_rxpath(dev); 1212 mt76x02_phy_set_txdac(dev); 1213 } 1214