1 /* 2 * (c) Copyright 2002-2010, Ralink Technology, Inc. 3 * Copyright (C) 2014 Felix Fietkau <nbd@openwrt.org> 4 * Copyright (C) 2015 Jakub Kicinski <kubakici@wp.pl> 5 * Copyright (C) 2018 Stanislaw Gruszka <stf_xl@wp.pl> 6 * 7 * This program is free software; you can redistribute it and/or modify 8 * it under the terms of the GNU General Public License version 2 9 * as published by the Free Software Foundation 10 * 11 * This program is distributed in the hope that it will be useful, 12 * but WITHOUT ANY WARRANTY; without even the implied warranty of 13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 * GNU General Public License for more details. 15 */ 16 17 #include "mt76x0.h" 18 #include "mcu.h" 19 #include "eeprom.h" 20 #include "trace.h" 21 #include "phy.h" 22 #include "initvals.h" 23 #include "initvals_phy.h" 24 #include "../mt76x02_phy.h" 25 26 #include <linux/etherdevice.h> 27 28 static int 29 mt76x0_rf_csr_wr(struct mt76x02_dev *dev, u32 offset, u8 value) 30 { 31 int ret = 0; 32 u8 bank, reg; 33 34 if (test_bit(MT76_REMOVED, &dev->mt76.state)) 35 return -ENODEV; 36 37 bank = MT_RF_BANK(offset); 38 reg = MT_RF_REG(offset); 39 40 if (WARN_ON_ONCE(reg > 64) || WARN_ON_ONCE(bank) > 8) 41 return -EINVAL; 42 43 mutex_lock(&dev->phy_mutex); 44 45 if (!mt76_poll(dev, MT_RF_CSR_CFG, MT_RF_CSR_CFG_KICK, 0, 100)) { 46 ret = -ETIMEDOUT; 47 goto out; 48 } 49 50 mt76_wr(dev, MT_RF_CSR_CFG, 51 FIELD_PREP(MT_RF_CSR_CFG_DATA, value) | 52 FIELD_PREP(MT_RF_CSR_CFG_REG_BANK, bank) | 53 FIELD_PREP(MT_RF_CSR_CFG_REG_ID, reg) | 54 MT_RF_CSR_CFG_WR | 55 MT_RF_CSR_CFG_KICK); 56 trace_mt76x0_rf_write(&dev->mt76, bank, offset, value); 57 out: 58 mutex_unlock(&dev->phy_mutex); 59 60 if (ret < 0) 61 dev_err(dev->mt76.dev, "Error: RF write %d:%d failed:%d!!\n", 62 bank, reg, ret); 63 64 return ret; 65 } 66 67 static int mt76x0_rf_csr_rr(struct mt76x02_dev *dev, u32 offset) 68 { 69 int ret = -ETIMEDOUT; 70 u32 val; 71 u8 bank, reg; 72 73 if (test_bit(MT76_REMOVED, &dev->mt76.state)) 74 return -ENODEV; 75 76 bank = MT_RF_BANK(offset); 77 reg = MT_RF_REG(offset); 78 79 if (WARN_ON_ONCE(reg > 64) || WARN_ON_ONCE(bank) > 8) 80 return -EINVAL; 81 82 mutex_lock(&dev->phy_mutex); 83 84 if (!mt76_poll(dev, MT_RF_CSR_CFG, MT_RF_CSR_CFG_KICK, 0, 100)) 85 goto out; 86 87 mt76_wr(dev, MT_RF_CSR_CFG, 88 FIELD_PREP(MT_RF_CSR_CFG_REG_BANK, bank) | 89 FIELD_PREP(MT_RF_CSR_CFG_REG_ID, reg) | 90 MT_RF_CSR_CFG_KICK); 91 92 if (!mt76_poll(dev, MT_RF_CSR_CFG, MT_RF_CSR_CFG_KICK, 0, 100)) 93 goto out; 94 95 val = mt76_rr(dev, MT_RF_CSR_CFG); 96 if (FIELD_GET(MT_RF_CSR_CFG_REG_ID, val) == reg && 97 FIELD_GET(MT_RF_CSR_CFG_REG_BANK, val) == bank) { 98 ret = FIELD_GET(MT_RF_CSR_CFG_DATA, val); 99 trace_mt76x0_rf_read(&dev->mt76, bank, offset, ret); 100 } 101 out: 102 mutex_unlock(&dev->phy_mutex); 103 104 if (ret < 0) 105 dev_err(dev->mt76.dev, "Error: RF read %d:%d failed:%d!!\n", 106 bank, reg, ret); 107 108 return ret; 109 } 110 111 static int 112 rf_wr(struct mt76x02_dev *dev, u32 offset, u8 val) 113 { 114 if (test_bit(MT76_STATE_MCU_RUNNING, &dev->mt76.state)) { 115 struct mt76_reg_pair pair = { 116 .reg = offset, 117 .value = val, 118 }; 119 120 return mt76_wr_rp(dev, MT_MCU_MEMMAP_RF, &pair, 1); 121 } else { 122 WARN_ON_ONCE(1); 123 return mt76x0_rf_csr_wr(dev, offset, val); 124 } 125 } 126 127 static int 128 rf_rr(struct mt76x02_dev *dev, u32 offset) 129 { 130 int ret; 131 u32 val; 132 133 if (test_bit(MT76_STATE_MCU_RUNNING, &dev->mt76.state)) { 134 struct mt76_reg_pair pair = { 135 .reg = offset, 136 }; 137 138 ret = mt76_rd_rp(dev, MT_MCU_MEMMAP_RF, &pair, 1); 139 val = pair.value; 140 } else { 141 WARN_ON_ONCE(1); 142 ret = val = mt76x0_rf_csr_rr(dev, offset); 143 } 144 145 return (ret < 0) ? ret : val; 146 } 147 148 static int 149 rf_rmw(struct mt76x02_dev *dev, u32 offset, u8 mask, u8 val) 150 { 151 int ret; 152 153 ret = rf_rr(dev, offset); 154 if (ret < 0) 155 return ret; 156 val |= ret & ~mask; 157 ret = rf_wr(dev, offset, val); 158 if (ret) 159 return ret; 160 161 return val; 162 } 163 164 static int 165 rf_set(struct mt76x02_dev *dev, u32 offset, u8 val) 166 { 167 return rf_rmw(dev, offset, 0, val); 168 } 169 170 #if 0 171 static int 172 rf_clear(struct mt76x02_dev *dev, u32 offset, u8 mask) 173 { 174 return rf_rmw(dev, offset, mask, 0); 175 } 176 #endif 177 178 #define RF_RANDOM_WRITE(dev, tab) \ 179 mt76_wr_rp(dev, MT_MCU_MEMMAP_RF, \ 180 tab, ARRAY_SIZE(tab)) 181 182 int mt76x0_wait_bbp_ready(struct mt76x02_dev *dev) 183 { 184 int i = 20; 185 u32 val; 186 187 do { 188 val = mt76_rr(dev, MT_BBP(CORE, 0)); 189 printk("BBP version %08x\n", val); 190 if (val && ~val) 191 break; 192 } while (--i); 193 194 if (!i) { 195 dev_err(dev->mt76.dev, "Error: BBP is not ready\n"); 196 return -EIO; 197 } 198 199 return 0; 200 } 201 202 static void 203 mt76x0_bbp_set_ctrlch(struct mt76x02_dev *dev, enum nl80211_chan_width width, 204 u8 ctrl) 205 { 206 int core_val, agc_val; 207 208 switch (width) { 209 case NL80211_CHAN_WIDTH_80: 210 core_val = 3; 211 agc_val = 7; 212 break; 213 case NL80211_CHAN_WIDTH_40: 214 core_val = 2; 215 agc_val = 3; 216 break; 217 default: 218 core_val = 0; 219 agc_val = 1; 220 break; 221 } 222 223 mt76_rmw_field(dev, MT_BBP(CORE, 1), MT_BBP_CORE_R1_BW, core_val); 224 mt76_rmw_field(dev, MT_BBP(AGC, 0), MT_BBP_AGC_R0_BW, agc_val); 225 mt76_rmw_field(dev, MT_BBP(AGC, 0), MT_BBP_AGC_R0_CTRL_CHAN, ctrl); 226 mt76_rmw_field(dev, MT_BBP(TXBE, 0), MT_BBP_TXBE_R0_CTRL_CHAN, ctrl); 227 } 228 229 static void mt76x0_vco_cal(struct mt76x02_dev *dev, u8 channel) 230 { 231 u8 val; 232 233 val = rf_rr(dev, MT_RF(0, 4)); 234 if ((val & 0x70) != 0x30) 235 return; 236 237 /* 238 * Calibration Mode - Open loop, closed loop, and amplitude: 239 * B0.R06.[0]: 1 240 * B0.R06.[3:1] bp_close_code: 100 241 * B0.R05.[7:0] bp_open_code: 0x0 242 * B0.R04.[2:0] cal_bits: 000 243 * B0.R03.[2:0] startup_time: 011 244 * B0.R03.[6:4] settle_time: 245 * 80MHz channel: 110 246 * 40MHz channel: 101 247 * 20MHz channel: 100 248 */ 249 val = rf_rr(dev, MT_RF(0, 6)); 250 val &= ~0xf; 251 val |= 0x09; 252 rf_wr(dev, MT_RF(0, 6), val); 253 254 val = rf_rr(dev, MT_RF(0, 5)); 255 if (val != 0) 256 rf_wr(dev, MT_RF(0, 5), 0x0); 257 258 val = rf_rr(dev, MT_RF(0, 4)); 259 val &= ~0x07; 260 rf_wr(dev, MT_RF(0, 4), val); 261 262 val = rf_rr(dev, MT_RF(0, 3)); 263 val &= ~0x77; 264 if (channel == 1 || channel == 7 || channel == 9 || channel >= 13) { 265 val |= 0x63; 266 } else if (channel == 3 || channel == 4 || channel == 10) { 267 val |= 0x53; 268 } else if (channel == 2 || channel == 5 || channel == 6 || 269 channel == 8 || channel == 11 || channel == 12) { 270 val |= 0x43; 271 } else { 272 WARN(1, "Unknown channel %u\n", channel); 273 return; 274 } 275 rf_wr(dev, MT_RF(0, 3), val); 276 277 /* TODO replace by mt76x0_rf_set(dev, MT_RF(0, 4), BIT(7)); */ 278 val = rf_rr(dev, MT_RF(0, 4)); 279 val = ((val & ~(0x80)) | 0x80); 280 rf_wr(dev, MT_RF(0, 4), val); 281 282 msleep(2); 283 } 284 285 static void 286 mt76x0_mac_set_ctrlch(struct mt76x02_dev *dev, bool primary_upper) 287 { 288 mt76_rmw_field(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_UPPER_40M, 289 primary_upper); 290 } 291 292 static void 293 mt76x0_phy_set_band(struct mt76x02_dev *dev, enum nl80211_band band) 294 { 295 switch (band) { 296 case NL80211_BAND_2GHZ: 297 RF_RANDOM_WRITE(dev, mt76x0_rf_2g_channel_0_tab); 298 299 rf_wr(dev, MT_RF(5, 0), 0x45); 300 rf_wr(dev, MT_RF(6, 0), 0x44); 301 302 mt76_set(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_2G); 303 mt76_clear(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_5G); 304 305 mt76_wr(dev, MT_TX_ALC_VGA3, 0x00050007); 306 mt76_wr(dev, MT_TX0_RF_GAIN_CORR, 0x003E0002); 307 break; 308 case NL80211_BAND_5GHZ: 309 RF_RANDOM_WRITE(dev, mt76x0_rf_5g_channel_0_tab); 310 311 rf_wr(dev, MT_RF(5, 0), 0x44); 312 rf_wr(dev, MT_RF(6, 0), 0x45); 313 314 mt76_clear(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_2G); 315 mt76_set(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_5G); 316 317 mt76_wr(dev, MT_TX_ALC_VGA3, 0x00000005); 318 mt76_wr(dev, MT_TX0_RF_GAIN_CORR, 0x01010102); 319 break; 320 default: 321 break; 322 } 323 } 324 325 static void 326 mt76x0_phy_set_chan_rf_params(struct mt76x02_dev *dev, u8 channel, u16 rf_bw_band) 327 { 328 u16 rf_band = rf_bw_band & 0xff00; 329 u16 rf_bw = rf_bw_band & 0x00ff; 330 enum nl80211_band band; 331 u32 mac_reg; 332 u8 rf_val; 333 int i; 334 bool bSDM = false; 335 const struct mt76x0_freq_item *freq_item; 336 337 for (i = 0; i < ARRAY_SIZE(mt76x0_sdm_channel); i++) { 338 if (channel == mt76x0_sdm_channel[i]) { 339 bSDM = true; 340 break; 341 } 342 } 343 344 for (i = 0; i < ARRAY_SIZE(mt76x0_frequency_plan); i++) { 345 if (channel == mt76x0_frequency_plan[i].channel) { 346 rf_band = mt76x0_frequency_plan[i].band; 347 348 if (bSDM) 349 freq_item = &(mt76x0_sdm_frequency_plan[i]); 350 else 351 freq_item = &(mt76x0_frequency_plan[i]); 352 353 rf_wr(dev, MT_RF(0, 37), freq_item->pllR37); 354 rf_wr(dev, MT_RF(0, 36), freq_item->pllR36); 355 rf_wr(dev, MT_RF(0, 35), freq_item->pllR35); 356 rf_wr(dev, MT_RF(0, 34), freq_item->pllR34); 357 rf_wr(dev, MT_RF(0, 33), freq_item->pllR33); 358 359 rf_val = rf_rr(dev, MT_RF(0, 32)); 360 rf_val &= ~0xE0; 361 rf_val |= freq_item->pllR32_b7b5; 362 rf_wr(dev, MT_RF(0, 32), rf_val); 363 364 /* R32<4:0> pll_den: (Denomina - 8) */ 365 rf_val = rf_rr(dev, MT_RF(0, 32)); 366 rf_val &= ~0x1F; 367 rf_val |= freq_item->pllR32_b4b0; 368 rf_wr(dev, MT_RF(0, 32), rf_val); 369 370 /* R31<7:5> */ 371 rf_val = rf_rr(dev, MT_RF(0, 31)); 372 rf_val &= ~0xE0; 373 rf_val |= freq_item->pllR31_b7b5; 374 rf_wr(dev, MT_RF(0, 31), rf_val); 375 376 /* R31<4:0> pll_k(Nominator) */ 377 rf_val = rf_rr(dev, MT_RF(0, 31)); 378 rf_val &= ~0x1F; 379 rf_val |= freq_item->pllR31_b4b0; 380 rf_wr(dev, MT_RF(0, 31), rf_val); 381 382 /* R30<7> sdm_reset_n */ 383 rf_val = rf_rr(dev, MT_RF(0, 30)); 384 rf_val &= ~0x80; 385 if (bSDM) { 386 rf_wr(dev, MT_RF(0, 30), rf_val); 387 rf_val |= 0x80; 388 rf_wr(dev, MT_RF(0, 30), rf_val); 389 } else { 390 rf_val |= freq_item->pllR30_b7; 391 rf_wr(dev, MT_RF(0, 30), rf_val); 392 } 393 394 /* R30<6:2> sdmmash_prbs,sin */ 395 rf_val = rf_rr(dev, MT_RF(0, 30)); 396 rf_val &= ~0x7C; 397 rf_val |= freq_item->pllR30_b6b2; 398 rf_wr(dev, MT_RF(0, 30), rf_val); 399 400 /* R30<1> sdm_bp */ 401 rf_val = rf_rr(dev, MT_RF(0, 30)); 402 rf_val &= ~0x02; 403 rf_val |= (freq_item->pllR30_b1 << 1); 404 rf_wr(dev, MT_RF(0, 30), rf_val); 405 406 /* R30<0> R29<7:0> (hex) pll_n */ 407 rf_val = freq_item->pll_n & 0x00FF; 408 rf_wr(dev, MT_RF(0, 29), rf_val); 409 410 rf_val = rf_rr(dev, MT_RF(0, 30)); 411 rf_val &= ~0x1; 412 rf_val |= ((freq_item->pll_n >> 8) & 0x0001); 413 rf_wr(dev, MT_RF(0, 30), rf_val); 414 415 /* R28<7:6> isi_iso */ 416 rf_val = rf_rr(dev, MT_RF(0, 28)); 417 rf_val &= ~0xC0; 418 rf_val |= freq_item->pllR28_b7b6; 419 rf_wr(dev, MT_RF(0, 28), rf_val); 420 421 /* R28<5:4> pfd_dly */ 422 rf_val = rf_rr(dev, MT_RF(0, 28)); 423 rf_val &= ~0x30; 424 rf_val |= freq_item->pllR28_b5b4; 425 rf_wr(dev, MT_RF(0, 28), rf_val); 426 427 /* R28<3:2> clksel option */ 428 rf_val = rf_rr(dev, MT_RF(0, 28)); 429 rf_val &= ~0x0C; 430 rf_val |= freq_item->pllR28_b3b2; 431 rf_wr(dev, MT_RF(0, 28), rf_val); 432 433 /* R28<1:0> R27<7:0> R26<7:0> (hex) sdm_k */ 434 rf_val = freq_item->pll_sdm_k & 0x000000FF; 435 rf_wr(dev, MT_RF(0, 26), rf_val); 436 437 rf_val = ((freq_item->pll_sdm_k >> 8) & 0x000000FF); 438 rf_wr(dev, MT_RF(0, 27), rf_val); 439 440 rf_val = rf_rr(dev, MT_RF(0, 28)); 441 rf_val &= ~0x3; 442 rf_val |= ((freq_item->pll_sdm_k >> 16) & 0x0003); 443 rf_wr(dev, MT_RF(0, 28), rf_val); 444 445 /* R24<1:0> xo_div */ 446 rf_val = rf_rr(dev, MT_RF(0, 24)); 447 rf_val &= ~0x3; 448 rf_val |= freq_item->pllR24_b1b0; 449 rf_wr(dev, MT_RF(0, 24), rf_val); 450 451 break; 452 } 453 } 454 455 for (i = 0; i < ARRAY_SIZE(mt76x0_rf_bw_switch_tab); i++) { 456 if (rf_bw == mt76x0_rf_bw_switch_tab[i].bw_band) { 457 rf_wr(dev, mt76x0_rf_bw_switch_tab[i].rf_bank_reg, 458 mt76x0_rf_bw_switch_tab[i].value); 459 } else if ((rf_bw == (mt76x0_rf_bw_switch_tab[i].bw_band & 0xFF)) && 460 (rf_band & mt76x0_rf_bw_switch_tab[i].bw_band)) { 461 rf_wr(dev, mt76x0_rf_bw_switch_tab[i].rf_bank_reg, 462 mt76x0_rf_bw_switch_tab[i].value); 463 } 464 } 465 466 for (i = 0; i < ARRAY_SIZE(mt76x0_rf_band_switch_tab); i++) { 467 if (mt76x0_rf_band_switch_tab[i].bw_band & rf_band) { 468 rf_wr(dev, mt76x0_rf_band_switch_tab[i].rf_bank_reg, 469 mt76x0_rf_band_switch_tab[i].value); 470 } 471 } 472 473 mac_reg = mt76_rr(dev, MT_RF_MISC); 474 mac_reg &= ~0xC; /* Clear 0x518[3:2] */ 475 mt76_wr(dev, MT_RF_MISC, mac_reg); 476 477 band = (rf_band & RF_G_BAND) ? NL80211_BAND_2GHZ : NL80211_BAND_5GHZ; 478 if (mt76x02_ext_pa_enabled(&dev->mt76, band)) { 479 /* 480 MT_RF_MISC (offset: 0x0518) 481 [2]1'b1: enable external A band PA, 1'b0: disable external A band PA 482 [3]1'b1: enable external G band PA, 1'b0: disable external G band PA 483 */ 484 if (rf_band & RF_A_BAND) { 485 mac_reg = mt76_rr(dev, MT_RF_MISC); 486 mac_reg |= 0x4; 487 mt76_wr(dev, MT_RF_MISC, mac_reg); 488 } else { 489 mac_reg = mt76_rr(dev, MT_RF_MISC); 490 mac_reg |= 0x8; 491 mt76_wr(dev, MT_RF_MISC, mac_reg); 492 } 493 494 /* External PA */ 495 for (i = 0; i < ARRAY_SIZE(mt76x0_rf_ext_pa_tab); i++) 496 if (mt76x0_rf_ext_pa_tab[i].bw_band & rf_band) 497 rf_wr(dev, mt76x0_rf_ext_pa_tab[i].rf_bank_reg, 498 mt76x0_rf_ext_pa_tab[i].value); 499 } 500 501 if (rf_band & RF_G_BAND) { 502 mt76_wr(dev, MT_TX0_RF_GAIN_ATTEN, 0x63707400); 503 /* Set Atten mode = 2 For G band, Disable Tx Inc dcoc. */ 504 mac_reg = mt76_rr(dev, MT_TX_ALC_CFG_1); 505 mac_reg &= 0x896400FF; 506 mt76_wr(dev, MT_TX_ALC_CFG_1, mac_reg); 507 } else { 508 mt76_wr(dev, MT_TX0_RF_GAIN_ATTEN, 0x686A7800); 509 /* Set Atten mode = 0 For Ext A band, Disable Tx Inc dcoc Cal. */ 510 mac_reg = mt76_rr(dev, MT_TX_ALC_CFG_1); 511 mac_reg &= 0x890400FF; 512 mt76_wr(dev, MT_TX_ALC_CFG_1, mac_reg); 513 } 514 } 515 516 static void 517 mt76x0_phy_set_chan_bbp_params(struct mt76x02_dev *dev, u8 channel, u16 rf_bw_band) 518 { 519 int i; 520 521 for (i = 0; i < ARRAY_SIZE(mt76x0_bbp_switch_tab); i++) { 522 const struct mt76x0_bbp_switch_item *item = &mt76x0_bbp_switch_tab[i]; 523 const struct mt76_reg_pair *pair = &item->reg_pair; 524 525 if ((rf_bw_band & item->bw_band) != rf_bw_band) 526 continue; 527 528 if (pair->reg == MT_BBP(AGC, 8)) { 529 u32 val = pair->value; 530 u8 gain; 531 532 gain = FIELD_GET(MT_BBP_AGC_GAIN, val); 533 gain -= dev->cal.rx.lna_gain * 2; 534 val &= ~MT_BBP_AGC_GAIN; 535 val |= FIELD_PREP(MT_BBP_AGC_GAIN, gain); 536 mt76_wr(dev, pair->reg, val); 537 } else { 538 mt76_wr(dev, pair->reg, pair->value); 539 } 540 } 541 } 542 543 static void mt76x0_ant_select(struct mt76x02_dev *dev) 544 { 545 struct ieee80211_channel *chan = dev->mt76.chandef.chan; 546 547 /* single antenna mode */ 548 if (chan->band == NL80211_BAND_2GHZ) { 549 mt76_rmw(dev, MT_COEXCFG3, 550 BIT(5) | BIT(4) | BIT(3) | BIT(2), BIT(1)); 551 mt76_rmw(dev, MT_WLAN_FUN_CTRL, BIT(5), BIT(6)); 552 } else { 553 mt76_rmw(dev, MT_COEXCFG3, BIT(5) | BIT(2), 554 BIT(4) | BIT(3)); 555 mt76_clear(dev, MT_WLAN_FUN_CTRL, 556 BIT(6) | BIT(5)); 557 } 558 mt76_clear(dev, MT_CMB_CTRL, BIT(14) | BIT(12)); 559 mt76_clear(dev, MT_COEXCFG0, BIT(2)); 560 } 561 562 static void 563 mt76x0_bbp_set_bw(struct mt76x02_dev *dev, enum nl80211_chan_width width) 564 { 565 enum { BW_20 = 0, BW_40 = 1, BW_80 = 2, BW_10 = 4}; 566 int bw; 567 568 switch (width) { 569 default: 570 case NL80211_CHAN_WIDTH_20_NOHT: 571 case NL80211_CHAN_WIDTH_20: 572 bw = BW_20; 573 break; 574 case NL80211_CHAN_WIDTH_40: 575 bw = BW_40; 576 break; 577 case NL80211_CHAN_WIDTH_80: 578 bw = BW_80; 579 break; 580 case NL80211_CHAN_WIDTH_10: 581 bw = BW_10; 582 break; 583 case NL80211_CHAN_WIDTH_80P80: 584 case NL80211_CHAN_WIDTH_160: 585 case NL80211_CHAN_WIDTH_5: 586 /* TODO error */ 587 return ; 588 } 589 590 mt76x02_mcu_function_select(&dev->mt76, BW_SETTING, bw, false); 591 } 592 593 void mt76x0_phy_set_txpower(struct mt76x02_dev *dev) 594 { 595 struct mt76_rate_power *t = &dev->mt76.rate_power; 596 u8 info[2]; 597 598 mt76x0_get_power_info(dev, info); 599 mt76x0_get_tx_power_per_rate(dev); 600 601 mt76x02_add_rate_power_offset(t, info[0]); 602 mt76x02_limit_rate_power(t, dev->mt76.txpower_conf); 603 dev->mt76.txpower_cur = mt76x02_get_max_rate_power(t); 604 mt76x02_add_rate_power_offset(t, -info[0]); 605 606 mt76x02_phy_set_txpower(&dev->mt76, info[0], info[1]); 607 } 608 609 int mt76x0_phy_set_channel(struct mt76x02_dev *dev, 610 struct cfg80211_chan_def *chandef) 611 { 612 u32 ext_cca_chan[4] = { 613 [0] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 0) | 614 FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 1) | 615 FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 2) | 616 FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 3) | 617 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(0)), 618 [1] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 1) | 619 FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 0) | 620 FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 2) | 621 FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 3) | 622 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(1)), 623 [2] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 2) | 624 FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 3) | 625 FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 1) | 626 FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 0) | 627 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(2)), 628 [3] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 3) | 629 FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 2) | 630 FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 1) | 631 FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 0) | 632 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(3)), 633 }; 634 bool scan = test_bit(MT76_SCANNING, &dev->mt76.state); 635 int ch_group_index, freq, freq1; 636 u8 channel; 637 u32 val; 638 u16 rf_bw_band; 639 640 freq = chandef->chan->center_freq; 641 freq1 = chandef->center_freq1; 642 channel = chandef->chan->hw_value; 643 rf_bw_band = (channel <= 14) ? RF_G_BAND : RF_A_BAND; 644 dev->mt76.chandef = *chandef; 645 646 switch (chandef->width) { 647 case NL80211_CHAN_WIDTH_40: 648 if (freq1 > freq) 649 ch_group_index = 0; 650 else 651 ch_group_index = 1; 652 channel += 2 - ch_group_index * 4; 653 rf_bw_band |= RF_BW_40; 654 break; 655 case NL80211_CHAN_WIDTH_80: 656 ch_group_index = (freq - freq1 + 30) / 20; 657 if (WARN_ON(ch_group_index < 0 || ch_group_index > 3)) 658 ch_group_index = 0; 659 channel += 6 - ch_group_index * 4; 660 rf_bw_band |= RF_BW_80; 661 break; 662 default: 663 ch_group_index = 0; 664 rf_bw_band |= RF_BW_20; 665 break; 666 } 667 668 mt76x0_bbp_set_bw(dev, chandef->width); 669 mt76x0_bbp_set_ctrlch(dev, chandef->width, ch_group_index); 670 mt76x0_mac_set_ctrlch(dev, ch_group_index & 1); 671 mt76x0_ant_select(dev); 672 673 mt76_rmw(dev, MT_EXT_CCA_CFG, 674 (MT_EXT_CCA_CFG_CCA0 | 675 MT_EXT_CCA_CFG_CCA1 | 676 MT_EXT_CCA_CFG_CCA2 | 677 MT_EXT_CCA_CFG_CCA3 | 678 MT_EXT_CCA_CFG_CCA_MASK), 679 ext_cca_chan[ch_group_index]); 680 681 mt76x0_phy_set_band(dev, chandef->chan->band); 682 mt76x0_phy_set_chan_rf_params(dev, channel, rf_bw_band); 683 mt76x0_read_rx_gain(dev); 684 685 /* set Japan Tx filter at channel 14 */ 686 val = mt76_rr(dev, MT_BBP(CORE, 1)); 687 if (channel == 14) 688 val |= 0x20; 689 else 690 val &= ~0x20; 691 mt76_wr(dev, MT_BBP(CORE, 1), val); 692 693 mt76x0_phy_set_chan_bbp_params(dev, channel, rf_bw_band); 694 695 /* Vendor driver don't do it */ 696 /* mt76x0_phy_set_tx_power(dev, channel, rf_bw_band); */ 697 698 mt76x0_vco_cal(dev, channel); 699 if (scan) 700 mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_RXDCOC, 1, false); 701 702 mt76x0_phy_set_txpower(dev); 703 704 return 0; 705 } 706 707 void mt76x0_phy_recalibrate_after_assoc(struct mt76x02_dev *dev) 708 { 709 u32 tx_alc, reg_val; 710 u8 channel = dev->mt76.chandef.chan->hw_value; 711 int is_5ghz = (dev->mt76.chandef.chan->band == NL80211_BAND_5GHZ) ? 1 : 0; 712 713 mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_R, 0, false); 714 715 mt76x0_vco_cal(dev, channel); 716 717 tx_alc = mt76_rr(dev, MT_TX_ALC_CFG_0); 718 mt76_wr(dev, MT_TX_ALC_CFG_0, 0); 719 usleep_range(500, 700); 720 721 reg_val = mt76_rr(dev, 0x2124); 722 reg_val &= 0xffffff7e; 723 mt76_wr(dev, 0x2124, reg_val); 724 725 mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_RXDCOC, 0, false); 726 727 mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_LC, is_5ghz, false); 728 mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_LOFT, is_5ghz, false); 729 mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_TXIQ, is_5ghz, false); 730 mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_TX_GROUP_DELAY, 731 is_5ghz, false); 732 mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_RXIQ, is_5ghz, false); 733 mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_RX_GROUP_DELAY, 734 is_5ghz, false); 735 736 mt76_wr(dev, 0x2124, reg_val); 737 mt76_wr(dev, MT_TX_ALC_CFG_0, tx_alc); 738 msleep(100); 739 740 mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_RXDCOC, 1, false); 741 } 742 743 void mt76x0_agc_save(struct mt76x02_dev *dev) 744 { 745 /* Only one RX path */ 746 dev->agc_save = FIELD_GET(MT_BBP_AGC_GAIN, mt76_rr(dev, MT_BBP(AGC, 8))); 747 } 748 749 void mt76x0_agc_restore(struct mt76x02_dev *dev) 750 { 751 mt76_rmw_field(dev, MT_BBP(AGC, 8), MT_BBP_AGC_GAIN, dev->agc_save); 752 } 753 754 static void mt76x0_temp_sensor(struct mt76x02_dev *dev) 755 { 756 u8 rf_b7_73, rf_b0_66, rf_b0_67; 757 int cycle, temp; 758 u32 val; 759 s32 sval; 760 761 rf_b7_73 = rf_rr(dev, MT_RF(7, 73)); 762 rf_b0_66 = rf_rr(dev, MT_RF(0, 66)); 763 rf_b0_67 = rf_rr(dev, MT_RF(0, 73)); 764 765 rf_wr(dev, MT_RF(7, 73), 0x02); 766 rf_wr(dev, MT_RF(0, 66), 0x23); 767 rf_wr(dev, MT_RF(0, 73), 0x01); 768 769 mt76_wr(dev, MT_BBP(CORE, 34), 0x00080055); 770 771 for (cycle = 0; cycle < 2000; cycle++) { 772 val = mt76_rr(dev, MT_BBP(CORE, 34)); 773 if (!(val & 0x10)) 774 break; 775 udelay(3); 776 } 777 778 if (cycle >= 2000) { 779 val &= 0x10; 780 mt76_wr(dev, MT_BBP(CORE, 34), val); 781 goto done; 782 } 783 784 sval = mt76_rr(dev, MT_BBP(CORE, 35)) & 0xff; 785 if (!(sval & 0x80)) 786 sval &= 0x7f; /* Positive */ 787 else 788 sval |= 0xffffff00; /* Negative */ 789 790 temp = (35 * (sval - dev->cal.rx.temp_offset)) / 10 + 25; 791 792 done: 793 rf_wr(dev, MT_RF(7, 73), rf_b7_73); 794 rf_wr(dev, MT_RF(0, 66), rf_b0_66); 795 rf_wr(dev, MT_RF(0, 73), rf_b0_67); 796 } 797 798 static void mt76x0_dynamic_vga_tuning(struct mt76x02_dev *dev) 799 { 800 struct cfg80211_chan_def *chandef = &dev->mt76.chandef; 801 u32 val, init_vga; 802 int avg_rssi; 803 804 init_vga = chandef->chan->band == NL80211_BAND_5GHZ ? 0x54 : 0x4E; 805 avg_rssi = mt76x02_phy_get_min_avg_rssi(&dev->mt76); 806 if (avg_rssi > -60) 807 init_vga -= 0x20; 808 else if (avg_rssi > -70) 809 init_vga -= 0x10; 810 811 val = mt76_rr(dev, MT_BBP(AGC, 8)); 812 val &= 0xFFFF80FF; 813 val |= init_vga << 8; 814 mt76_wr(dev, MT_BBP(AGC,8), val); 815 } 816 817 static void mt76x0_phy_calibrate(struct work_struct *work) 818 { 819 struct mt76x02_dev *dev = container_of(work, struct mt76x02_dev, 820 cal_work.work); 821 822 mt76x0_dynamic_vga_tuning(dev); 823 mt76x0_temp_sensor(dev); 824 825 ieee80211_queue_delayed_work(dev->mt76.hw, &dev->cal_work, 826 MT_CALIBRATE_INTERVAL); 827 } 828 829 static void mt76x0_rf_init(struct mt76x02_dev *dev) 830 { 831 int i; 832 u8 val; 833 834 RF_RANDOM_WRITE(dev, mt76x0_rf_central_tab); 835 RF_RANDOM_WRITE(dev, mt76x0_rf_2g_channel_0_tab); 836 RF_RANDOM_WRITE(dev, mt76x0_rf_5g_channel_0_tab); 837 RF_RANDOM_WRITE(dev, mt76x0_rf_vga_channel_0_tab); 838 839 for (i = 0; i < ARRAY_SIZE(mt76x0_rf_bw_switch_tab); i++) { 840 const struct mt76x0_rf_switch_item *item = &mt76x0_rf_bw_switch_tab[i]; 841 842 if (item->bw_band == RF_BW_20) 843 rf_wr(dev, item->rf_bank_reg, item->value); 844 else if (((RF_G_BAND | RF_BW_20) & item->bw_band) == (RF_G_BAND | RF_BW_20)) 845 rf_wr(dev, item->rf_bank_reg, item->value); 846 } 847 848 for (i = 0; i < ARRAY_SIZE(mt76x0_rf_band_switch_tab); i++) { 849 if (mt76x0_rf_band_switch_tab[i].bw_band & RF_G_BAND) { 850 rf_wr(dev, 851 mt76x0_rf_band_switch_tab[i].rf_bank_reg, 852 mt76x0_rf_band_switch_tab[i].value); 853 } 854 } 855 856 /* 857 Frequency calibration 858 E1: B0.R22<6:0>: xo_cxo<6:0> 859 E2: B0.R21<0>: xo_cxo<0>, B0.R22<7:0>: xo_cxo<8:1> 860 */ 861 rf_wr(dev, MT_RF(0, 22), 862 min_t(u8, dev->cal.rx.freq_offset, 0xbf)); 863 val = rf_rr(dev, MT_RF(0, 22)); 864 865 /* 866 Reset the DAC (Set B0.R73<7>=1, then set B0.R73<7>=0, and then set B0.R73<7>) during power up. 867 */ 868 val = rf_rr(dev, MT_RF(0, 73)); 869 val |= 0x80; 870 rf_wr(dev, MT_RF(0, 73), val); 871 val &= ~0x80; 872 rf_wr(dev, MT_RF(0, 73), val); 873 val |= 0x80; 874 rf_wr(dev, MT_RF(0, 73), val); 875 876 /* 877 vcocal_en (initiate VCO calibration (reset after completion)) - It should be at the end of RF configuration. 878 */ 879 rf_set(dev, MT_RF(0, 4), 0x80); 880 } 881 882 void mt76x0_phy_init(struct mt76x02_dev *dev) 883 { 884 INIT_DELAYED_WORK(&dev->cal_work, mt76x0_phy_calibrate); 885 886 mt76x0_rf_init(dev); 887 mt76x02_phy_set_rxpath(&dev->mt76); 888 mt76x02_phy_set_txdac(&dev->mt76); 889 } 890