Lines Matching refs:rtwdev

80 static u8 _kpath(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx)
82 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[RFK]dbcc_en: %x, PHY%d\n",
83 rtwdev->dbcc_en, phy_idx);
85 if (!rtwdev->dbcc_en)
94 static void _rfk_backup_bb_reg(struct rtw89_dev *rtwdev, u32 backup_bb_reg_val[])
100 rtw89_phy_read32_mask(rtwdev, rtw8852c_backup_bb_regs[i],
102 rtw89_debug(rtwdev, RTW89_DBG_RFK,
108 static void _rfk_backup_rf_reg(struct rtw89_dev *rtwdev, u32 backup_rf_reg_val[],
115 rtw89_read_rf(rtwdev, rf_path,
117 rtw89_debug(rtwdev, RTW89_DBG_RFK,
123 static void _rfk_restore_bb_reg(struct rtw89_dev *rtwdev, u32 backup_bb_reg_val[])
128 rtw89_phy_write32_mask(rtwdev, rtw8852c_backup_bb_regs[i],
130 rtw89_debug(rtwdev, RTW89_DBG_RFK,
136 static void _rfk_restore_rf_reg(struct rtw89_dev *rtwdev, u32 backup_rf_reg_val[],
142 rtw89_write_rf(rtwdev, rf_path, rtw8852c_backup_rf_regs[i],
145 rtw89_debug(rtwdev, RTW89_DBG_RFK,
151 static void _wait_rx_mode(struct rtw89_dev *rtwdev, u8 kpath)
162 2, 5000, false, rtwdev, path, 0x00,
164 rtw89_debug(rtwdev, RTW89_DBG_RFK,
170 static void _dack_dump(struct rtw89_dev *rtwdev)
172 struct rtw89_dack_info *dack = &rtwdev->dack;
176 rtw89_debug(rtwdev, RTW89_DBG_RFK,
179 rtw89_debug(rtwdev, RTW89_DBG_RFK,
182 rtw89_debug(rtwdev, RTW89_DBG_RFK,
185 rtw89_debug(rtwdev, RTW89_DBG_RFK,
189 rtw89_debug(rtwdev, RTW89_DBG_RFK,
192 rtw89_debug(rtwdev, RTW89_DBG_RFK,
196 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]S0 MSBK ic:\n");
199 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]0x%x\n", t);
201 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]S0 MSBK qc:\n");
204 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]0x%x\n", t);
206 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]S1 MSBK ic:\n");
209 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]0x%x\n", t);
211 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]S1 MSBK qc:\n");
214 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]0x%x\n", t);
218 static void _addck_backup(struct rtw89_dev *rtwdev)
220 struct rtw89_dack_info *dack = &rtwdev->dack;
222 rtw89_phy_write32_mask(rtwdev, R_ADDCK0, B_ADDCK0, 0x0);
223 dack->addck_d[0][0] = rtw89_phy_read32_mask(rtwdev, R_ADDCKR0,
225 dack->addck_d[0][1] = rtw89_phy_read32_mask(rtwdev, R_ADDCKR0,
228 rtw89_phy_write32_mask(rtwdev, R_ADDCK1, B_ADDCK1, 0x0);
229 dack->addck_d[1][0] = rtw89_phy_read32_mask(rtwdev, R_ADDCKR1,
231 dack->addck_d[1][1] = rtw89_phy_read32_mask(rtwdev, R_ADDCKR1,
235 static void _addck_reload(struct rtw89_dev *rtwdev)
237 struct rtw89_dack_info *dack = &rtwdev->dack;
239 rtw89_phy_write32_mask(rtwdev, R_ADDCK0_RL, B_ADDCK0_RL1,
241 rtw89_phy_write32_mask(rtwdev, R_ADDCK0_RL, B_ADDCK0_RL0,
243 rtw89_phy_write32_mask(rtwdev, R_ADDCK0_RL, B_ADDCK0_RLS, 0x3);
244 rtw89_phy_write32_mask(rtwdev, R_ADDCK1_RL, B_ADDCK1_RL1,
246 rtw89_phy_write32_mask(rtwdev, R_ADDCK1_RL, B_ADDCK1_RL0,
248 rtw89_phy_write32_mask(rtwdev, R_ADDCK1_RL, B_ADDCK1_RLS, 0x3);
251 static void _dack_backup_s0(struct rtw89_dev *rtwdev)
253 struct rtw89_dack_info *dack = &rtwdev->dack;
256 rtw89_phy_write32_mask(rtwdev, R_P0_NRBW, B_P0_NRBW_DBG, 0x1);
258 rtw89_phy_write32_mask(rtwdev, R_DCOF0, B_DCOF0_V, i);
259 dack->msbk_d[0][0][i] = rtw89_phy_read32_mask(rtwdev,
262 rtw89_phy_write32_mask(rtwdev, R_DCOF8, B_DCOF8_V, i);
263 dack->msbk_d[0][1][i] = rtw89_phy_read32_mask(rtwdev,
267 dack->biask_d[0][0] = rtw89_phy_read32_mask(rtwdev, R_DACK_BIAS00,
269 dack->biask_d[0][1] = rtw89_phy_read32_mask(rtwdev, R_DACK_BIAS01,
271 dack->dadck_d[0][0] = rtw89_phy_read32_mask(rtwdev, R_DACK_DADCK00,
273 dack->dadck_d[0][1] = rtw89_phy_read32_mask(rtwdev, R_DACK_DADCK01,
277 static void _dack_backup_s1(struct rtw89_dev *rtwdev)
279 struct rtw89_dack_info *dack = &rtwdev->dack;
282 rtw89_phy_write32_mask(rtwdev, R_P1_DBGMOD, B_P1_DBGMOD_ON, 0x1);
284 rtw89_phy_write32_mask(rtwdev, R_DACK10, B_DACK10, i);
285 dack->msbk_d[1][0][i] = rtw89_phy_read32_mask(rtwdev,
288 rtw89_phy_write32_mask(rtwdev, R_DACK11, B_DACK11, i);
289 dack->msbk_d[1][1][i] = rtw89_phy_read32_mask(rtwdev,
293 dack->biask_d[1][0] = rtw89_phy_read32_mask(rtwdev, R_DACK_BIAS10,
295 dack->biask_d[1][1] = rtw89_phy_read32_mask(rtwdev, R_DACK_BIAS11,
297 dack->dadck_d[1][0] = rtw89_phy_read32_mask(rtwdev, R_DACK_DADCK10,
299 dack->dadck_d[1][1] = rtw89_phy_read32_mask(rtwdev, R_DACK_DADCK11,
303 static void _dack_reload_by_path(struct rtw89_dev *rtwdev,
306 struct rtw89_dack_info *dack = &rtwdev->dack;
315 rtw89_rfk_parser(rtwdev, &rtw8852c_dack_reload_defs_tbl);
322 rtw89_phy_write32(rtwdev, addr, val32);
323 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]0x%x=0x%x\n", addr,
324 rtw89_phy_read32_mask(rtwdev, addr, MASKDWORD));
331 rtw89_phy_write32(rtwdev, addr, val32);
332 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]0x%x=0x%x\n", addr,
333 rtw89_phy_read32_mask(rtwdev, addr, MASKDWORD));
340 rtw89_phy_write32(rtwdev, addr, val32);
341 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]0x%x=0x%x\n", addr,
342 rtw89_phy_read32_mask(rtwdev, addr, MASKDWORD));
349 rtw89_phy_write32(rtwdev, addr, val32);
350 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]0x%x=0x%x\n", addr,
351 rtw89_phy_read32_mask(rtwdev, addr, MASKDWORD));
357 rtw89_phy_write32(rtwdev, addr, val32);
358 rtw89_phy_write32_set(rtwdev, addr, BIT(0));
361 static void _dack_reload(struct rtw89_dev *rtwdev, enum rtw89_rf_path path)
366 _dack_reload_by_path(rtwdev, path, i);
369 static void _addck(struct rtw89_dev *rtwdev)
371 struct rtw89_dack_info *dack = &rtwdev->dack;
376 rtw89_phy_write32_mask(rtwdev, R_ADDCK0, B_ADDCK0_RST, 0x1);
377 rtw89_phy_write32_mask(rtwdev, R_ADDCK0, B_ADDCK0_EN, 0x1);
378 rtw89_phy_write32_mask(rtwdev, R_ADDCK0, B_ADDCK0_EN, 0x0);
380 rtw89_phy_write32_mask(rtwdev, R_ADDCK0, B_ADDCK0, 0x1);
383 1, 10000, false, rtwdev, 0xc0fc, BIT(0));
385 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]S0 ADDCK timeout\n");
389 rtw89_phy_write32_mask(rtwdev, R_ADDCK0, B_ADDCK0_RST, 0x0);
392 rtw89_phy_write32_mask(rtwdev, R_ADDCK1, B_ADDCK1_RST, 0x1);
393 rtw89_phy_write32_mask(rtwdev, R_ADDCK1, B_ADDCK1_EN, 0x1);
394 rtw89_phy_write32_mask(rtwdev, R_ADDCK1, B_ADDCK1_EN, 0x0);
396 rtw89_phy_write32_mask(rtwdev, R_ADDCK1, B_ADDCK1, 0x1);
399 1, 10000, false, rtwdev, 0xc1fc, BIT(0));
401 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]S1 ADDCK timeout\n");
404 rtw89_phy_write32_mask(rtwdev, R_ADDCK1, B_ADDCK1_RST, 0x0);
407 static void _dack_reset(struct rtw89_dev *rtwdev, u8 path)
409 rtw89_rfk_parser_by_cond(rtwdev, path == RF_PATH_A,
443 static void rtw8852c_txck_force(struct rtw89_dev *rtwdev, u8 path, bool force,
446 rtw89_phy_write32_mask(rtwdev, R_P0_RXCK | (path << 13), B_P0_TXCK_ON, 0x0);
451 rtw89_phy_write32_mask(rtwdev, R_P0_RXCK | (path << 13), B_P0_TXCK_VAL, ck);
452 rtw89_phy_write32_mask(rtwdev, R_P0_RXCK | (path << 13), B_P0_TXCK_ON, 0x1);
455 static void rtw8852c_rxck_force(struct rtw89_dev *rtwdev, u8 path, bool force,
460 rtw89_phy_write32_mask(rtwdev, R_P0_RXCK | (path << 13), B_P0_RXCK_ON, 0x0);
465 rtw89_phy_write32_mask(rtwdev, R_P0_RXCK | (path << 13), B_P0_RXCK_VAL, ck);
466 rtw89_phy_write32_mask(rtwdev, R_P0_RXCK | (path << 13), B_P0_RXCK_ON, 0x1);
481 rtw89_phy_write32_mask(rtwdev, R_P0_CFCH_BW0 | (path << 8), B_P0_CFCH_CTL, def->ctl);
482 rtw89_phy_write32_mask(rtwdev, R_P0_CFCH_BW0 | (path << 8), B_P0_CFCH_EN, def->en);
483 rtw89_phy_write32_mask(rtwdev, R_P0_CFCH_BW0 | (path << 8), B_P0_CFCH_BW0, def->bw0);
484 rtw89_phy_write32_mask(rtwdev, R_P0_CFCH_BW1 | (path << 8), B_P0_CFCH_BW1, def->bw1);
485 rtw89_phy_write32_mask(rtwdev, R_DRCK | (path << 8), B_DRCK_MUL, def->mul);
486 rtw89_phy_write32_mask(rtwdev, R_ADCMOD | (path << 8), B_ADCMOD_LP, def->lp);
489 static bool _check_dack_done(struct rtw89_dev *rtwdev, bool s0)
492 if (rtw89_phy_read32_mask(rtwdev, R_DACK_S0P0, B_DACK_S0P0_OK) == 0 ||
493 rtw89_phy_read32_mask(rtwdev, R_DACK_S0P1, B_DACK_S0P1_OK) == 0 ||
494 rtw89_phy_read32_mask(rtwdev, R_DACK_S0P2, B_DACK_S0P2_OK) == 0 ||
495 rtw89_phy_read32_mask(rtwdev, R_DACK_S0P3, B_DACK_S0P3_OK) == 0)
498 if (rtw89_phy_read32_mask(rtwdev, R_DACK_S1P0, B_DACK_S1P0_OK) == 0 ||
499 rtw89_phy_read32_mask(rtwdev, R_DACK_S1P1, B_DACK_S1P1_OK) == 0 ||
500 rtw89_phy_read32_mask(rtwdev, R_DACK_S1P2, B_DACK_S1P2_OK) == 0 ||
501 rtw89_phy_read32_mask(rtwdev, R_DACK_S1P3, B_DACK_S1P3_OK) == 0)
508 static void _dack_s0(struct rtw89_dev *rtwdev)
510 struct rtw89_dack_info *dack = &rtwdev->dack;
514 rtw8852c_txck_force(rtwdev, RF_PATH_A, true, DAC_160M);
515 rtw89_rfk_parser(rtwdev, &rtw8852c_dack_defs_s0_tbl);
517 _dack_reset(rtwdev, RF_PATH_A);
519 rtw89_phy_write32_mask(rtwdev, R_DCOF1, B_DCOF1_S, 0x1);
521 1, 10000, false, rtwdev, true);
523 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]S0 DACK timeout\n");
526 rtw89_phy_write32_mask(rtwdev, R_DCOF1, B_DCOF1_S, 0x0);
527 rtw8852c_txck_force(rtwdev, RF_PATH_A, false, DAC_960M);
528 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]after S0 DADCK\n");
530 _dack_backup_s0(rtwdev);
531 _dack_reload(rtwdev, RF_PATH_A);
532 rtw89_phy_write32_mask(rtwdev, R_P0_NRBW, B_P0_NRBW_DBG, 0x0);
535 static void _dack_s1(struct rtw89_dev *rtwdev)
537 struct rtw89_dack_info *dack = &rtwdev->dack;
541 rtw8852c_txck_force(rtwdev, RF_PATH_B, true, DAC_160M);
542 rtw89_rfk_parser(rtwdev, &rtw8852c_dack_defs_s1_tbl);
544 _dack_reset(rtwdev, RF_PATH_B);
546 rtw89_phy_write32_mask(rtwdev, R_DACK1_K, B_DACK1_EN, 0x1);
548 1, 10000, false, rtwdev, false);
550 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]S1 DACK timeout\n");
553 rtw89_phy_write32_mask(rtwdev, R_DACK1_K, B_DACK1_EN, 0x0);
554 rtw8852c_txck_force(rtwdev, RF_PATH_B, false, DAC_960M);
555 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]after S1 DADCK\n");
557 _dack_backup_s1(rtwdev);
558 _dack_reload(rtwdev, RF_PATH_B);
559 rtw89_phy_write32_mask(rtwdev, R_P1_DBGMOD, B_P1_DBGMOD_ON, 0x0);
562 static void _dack(struct rtw89_dev *rtwdev)
564 _dack_s0(rtwdev);
565 _dack_s1(rtwdev);
568 static void _drck(struct rtw89_dev *rtwdev)
573 rtw89_phy_write32_mask(rtwdev, R_DRCK, B_DRCK_EN, 0x1);
575 1, 10000, false, rtwdev, 0xc0c8, BIT(3));
577 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]DRCK timeout\n");
579 rtw89_rfk_parser(rtwdev, &rtw8852c_drck_defs_tbl);
581 val = rtw89_phy_read32_mask(rtwdev, R_DRCK_RES, B_DRCK_RES);
582 rtw89_phy_write32_mask(rtwdev, R_DRCK, B_DRCK_IDLE, 0x0);
583 rtw89_phy_write32_mask(rtwdev, R_DRCK, B_DRCK_VAL, val);
584 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]0xc0c4 = 0x%x\n",
585 rtw89_phy_read32_mask(rtwdev, R_DRCK, MASKDWORD));
588 static void _dac_cal(struct rtw89_dev *rtwdev, bool force,
591 struct rtw89_dack_info *dack = &rtwdev->dack;
593 u8 phy_map = rtw89_btc_phymap(rtwdev, RTW89_PHY_0, RF_AB, chanctx_idx);
596 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]DACK b\n");
597 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]DACK start!!!\n");
598 rf0_0 = rtw89_read_rf(rtwdev, RF_PATH_A, RR_MOD, RFREG_MASK);
599 rf1_0 = rtw89_read_rf(rtwdev, RF_PATH_B, RR_MOD, RFREG_MASK);
600 _drck(rtwdev);
602 rtw89_write_rf(rtwdev, RF_PATH_A, RR_RSV1, RR_RSV1_RST, 0x0);
603 rtw89_write_rf(rtwdev, RF_PATH_B, RR_RSV1, RR_RSV1_RST, 0x0);
604 rtw89_write_rf(rtwdev, RF_PATH_A, RR_MOD, RFREG_MASK, 0x337e1);
605 rtw89_write_rf(rtwdev, RF_PATH_B, RR_MOD, RFREG_MASK, 0x337e1);
606 rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, BTC_WRFKT_DACK, BTC_WRFK_ONESHOT_START);
607 _addck(rtwdev);
608 rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, BTC_WRFKT_DACK, BTC_WRFK_ONESHOT_STOP);
610 _addck_backup(rtwdev);
611 _addck_reload(rtwdev);
612 rtw89_write_rf(rtwdev, RF_PATH_A, RR_MODOPT, RFREG_MASK, 0x0);
613 rtw89_write_rf(rtwdev, RF_PATH_B, RR_MODOPT, RFREG_MASK, 0x0);
614 rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, BTC_WRFKT_DACK, BTC_WRFK_ONESHOT_START);
615 _dack(rtwdev);
616 rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, BTC_WRFKT_DACK, BTC_WRFK_ONESHOT_STOP);
618 _dack_dump(rtwdev);
620 rtw89_write_rf(rtwdev, RF_PATH_A, RR_MOD, RFREG_MASK, rf0_0);
621 rtw89_write_rf(rtwdev, RF_PATH_B, RR_MOD, RFREG_MASK, rf1_0);
622 rtw89_write_rf(rtwdev, RF_PATH_A, RR_RSV1, RR_RSV1_RST, 0x1);
623 rtw89_write_rf(rtwdev, RF_PATH_B, RR_RSV1, RR_RSV1_RST, 0x1);
625 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]DACK finish!!!\n");
648 static void rtw8852c_disable_rxagc(struct rtw89_dev *rtwdev, u8 path, u8 en_rxgac)
651 rtw89_phy_write32_mask(rtwdev, R_P0_AGC_CTL, B_P0_AGC_EN, en_rxgac);
653 rtw89_phy_write32_mask(rtwdev, R_P1_AGC_CTL, B_P1_AGC_EN, en_rxgac);
656 static void _iqk_rxk_setting(struct rtw89_dev *rtwdev, u8 path)
658 struct rtw89_iqk_info *iqk_info = &rtwdev->iqk;
661 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_RXK, 0x0101);
663 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_RXK, 0x0202);
668 rtw89_phy_write32_mask(rtwdev, R_UPD_CLK + (path << 13), B_DPD_GDIS, 0x1);
669 rtw8852c_rxck_force(rtwdev, path, true, ADC_480M);
670 rtw89_phy_write32_mask(rtwdev, R_UPD_CLK + (path << 13), B_ACK_VAL, 0x0);
671 rtw89_write_rf(rtwdev, path, RR_RXBB2, RR_RXBB2_CKT, 0x1);
672 rtw89_phy_write32_mask(rtwdev, R_P0_NRBW + (path << 13), B_P0_NRBW_DBG, 0x1);
675 rtw89_phy_write32_mask(rtwdev, R_UPD_CLK + (path << 13), B_DPD_GDIS, 0x1);
676 rtw8852c_rxck_force(rtwdev, path, true, ADC_960M);
677 rtw89_phy_write32_mask(rtwdev, R_UPD_CLK + (path << 13), B_ACK_VAL, 0x1);
678 rtw89_write_rf(rtwdev, path, RR_RXBB2, RR_RXBB2_CKT, 0x1);
679 rtw89_phy_write32_mask(rtwdev, R_P0_NRBW + (path << 13), B_P0_NRBW_DBG, 0x1);
682 rtw89_phy_write32_mask(rtwdev, R_UPD_CLK + (path << 13), B_DPD_GDIS, 0x1);
683 rtw8852c_rxck_force(rtwdev, path, true, ADC_1920M);
684 rtw89_phy_write32_mask(rtwdev, R_UPD_CLK + (path << 13), B_ACK_VAL, 0x2);
685 rtw89_write_rf(rtwdev, path, RR_RXBB2, RR_RXBB2_CKT, 0x1);
686 rtw89_phy_write32_mask(rtwdev, R_P0_NRBW + (path << 13), B_P0_NRBW_DBG, 0x1);
692 rtw89_rfk_parser(rtwdev, &rtw8852c_iqk_rxk_cfg_defs_tbl);
695 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_RXK, 0x1101);
697 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_RXK, 0x2202);
700 static bool _iqk_check_cal(struct rtw89_dev *rtwdev, u8 path, u8 ktype)
707 1, 8200, false, rtwdev, 0xbff8, MASKBYTE0);
709 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK]IQK timeout!!!\n");
711 rtw89_phy_write32_clr(rtwdev, R_NCTL_N1, MASKBYTE0);
712 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK]S%x, ret=%d\n", path, ret);
713 tmp = rtw89_phy_read32_mask(rtwdev, R_NCTL_RPT, MASKDWORD);
714 rtw89_debug(rtwdev, RTW89_DBG_RFK,
720 static bool _iqk_one_shot(struct rtw89_dev *rtwdev,
723 struct rtw89_iqk_info *iqk_info = &rtwdev->iqk;
733 rtw89_phy_write32_mask(rtwdev, addr_rfc_ctl, 0x00000002, 0x1);
737 rtw89_phy_write32_mask(rtwdev, addr_rfc_ctl, 0x00000002, 0x1);
741 rtw89_phy_write32_mask(rtwdev, addr_rfc_ctl, 0x00000002, 0x1);
745 rtw89_phy_write32_mask(rtwdev, addr_rfc_ctl, 0x00000002, 0x1);
749 rtw89_phy_write32_mask(rtwdev, addr_rfc_ctl, 0x00000002, 0x1);
753 rtw89_phy_write32_mask(rtwdev, addr_rfc_ctl, 0x00000002, 0x0);
760 rtw89_phy_write32_mask(rtwdev, addr_rfc_ctl, 0x00000002, 0x1);
764 rtw89_phy_write32_mask(rtwdev, addr_rfc_ctl, 0x00000002, 0x0);
768 rtw89_phy_write32_mask(rtwdev, addr_rfc_ctl, 0x00000002, 0x1);
775 rtw89_phy_write32_mask(rtwdev, R_NCTL_CFG, MASKDWORD, iqk_cmd + 1);
777 fail = _iqk_check_cal(rtwdev, path, ktype);
778 rtw89_phy_write32_mask(rtwdev, addr_rfc_ctl, 0x00000002, 0x0);
783 static bool _rxk_group_sel(struct rtw89_dev *rtwdev,
786 struct rtw89_iqk_info *iqk_info = &rtwdev->iqk;
792 bkrf0 = rtw89_read_rf(rtwdev, path, RR_MOD, RR_MOD_NBW);
794 rtw89_write_rf(rtwdev, RF_PATH_B, RR_IQKPLL, RR_IQKPLL_MOD, 0x3);
795 tmp = rtw89_read_rf(rtwdev, RF_PATH_B, RR_CHTR, RR_CHTR_MOD);
796 rtw89_write_rf(rtwdev, RF_PATH_B, RR_RSV4, RR_RSV4_AGH, tmp);
797 tmp = rtw89_read_rf(rtwdev, RF_PATH_B, RR_CHTR, RR_CHTR_TXRX);
798 rtw89_write_rf(rtwdev, RF_PATH_B, RR_RSV4, RR_RSV4_PLLCH, tmp);
804 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_MASK, 0xc);
805 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_NBW, 0x0);
806 rtw89_write_rf(rtwdev, path, RR_RXG, RR_RXG_IQKMOD, 0x9);
809 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_MASK, 0xc);
810 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_NBW, 0x0);
811 rtw89_write_rf(rtwdev, path, RR_RXAE, RR_RXAE_IQKMOD, 0x8);
814 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_MASK, 0xc);
815 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_NBW, 0x0);
816 rtw89_write_rf(rtwdev, path, RR_RXAE, RR_RXAE_IQKMOD, 0x9);
826 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_M_RXG,
828 rtw89_write_rf(rtwdev, path, RR_RXBB, RR_RXBB_VOBUF,
832 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_M_RXG,
834 rtw89_write_rf(rtwdev, path, RR_RXA2, RR_RXA2_IATT,
838 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_M_RXG,
840 rtw89_write_rf(rtwdev, path, RR_RXA2, RR_RXA2_IATT,
844 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8),
846 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8),
848 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8),
850 fail = _iqk_one_shot(rtwdev, phy_idx, path, ID_RXK);
854 rtw89_write_rf(rtwdev, path, RR_IQKPLL, RR_IQKPLL_MOD, 0x0);
855 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_NBW, bkrf0);
868 static bool _iqk_nbrxk(struct rtw89_dev *rtwdev,
871 struct rtw89_iqk_info *iqk_info = &rtwdev->iqk;
877 bkrf0 = rtw89_read_rf(rtwdev, path, RR_MOD, RR_MOD_NBW);
879 rtw89_write_rf(rtwdev, RF_PATH_B, RR_IQKPLL, RR_IQKPLL_MOD, 0x3);
880 tmp = rtw89_read_rf(rtwdev, RF_PATH_B, RR_CHTR, RR_CHTR_MOD);
881 rtw89_write_rf(rtwdev, RF_PATH_B, RR_RSV4, RR_RSV4_AGH, tmp);
882 tmp = rtw89_read_rf(rtwdev, RF_PATH_B, RR_CHTR, RR_CHTR_TXRX);
883 rtw89_write_rf(rtwdev, RF_PATH_B, RR_RSV4, RR_RSV4_PLLCH, tmp);
889 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_MASK, 0xc);
890 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_NBW, 0x0);
891 rtw89_write_rf(rtwdev, path, RR_RXG, RR_RXG_IQKMOD, 0x9);
894 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_MASK, 0xc);
895 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_NBW, 0x0);
896 rtw89_write_rf(rtwdev, path, RR_RXAE, RR_RXAE_IQKMOD, 0x8);
899 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_MASK, 0xc);
900 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_NBW, 0x0);
901 rtw89_write_rf(rtwdev, path, RR_RXAE, RR_RXAE_IQKMOD, 0x9);
910 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_M_RXG, _rxk_g_idxrxgain[gp]);
911 rtw89_write_rf(rtwdev, path, RR_RXBB, RR_RXBB_VOBUF, _rxk_g_idxattc2[gp]);
914 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_M_RXG, _rxk_a_idxrxgain[gp]);
915 rtw89_write_rf(rtwdev, path, RR_RXA2, RR_RXA2_IATT, _rxk_a_idxattc2[gp]);
918 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_M_RXG, _rxk_a6_idxrxgain[gp]);
919 rtw89_write_rf(rtwdev, path, RR_RXA2, RR_RXA2_IATT, _rxk_a6_idxattc2[gp]);
923 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), B_CFIR_LUT_SEL, 0x1);
924 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), B_CFIR_LUT_SET, 0x0);
925 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), B_CFIR_LUT_GP_V1, gp);
926 fail = _iqk_one_shot(rtwdev, phy_idx, path, ID_RXK);
929 rtw89_write_rf(rtwdev, path, RR_IQKPLL, RR_IQKPLL_MOD, 0x0);
931 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_NBW, bkrf0);
935 rtw89_phy_read32_mask(rtwdev, R_RXIQC + (path << 8),
944 static bool _txk_group_sel(struct rtw89_dev *rtwdev,
947 struct rtw89_iqk_info *iqk_info = &rtwdev->iqk;
954 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR0,
956 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR1,
958 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG,
960 rtw89_phy_write32_mask(rtwdev,
965 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR0,
967 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR1,
969 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG,
971 rtw89_phy_write32_mask(rtwdev,
976 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR0,
978 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR1,
980 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG,
982 rtw89_phy_write32_mask(rtwdev,
989 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8),
991 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8),
993 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8),
995 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8),
997 rtw89_phy_write32_mask(rtwdev, R_IQK_DIF4, B_IQK_DIF4_TXT, 0x00b);
998 rtw89_phy_write32_mask(rtwdev, R_NCTL_N1, B_NCTL_N1_CIP, 0x00);
999 fail = _iqk_one_shot(rtwdev, phy_idx, path, ID_TXK);
1013 static bool _iqk_nbtxk(struct rtw89_dev *rtwdev,
1016 struct rtw89_iqk_info *iqk_info = &rtwdev->iqk;
1022 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR0, _txk_g_power_range[gp]);
1023 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR1, _txk_g_track_range[gp]);
1024 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, _txk_g_gain_bb[gp]);
1025 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8),
1029 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR0, _txk_a_power_range[gp]);
1030 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR1, _txk_a_track_range[gp]);
1031 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, _txk_a_gain_bb[gp]);
1032 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8),
1036 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR0, _txk_a6_power_range[gp]);
1037 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR1, _txk_a6_track_range[gp]);
1038 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, _txk_a6_gain_bb[gp]);
1039 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8),
1046 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), B_CFIR_LUT_SEL, 0x1);
1047 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), B_CFIR_LUT_SET, 0x1);
1048 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), B_CFIR_LUT_G2, 0x0);
1049 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), B_CFIR_LUT_GP, gp + 1);
1050 rtw89_phy_write32_mask(rtwdev, R_IQK_DIF4, B_IQK_DIF4_TXT, 0x00b);
1051 rtw89_phy_write32_mask(rtwdev, R_NCTL_N1, B_NCTL_N1_CIP, 0x00);
1052 fail = _iqk_one_shot(rtwdev, phy_idx, path, ID_NBTXK);
1056 rtw89_phy_read32_mask(rtwdev, R_TXIQC + (path << 8),
1066 static bool _lok_finetune_check(struct rtw89_dev *rtwdev, u8 path)
1068 struct rtw89_rfk_mcc_info_data *rfk_mcc = rtwdev->rfk_mcc.data;
1069 struct rtw89_iqk_info *iqk_info = &rtwdev->iqk;
1078 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK]===>%s\n", __func__);
1079 val = rtw89_read_rf(rtwdev, path, RR_TXMO, RFREG_MASK);
1090 val = rtw89_read_rf(rtwdev, path, RR_LOKVB, RFREG_MASK);
1104 static bool _iqk_lok(struct rtw89_dev *rtwdev,
1107 struct rtw89_iqk_info *iqk_info = &rtwdev->iqk;
1113 rtw89_phy_write32_mask(rtwdev, R_IQK_DIF4, B_IQK_DIF4_TXT, IQK_DF4_TXT_8_25MHZ);
1118 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x6);
1119 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8),
1124 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x6);
1125 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8),
1130 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x6);
1131 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8),
1138 tmp = _iqk_one_shot(rtwdev, phy_idx, path, tmp_id);
1144 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x12);
1145 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8),
1149 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x12);
1150 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8),
1154 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x12);
1155 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8),
1161 tmp = _iqk_one_shot(rtwdev, phy_idx, path, ID_FLOK_VBUFFER);
1166 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x6);
1167 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8),
1172 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x6);
1173 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8),
1178 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x6);
1179 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8),
1186 tmp = _iqk_one_shot(rtwdev, phy_idx, path, tmp_id);
1193 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x12);
1194 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8),
1198 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x12);
1199 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8),
1203 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x12);
1204 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8),
1208 tmp = _iqk_one_shot(rtwdev, phy_idx, path, ID_FLOK_VBUFFER);
1209 fail = _lok_finetune_check(rtwdev, path);
1214 static void _iqk_txk_setting(struct rtw89_dev *rtwdev, u8 path)
1216 struct rtw89_iqk_info *iqk_info = &rtwdev->iqk;
1221 rtw89_write_rf(rtwdev, path, RR_TXG1, RR_TXG1_ATT2, 0x0);
1222 rtw89_write_rf(rtwdev, path, RR_TXG1, RR_TXG1_ATT1, 0x0);
1223 rtw89_write_rf(rtwdev, path, RR_TXG2, RR_TXG2_ATT0, 0x1);
1224 rtw89_write_rf(rtwdev, path, RR_TXA2, RR_TXA2_LDO, 0xf);
1225 rtw89_write_rf(rtwdev, path, RR_TXGA, RR_TXGA_LOK_EXT, 0x0);
1226 rtw89_write_rf(rtwdev, path, RR_LUTWE, RR_LUTWE_LOK, 0x1);
1227 rtw89_write_rf(rtwdev, path, RR_MOD, RFREG_MASK,
1230 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR0, 0x0);
1231 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR1, 0x6);
1234 rtw89_write_rf(rtwdev, path, RR_TXATANK, RR_TXATANK_LBSW2, 0x0);
1235 rtw89_write_rf(rtwdev, path, RR_TXPOW, RR_TXPOW_TXAS, 0x1);
1236 rtw89_write_rf(rtwdev, path, RR_TXA2, RR_TXA2_LDO, 0xf);
1237 rtw89_write_rf(rtwdev, path, RR_TXGA, RR_TXGA_LOK_EXT, 0x0);
1238 rtw89_write_rf(rtwdev, path, RR_LUTWE, RR_LUTWE_LOK, 0x1);
1239 rtw89_write_rf(rtwdev, path, RR_MOD, RFREG_MASK,
1242 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR0, 0x0);
1243 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR1, 0x6);
1246 rtw89_write_rf(rtwdev, path, RR_TXATANK, RR_TXATANK_LBSW2, 0x0);
1247 rtw89_write_rf(rtwdev, path, RR_TXPOW, RR_TXPOW_TXAS, 0x1);
1248 rtw89_write_rf(rtwdev, path, RR_TXA2, RR_TXA2_LDO, 0xf);
1249 rtw89_write_rf(rtwdev, path, RR_TXGA, RR_TXGA_LOK_EXT, 0x0);
1250 rtw89_write_rf(rtwdev, path, RR_LUTWE, RR_LUTWE_LOK, 0x1);
1251 rtw89_write_rf(rtwdev, path, RR_MOD, RFREG_MASK,
1254 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR0, 0x0);
1255 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR1, 0x6);
1260 static void _iqk_info_iqk(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx,
1263 struct rtw89_iqk_info *iqk_info = &rtwdev->iqk;
1267 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK]S%d_thermal = %lu\n", path,
1268 ewma_thermal_read(&rtwdev->phystat.avg_thermal[path]));
1269 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK]S%d_LOK_COR_fail= %d\n", path,
1271 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK]S%d_LOK_FIN_fail= %d\n", path,
1273 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK]S%d_TXIQK_fail = %d\n", path,
1275 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK]S%d_RXIQK_fail= %d,\n", path,
1279 rtw89_phy_write32_mask(rtwdev, R_IQKINF, B_IQKINF_FCOR << (path * 4), flag);
1281 rtw89_phy_write32_mask(rtwdev, R_IQKINF, B_IQKINF_FFIN << (path * 4), flag);
1283 rtw89_phy_write32_mask(rtwdev, R_IQKINF, B_IQKINF_FTX << (path * 4), flag);
1285 rtw89_phy_write32_mask(rtwdev, R_IQKINF, B_IQKINF_F_RX << (path * 4), flag);
1287 tmp = rtw89_phy_read32_mask(rtwdev, R_IQK_RES + (path << 8), MASKDWORD);
1289 tmp = rtw89_phy_read32_mask(rtwdev, R_TXIQC + (path << 8), MASKDWORD);
1291 tmp = rtw89_phy_read32_mask(rtwdev, R_RXIQC + (path << 8), MASKDWORD);
1294 rtw89_phy_write32_mask(rtwdev, R_IQKINF2, B_IQKINF2_KCNT,
1297 tmp = rtw89_phy_read32_mask(rtwdev, R_IQKINF, B_IQKINF_FAIL << (path * 4));
1300 rtw89_phy_write32_mask(rtwdev, R_IQKINF2, B_IQKINF2_FCNT << (path * 4),
1304 static void _iqk_by_path(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx, u8 path)
1306 struct rtw89_iqk_info *iqk_info = &rtwdev->iqk;
1308 _iqk_txk_setting(rtwdev, path);
1309 iqk_info->lok_fail[path] = _iqk_lok(rtwdev, phy_idx, path);
1312 iqk_info->iqk_tx_fail[0][path] = _iqk_nbtxk(rtwdev, phy_idx, path);
1314 iqk_info->iqk_tx_fail[0][path] = _txk_group_sel(rtwdev, phy_idx, path);
1316 _iqk_rxk_setting(rtwdev, path);
1318 iqk_info->iqk_rx_fail[0][path] = _iqk_nbrxk(rtwdev, phy_idx, path);
1320 iqk_info->iqk_rx_fail[0][path] = _rxk_group_sel(rtwdev, phy_idx, path);
1322 _iqk_info_iqk(rtwdev, phy_idx, path);
1325 static void _iqk_get_ch_info(struct rtw89_dev *rtwdev,
1329 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, chanctx_idx);
1330 struct rtw89_iqk_info *iqk_info = &rtwdev->iqk;
1332 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK]===>%s\n", __func__);
1338 rtw89_debug(rtwdev, RTW89_DBG_RFK,
1341 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK]iqk_info->iqk_bw[%x] = 0x%x\n",
1343 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK]iqk_info->iqk_ch[%x] = 0x%x\n",
1345 rtw89_debug(rtwdev, RTW89_DBG_RFK,
1347 rtwdev->dbcc_en ? "on" : "off",
1353 if (!rtwdev->dbcc_en)
1358 rtw89_phy_write32_mask(rtwdev, R_IQKINF, B_IQKINF_VER, RTW8852C_IQK_VER);
1359 rtw89_phy_write32_mask(rtwdev, R_IQKCH, B_IQKCH_BAND << (path * 16),
1361 rtw89_phy_write32_mask(rtwdev, R_IQKCH, B_IQKCH_BW << (path * 16),
1363 rtw89_phy_write32_mask(rtwdev, R_IQKCH, B_IQKCH_CH << (path * 16),
1366 rtw89_phy_write32_mask(rtwdev, R_IQKINF2, B_IQKINF2_NCTLV, RTW8852C_NCTL_VER);
1369 static void _iqk_start_iqk(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx,
1372 _iqk_by_path(rtwdev, phy_idx, path);
1375 static void _iqk_restore(struct rtw89_dev *rtwdev, u8 path)
1377 struct rtw89_iqk_info *iqk_info = &rtwdev->iqk;
1380 rtw89_phy_write32_mask(rtwdev, R_TXIQC + (path << 8), MASKDWORD,
1382 rtw89_phy_write32_mask(rtwdev, R_RXIQC + (path << 8), MASKDWORD,
1384 rtw89_phy_write32_mask(rtwdev, R_NCTL_CFG, MASKDWORD,
1387 fail = _iqk_check_cal(rtwdev, path, 0x12);
1388 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK] restore fail = %x\n", fail);
1390 rtw89_phy_write32_mask(rtwdev, R_NCTL_N1, B_NCTL_N1_CIP, 0x00);
1391 rtw89_phy_write32_mask(rtwdev, R_NCTL_RPT, MASKDWORD, 0x00000000);
1392 rtw89_phy_write32_mask(rtwdev, R_KIP_SYSCFG, MASKDWORD, 0x80000000);
1394 rtw89_write_rf(rtwdev, path, RR_LUTWE, RR_LUTWE_LOK, 0x0);
1395 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_MASK, RR_MOD_V_RX);
1396 rtw89_write_rf(rtwdev, path, RR_RSV1, RR_RSV1_RST, 0x1);
1399 static void _iqk_afebb_restore(struct rtw89_dev *rtwdev,
1402 rtw89_rfk_parser_by_cond(rtwdev, path == RF_PATH_A,
1406 rtw8852c_disable_rxagc(rtwdev, path, 0x1);
1409 static void _iqk_preset(struct rtw89_dev *rtwdev, u8 path)
1411 struct rtw89_rfk_mcc_info_data *rfk_mcc = rtwdev->rfk_mcc.data;
1415 rtw89_phy_write32_mask(rtwdev, R_COEF_SEL + (path << 8), B_COEF_SEL_IQC, idx);
1416 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), B_CFIR_LUT_G3, idx);
1417 rtw89_write_rf(rtwdev, path, RR_RSV1, RR_RSV1_RST, 0x0);
1418 rtw89_phy_write32_mask(rtwdev, R_NCTL_RPT, MASKDWORD, 0x00000080);
1419 rtw89_phy_write32_mask(rtwdev, R_KIP_SYSCFG, MASKDWORD, 0x81ff010a);
1422 static void _iqk_macbb_setting(struct rtw89_dev *rtwdev,
1425 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK]===> %s\n", __func__);
1428 rtw89_write_rf(rtwdev, path, RR_BBDC, RR_BBDC_SEL, 0x0);
1429 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_A0 << path, 0x1);
1430 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_A1 << path, 0x0);
1431 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_A2 << path, 0x1);
1432 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_A3 << path, 0x0);
1435 rtw8852c_disable_rxagc(rtwdev, path, 0x0);
1436 rtw89_phy_write32_mask(rtwdev, R_UPD_CLK | (path << 13), MASKDWORD, 0xf801fffd);
1437 rtw89_phy_write32_mask(rtwdev, R_UPD_CLK | (path << 13), B_DPD_DIS, 0x1);
1438 rtw89_phy_write32_mask(rtwdev, R_UPD_CLK | (path << 13), B_DAC_VAL, 0x1);
1440 rtw8852c_txck_force(rtwdev, path, true, DAC_960M);
1441 rtw89_phy_write32_mask(rtwdev, R_UPD_CLK | (path << 13), B_DPD_GDIS, 0x1);
1443 rtw8852c_rxck_force(rtwdev, path, true, ADC_1920M);
1444 rtw89_phy_write32_mask(rtwdev, R_UPD_CLK | (path << 13), B_ACK_VAL, 0x2);
1446 rtw89_phy_write32_mask(rtwdev, R_P0_NRBW | (path << 13), B_P0_NRBW_DBG, 0x1);
1447 rtw89_phy_write32_mask(rtwdev, R_ANAPAR_PW15, B_ANAPAR_PW15, 0x1f);
1448 rtw89_phy_write32_mask(rtwdev, R_ANAPAR_PW15, B_ANAPAR_PW15, 0x13);
1449 rtw89_phy_write32_mask(rtwdev, R_ANAPAR, B_ANAPAR_15, 0x0001);
1450 rtw89_phy_write32_mask(rtwdev, R_ANAPAR, B_ANAPAR_15, 0x0041);
1451 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_A1 << path, 0x1);
1452 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_A3 << path, 0x1);
1455 static void _rck(struct rtw89_dev *rtwdev, enum rtw89_rf_path path)
1461 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[RCK] ====== S%d RCK ======\n", path);
1463 rf_reg5 = rtw89_read_rf(rtwdev, path, RR_RSV1, RFREG_MASK);
1465 rtw89_write_rf(rtwdev, path, RR_RSV1, RR_RSV1_RST, 0x0);
1466 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_MASK, RR_MOD_V_RX);
1468 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[RCK] RF0x00 = 0x%x\n",
1469 rtw89_read_rf(rtwdev, path, RR_MOD, RFREG_MASK));
1472 rtw89_write_rf(rtwdev, path, RR_RCKC, RFREG_MASK, 0x00240);
1475 false, rtwdev, path, 0x1c, BIT(3));
1477 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[RCK] RCK timeout\n");
1479 rck_val = rtw89_read_rf(rtwdev, path, RR_RCKC, RR_RCKC_CA);
1480 rtw89_write_rf(rtwdev, path, RR_RCKC, RFREG_MASK, rck_val);
1482 rtw89_write_rf(rtwdev, path, RR_RSV1, RFREG_MASK, rf_reg5);
1484 rtw89_debug(rtwdev, RTW89_DBG_RFK,
1486 rtw89_read_rf(rtwdev, path, RR_RCKC, RFREG_MASK),
1487 rtw89_read_rf(rtwdev, path, RR_RCKS, RFREG_MASK));
1490 static void _iqk_init(struct rtw89_dev *rtwdev)
1492 struct rtw89_iqk_info *iqk_info = &rtwdev->iqk;
1495 rtw89_phy_write32_clr(rtwdev, R_IQKINF, MASKDWORD);
1499 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK]===>%s\n", __func__);
1521 static void _doiqk(struct rtw89_dev *rtwdev, bool force,
1525 struct rtw89_iqk_info *iqk_info = &rtwdev->iqk;
1528 u8 phy_map = rtw89_btc_phymap(rtwdev, phy_idx, RF_AB, chanctx_idx);
1530 rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, BTC_WRFKT_IQK, BTC_WRFK_ONESHOT_START);
1532 rtw89_debug(rtwdev, RTW89_DBG_RFK,
1537 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK]Test Ver 0x%x\n", iqk_info->version);
1538 _iqk_get_ch_info(rtwdev, phy_idx, path, chanctx_idx);
1539 _rfk_backup_bb_reg(rtwdev, backup_bb_val);
1540 _rfk_backup_rf_reg(rtwdev, backup_rf_val[path], path);
1541 _iqk_macbb_setting(rtwdev, phy_idx, path);
1542 _iqk_preset(rtwdev, path);
1543 _iqk_start_iqk(rtwdev, phy_idx, path);
1544 _iqk_restore(rtwdev, path);
1545 _iqk_afebb_restore(rtwdev, phy_idx, path);
1546 _rfk_restore_bb_reg(rtwdev, backup_bb_val);
1547 _rfk_restore_rf_reg(rtwdev, backup_rf_val[path], path);
1548 rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, BTC_WRFKT_IQK, BTC_WRFK_ONESHOT_STOP);
1551 static void _iqk(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx, bool force,
1554 switch (_kpath(rtwdev, phy_idx)) {
1556 _doiqk(rtwdev, force, phy_idx, RF_PATH_A, chanctx_idx);
1559 _doiqk(rtwdev, force, phy_idx, RF_PATH_B, chanctx_idx);
1562 _doiqk(rtwdev, force, phy_idx, RF_PATH_A, chanctx_idx);
1563 _doiqk(rtwdev, force, phy_idx, RF_PATH_B, chanctx_idx);
1570 static void _rx_dck_value_rewrite(struct rtw89_dev *rtwdev, u8 path, u8 addr,
1575 rtw89_debug(rtwdev, RTW89_DBG_RFK,
1582 rtw89_write_rf(rtwdev, path, RR_LUTPLL, RR_CAL_RW, 0x1);
1583 rtw89_write_rf(rtwdev, path, RR_RFC, RR_WCAL, 0x1);
1584 rtw89_write_rf(rtwdev, path, RR_DCK, RR_DCK_FINE, 0x1);
1585 rtw89_write_rf(rtwdev, path, RR_LUTWA, MASKBYTE0, addr);
1586 rtw89_write_rf(rtwdev, path, RR_LUTWD0, RFREG_MASK, ofst_val);
1587 rtw89_write_rf(rtwdev, path, RR_LUTWD0, RFREG_MASK, ofst_val);
1588 rtw89_write_rf(rtwdev, path, RR_DCK, RR_DCK_FINE, 0x0);
1589 rtw89_write_rf(rtwdev, path, RR_RFC, RR_WCAL, 0x0);
1590 rtw89_write_rf(rtwdev, path, RR_LUTPLL, RR_CAL_RW, 0x0);
1592 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[RX_DCK] Final val_i = 0x%x, val_q = 0x%x\n",
1597 static bool _rx_dck_rek_check(struct rtw89_dev *rtwdev, u8 path)
1607 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_DCK, _dck_addr_bs[i]);
1608 i_even_bs = rtw89_read_rf(rtwdev, path, RR_DCK, RR_DCK_TIA);
1609 q_even_bs = rtw89_read_rf(rtwdev, path, RR_DCK1, RR_DCK1_TIA);
1610 rtw89_debug(rtwdev, RTW89_DBG_RFK,
1614 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_DCK, _dck_addr[i]);
1615 i_even = rtw89_read_rf(rtwdev, path, RR_DCK, RR_DCK_TIA);
1616 q_even = rtw89_read_rf(rtwdev, path, RR_DCK1, RR_DCK1_TIA);
1617 rtw89_debug(rtwdev, RTW89_DBG_RFK,
1624 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_DCK, _dck_addr_bs[i] + 1);
1625 i_odd_bs = rtw89_read_rf(rtwdev, path, RR_DCK, RR_DCK_TIA);
1626 q_odd_bs = rtw89_read_rf(rtwdev, path, RR_DCK1, RR_DCK1_TIA);
1627 rtw89_debug(rtwdev, RTW89_DBG_RFK,
1631 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_DCK, _dck_addr[i] + 1);
1632 i_odd = rtw89_read_rf(rtwdev, path, RR_DCK, RR_DCK_TIA);
1633 q_odd = rtw89_read_rf(rtwdev, path, RR_DCK1, RR_DCK1_TIA);
1634 rtw89_debug(rtwdev, RTW89_DBG_RFK,
1645 static void _rx_dck_fix_if_need(struct rtw89_dev *rtwdev, u8 path, u8 addr,
1651 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[RX_DCK] offset check PASS!!\n");
1656 rtw89_debug(rtwdev, RTW89_DBG_RFK,
1662 rtw89_debug(rtwdev, RTW89_DBG_RFK,
1667 _rx_dck_value_rewrite(rtwdev, path, addr, val_i, val_q);
1670 static void _rx_dck_recover(struct rtw89_dev *rtwdev, u8 path)
1678 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[RX_DCK] ===> recovery\n");
1681 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_DCK, _dck_addr_bs[i]);
1682 i_even_bs = rtw89_read_rf(rtwdev, path, RR_DCK, RR_DCK_TIA);
1683 q_even_bs = rtw89_read_rf(rtwdev, path, RR_DCK1, RR_DCK1_TIA);
1685 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_DCK, _dck_addr_bs[i] + 1);
1686 i_odd_bs = rtw89_read_rf(rtwdev, path, RR_DCK, RR_DCK_TIA);
1687 q_odd_bs = rtw89_read_rf(rtwdev, path, RR_DCK1, RR_DCK1_TIA);
1689 rtw89_debug(rtwdev, RTW89_DBG_RFK,
1693 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_DCK, _dck_addr[i]);
1694 i_even = rtw89_read_rf(rtwdev, path, RR_DCK, RR_DCK_TIA);
1695 q_even = rtw89_read_rf(rtwdev, path, RR_DCK1, RR_DCK1_TIA);
1697 rtw89_debug(rtwdev, RTW89_DBG_RFK,
1700 _rx_dck_fix_if_need(rtwdev, path, _dck_addr[i],
1703 rtw89_debug(rtwdev, RTW89_DBG_RFK,
1707 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_DCK, _dck_addr[i] + 1);
1708 i_odd = rtw89_read_rf(rtwdev, path, RR_DCK, RR_DCK_TIA);
1709 q_odd = rtw89_read_rf(rtwdev, path, RR_DCK1, RR_DCK1_TIA);
1711 rtw89_debug(rtwdev, RTW89_DBG_RFK,
1714 _rx_dck_fix_if_need(rtwdev, path, _dck_addr[i] + 1,
1719 static void _rx_dck_toggle(struct rtw89_dev *rtwdev, u8 path)
1724 rtw89_write_rf(rtwdev, path, RR_DCK, RR_DCK_LV, 0x0);
1725 rtw89_write_rf(rtwdev, path, RR_DCK, RR_DCK_LV, 0x1);
1728 2, 2000, false, rtwdev, path,
1731 rtw89_warn(rtwdev, "[RX_DCK] S%d RXDCK timeout\n", path);
1733 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[RX_DCK] S%d RXDCK finish\n", path);
1735 rtw89_write_rf(rtwdev, path, RR_DCK, RR_DCK_LV, 0x0);
1738 static void _set_rx_dck(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy, u8 path,
1743 rtw89_write_rf(rtwdev, path, RR_DCK1, RR_DCK1_CLR, 0x0);
1745 _rx_dck_toggle(rtwdev, path);
1746 if (rtw89_read_rf(rtwdev, path, RR_DCKC, RR_DCKC_CHK) == 0)
1748 res = rtw89_read_rf(rtwdev, path, RR_DCK, RR_DCK_DONE);
1750 rtw89_write_rf(rtwdev, path, RR_RXBB2, RR_RXBB2_IDAC, res);
1751 _rx_dck_toggle(rtwdev, path);
1752 rtw89_write_rf(rtwdev, path, RR_RXBB2, RR_RXBB2_IDAC, 0x1);
1757 u8 _rx_dck_channel_calc(struct rtw89_dev *rtwdev, const struct rtw89_chan *chan)
1780 rtw89_debug(rtwdev, RTW89_DBG_RFK,
1834 static void _rf_direct_cntrl(struct rtw89_dev *rtwdev,
1838 rtw89_write_rf(rtwdev, path, RR_RSV1, RR_RSV1_RST, 0x1);
1840 rtw89_write_rf(rtwdev, path, RR_RSV1, RR_RSV1_RST, 0x0);
1843 static void _dpk_onoff(struct rtw89_dev *rtwdev,
1846 static void _dpk_bkup_kip(struct rtw89_dev *rtwdev, const u32 reg[],
1853 rtw89_phy_read32_mask(rtwdev, reg[i] + (path << 8), MASKDWORD);
1855 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] Backup 0x%x = %x\n",
1860 static void _dpk_reload_kip(struct rtw89_dev *rtwdev, const u32 reg[],
1866 rtw89_phy_write32_mask(rtwdev, reg[i] + (path << 8),
1868 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] Reload 0x%x = %x\n",
1873 static u8 _dpk_one_shot(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
1882 rtw89_phy_write32_mask(rtwdev, R_NCTL_CFG, MASKDWORD, dpk_cmd);
1885 10, 20000, false, rtwdev, 0xbff8, MASKBYTE0);
1887 rtw89_phy_write32_clr(rtwdev, R_NCTL_N1, MASKBYTE0);
1889 rtw89_debug(rtwdev, RTW89_DBG_RFK,
1899 rtw89_debug(rtwdev, RTW89_DBG_RFK,
1907 static void _dpk_information(struct rtw89_dev *rtwdev,
1911 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, chanctx_idx);
1912 struct rtw89_dpk_info *dpk = &rtwdev->dpk;
1920 rtw89_debug(rtwdev, RTW89_DBG_RFK,
1923 rtwdev->is_tssi_mode[path] ? "on" : "off",
1924 rtwdev->dbcc_en ? "on" : "off",
1932 static void _dpk_bb_afe_setting(struct rtw89_dev *rtwdev,
1937 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_A0 << path, 0x1);
1938 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_A1 << path, 0x0);
1939 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_A2 << path, 0x1);
1940 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_A3 << path, 0x0);
1943 rtw89_phy_write32_mask(rtwdev, R_UPD_CLK + (path << 13), MASKDWORD, 0xd801dffd);
1946 rtw8852c_txck_force(rtwdev, path, true, DAC_960M);
1949 rtw8852c_rxck_force(rtwdev, path, true, ADC_1920M);
1950 rtw89_phy_write32_mask(rtwdev, R_P0_NRBW + (path << 13),
1952 rtw89_phy_write32_mask(rtwdev, R_ANAPAR_PW15, MASKBYTE3, 0x1f);
1953 rtw89_phy_write32_mask(rtwdev, R_ANAPAR_PW15, MASKBYTE3, 0x13);
1954 rtw89_phy_write32_mask(rtwdev, R_ANAPAR, MASKHWORD, 0x0001);
1955 rtw89_phy_write32_mask(rtwdev, R_ANAPAR, MASKHWORD, 0x0041);
1958 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_A1 << path, 0x1);
1959 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_A3 << path, 0x1);
1961 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] S%d BB/AFE setting\n", path);
1964 static void _dpk_bb_afe_restore(struct rtw89_dev *rtwdev, u8 path)
1966 rtw89_phy_write32_mask(rtwdev, R_P0_NRBW + (path << 13),
1968 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_A0 << path, 0x1);
1969 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_A1 << path, 0x0);
1970 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_A2 << path, 0x1);
1971 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_A3 << path, 0x0);
1972 rtw89_phy_write32_mask(rtwdev, R_UPD_CLK + (path << 13), MASKDWORD, 0x00000000);
1973 rtw89_phy_write32_mask(rtwdev, R_P0_RXCK + (path << 13), B_P0_TXCK_ALL, 0x00);
1974 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_A0 << path, 0x0);
1975 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO, B_ADC_FIFO_A2 << path, 0x0);
1977 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] S%d BB/AFE restore\n", path);
1980 static void _dpk_tssi_pause(struct rtw89_dev *rtwdev,
1983 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_TRK + (path << 13),
1986 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] S%d TSSI %s\n", path,
1990 static void _dpk_kip_control_rfc(struct rtw89_dev *rtwdev, u8 path, bool ctrl_by_kip)
1992 rtw89_phy_write32_mask(rtwdev, R_UPD_CLK + (path << 13), B_IQK_RFC_ON, ctrl_by_kip);
1993 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] RFC is controlled by %s\n",
1997 static void _dpk_txpwr_bb_force(struct rtw89_dev *rtwdev, u8 path, bool force)
1999 rtw89_phy_write32_mask(rtwdev, R_TXPWRB + (path << 13), B_TXPWRB_ON, force);
2000 rtw89_phy_write32_mask(rtwdev, R_TXPWRB_H + (path << 13), B_TXPWRB_RDY, force);
2002 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] S%d txpwr_bb_force %s\n",
2006 static void _dpk_kip_restore(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
2009 _dpk_one_shot(rtwdev, phy, path, D_KIP_RESTORE);
2010 _dpk_kip_control_rfc(rtwdev, path, false);
2011 _dpk_txpwr_bb_force(rtwdev, path, false);
2012 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] S%d restore KIP\n", path);
2015 static void _dpk_lbk_rxiqk(struct rtw89_dev *rtwdev,
2023 rtw89_phy_write32_mask(rtwdev, R_DPD_V1 + (path << 8), B_DPD_LBK, 0x1);
2024 rtw89_phy_write32_mask(rtwdev, R_MDPK_RX_DCK, B_MDPK_RX_DCK_EN, 0x1);
2026 _dpk_kip_control_rfc(rtwdev, path, false);
2028 cur_rxbb = rtw89_read_rf(rtwdev, path, RR_MOD, RR_MOD_M_RXBB);
2029 rf_11 = rtw89_read_rf(rtwdev, path, RR_TXIG, RFREG_MASK);
2030 reg_81cc = rtw89_phy_read32_mask(rtwdev, R_KIP_IQP + (path << 8),
2033 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR0, 0x0);
2034 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR1, 0x3);
2035 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0xd);
2036 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_M_RXBB, 0x1f);
2038 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8), B_KIP_IQP_IQSW, 0x12);
2039 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8), B_KIP_IQP_SW, 0x3);
2041 _dpk_kip_control_rfc(rtwdev, path, true);
2043 rtw89_phy_write32_mask(rtwdev, R_IQK_DIF4, MASKDWORD, RX_TONE_IDX);
2045 _dpk_one_shot(rtwdev, phy, path, LBK_RXIQK);
2047 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] S%d LBK RXIQC = 0x%x\n", path,
2048 rtw89_phy_read32_mask(rtwdev, R_RXIQC + (path << 8), MASKDWORD));
2050 _dpk_kip_control_rfc(rtwdev, path, false);
2052 rtw89_write_rf(rtwdev, path, RR_TXIG, RFREG_MASK, rf_11);
2053 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_M_RXBB, cur_rxbb);
2054 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8), B_KIP_IQP_SW, reg_81cc);
2056 rtw89_phy_write32_mask(rtwdev, R_MDPK_RX_DCK, B_MDPK_RX_DCK_EN, 0x0);
2057 rtw89_phy_write32_mask(rtwdev, R_KPATH_CFG, B_KPATH_CFG_ED, 0x0);
2058 rtw89_phy_write32_mask(rtwdev, R_LOAD_COEF + (path << 8), B_LOAD_COEF_DI, 0x1);
2060 _dpk_kip_control_rfc(rtwdev, path, true);
2063 static void _dpk_rf_setting(struct rtw89_dev *rtwdev, u8 gain,
2066 struct rtw89_dpk_info *dpk = &rtwdev->dpk;
2069 rtw89_write_rf(rtwdev, path, RR_MOD, RFREG_MASK,
2070 0x50121 | BIT(rtwdev->dbcc_en));
2071 rtw89_write_rf(rtwdev, path, RR_MOD_V1, RR_MOD_MASK, RF_DPK);
2072 rtw89_write_rf(rtwdev, path, RR_RXBB, RR_RXBB_ATTC, 0x2);
2073 rtw89_write_rf(rtwdev, path, RR_RXBB, RR_RXBB_ATTR, 0x4);
2074 rtw89_write_rf(rtwdev, path, RR_LUTDBG, RR_LUTDBG_TIA, 0x1);
2075 rtw89_write_rf(rtwdev, path, RR_TIA, RR_TIA_N6, 0x1);
2077 rtw89_debug(rtwdev, RTW89_DBG_RFK,
2079 rtw89_read_rf(rtwdev, path, RR_MOD, RFREG_MASK),
2080 rtw89_read_rf(rtwdev, path, RR_RXBB, RFREG_MASK),
2081 rtw89_read_rf(rtwdev, path, RR_TIA, RFREG_MASK),
2082 rtw89_read_rf(rtwdev, path, RR_BTC, RFREG_MASK),
2083 rtw89_read_rf(rtwdev, path, RR_LUTDBG, RFREG_MASK),
2084 rtw89_read_rf(rtwdev, path, 0x1001a, RFREG_MASK));
2086 rtw89_write_rf(rtwdev, path, RR_MOD, RFREG_MASK,
2087 0x50101 | BIT(rtwdev->dbcc_en));
2088 rtw89_write_rf(rtwdev, path, RR_MOD_V1, RR_MOD_MASK, RF_DPK);
2091 rtw89_write_rf(rtwdev, path, RR_IQGEN, RR_IQGEN_BIAS, 0x8);
2093 rtw89_write_rf(rtwdev, path, RR_LOGEN, RR_LOGEN_RPT, 0xd);
2094 rtw89_write_rf(rtwdev, path, RR_TXAC, RR_TXAC_IQG, 0x8);
2096 rtw89_write_rf(rtwdev, path, RR_RXA2, RR_RXA2_ATT, 0x0);
2097 rtw89_write_rf(rtwdev, path, RR_TXIQK, RR_TXIQK_ATT2, 0x3);
2098 rtw89_write_rf(rtwdev, path, RR_LUTDBG, RR_LUTDBG_TIA, 0x1);
2099 rtw89_write_rf(rtwdev, path, RR_TIA, RR_TIA_N6, 0x1);
2102 rtw89_write_rf(rtwdev, path, RR_RXBB2, RR_RXBB2_EBW, 0x0);
2106 static void _dpk_tpg_sel(struct rtw89_dev *rtwdev, enum rtw89_rf_path path, u8 kidx)
2108 struct rtw89_dpk_info *dpk = &rtwdev->dpk;
2111 rtw89_phy_write32_mask(rtwdev, R_TPG_MOD, B_TPG_MOD_F, 0x3);
2112 rtw89_phy_write32_mask(rtwdev, R_TPG_SEL, MASKDWORD, 0x0180ff30);
2114 rtw89_phy_write32_mask(rtwdev, R_TPG_MOD, B_TPG_MOD_F, 0x0);
2115 rtw89_phy_write32_mask(rtwdev, R_TPG_SEL, MASKDWORD, 0xffe0fa00);
2117 rtw89_phy_write32_mask(rtwdev, R_TPG_MOD, B_TPG_MOD_F, 0x2);
2118 rtw89_phy_write32_mask(rtwdev, R_TPG_SEL, MASKDWORD, 0xff4009e0);
2120 rtw89_phy_write32_mask(rtwdev, R_TPG_MOD, B_TPG_MOD_F, 0x1);
2121 rtw89_phy_write32_mask(rtwdev, R_TPG_SEL, MASKDWORD, 0xf9f007d0);
2123 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] TPG_Select for %s\n",
2129 static bool _dpk_sync_check(struct rtw89_dev *rtwdev, enum rtw89_rf_path path, u8 kidx)
2134 struct rtw89_dpk_info *dpk = &rtwdev->dpk;
2139 rtw89_phy_write32_mask(rtwdev, R_KIP_RPT1, B_KIP_RPT1_SEL, 0x0);
2141 corr_idx = rtw89_phy_read32_mask(rtwdev, R_RPT_COM, B_PRT_COM_CORI);
2142 corr_val = rtw89_phy_read32_mask(rtwdev, R_RPT_COM, B_PRT_COM_CORV);
2147 rtw89_phy_write32_mask(rtwdev, R_KIP_RPT1, B_KIP_RPT1_SEL, 0x9);
2149 dc_i = rtw89_phy_read32_mask(rtwdev, R_RPT_COM, B_PRT_COM_DCI);
2150 dc_q = rtw89_phy_read32_mask(rtwdev, R_RPT_COM, B_PRT_COM_DCQ);
2155 rtw89_debug(rtwdev, RTW89_DBG_RFK,
2162 rtw89_phy_write32_mask(rtwdev, R_KIP_RPT1, B_KIP_RPT1_SEL, 0x8);
2163 rxbb = rtw89_phy_read32_mask(rtwdev, R_RPT_COM, B_PRT_COM_RXBB);
2165 rtw89_phy_write32_mask(rtwdev, R_KIP_RPT1, B_KIP_RPT1_SEL, 0x31);
2166 rxbb_ov = rtw89_phy_read32_mask(rtwdev, R_RPT_COM, B_PRT_COM_RXOV);
2168 rtw89_debug(rtwdev, RTW89_DBG_RFK,
2171 rtw89_phy_read32_mask(rtwdev, R_RPT_COM, B_PRT_COM_DONE),
2181 static u16 _dpk_dgain_read(struct rtw89_dev *rtwdev)
2185 rtw89_phy_write32_clr(rtwdev, R_KIP_RPT1, B_KIP_RPT1_SEL);
2187 dgain = rtw89_phy_read32_mask(rtwdev, R_RPT_COM, B_PRT_COM_DCI);
2189 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] DGain = 0x%x (%d)\n", dgain, dgain);
2194 static u8 _dpk_gainloss_read(struct rtw89_dev *rtwdev)
2198 rtw89_phy_write32_mask(rtwdev, R_KIP_RPT1, B_KIP_RPT1_SEL, 0x6);
2199 rtw89_phy_write32_mask(rtwdev, R_DPK_CFG2, B_DPK_CFG2_ST, 0x1);
2201 result = rtw89_phy_read32_mask(rtwdev, R_RPT_COM, B_PRT_COM_GL);
2203 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] tmp GL = %d\n", result);
2208 static void _dpk_kset_query(struct rtw89_dev *rtwdev, enum rtw89_rf_path path)
2210 struct rtw89_dpk_info *dpk = &rtwdev->dpk;
2212 rtw89_phy_write32_mask(rtwdev, R_KIP_RPT + (path << 8), B_KIP_RPT_SEL, 0x10);
2214 rtw89_phy_read32_mask(rtwdev, R_RPT_PER + (path << 8), 0xE0000000) - 1;
2217 static void _dpk_kip_set_txagc(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
2222 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] set S%d txagc to %ddBm\n", path, dbm);
2223 rtw89_phy_write32_mask(rtwdev, R_TXPWRB + (path << 13), B_TXPWRB_VAL, dbm << 2);
2225 _dpk_one_shot(rtwdev, phy, path, D_TXAGC);
2226 _dpk_kset_query(rtwdev, path);
2229 static u8 _dpk_gainloss(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
2232 _dpk_one_shot(rtwdev, phy, path, D_GAIN_LOSS);
2233 _dpk_kip_set_txagc(rtwdev, phy, path, 0xff, false);
2235 rtw89_phy_write32_mask(rtwdev, R_DPK_GL + (path << 8), B_DPK_GL_A1, 0x0);
2236 rtw89_phy_write32_mask(rtwdev, R_DPK_GL + (path << 8), B_DPK_GL_A0, 0x0);
2238 return _dpk_gainloss_read(rtwdev);
2241 static enum dpk_pas_result _dpk_pas_read(struct rtw89_dev *rtwdev, bool is_check)
2247 rtw89_phy_write32_mask(rtwdev, R_KIP_RPT1, MASKBYTE2, 0x06);
2248 rtw89_phy_write32_mask(rtwdev, R_DPK_CFG2, B_DPK_CFG2_ST, 0x0);
2249 rtw89_phy_write32_mask(rtwdev, R_DPK_CFG3, MASKBYTE2, 0x08);
2252 rtw89_phy_write32_mask(rtwdev, R_DPK_CFG3, MASKBYTE3, 0x00);
2253 val1_i = rtw89_phy_read32_mask(rtwdev, R_RPT_COM, MASKHWORD);
2255 val1_q = rtw89_phy_read32_mask(rtwdev, R_RPT_COM, MASKLWORD);
2258 rtw89_phy_write32_mask(rtwdev, R_DPK_CFG3, MASKBYTE3, 0x1f);
2259 val2_i = rtw89_phy_read32_mask(rtwdev, R_RPT_COM, MASKHWORD);
2261 val2_q = rtw89_phy_read32_mask(rtwdev, R_RPT_COM, MASKLWORD);
2264 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] PAS_delta = 0x%x\n",
2269 rtw89_phy_write32_mask(rtwdev, R_DPK_CFG3, MASKBYTE3, i);
2270 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] PAS_Read[%02d]= 0x%08x\n", i,
2271 rtw89_phy_read32_mask(rtwdev, R_RPT_COM, MASKDWORD));
2286 static bool _dpk_kip_set_rxagc(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
2289 _dpk_kip_control_rfc(rtwdev, path, false);
2290 rtw89_phy_write32_mask(rtwdev, R_KIP_MOD, B_KIP_MOD,
2291 rtw89_read_rf(rtwdev, path, RR_MOD, RFREG_MASK));
2292 _dpk_kip_control_rfc(rtwdev, path, true);
2294 _dpk_one_shot(rtwdev, phy, path, D_RXAGC);
2296 return _dpk_sync_check(rtwdev, path, kidx);
2299 static void _dpk_read_rxsram(struct rtw89_dev *rtwdev)
2303 rtw89_rfk_parser(rtwdev, &rtw8852c_read_rxsram_pre_defs_tbl);
2306 rtw89_phy_write32_mask(rtwdev, R_SRAM_IQRX, MASKDWORD, 0x00010000 | addr);
2308 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] RXSRAM[%03d] = 0x%07x\n", addr,
2309 rtw89_phy_read32_mask(rtwdev, R_RPT_COM, MASKDWORD));
2312 rtw89_rfk_parser(rtwdev, &rtw8852c_read_rxsram_post_defs_tbl);
2315 static void _dpk_bypass_rxiqc(struct rtw89_dev *rtwdev, enum rtw89_rf_path path)
2317 rtw89_phy_write32_mask(rtwdev, R_DPD_V1 + (path << 8), B_DPD_LBK, 0x1);
2318 rtw89_phy_write32_mask(rtwdev, R_RXIQC + (path << 8), MASKDWORD, 0x40000002);
2320 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] Bypass RXIQC\n");
2323 static u8 _dpk_agc(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
2326 struct rtw89_dpk_info *dpk = &rtwdev->dpk;
2339 is_fail = _dpk_kip_set_rxagc(rtwdev, phy, path, kidx);
2342 _dpk_read_rxsram(rtwdev);
2349 dgain = _dpk_dgain_read(rtwdev);
2352 _dpk_one_shot(rtwdev, phy, path, D_SYNC);
2353 _dpk_dgain_read(rtwdev);
2358 _dpk_bypass_rxiqc(rtwdev, path);
2360 _dpk_lbk_rxiqk(rtwdev, phy, path);
2366 tmp_gl_idx = _dpk_gainloss(rtwdev, phy, path, kidx);
2367 pas = _dpk_pas_read(rtwdev, true);
2384 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] Txagc@lower bound!!\n");
2387 _dpk_kip_set_txagc(rtwdev, phy, path, tmp_dbm, true);
2396 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] Txagc@upper bound!!\n");
2399 _dpk_kip_set_txagc(rtwdev, phy, path, tmp_dbm, true);
2406 _dpk_kip_control_rfc(rtwdev, path, false);
2407 tmp_rxbb = rtw89_read_rf(rtwdev, path, RR_MOD, RR_MOD_M_RXBB);
2413 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_M_RXBB, tmp_rxbb);
2414 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] Adjust RXBB (%+d) = 0x%x\n",
2416 _dpk_kip_control_rfc(rtwdev, path, true);
2426 rtw89_warn(rtwdev, "[DPK] exceed loop limit\n");
2431 static void _dpk_set_mdpd_para(struct rtw89_dev *rtwdev, u8 order)
2441 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] Wrong MDPD order!!(0x%x)\n", order);
2445 rtw89_rfk_parser(rtwdev, order_tbls[order]);
2447 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] Set %s for IDL\n",
2453 static void _dpk_idl_mpa(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
2456 struct rtw89_dpk_info *dpk = &rtwdev->dpk;
2461 rtw89_phy_write32_mask(rtwdev, R_LDL_NORM, B_LDL_NORM_MA, 0x1);
2463 if (rtw89_phy_read32_mask(rtwdev, R_DPK_MPA, B_DPK_MPA_T2) == 0x1)
2464 _dpk_set_mdpd_para(rtwdev, 0x2);
2465 else if (rtw89_phy_read32_mask(rtwdev, R_DPK_MPA, B_DPK_MPA_T1) == 0x1)
2466 _dpk_set_mdpd_para(rtwdev, 0x1);
2467 else if (rtw89_phy_read32_mask(rtwdev, R_DPK_MPA, B_DPK_MPA_T0) == 0x1)
2468 _dpk_set_mdpd_para(rtwdev, 0x0);
2472 _dpk_set_mdpd_para(rtwdev, 0x2);
2475 _dpk_set_mdpd_para(rtwdev, 0x1);
2477 _dpk_set_mdpd_para(rtwdev, 0x0);
2479 rtw89_phy_write32_mask(rtwdev, R_DPK_IDL, B_DPK_IDL, 0x0);
2482 _dpk_one_shot(rtwdev, phy, path, D_MDPK_IDL);
2483 rtw89_phy_write32_mask(rtwdev, R_KIP_RPT1, B_KIP_RPT1_SEL, 0x0);
2484 dpk_sync = rtw89_phy_read32_mask(rtwdev, R_RPT_COM, MASKDWORD);
2485 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] dpk_sync = 0x%x\n", dpk_sync);
2487 rtw89_phy_write32_mask(rtwdev, R_KIP_RPT1, B_KIP_RPT1_SEL, 0xf);
2488 ov_flag = rtw89_phy_read32_mask(rtwdev, R_RPT_COM, B_PRT_COM_SYNERR);
2490 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] ReK due to MDPK ov!!!\n");
2491 _dpk_one_shot(rtwdev, phy, path, D_MDPK_IDL);
2492 rtw89_phy_write32_mask(rtwdev, R_KIP_RPT1, B_KIP_RPT1_SEL, 0xf);
2493 ov_flag = rtw89_phy_read32_mask(rtwdev, R_RPT_COM, B_PRT_COM_SYNERR);
2497 _dpk_set_mdpd_para(rtwdev, 0x2);
2498 _dpk_one_shot(rtwdev, phy, path, D_MDPK_IDL);
2502 static bool _dpk_reload_check(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
2505 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, chanctx_idx);
2506 struct rtw89_dpk_info *dpk = &rtwdev->dpk;
2518 rtw89_phy_write32_mask(rtwdev, R_COEF_SEL + (path << 8),
2522 rtw89_debug(rtwdev, RTW89_DBG_RFK,
2529 static void _dpk_kip_pwr_clk_onoff(struct rtw89_dev *rtwdev, bool turn_on)
2531 rtw89_rfk_parser(rtwdev, turn_on ? &rtw8852c_dpk_kip_pwr_clk_on_defs_tbl :
2535 static void _dpk_kip_preset_8852c(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
2538 rtw89_phy_write32_mask(rtwdev, R_KIP_MOD, B_KIP_MOD,
2539 rtw89_read_rf(rtwdev, path, RR_MOD, RFREG_MASK));
2541 if (rtwdev->hal.cv == CHIP_CAV)
2542 rtw89_phy_write32_mask(rtwdev,
2546 rtw89_phy_write32_mask(rtwdev,
2550 _dpk_kip_control_rfc(rtwdev, path, true);
2551 rtw89_phy_write32_mask(rtwdev, R_COEF_SEL + (path << 8), B_COEF_SEL_MDPD, kidx);
2553 _dpk_one_shot(rtwdev, phy, path, D_KIP_PRESET);
2556 static void _dpk_para_query(struct rtw89_dev *rtwdev, enum rtw89_rf_path path, u8 kidx)
2560 struct rtw89_dpk_info *dpk = &rtwdev->dpk;
2563 para = rtw89_phy_read32_mask(rtwdev, dpk_par_regs[kidx][dpk->cur_k_set] + (path << 8),
2569 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] thermal/ txagc_RF (K%d) = 0x%x/ 0x%x\n",
2573 static void _dpk_gain_normalize_8852c(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
2576 struct rtw89_dpk_info *dpk = &rtwdev->dpk;
2579 rtw89_phy_write32_mask(rtwdev, R_DPK_GN + (path << 8), B_DPK_GN_AG, 0x200);
2580 rtw89_phy_write32_mask(rtwdev, R_DPK_GN + (path << 8), B_DPK_GN_EN, 0x3);
2582 _dpk_one_shot(rtwdev, phy, path, D_GAIN_NORM);
2584 rtw89_phy_write32_mask(rtwdev, dpk_par_regs[kidx][dpk->cur_k_set] + (path << 8),
2588 rtw89_phy_read32_mask(rtwdev, dpk_par_regs[kidx][dpk->cur_k_set] + (path << 8),
2592 static u8 _dpk_order_convert(struct rtw89_dev *rtwdev)
2594 u32 val32 = rtw89_phy_read32_mask(rtwdev, R_LDL_NORM, B_LDL_NORM_OP);
2615 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] convert MDPD order to 0x%x\n", val);
2620 static void _dpk_on(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
2623 struct rtw89_dpk_info *dpk = &rtwdev->dpk;
2625 rtw89_phy_write32_mask(rtwdev, R_LOAD_COEF + (path << 8), B_LOAD_COEF_MDPD, 0x1);
2626 rtw89_phy_write32_mask(rtwdev, R_LOAD_COEF + (path << 8), B_LOAD_COEF_MDPD, 0x0);
2627 rtw89_phy_write32_mask(rtwdev, R_DPD_CH0A + (path << 8) + (kidx << 2),
2628 B_DPD_ORDER, _dpk_order_convert(rtwdev));
2633 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] S%d[%d] path_ok = 0x%x\n",
2636 rtw89_phy_write32_mask(rtwdev, R_DPD_CH0A + (path << 8) + (kidx << 2),
2639 _dpk_gain_normalize_8852c(rtwdev, phy, path, kidx, false);
2642 static bool _dpk_main(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
2645 struct rtw89_dpk_info *dpk = &rtwdev->dpk;
2650 rtw89_debug(rtwdev, RTW89_DBG_RFK,
2652 _dpk_kip_control_rfc(rtwdev, path, false);
2653 _rf_direct_cntrl(rtwdev, path, false);
2654 rtw89_write_rf(rtwdev, path, RR_BBDC, RFREG_MASK, 0x03ffd);
2655 _dpk_rf_setting(rtwdev, gain, path, kidx);
2656 _set_rx_dck(rtwdev, phy, path, false);
2657 _dpk_kip_pwr_clk_onoff(rtwdev, true);
2658 _dpk_kip_preset_8852c(rtwdev, phy, path, kidx);
2659 _dpk_txpwr_bb_force(rtwdev, path, true);
2660 _dpk_kip_set_txagc(rtwdev, phy, path, init_xdbm, true);
2661 _dpk_tpg_sel(rtwdev, path, kidx);
2663 is_fail = _dpk_agc(rtwdev, phy, path, kidx, init_xdbm, false);
2667 _dpk_idl_mpa(rtwdev, phy, path, kidx);
2668 _dpk_para_query(rtwdev, path, kidx);
2669 _dpk_on(rtwdev, phy, path, kidx);
2672 _dpk_kip_control_rfc(rtwdev, path, false);
2673 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_MASK, RF_RX);
2674 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] S%d[%d]_K%d %s\n", path, kidx,
2680 static void _dpk_init(struct rtw89_dev *rtwdev, u8 path)
2682 struct rtw89_dpk_info *dpk = &rtwdev->dpk;
2688 static void _dpk_drf_direct_cntrl(struct rtw89_dev *rtwdev, u8 path, bool is_bybb)
2691 rtw89_write_rf(rtwdev, path, RR_BBDC, RR_BBDC_SEL, 0x1);
2693 rtw89_write_rf(rtwdev, path, RR_BBDC, RR_BBDC_SEL, 0x0);
2696 static void _dpk_cal_select(struct rtw89_dev *rtwdev, bool force,
2700 struct rtw89_dpk_info *dpk = &rtwdev->dpk;
2718 reloaded[path] = _dpk_reload_check(rtwdev, phy, path,
2723 _dpk_onoff(rtwdev, path, false);
2731 rtw89_debug(rtwdev, RTW89_DBG_RFK,
2734 _dpk_bkup_kip(rtwdev, kip_reg, kip_bkup, path);
2735 _rfk_backup_rf_reg(rtwdev, backup_rf_val[path], path);
2736 _dpk_information(rtwdev, phy, path, chanctx_idx);
2737 _dpk_init(rtwdev, path);
2738 if (rtwdev->is_tssi_mode[path])
2739 _dpk_tssi_pause(rtwdev, path, true);
2743 rtw89_debug(rtwdev, RTW89_DBG_RFK,
2746 rtw8852c_disable_rxagc(rtwdev, path, 0x0);
2747 _dpk_drf_direct_cntrl(rtwdev, path, false);
2748 _dpk_bb_afe_setting(rtwdev, phy, path, kpath);
2749 is_fail = _dpk_main(rtwdev, phy, path, 1);
2750 _dpk_onoff(rtwdev, path, is_fail);
2754 rtw89_debug(rtwdev, RTW89_DBG_RFK,
2757 _dpk_kip_restore(rtwdev, phy, path);
2758 _dpk_reload_kip(rtwdev, kip_reg, kip_bkup, path);
2759 _rfk_restore_rf_reg(rtwdev, backup_rf_val[path], path);
2760 _dpk_bb_afe_restore(rtwdev, path);
2761 rtw8852c_disable_rxagc(rtwdev, path, 0x1);
2762 if (rtwdev->is_tssi_mode[path])
2763 _dpk_tssi_pause(rtwdev, path, false);
2766 _dpk_kip_pwr_clk_onoff(rtwdev, false);
2769 static bool _dpk_bypass_check(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
2772 struct rtw89_fem_info *fem = &rtwdev->fem;
2773 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, chanctx_idx);
2776 if (rtwdev->hal.cv == CHIP_CAV && band != RTW89_BAND_2G) {
2777 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] Skip DPK due to CAV & not 2G!!\n");
2780 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] Skip DPK due to 2G_ext_PA exist!!\n");
2783 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] Skip DPK due to 5G_ext_PA exist!!\n");
2786 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] Skip DPK due to 6G_ext_PA exist!!\n");
2793 static void _dpk_force_bypass(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy)
2797 kpath = _kpath(rtwdev, phy);
2801 _dpk_onoff(rtwdev, path, true);
2805 static void _dpk(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy, bool force,
2808 rtw89_debug(rtwdev, RTW89_DBG_RFK,
2810 RTW8852C_DPK_VER, rtwdev->hal.cv,
2813 if (_dpk_bypass_check(rtwdev, phy, chanctx_idx))
2814 _dpk_force_bypass(rtwdev, phy);
2816 _dpk_cal_select(rtwdev, force, phy, _kpath(rtwdev, phy), chanctx_idx);
2818 if (rtw89_read_rf(rtwdev, RF_PATH_A, RR_DCKC, RR_DCKC_CHK) == 0x1)
2819 rtw8852c_rx_dck(rtwdev, phy, false);
2822 static void _dpk_onoff(struct rtw89_dev *rtwdev,
2825 struct rtw89_dpk_info *dpk = &rtwdev->dpk;
2831 rtw89_phy_write32_mask(rtwdev, R_DPD_CH0A + (path << 8) + (kidx << 2),
2834 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] S%d[%d] DPK %s !!!\n", path,
2838 static void _dpk_track(struct rtw89_dev *rtwdev)
2840 struct rtw89_dpk_info *dpk = &rtwdev->dpk;
2850 rtw89_debug(rtwdev, RTW89_DBG_RFK_TRACK,
2855 rtw89_phy_read32_mask(rtwdev, R_TXAGC_BB + (path << 13), 0x0000003f);
2857 rtw89_phy_read32_mask(rtwdev, R_TXAGC_BB + (path << 13), MASKBYTE2);
2859 rtw89_phy_read32_mask(rtwdev, R_TXAGC_BTP + (path << 13), B_TXAGC_BTP);
2862 rtw89_phy_write32_mask(rtwdev, R_KIP_RPT + (path << 8), B_KIP_RPT_SEL, 0xf);
2864 rtw89_phy_read32_mask(rtwdev, R_RPT_PER + (path << 8), B_RPT_PER_TH);
2866 rtw89_phy_read32_mask(rtwdev, R_RPT_PER + (path << 8), B_RPT_PER_OF);
2868 rtw89_phy_read32_mask(rtwdev, R_RPT_PER + (path << 8), B_RPT_PER_TSSI);
2871 cur_ther = ewma_thermal_read(&rtwdev->phystat.avg_thermal[path]);
2873 rtw89_debug(rtwdev, RTW89_DBG_RFK_TRACK,
2881 rtw89_debug(rtwdev, RTW89_DBG_RFK_TRACK,
2884 rtw89_debug(rtwdev, RTW89_DBG_RFK_TRACK,
2888 rtw89_debug(rtwdev, RTW89_DBG_RFK_TRACK,
2891 rtw89_debug(rtwdev, RTW89_DBG_RFK_TRACK,
2895 if (rtw89_phy_read32_mask(rtwdev, R_DPK_WR, B_DPK_WR_ST) == 0x0 &&
2896 txagc_rf != 0 && rtwdev->hal.cv == CHIP_CAV) {
2897 rtw89_debug(rtwdev, RTW89_DBG_RFK_TRACK,
2900 rtw89_phy_write32_mask(rtwdev, R_DPD_BND + (path << 8) + (kidx << 2),
2906 static void _tssi_set_sys(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
2913 rtw89_rfk_parser(rtwdev, &rtw8852c_tssi_sys_defs_tbl);
2928 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_ADC_CLK,
2930 rtw89_rfk_parser_by_cond(rtwdev, band == RTW89_BAND_2G,
2934 rtw89_phy_write32_mask(rtwdev, R_P1_TSSI_ADC_CLK,
2936 rtw89_rfk_parser_by_cond(rtwdev, band == RTW89_BAND_2G,
2942 static void _tssi_ini_txpwr_ctrl_bb(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
2945 rtw89_rfk_parser_by_cond(rtwdev, path == RF_PATH_A,
2950 static void _tssi_ini_txpwr_ctrl_bb_he_tb(struct rtw89_dev *rtwdev,
2954 rtw89_rfk_parser_by_cond(rtwdev, path == RF_PATH_A,
2959 static void _tssi_set_dck(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
2965 rtw89_rfk_parser(rtwdev, &rtw8852c_tssi_dck_defs_a_tbl);
2966 rtw89_rfk_parser_by_cond(rtwdev, band == RTW89_BAND_2G,
2970 rtw89_rfk_parser(rtwdev, &rtw8852c_tssi_dck_defs_b_tbl);
2971 rtw89_rfk_parser_by_cond(rtwdev, band == RTW89_BAND_2G,
2977 static void _tssi_set_bbgain_split(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
2980 rtw89_rfk_parser_by_cond(rtwdev, path == RF_PATH_A,
2985 static void _tssi_set_tmeter_tbl(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
2999 struct rtw89_fw_txpwr_track_cfg *trk = rtwdev->fw.elm_info.txpwr_trk;
3000 struct rtw89_tssi_info *tssi_info = &rtwdev->tssi;
3103 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
3106 rtw89_phy_write32_mask(rtwdev, R_P0_TMETER, B_P0_TMETER_DIS, 0x0);
3107 rtw89_phy_write32_mask(rtwdev, R_P0_TMETER, B_P0_TMETER_TRK, 0x1);
3110 rtw89_phy_write32_mask(rtwdev, R_P0_TMETER, B_P0_TMETER, 32);
3111 rtw89_phy_write32_mask(rtwdev, R_P0_RFCTM, B_P0_RFCTM_VAL, 32);
3114 rtw89_phy_write32(rtwdev, R_P0_TSSI_BASE + i, 0x0);
3116 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
3122 rtw89_phy_write32_mask(rtwdev, R_P0_TMETER, B_P0_TMETER, thermal);
3123 rtw89_phy_write32_mask(rtwdev, R_P0_RFCTM, B_P0_RFCTM_VAL,
3140 rtw89_phy_write32(rtwdev, R_P0_TSSI_BASE + i, tmp);
3142 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
3147 rtw89_phy_write32_mask(rtwdev, R_P0_RFCTM, R_P0_RFCTM_RDY, 0x1);
3148 rtw89_phy_write32_mask(rtwdev, R_P0_RFCTM, R_P0_RFCTM_RDY, 0x0);
3153 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
3156 rtw89_phy_write32_mask(rtwdev, R_P1_TMETER, B_P1_TMETER_DIS, 0x0);
3157 rtw89_phy_write32_mask(rtwdev, R_P1_TMETER, B_P1_TMETER_TRK, 0x1);
3160 rtw89_phy_write32_mask(rtwdev, R_P1_TMETER, B_P1_TMETER, 32);
3161 rtw89_phy_write32_mask(rtwdev, R_P1_RFCTM, B_P1_RFCTM_VAL, 32);
3164 rtw89_phy_write32(rtwdev, R_TSSI_THOF + i, 0x0);
3166 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
3172 rtw89_phy_write32_mask(rtwdev, R_P1_TMETER, B_P1_TMETER, thermal);
3173 rtw89_phy_write32_mask(rtwdev, R_P1_RFCTM, B_P1_RFCTM_VAL,
3190 rtw89_phy_write32(rtwdev, R_TSSI_THOF + i, tmp);
3192 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
3197 rtw89_phy_write32_mask(rtwdev, R_P1_RFCTM, R_P1_RFCTM_RDY, 0x1);
3198 rtw89_phy_write32_mask(rtwdev, R_P1_RFCTM, R_P1_RFCTM_RDY, 0x0);
3203 static void _tssi_slope_cal_org(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
3209 rtw89_rfk_parser_by_cond(rtwdev, band == RTW89_BAND_2G,
3213 rtw89_rfk_parser_by_cond(rtwdev, band == RTW89_BAND_2G,
3219 static void _tssi_set_aligk_default(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
3242 rtw89_rfk_parser(rtwdev, tbl);
3245 static void _tssi_set_slope(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
3248 rtw89_rfk_parser_by_cond(rtwdev, path == RF_PATH_A,
3253 static void _tssi_run_slope(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
3256 rtw89_rfk_parser_by_cond(rtwdev, path == RF_PATH_A,
3261 static void _tssi_set_track(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
3264 rtw89_rfk_parser_by_cond(rtwdev, path == RF_PATH_A,
3269 static void _tssi_set_txagc_offset_mv_avg(struct rtw89_dev *rtwdev,
3273 rtw89_rfk_parser_by_cond(rtwdev, path == RF_PATH_A,
3278 static void _tssi_enable(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy)
3280 struct rtw89_tssi_info *tssi_info = &rtwdev->tssi;
3283 if (rtwdev->dbcc_en) {
3294 _tssi_set_track(rtwdev, phy, i);
3295 _tssi_set_txagc_offset_mv_avg(rtwdev, phy, i);
3297 rtw89_rfk_parser_by_cond(rtwdev, i == RF_PATH_A,
3302 ewma_thermal_read(&rtwdev->phystat.avg_thermal[i]);
3303 rtwdev->is_tssi_mode[i] = true;
3307 static void _tssi_disable(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy)
3311 if (rtwdev->dbcc_en) {
3323 rtw89_rfk_parser(rtwdev, &rtw8852c_tssi_disable_defs_a_tbl);
3324 rtwdev->is_tssi_mode[RF_PATH_A] = false;
3326 rtw89_rfk_parser(rtwdev, &rtw8852c_tssi_disable_defs_b_tbl);
3327 rtwdev->is_tssi_mode[RF_PATH_B] = false;
3332 static u32 _tssi_get_cck_group(struct rtw89_dev *rtwdev, u8 ch)
3358 static u32 _tssi_get_ofdm_group(struct rtw89_dev *rtwdev, u8 ch)
3426 static u32 _tssi_get_6g_ofdm_group(struct rtw89_dev *rtwdev, u8 ch)
3546 static u32 _tssi_get_trim_group(struct rtw89_dev *rtwdev, u8 ch)
3574 static u32 _tssi_get_6g_trim_group(struct rtw89_dev *rtwdev, u8 ch)
3630 static s8 _tssi_get_ofdm_de(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
3633 struct rtw89_tssi_info *tssi_info = &rtwdev->tssi;
3642 gidx = _tssi_get_ofdm_group(rtwdev, ch);
3644 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
3655 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
3661 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
3665 gidx = _tssi_get_6g_ofdm_group(rtwdev, ch);
3667 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
3678 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
3684 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
3692 static s8 _tssi_get_ofdm_trim_de(struct rtw89_dev *rtwdev,
3696 struct rtw89_tssi_info *tssi_info = &rtwdev->tssi;
3705 tgidx = _tssi_get_trim_group(rtwdev, ch);
3707 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
3718 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
3724 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
3729 tgidx = _tssi_get_6g_trim_group(rtwdev, ch);
3731 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
3742 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
3748 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
3757 static void _tssi_set_efuse_to_de(struct rtw89_dev *rtwdev,
3760 struct rtw89_tssi_info *tssi_info = &rtwdev->tssi;
3768 rtw89_debug(rtwdev, RTW89_DBG_TSSI, "[TSSI][TRIM]: phy=%d ch=%d\n",
3771 if (rtwdev->dbcc_en) {
3782 gidx = _tssi_get_cck_group(rtwdev, ch);
3783 trim_de = _tssi_get_ofdm_trim_de(rtwdev, phy, i, chan);
3786 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
3790 rtw89_phy_write32_mask(rtwdev, _tssi_de_cck_long[i], _TSSI_DE_MASK, val);
3791 rtw89_phy_write32_mask(rtwdev, _tssi_de_cck_short[i], _TSSI_DE_MASK, val);
3793 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
3796 rtw89_phy_read32_mask(rtwdev, _tssi_de_cck_long[i],
3799 ofdm_de = _tssi_get_ofdm_de(rtwdev, phy, i, chan);
3800 trim_de = _tssi_get_ofdm_trim_de(rtwdev, phy, i, chan);
3803 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
3807 rtw89_phy_write32_mask(rtwdev, _tssi_de_mcs_20m[i], _TSSI_DE_MASK, val);
3808 rtw89_phy_write32_mask(rtwdev, _tssi_de_mcs_40m[i], _TSSI_DE_MASK, val);
3809 rtw89_phy_write32_mask(rtwdev, _tssi_de_mcs_80m[i], _TSSI_DE_MASK, val);
3810 rtw89_phy_write32_mask(rtwdev, _tssi_de_mcs_80m_80m[i], _TSSI_DE_MASK, val);
3811 rtw89_phy_write32_mask(rtwdev, _tssi_de_mcs_5m[i], _TSSI_DE_MASK, val);
3812 rtw89_phy_write32_mask(rtwdev, _tssi_de_mcs_10m[i], _TSSI_DE_MASK, val);
3814 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
3817 rtw89_phy_read32_mask(rtwdev, _tssi_de_mcs_20m[i],
3822 static void rtw8852c_tssi_cont_en(struct rtw89_dev *rtwdev, bool en,
3829 rtw89_phy_write32_mask(rtwdev, tssi_trk[path], BIT(30), 0x0);
3830 rtw89_phy_write32_mask(rtwdev, tssi_en[path], BIT(31), 0x0);
3831 if (rtwdev->dbcc_en && path == RF_PATH_B)
3832 _tssi_set_efuse_to_de(rtwdev, RTW89_PHY_1, chan);
3834 _tssi_set_efuse_to_de(rtwdev, RTW89_PHY_0, chan);
3836 rtw89_phy_write32_mask(rtwdev, tssi_trk[path], BIT(30), 0x1);
3837 rtw89_phy_write32_mask(rtwdev, tssi_en[path], BIT(31), 0x1);
3841 void rtw8852c_tssi_cont_en_phyidx(struct rtw89_dev *rtwdev, bool en, u8 phy_idx,
3844 if (!rtwdev->dbcc_en) {
3845 rtw8852c_tssi_cont_en(rtwdev, en, RF_PATH_A, chan);
3846 rtw8852c_tssi_cont_en(rtwdev, en, RF_PATH_B, chan);
3849 rtw8852c_tssi_cont_en(rtwdev, en, RF_PATH_A, chan);
3851 rtw8852c_tssi_cont_en(rtwdev, en, RF_PATH_B, chan);
3855 static void _bw_setting(struct rtw89_dev *rtwdev, enum rtw89_rf_path path,
3861 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[RFK]===>%s\n", __func__);
3867 rf_reg18 = rtw89_read_rf(rtwdev, path, reg_reg18_addr, RFREG_MASK);
3875 rtw89_phy_write32_mask(rtwdev, R_P0_CFCH_BW0 | (path << 8), B_P0_CFCH_BW0, 0x3);
3876 rtw89_phy_write32_mask(rtwdev, R_P0_CFCH_BW1 | (path << 8), B_P0_CFCH_BW1, 0xf);
3880 rtw89_phy_write32_mask(rtwdev, R_P0_CFCH_BW0 | (path << 8), B_P0_CFCH_BW0, 0x3);
3881 rtw89_phy_write32_mask(rtwdev, R_P0_CFCH_BW1 | (path << 8), B_P0_CFCH_BW1, 0xf);
3885 rtw89_phy_write32_mask(rtwdev, R_P0_CFCH_BW0 | (path << 8), B_P0_CFCH_BW0, 0x2);
3886 rtw89_phy_write32_mask(rtwdev, R_P0_CFCH_BW1 | (path << 8), B_P0_CFCH_BW1, 0xd);
3890 rtw89_phy_write32_mask(rtwdev, R_P0_CFCH_BW0 | (path << 8), B_P0_CFCH_BW0, 0x1);
3891 rtw89_phy_write32_mask(rtwdev, R_P0_CFCH_BW1 | (path << 8), B_P0_CFCH_BW1, 0xb);
3897 rtw89_write_rf(rtwdev, path, reg_reg18_addr, RFREG_MASK, rf_reg18);
3900 static void _ctrl_bw(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
3907 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[RFK]===>%s\n", __func__);
3908 kpath = _kpath(rtwdev, phy);
3915 _bw_setting(rtwdev, path, bw, is_dav);
3917 _bw_setting(rtwdev, path, bw, is_dav);
3918 if (rtwdev->dbcc_en)
3921 if (path == RF_PATH_B && rtwdev->hal.cv == CHIP_CAV) {
3922 rtw89_write_rf(rtwdev, RF_PATH_B, RR_RSV1, RR_RSV1_RST, 0x0);
3923 tmp = rtw89_read_rf(rtwdev, RF_PATH_A, RR_CFGCH, RFREG_MASK);
3924 rtw89_write_rf(rtwdev, RF_PATH_B, RR_APK, RR_APK_MOD, 0x3);
3925 rtw89_write_rf(rtwdev, RF_PATH_B, RR_CFGCH, RFREG_MASK, tmp);
3927 rtw89_write_rf(rtwdev, RF_PATH_B, RR_RSV1, RR_RSV1_RST, 0x1);
3932 static void _ch_setting(struct rtw89_dev *rtwdev, enum rtw89_rf_path path,
3938 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[RFK]===>%s\n", __func__);
3944 rf_reg18 = rtw89_read_rf(rtwdev, path, reg_reg18_addr, RFREG_MASK);
3964 rtw89_write_rf(rtwdev, path, reg_reg18_addr, RFREG_MASK, rf_reg18);
3968 static void _ctrl_ch(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
3973 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[RFK]===>%s\n", __func__);
3984 kpath = _kpath(rtwdev, phy);
3988 _ch_setting(rtwdev, path, central_ch, band, true);
3989 _ch_setting(rtwdev, path, central_ch, band, false);
3994 static void _rxbb_bw(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
4001 kpath = _kpath(rtwdev, phy);
4006 rtw89_write_rf(rtwdev, path, RR_LUTWE2, RR_LUTWE2_RTXBW, 0x1);
4007 rtw89_write_rf(rtwdev, path, RR_LUTWA, RR_LUTWA_M2, 0xa);
4023 rtw89_write_rf(rtwdev, path, RR_LUTWD0, RR_LUTWD0_LB, val);
4024 rtw89_write_rf(rtwdev, path, RR_LUTWE2, RR_LUTWE2_RTXBW, 0x0);
4028 static void _lck_keep_thermal(struct rtw89_dev *rtwdev)
4030 struct rtw89_lck_info *lck = &rtwdev->lck;
4033 for (path = 0; path < rtwdev->chip->rf_path_num; path++) {
4035 ewma_thermal_read(&rtwdev->phystat.avg_thermal[path]);
4036 rtw89_debug(rtwdev, RTW89_DBG_RFK_TRACK,
4041 static void _lck(struct rtw89_dev *rtwdev)
4044 int path = rtwdev->dbcc_en ? 2 : 1;
4047 rtw89_debug(rtwdev, RTW89_DBG_RFK_TRACK, "[LCK] DO LCK\n");
4049 tmp18[0] = rtw89_read_rf(rtwdev, RF_PATH_A, RR_CFGCH, RFREG_MASK);
4050 tmp18[1] = rtw89_read_rf(rtwdev, RF_PATH_B, RR_CFGCH, RFREG_MASK);
4053 rtw89_write_rf(rtwdev, i, RR_LCK_TRG, RR_LCK_TRGSEL, 0x1);
4054 rtw89_write_rf(rtwdev, i, RR_CFGCH, RFREG_MASK, tmp18[i]);
4055 rtw89_write_rf(rtwdev, i, RR_LCK_TRG, RR_LCK_TRGSEL, 0x0);
4058 _lck_keep_thermal(rtwdev);
4063 void rtw8852c_lck_track(struct rtw89_dev *rtwdev)
4065 struct rtw89_lck_info *lck = &rtwdev->lck;
4070 for (path = 0; path < rtwdev->chip->rf_path_num; path++) {
4072 ewma_thermal_read(&rtwdev->phystat.avg_thermal[path]);
4075 rtw89_debug(rtwdev, RTW89_DBG_RFK_TRACK,
4080 _lck(rtwdev);
4086 void rtw8852c_lck_init(struct rtw89_dev *rtwdev)
4088 _lck_keep_thermal(rtwdev);
4092 void rtw8852c_ctrl_bw_ch(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
4096 _ctrl_ch(rtwdev, phy, central_ch, band);
4097 _ctrl_bw(rtwdev, phy, bw);
4098 _rxbb_bw(rtwdev, phy, bw);
4101 void rtw8852c_set_channel_rf(struct rtw89_dev *rtwdev,
4105 rtw8852c_ctrl_bw_ch(rtwdev, phy_idx, chan->channel,
4110 void rtw8852c_mcc_get_ch_info(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx)
4112 struct rtw89_rfk_mcc_info_data *rfk_mcc = rtwdev->rfk_mcc.data;
4119 mode = rtw89_get_entity_mode(rtwdev);
4129 chan = rtw89_chan_get(rtwdev, chan_idx);
4140 idx = rtw89_rfk_chan_lookup(rtwdev, desc, ARRAY_SIZE(desc), chan);
4147 void rtw8852c_rck(struct rtw89_dev *rtwdev)
4152 _rck(rtwdev, path);
4155 void rtw8852c_dack(struct rtw89_dev *rtwdev, enum rtw89_chanctx_idx chanctx_idx)
4157 u8 phy_map = rtw89_btc_phymap(rtwdev, RTW89_PHY_0, 0, chanctx_idx);
4159 rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, BTC_WRFKT_DACK, BTC_WRFK_START);
4160 _dac_cal(rtwdev, false, chanctx_idx);
4161 rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, BTC_WRFKT_DACK, BTC_WRFK_STOP);
4164 void rtw8852c_iqk(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx,
4168 u8 phy_map = rtw89_btc_phymap(rtwdev, phy_idx, 0, chanctx_idx);
4170 rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, BTC_WRFKT_IQK, BTC_WRFK_START);
4171 rtw89_chip_stop_sch_tx(rtwdev, phy_idx, &tx_en, RTW89_SCH_TX_SEL_ALL);
4172 _wait_rx_mode(rtwdev, _kpath(rtwdev, phy_idx));
4174 _iqk_init(rtwdev);
4175 _iqk(rtwdev, phy_idx, false, chanctx_idx);
4177 rtw89_chip_resume_sch_tx(rtwdev, phy_idx, tx_en);
4178 rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, BTC_WRFKT_IQK, BTC_WRFK_STOP);
4183 static void _rx_dck(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
4186 struct rtw89_rx_dck_info *rx_dck = &rtwdev->rx_dck;
4192 kpath = _kpath(rtwdev, phy);
4193 rtw89_debug(rtwdev, RTW89_DBG_RFK,
4195 RXDCK_VER_8852C, rtwdev->hal.cv);
4198 rf_reg5 = rtw89_read_rf(rtwdev, path, RR_RSV1, RFREG_MASK);
4202 if (rtwdev->is_tssi_mode[path])
4203 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_TRK + (path << 13),
4205 rtw89_write_rf(rtwdev, path, RR_RSV1, RR_RSV1_RST, 0x0);
4206 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_MASK, RR_MOD_V_RX);
4207 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_LO_SEL, rtwdev->dbcc_en);
4210 _set_rx_dck(rtwdev, phy, path, is_afe);
4216 _rx_dck_recover(rtwdev, path);
4220 is_fail = _rx_dck_rek_check(rtwdev, path);
4225 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[RX_DCK] rek_cnt[%d]=%d",
4228 rx_dck->thermal[path] = ewma_thermal_read(&rtwdev->phystat.avg_thermal[path]);
4229 rtw89_write_rf(rtwdev, path, RR_RSV1, RFREG_MASK, rf_reg5);
4231 if (rtwdev->is_tssi_mode[path])
4232 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_TRK + (path << 13),
4237 void rtw8852c_rx_dck(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy, bool is_afe)
4239 _rx_dck(rtwdev, phy, is_afe, 1);
4244 void rtw8852c_rx_dck_track(struct rtw89_dev *rtwdev)
4247 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, chanctx_idx);
4248 struct rtw89_rx_dck_info *rx_dck = &rtwdev->rx_dck;
4250 u8 phy_map = rtw89_btc_phymap(rtwdev, phy_idx, 0, chanctx_idx);
4260 if (rtwdev->scanning)
4265 ewma_thermal_read(&rtwdev->phystat.avg_thermal[path]);
4268 rtw89_debug(rtwdev, RTW89_DBG_RFK_TRACK,
4279 rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, BTC_WRFKT_RXDCK, BTC_WRFK_START);
4280 rtw89_chip_stop_sch_tx(rtwdev, phy_idx, &tx_en, RTW89_SCH_TX_SEL_ALL);
4283 dck_channel = _rx_dck_channel_calc(rtwdev, chan);
4284 _ctrl_ch(rtwdev, RTW89_PHY_0, dck_channel, chan->band_type);
4287 _rx_dck(rtwdev, RTW89_PHY_0, false, 20);
4290 _ctrl_ch(rtwdev, RTW89_PHY_0, chan->channel, chan->band_type);
4292 rtw89_chip_resume_sch_tx(rtwdev, phy_idx, tx_en);
4293 rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, BTC_WRFKT_RXDCK, BTC_WRFK_STOP);
4296 void rtw8852c_dpk_init(struct rtw89_dev *rtwdev)
4298 struct rtw89_dpk_info *dpk = &rtwdev->dpk;
4304 void rtw8852c_dpk(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx,
4308 u8 phy_map = rtw89_btc_phymap(rtwdev, phy_idx, 0, chanctx_idx);
4310 rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, BTC_WRFKT_DPK, BTC_WRFK_START);
4311 rtw89_chip_stop_sch_tx(rtwdev, phy_idx, &tx_en, RTW89_SCH_TX_SEL_ALL);
4312 _wait_rx_mode(rtwdev, _kpath(rtwdev, phy_idx));
4314 _dpk(rtwdev, phy_idx, false, chanctx_idx);
4316 rtw89_chip_resume_sch_tx(rtwdev, phy_idx, tx_en);
4317 rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, BTC_WRFKT_DPK, BTC_WRFK_STOP);
4320 void rtw8852c_dpk_track(struct rtw89_dev *rtwdev)
4322 _dpk_track(rtwdev);
4325 void rtw8852c_tssi(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
4328 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, chanctx_idx);
4331 rtw89_debug(rtwdev, RTW89_DBG_TSSI, "[TSSI] %s: phy=%d\n", __func__, phy);
4333 if (rtwdev->dbcc_en) {
4343 _tssi_disable(rtwdev, phy);
4346 _tssi_set_sys(rtwdev, phy, i, chan);
4347 _tssi_ini_txpwr_ctrl_bb(rtwdev, phy, i);
4348 _tssi_ini_txpwr_ctrl_bb_he_tb(rtwdev, phy, i);
4349 _tssi_set_dck(rtwdev, phy, i, chan);
4350 _tssi_set_bbgain_split(rtwdev, phy, i);
4351 _tssi_set_tmeter_tbl(rtwdev, phy, i, chan);
4352 _tssi_slope_cal_org(rtwdev, phy, i, chan);
4353 _tssi_set_aligk_default(rtwdev, phy, i, chan);
4354 _tssi_set_slope(rtwdev, phy, i);
4355 _tssi_run_slope(rtwdev, phy, i);
4358 _tssi_enable(rtwdev, phy);
4359 _tssi_set_efuse_to_de(rtwdev, phy, chan);
4362 void rtw8852c_tssi_scan(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
4367 rtw89_debug(rtwdev, RTW89_DBG_TSSI, "[TSSI] %s: phy=%d\n",
4370 if (!rtwdev->is_tssi_mode[RF_PATH_A])
4372 if (!rtwdev->is_tssi_mode[RF_PATH_B])
4375 if (rtwdev->dbcc_en) {
4385 _tssi_disable(rtwdev, phy);
4388 _tssi_set_sys(rtwdev, phy, i, chan);
4389 _tssi_set_dck(rtwdev, phy, i, chan);
4390 _tssi_set_tmeter_tbl(rtwdev, phy, i, chan);
4391 _tssi_slope_cal_org(rtwdev, phy, i, chan);
4392 _tssi_set_aligk_default(rtwdev, phy, i, chan);
4395 _tssi_enable(rtwdev, phy);
4396 _tssi_set_efuse_to_de(rtwdev, phy, chan);
4399 static void rtw8852c_tssi_default_txagc(struct rtw89_dev *rtwdev,
4402 struct rtw89_tssi_info *tssi_info = &rtwdev->tssi;
4405 if (!rtwdev->is_tssi_mode[RF_PATH_A] && !rtwdev->is_tssi_mode[RF_PATH_B])
4410 if (rtw89_phy_read32_mask(rtwdev, R_TXAGC_BB, B_TXAGC_BB_OFT) != 0xc000 &&
4411 rtw89_phy_read32_mask(rtwdev, R_TXAGC_BB, B_TXAGC_BB_OFT) != 0x0) {
4414 rtw89_phy_read32_mask(rtwdev, R_TXAGC_BB,
4421 if (rtw89_phy_read32_mask(rtwdev, R_TXAGC_BB_S1, B_TXAGC_BB_S1_OFT) != 0xc000 &&
4422 rtw89_phy_read32_mask(rtwdev, R_TXAGC_BB_S1, B_TXAGC_BB_S1_OFT) != 0x0) {
4425 rtw89_phy_read32_mask(rtwdev, R_TXAGC_BB_S1,
4433 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_TRK, B_P0_TSSI_OFT,
4435 rtw89_phy_write32_mask(rtwdev, R_P1_TSSI_TRK, B_P1_TSSI_OFT,
4438 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_TRK, B_P0_TSSI_OFT_EN, 0x0);
4439 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_TRK, B_P0_TSSI_OFT_EN, 0x1);
4441 rtw89_phy_write32_mask(rtwdev, R_P1_TSSI_TRK, B_P1_TSSI_OFT_EN, 0x0);
4442 rtw89_phy_write32_mask(rtwdev, R_P1_TSSI_TRK, B_P1_TSSI_OFT_EN, 0x1);
4446 void rtw8852c_wifi_scan_notify(struct rtw89_dev *rtwdev,
4450 rtw8852c_tssi_default_txagc(rtwdev, phy_idx, true);
4452 rtw8852c_tssi_default_txagc(rtwdev, phy_idx, false);
4455 void rtw8852c_rfk_chanctx_cb(struct rtw89_dev *rtwdev,
4458 struct rtw89_dpk_info *dpk = &rtwdev->dpk;
4465 _dpk_onoff(rtwdev, path, false);
4470 _dpk_onoff(rtwdev, path, false);
4471 rtw8852c_dpk(rtwdev, RTW89_PHY_0, RTW89_CHANCTX_0);