Lines Matching defs:rtwdev
176 static void _rfk_backup_bb_reg(struct rtw89_dev *rtwdev, u32 backup_bb_reg_val[])
182 rtw89_phy_read32_mask(rtwdev, rtw8852b_backup_bb_regs[i],
184 rtw89_debug(rtwdev, RTW89_DBG_RFK,
190 static void _rfk_backup_rf_reg(struct rtw89_dev *rtwdev, u32 backup_rf_reg_val[],
197 rtw89_read_rf(rtwdev, rf_path,
199 rtw89_debug(rtwdev, RTW89_DBG_RFK,
205 static void _rfk_restore_bb_reg(struct rtw89_dev *rtwdev,
211 rtw89_phy_write32_mask(rtwdev, rtw8852b_backup_bb_regs[i],
213 rtw89_debug(rtwdev, RTW89_DBG_RFK,
219 static void _rfk_restore_rf_reg(struct rtw89_dev *rtwdev,
225 rtw89_write_rf(rtwdev, rf_path, rtw8852b_backup_rf_regs[i],
228 rtw89_debug(rtwdev, RTW89_DBG_RFK,
234 static void _rfk_rf_direct_cntrl(struct rtw89_dev *rtwdev,
238 rtw89_write_rf(rtwdev, path, RR_RSV1, RR_RSV1_RST, 0x1);
240 rtw89_write_rf(rtwdev, path, RR_RSV1, RR_RSV1_RST, 0x0);
243 static void _rfk_drf_direct_cntrl(struct rtw89_dev *rtwdev,
247 rtw89_write_rf(rtwdev, path, RR_BBDC, RR_BBDC_SEL, 0x1);
249 rtw89_write_rf(rtwdev, path, RR_BBDC, RR_BBDC_SEL, 0x0);
252 static bool _iqk_check_cal(struct rtw89_dev *rtwdev, u8 path)
259 1, 8200, false, rtwdev, 0xbff8, MASKBYTE0);
261 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK]NCTL1 IQK timeout!!!\n");
266 fail = rtw89_phy_read32_mask(rtwdev, R_NCTL_RPT, B_NCTL_RPT_FLG);
267 rtw89_phy_write32_mask(rtwdev, R_NCTL_N1, MASKBYTE0, 0x0);
269 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK]S%x, ret=%d\n", path, ret);
270 val = rtw89_phy_read32_mask(rtwdev, R_NCTL_RPT, MASKDWORD);
271 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK]S%x, 0x8008 = 0x%x\n", path, val);
276 static u8 _kpath(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx)
280 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[RFK]dbcc_en: %x,PHY%d\n",
281 rtwdev->dbcc_en, phy_idx);
283 if (!rtwdev->dbcc_en) {
294 static void _set_rx_dck(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
297 rtw89_write_rf(rtwdev, path, RR_DCK1, RR_DCK1_CLR, 0x0);
298 rtw89_write_rf(rtwdev, path, RR_DCK, RR_DCK_LV, 0x0);
299 rtw89_write_rf(rtwdev, path, RR_DCK, RR_DCK_LV, 0x1);
303 static void _rx_dck(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy)
308 rtw89_debug(rtwdev, RTW89_DBG_RFK,
310 RTW8852B_RXDCK_VER, rtwdev->hal.cv);
313 rf_reg5 = rtw89_read_rf(rtwdev, path, RR_RSV1, RFREG_MASK);
314 dck_tune = rtw89_read_rf(rtwdev, path, RR_DCK, RR_DCK_FINE);
316 if (rtwdev->is_tssi_mode[path])
317 rtw89_phy_write32_mask(rtwdev,
321 rtw89_write_rf(rtwdev, path, RR_RSV1, RR_RSV1_RST, 0x0);
322 rtw89_write_rf(rtwdev, path, RR_DCK, RR_DCK_FINE, 0x0);
323 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_MASK, RR_MOD_V_RX);
324 _set_rx_dck(rtwdev, phy, path);
325 rtw89_write_rf(rtwdev, path, RR_DCK, RR_DCK_FINE, dck_tune);
326 rtw89_write_rf(rtwdev, path, RR_RSV1, RFREG_MASK, rf_reg5);
328 if (rtwdev->is_tssi_mode[path])
329 rtw89_phy_write32_mask(rtwdev,
335 static void _rck(struct rtw89_dev *rtwdev, enum rtw89_rf_path path)
342 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[RCK] ====== S%d RCK ======\n", path);
344 rf_reg5 = rtw89_read_rf(rtwdev, path, RR_RSV1, RFREG_MASK);
346 rtw89_write_rf(rtwdev, path, RR_RSV1, RR_RSV1_RST, 0x0);
347 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_MASK, RR_MOD_V_RX);
349 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[RCK] RF0x00 = 0x%05x\n",
350 rtw89_read_rf(rtwdev, path, RR_MOD, RFREG_MASK));
353 rtw89_write_rf(rtwdev, path, RR_RCKC, RFREG_MASK, 0x00240);
356 false, rtwdev, path, RR_RCKS, BIT(3));
358 rck_val = rtw89_read_rf(rtwdev, path, RR_RCKC, RR_RCKC_CA);
360 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[RCK] rck_val = 0x%x, ret = %d\n",
363 rtw89_write_rf(rtwdev, path, RR_RCKC, RFREG_MASK, rck_val);
364 rtw89_write_rf(rtwdev, path, RR_RSV1, RFREG_MASK, rf_reg5);
366 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[RCK] RF 0x1b = 0x%x\n",
367 rtw89_read_rf(rtwdev, path, RR_RCKC, RFREG_MASK));
370 static void _afe_init(struct rtw89_dev *rtwdev)
372 rtw89_write32(rtwdev, R_AX_PHYREG_SET, 0xf);
374 rtw89_rfk_parser(rtwdev, &rtw8852b_afe_init_defs_tbl);
377 static void _drck(struct rtw89_dev *rtwdev)
383 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]Ddie RCK start!!!\n");
384 rtw89_phy_write32_mask(rtwdev, R_DRCK_V1, B_DRCK_V1_KICK, 0x1);
387 false, rtwdev, R_DRCK_RS, B_DRCK_RS_DONE);
389 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]DRCK timeout\n");
391 rtw89_phy_write32_mask(rtwdev, R_DRCK_V1, B_DRCK_V1_KICK, 0x0);
392 rtw89_phy_write32_mask(rtwdev, R_DRCK_FH, B_DRCK_LAT, 0x1);
394 rtw89_phy_write32_mask(rtwdev, R_DRCK_FH, B_DRCK_LAT, 0x0);
395 rck_d = rtw89_phy_read32_mask(rtwdev, R_DRCK_RS, B_DRCK_RS_LPS);
396 rtw89_phy_write32_mask(rtwdev, R_DRCK_V1, B_DRCK_V1_SEL, 0x0);
397 rtw89_phy_write32_mask(rtwdev, R_DRCK_V1, B_DRCK_V1_CV, rck_d);
399 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]0xc0cc = 0x%x\n",
400 rtw89_phy_read32_mask(rtwdev, R_DRCK_V1, MASKDWORD));
403 static void _addck_backup(struct rtw89_dev *rtwdev)
405 struct rtw89_dack_info *dack = &rtwdev->dack;
407 rtw89_phy_write32_mask(rtwdev, R_ADDCK0, B_ADDCK0, 0x0);
408 dack->addck_d[0][0] = rtw89_phy_read32_mask(rtwdev, R_ADDCKR0, B_ADDCKR0_A0);
409 dack->addck_d[0][1] = rtw89_phy_read32_mask(rtwdev, R_ADDCKR0, B_ADDCKR0_A1);
411 rtw89_phy_write32_mask(rtwdev, R_ADDCK1, B_ADDCK1, 0x0);
412 dack->addck_d[1][0] = rtw89_phy_read32_mask(rtwdev, R_ADDCKR1, B_ADDCKR1_A0);
413 dack->addck_d[1][1] = rtw89_phy_read32_mask(rtwdev, R_ADDCKR1, B_ADDCKR1_A1);
416 static void _addck_reload(struct rtw89_dev *rtwdev)
418 struct rtw89_dack_info *dack = &rtwdev->dack;
421 rtw89_phy_write32_mask(rtwdev, R_ADDCK0D, B_ADDCK0D_VAL, dack->addck_d[0][0]);
422 rtw89_phy_write32_mask(rtwdev, R_ADDCK0, B_ADDCK0_VAL, dack->addck_d[0][1] >> 6);
423 rtw89_phy_write32_mask(rtwdev, R_ADDCK0D, B_ADDCK0D_VAL2, dack->addck_d[0][1] & 0x3f);
424 rtw89_phy_write32_mask(rtwdev, R_ADDCK0, B_ADDCK0_MAN, 0x3);
427 rtw89_phy_write32_mask(rtwdev, R_ADDCK1D, B_ADDCK1D_VAL, dack->addck_d[1][0]);
428 rtw89_phy_write32_mask(rtwdev, R_ADDCK1, B_ADDCK0_VAL, dack->addck_d[1][1] >> 6);
429 rtw89_phy_write32_mask(rtwdev, R_ADDCK1D, B_ADDCK1D_VAL2, dack->addck_d[1][1] & 0x3f);
430 rtw89_phy_write32_mask(rtwdev, R_ADDCK1, B_ADDCK1_MAN, 0x3);
433 static void _dack_backup_s0(struct rtw89_dev *rtwdev)
435 struct rtw89_dack_info *dack = &rtwdev->dack;
438 rtw89_phy_write32_mask(rtwdev, R_P0_NRBW, B_P0_NRBW_DBG, 0x1);
441 rtw89_phy_write32_mask(rtwdev, R_DCOF0, B_DCOF0_V, i);
443 rtw89_phy_read32_mask(rtwdev, R_DACK_S0P2, B_DACK_S0M0);
444 rtw89_phy_write32_mask(rtwdev, R_DCOF8, B_DCOF8_V, i);
446 rtw89_phy_read32_mask(rtwdev, R_DACK_S0P3, B_DACK_S0M1);
450 rtw89_phy_read32_mask(rtwdev, R_DACK_BIAS00, B_DACK_BIAS00);
452 rtw89_phy_read32_mask(rtwdev, R_DACK_BIAS01, B_DACK_BIAS01);
455 rtw89_phy_read32_mask(rtwdev, R_DACK_DADCK00, B_DACK_DADCK00);
457 rtw89_phy_read32_mask(rtwdev, R_DACK_DADCK01, B_DACK_DADCK01);
460 static void _dack_backup_s1(struct rtw89_dev *rtwdev)
462 struct rtw89_dack_info *dack = &rtwdev->dack;
465 rtw89_phy_write32_mask(rtwdev, R_P1_DBGMOD, B_P1_DBGMOD_ON, 0x1);
468 rtw89_phy_write32_mask(rtwdev, R_DACK10, B_DACK10, i);
470 rtw89_phy_read32_mask(rtwdev, R_DACK10S, B_DACK10S);
471 rtw89_phy_write32_mask(rtwdev, R_DACK11, B_DACK11, i);
473 rtw89_phy_read32_mask(rtwdev, R_DACK11S, B_DACK11S);
477 rtw89_phy_read32_mask(rtwdev, R_DACK_BIAS10, B_DACK_BIAS10);
479 rtw89_phy_read32_mask(rtwdev, R_DACK_BIAS11, B_DACK_BIAS11);
482 rtw89_phy_read32_mask(rtwdev, R_DACK_DADCK10, B_DACK_DADCK10);
484 rtw89_phy_read32_mask(rtwdev, R_DACK_DADCK11, B_DACK_DADCK11);
487 static void _check_addc(struct rtw89_dev *rtwdev, enum rtw89_rf_path path)
493 rtw89_rfk_parser_by_cond(rtwdev, path == RF_PATH_A,
498 tmp = rtw89_phy_read32_mask(rtwdev, R_DBG32_D, MASKDWORD);
506 rtw89_debug(rtwdev, RTW89_DBG_RFK,
510 static void _addck(struct rtw89_dev *rtwdev)
512 struct rtw89_dack_info *dack = &rtwdev->dack;
517 rtw89_phy_write32_mask(rtwdev, R_ADDCK0, B_ADDCK0_MAN, 0x0);
518 rtw89_phy_write32_mask(rtwdev, R_PATH1_SAMPL_DLY_T_V1, 0x30, 0x0);
519 rtw89_phy_write32_mask(rtwdev, R_P0_NRBW, B_P0_NRBW_DBG, 0x1);
520 rtw89_phy_write32_mask(rtwdev, R_ANAPAR, B_ANAPAR_ADCCLK, 0x0);
521 rtw89_phy_write32_mask(rtwdev, R_ANAPAR, B_ANAPAR_FLTRST, 0x0);
522 rtw89_phy_write32_mask(rtwdev, R_ANAPAR, B_ANAPAR_FLTRST, 0x1);
523 rtw89_phy_write32_mask(rtwdev, R_ANAPAR_PW15, B_ANAPAR_PW15_H, 0xf);
524 rtw89_phy_write32_mask(rtwdev, R_ANAPAR, B_ANAPAR_EN, 0x0);
525 rtw89_phy_write32_mask(rtwdev, R_PATH0_SAMPL_DLY_T_V1, BIT(1), 0x1);
526 rtw89_phy_write32_mask(rtwdev, R_ANAPAR_PW15, B_ANAPAR_PW15_H, 0x3);
528 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]before S0 ADDCK\n");
529 _check_addc(rtwdev, RF_PATH_A);
531 rtw89_phy_write32_mask(rtwdev, R_ADDCK0, B_ADDCK0_TRG, 0x1);
532 rtw89_phy_write32_mask(rtwdev, R_ADDCK0, B_ADDCK0_TRG, 0x0);
534 rtw89_phy_write32_mask(rtwdev, R_ADDCK0, B_ADDCK0, 0x1);
537 false, rtwdev, R_ADDCKR0, BIT(0));
539 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]S0 ADDCK timeout\n");
542 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]ADDCK ret = %d\n", ret);
543 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]after S0 ADDCK\n");
544 _check_addc(rtwdev, RF_PATH_A);
546 rtw89_phy_write32_mask(rtwdev, R_PATH0_SAMPL_DLY_T_V1, BIT(1), 0x0);
547 rtw89_phy_write32_mask(rtwdev, R_ANAPAR, B_ANAPAR_EN, 0x1);
548 rtw89_phy_write32_mask(rtwdev, R_ANAPAR_PW15, B_ANAPAR_PW15_H, 0xc);
549 rtw89_phy_write32_mask(rtwdev, R_ANAPAR, B_ANAPAR_ADCCLK, 0x1);
550 rtw89_phy_write32_mask(rtwdev, R_P0_NRBW, B_P0_NRBW_DBG, 0x0);
553 rtw89_phy_write32_mask(rtwdev, R_P1_DBGMOD, B_P1_DBGMOD_ON, 0x1);
554 rtw89_phy_write32_mask(rtwdev, R_ANAPAR, B_ANAPAR_ADCCLK, 0x0);
555 rtw89_phy_write32_mask(rtwdev, R_ANAPAR, B_ANAPAR_FLTRST, 0x0);
556 rtw89_phy_write32_mask(rtwdev, R_ANAPAR, B_ANAPAR_FLTRST, 0x1);
557 rtw89_phy_write32_mask(rtwdev, R_ANAPAR_PW15, B_ANAPAR_PW15_H, 0xf);
558 rtw89_phy_write32_mask(rtwdev, R_ANAPAR, B_ANAPAR_EN, 0x0);
559 rtw89_phy_write32_mask(rtwdev, R_PATH1_SAMPL_DLY_T_V1, BIT(1), 0x1);
560 rtw89_phy_write32_mask(rtwdev, R_ANAPAR_PW15, B_ANAPAR_PW15_H, 0x3);
562 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]before S1 ADDCK\n");
563 _check_addc(rtwdev, RF_PATH_B);
565 rtw89_phy_write32_mask(rtwdev, R_ADDCK1, B_ADDCK1_TRG, 0x1);
566 rtw89_phy_write32_mask(rtwdev, R_ADDCK1, B_ADDCK1_TRG, 0x0);
568 rtw89_phy_write32_mask(rtwdev, R_ADDCK1, B_ADDCK1, 0x1);
571 false, rtwdev, R_ADDCKR1, BIT(0));
573 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]S1 ADDCK timeout\n");
576 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]ADDCK ret = %d\n", ret);
577 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]after S1 ADDCK\n");
578 _check_addc(rtwdev, RF_PATH_B);
580 rtw89_phy_write32_mask(rtwdev, R_PATH1_SAMPL_DLY_T_V1, BIT(1), 0x0);
581 rtw89_phy_write32_mask(rtwdev, R_ANAPAR, B_ANAPAR_EN, 0x1);
582 rtw89_phy_write32_mask(rtwdev, R_ANAPAR_PW15, B_ANAPAR_PW15_H, 0xc);
583 rtw89_phy_write32_mask(rtwdev, R_ANAPAR, B_ANAPAR_ADCCLK, 0x1);
584 rtw89_phy_write32_mask(rtwdev, R_P1_DBGMOD, B_P1_DBGMOD_ON, 0x0);
587 static void _check_dadc(struct rtw89_dev *rtwdev, enum rtw89_rf_path path)
589 rtw89_rfk_parser_by_cond(rtwdev, path == RF_PATH_A,
593 _check_addc(rtwdev, path);
595 rtw89_rfk_parser_by_cond(rtwdev, path == RF_PATH_A,
600 static bool _dack_s0_check_done(struct rtw89_dev *rtwdev, bool part1)
603 if (rtw89_phy_read32_mask(rtwdev, R_DACK_S0P0, B_DACK_S0P0_OK) == 0 ||
604 rtw89_phy_read32_mask(rtwdev, R_DACK_S0P1, B_DACK_S0P1_OK) == 0)
607 if (rtw89_phy_read32_mask(rtwdev, R_DACK_S0P2, B_DACK_S0P2_OK) == 0 ||
608 rtw89_phy_read32_mask(rtwdev, R_DACK_S0P3, B_DACK_S0P3_OK) == 0)
615 static void _dack_s0(struct rtw89_dev *rtwdev)
617 struct rtw89_dack_info *dack = &rtwdev->dack;
621 rtw89_rfk_parser(rtwdev, &rtw8852b_dack_s0_1_defs_tbl);
624 false, rtwdev, true);
626 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]S0 MSBK timeout\n");
629 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]DACK ret = %d\n", ret);
631 rtw89_rfk_parser(rtwdev, &rtw8852b_dack_s0_2_defs_tbl);
634 false, rtwdev, false);
636 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]S0 DADCK timeout\n");
639 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]DACK ret = %d\n", ret);
641 rtw89_rfk_parser(rtwdev, &rtw8852b_dack_s0_3_defs_tbl);
643 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]after S0 DADCK\n");
645 _dack_backup_s0(rtwdev);
646 rtw89_phy_write32_mask(rtwdev, R_P0_NRBW, B_P0_NRBW_DBG, 0x0);
649 static bool _dack_s1_check_done(struct rtw89_dev *rtwdev, bool part1)
652 if (rtw89_phy_read32_mask(rtwdev, R_DACK_S1P0, B_DACK_S1P0_OK) == 0 &&
653 rtw89_phy_read32_mask(rtwdev, R_DACK_S1P1, B_DACK_S1P1_OK) == 0)
656 if (rtw89_phy_read32_mask(rtwdev, R_DACK10S, B_DACK_S1P2_OK) == 0 &&
657 rtw89_phy_read32_mask(rtwdev, R_DACK11S, B_DACK_S1P3_OK) == 0)
664 static void _dack_s1(struct rtw89_dev *rtwdev)
666 struct rtw89_dack_info *dack = &rtwdev->dack;
670 rtw89_rfk_parser(rtwdev, &rtw8852b_dack_s1_1_defs_tbl);
673 false, rtwdev, true);
675 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]S1 MSBK timeout\n");
678 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]DACK ret = %d\n", ret);
680 rtw89_rfk_parser(rtwdev, &rtw8852b_dack_s1_2_defs_tbl);
683 false, rtwdev, false);
685 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]S1 DADCK timeout\n");
688 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]DACK ret = %d\n", ret);
690 rtw89_rfk_parser(rtwdev, &rtw8852b_dack_s1_3_defs_tbl);
692 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]after S1 DADCK\n");
694 _check_dadc(rtwdev, RF_PATH_B);
695 _dack_backup_s1(rtwdev);
696 rtw89_phy_write32_mask(rtwdev, R_P1_DBGMOD, B_P1_DBGMOD_ON, 0x0);
699 static void _dack(struct rtw89_dev *rtwdev)
701 _dack_s0(rtwdev);
702 _dack_s1(rtwdev);
705 static void _dack_dump(struct rtw89_dev *rtwdev)
707 struct rtw89_dack_info *dack = &rtwdev->dack;
711 rtw89_debug(rtwdev, RTW89_DBG_RFK,
714 rtw89_debug(rtwdev, RTW89_DBG_RFK,
717 rtw89_debug(rtwdev, RTW89_DBG_RFK,
720 rtw89_debug(rtwdev, RTW89_DBG_RFK,
723 rtw89_debug(rtwdev, RTW89_DBG_RFK,
726 rtw89_debug(rtwdev, RTW89_DBG_RFK,
730 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]S0 MSBK ic:\n");
733 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]0x%x\n", t);
736 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]S0 MSBK qc:\n");
739 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]0x%x\n", t);
742 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]S1 MSBK ic:\n");
745 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]0x%x\n", t);
748 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]S1 MSBK qc:\n");
751 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]0x%x\n", t);
755 static void _dac_cal(struct rtw89_dev *rtwdev, bool force)
757 struct rtw89_dack_info *dack = &rtwdev->dack;
761 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]DACK 0x1\n");
762 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]DACK start!!!\n");
764 rf0_0 = rtw89_read_rf(rtwdev, RF_PATH_A, RR_MOD, RFREG_MASK);
765 rf1_0 = rtw89_read_rf(rtwdev, RF_PATH_B, RR_MOD, RFREG_MASK);
766 _afe_init(rtwdev);
767 _drck(rtwdev);
769 rtw89_write_rf(rtwdev, RF_PATH_A, RR_RSV1, RR_RSV1_RST, 0x0);
770 rtw89_write_rf(rtwdev, RF_PATH_B, RR_RSV1, RR_RSV1_RST, 0x0);
771 rtw89_write_rf(rtwdev, RF_PATH_A, RR_MOD, RFREG_MASK, 0x337e1);
772 rtw89_write_rf(rtwdev, RF_PATH_B, RR_MOD, RFREG_MASK, 0x337e1);
773 _addck(rtwdev);
774 _addck_backup(rtwdev);
775 _addck_reload(rtwdev);
777 rtw89_write_rf(rtwdev, RF_PATH_A, RR_MODOPT, RFREG_MASK, 0x0);
778 rtw89_write_rf(rtwdev, RF_PATH_B, RR_MODOPT, RFREG_MASK, 0x0);
779 _dack(rtwdev);
780 _dack_dump(rtwdev);
783 rtw89_write_rf(rtwdev, RF_PATH_A, RR_MOD, RFREG_MASK, rf0_0);
784 rtw89_write_rf(rtwdev, RF_PATH_B, RR_MOD, RFREG_MASK, rf1_0);
785 rtw89_write_rf(rtwdev, RF_PATH_A, RR_RSV1, RR_RSV1_RST, 0x1);
786 rtw89_write_rf(rtwdev, RF_PATH_B, RR_RSV1, RR_RSV1_RST, 0x1);
788 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DACK]DACK finish!!!\n");
791 static void _iqk_rxk_setting(struct rtw89_dev *rtwdev, u8 path)
793 struct rtw89_iqk_info *iqk_info = &rtwdev->iqk;
798 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_MASK, 0xc);
799 rtw89_write_rf(rtwdev, path, RR_RXK, RR_RXK_SEL2G, 0x1);
800 tmp = rtw89_read_rf(rtwdev, path, RR_CFGCH, RFREG_MASK);
801 rtw89_write_rf(rtwdev, path, RR_RSV4, RFREG_MASK, tmp);
804 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_MASK, 0xc);
805 rtw89_write_rf(rtwdev, path, RR_RXK, RR_RXK_SEL5G, 0x1);
806 tmp = rtw89_read_rf(rtwdev, path, RR_CFGCH, RFREG_MASK);
807 rtw89_write_rf(rtwdev, path, RR_RSV4, RFREG_MASK, tmp);
814 static bool _iqk_one_shot(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx,
817 struct rtw89_iqk_info *iqk_info = &rtwdev->iqk;
823 rtw89_phy_write32_mask(rtwdev, R_P0_RFCTM, B_P0_RFCTM_EN, 0x1);
827 rtw89_phy_write32_mask(rtwdev, R_P0_RFCTM, B_P0_RFCTM_EN, 0x1);
831 rtw89_phy_write32_mask(rtwdev, R_P0_RFCTM, B_P0_RFCTM_EN, 0x1);
835 rtw89_phy_write32_mask(rtwdev, R_P0_RFCTM, B_P0_RFCTM_EN, 0x0);
843 rtw89_phy_write32_mask(rtwdev, R_P0_RFCTM, B_P0_RFCTM_EN, 0x1);
848 rtw89_phy_write32_mask(rtwdev, R_P0_RFCTM, B_P0_RFCTM_EN, 0x0);
849 rtw89_phy_write32_mask(rtwdev, R_IQK_DIF4, B_IQK_DIF4_TXT, 0x011);
853 rtw89_phy_write32_mask(rtwdev, R_P0_RFCTM, B_P0_RFCTM_EN, 0x1);
854 rtw89_phy_write32_mask(rtwdev, R_IQK_DIF4, B_IQK_DIF4_RXT, 0x011);
861 rtw89_phy_write32_mask(rtwdev, R_NCTL_CFG, MASKDWORD, iqk_cmd + 1);
863 fail = _iqk_check_cal(rtwdev, path);
864 rtw89_phy_write32_mask(rtwdev, R_P0_RFCTM, B_P0_RFCTM_EN, 0x0);
869 static bool _rxk_group_sel(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx,
872 struct rtw89_iqk_info *iqk_info = &rtwdev->iqk;
880 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_RGM,
882 rtw89_write_rf(rtwdev, path, RR_RXBB, RR_RXBB_C2G,
884 rtw89_write_rf(rtwdev, path, RR_RXBB, RR_RXBB_C1G,
888 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_RGM,
890 rtw89_write_rf(rtwdev, path, RR_RXA2, RR_RXA2_HATT,
892 rtw89_write_rf(rtwdev, path, RR_RXA2, RR_RXA2_CC2,
899 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8),
901 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8),
903 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8),
905 fail = _iqk_one_shot(rtwdev, phy_idx, path, ID_RXK);
906 rtw89_phy_write32_mask(rtwdev, R_IQKINF,
910 rtw89_write_rf(rtwdev, path, RR_RXK, RR_RXK_SEL5G, 0x0);
914 rtw89_phy_write32_mask(rtwdev, R_IQK_RES + (path << 8),
919 rtw89_phy_write32_mask(rtwdev, R_IQK_RES + (path << 8),
927 static bool _iqk_nbrxk(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx,
930 struct rtw89_iqk_info *iqk_info = &rtwdev->iqk;
937 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_RGM,
939 rtw89_write_rf(rtwdev, path, RR_RXBB, RR_RXBB_C2G,
941 rtw89_write_rf(rtwdev, path, RR_RXBB, RR_RXBB_C1G,
945 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_RGM,
947 rtw89_write_rf(rtwdev, path, RR_RXA2, RR_RXA2_HATT,
949 rtw89_write_rf(rtwdev, path, RR_RXA2, RR_RXA2_CC2,
956 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), B_CFIR_LUT_SEL, 0x1);
957 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), B_CFIR_LUT_SET, 0x0);
958 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), B_CFIR_LUT_GP_V1, gp);
959 rtw89_write_rf(rtwdev, path, RR_RXKPLL, RFREG_MASK, 0x80013);
962 fail = _iqk_one_shot(rtwdev, phy_idx, path, ID_NBRXK);
963 rtw89_phy_write32_mask(rtwdev, R_IQKINF, BIT(16 + gp + path * 4), fail);
965 rtw89_write_rf(rtwdev, path, RR_RXK, RR_RXK_SEL5G, 0x0);
969 rtw89_phy_read32_mask(rtwdev, R_RXIQC + (path << 8), MASKDWORD) | 0x2;
976 static void _iqk_rxclk_setting(struct rtw89_dev *rtwdev, u8 path)
978 struct rtw89_iqk_info *iqk_info = &rtwdev->iqk;
981 rtw89_phy_write32_mask(rtwdev, R_P0_NRBW, B_P0_NRBW_DBG, 0x1);
982 rtw89_phy_write32_mask(rtwdev, R_P1_DBGMOD, B_P1_DBGMOD_ON, 0x1);
984 rtw89_phy_write32_mask(rtwdev, R_ANAPAR_PW15, B_ANAPAR_PW15, 0x0f);
986 rtw89_phy_write32_mask(rtwdev, R_ANAPAR_PW15, B_ANAPAR_PW15, 0x03);
987 rtw89_phy_write32_mask(rtwdev, R_ANAPAR, B_ANAPAR_15, 0xa001);
989 rtw89_phy_write32_mask(rtwdev, R_ANAPAR, B_ANAPAR_15, 0xa041);
990 rtw89_phy_write32_mask(rtwdev, R_P0_RXCK, B_P0_RXCK_VAL, 0x2);
991 rtw89_phy_write32_mask(rtwdev, R_P0_RXCK, B_P0_RXCK_ON, 0x1);
992 rtw89_phy_write32_mask(rtwdev, R_P1_RXCK, B_P1_RXCK_VAL, 0x2);
993 rtw89_phy_write32_mask(rtwdev, R_P1_RXCK, B_P1_RXCK_ON, 0x1);
994 rtw89_phy_write32_mask(rtwdev, R_UPD_CLK_ADC, B_UPD_CLK_ADC_ON, 0x1);
995 rtw89_phy_write32_mask(rtwdev, R_UPD_CLK_ADC, B_UPD_CLK_ADC_VAL, 0x1);
997 rtw89_phy_write32_mask(rtwdev, R_P0_NRBW, B_P0_NRBW_DBG, 0x1);
998 rtw89_phy_write32_mask(rtwdev, R_P1_DBGMOD, B_P1_DBGMOD_ON, 0x1);
1000 rtw89_phy_write32_mask(rtwdev, R_ANAPAR_PW15, B_ANAPAR_PW15, 0x0f);
1002 rtw89_phy_write32_mask(rtwdev, R_ANAPAR_PW15, B_ANAPAR_PW15, 0x03);
1003 rtw89_phy_write32_mask(rtwdev, R_ANAPAR, B_ANAPAR_15, 0xa001);
1005 rtw89_phy_write32_mask(rtwdev, R_ANAPAR, B_ANAPAR_15, 0xa041);
1006 rtw89_phy_write32_mask(rtwdev, R_P0_RXCK, B_P0_RXCK_VAL, 0x1);
1007 rtw89_phy_write32_mask(rtwdev, R_P0_RXCK, B_P0_RXCK_ON, 0x1);
1008 rtw89_phy_write32_mask(rtwdev, R_P1_RXCK, B_P1_RXCK_VAL, 0x1);
1009 rtw89_phy_write32_mask(rtwdev, R_P1_RXCK, B_P1_RXCK_ON, 0x1);
1010 rtw89_phy_write32_mask(rtwdev, R_UPD_CLK_ADC, B_UPD_CLK_ADC_ON, 0x1);
1011 rtw89_phy_write32_mask(rtwdev, R_UPD_CLK_ADC, B_UPD_CLK_ADC_VAL, 0x0);
1015 static bool _txk_group_sel(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx, u8 path)
1017 struct rtw89_iqk_info *iqk_info = &rtwdev->iqk;
1025 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR0,
1027 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR1,
1029 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG,
1031 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8),
1035 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR0,
1037 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR1,
1039 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG,
1041 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8),
1048 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8),
1050 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8),
1052 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8),
1054 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8),
1056 rtw89_phy_write32_mask(rtwdev, R_NCTL_N1, B_NCTL_N1_CIP, 0x00);
1057 fail = _iqk_one_shot(rtwdev, phy_idx, path, ID_TXK);
1058 rtw89_phy_write32_mask(rtwdev, R_IQKINF,
1065 rtw89_phy_write32_mask(rtwdev, R_IQK_RES + (path << 8),
1070 rtw89_phy_write32_mask(rtwdev, R_IQK_RES + (path << 8),
1078 static bool _iqk_nbtxk(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx, u8 path)
1080 struct rtw89_iqk_info *iqk_info = &rtwdev->iqk;
1086 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR0,
1088 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR1,
1090 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG,
1092 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8),
1096 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR0,
1098 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR1,
1100 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG,
1102 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8),
1109 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), B_CFIR_LUT_SEL, 0x1);
1110 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), B_CFIR_LUT_SET, 0x1);
1111 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), B_CFIR_LUT_G2, 0x0);
1112 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), B_CFIR_LUT_GP, gp);
1113 rtw89_phy_write32_mask(rtwdev, R_NCTL_N1, B_NCTL_N1_CIP, 0x00);
1114 kfail = _iqk_one_shot(rtwdev, phy_idx, path, ID_NBTXK);
1118 rtw89_phy_read32_mask(rtwdev, R_TXIQC + (path << 8),
1126 static void _lok_res_table(struct rtw89_dev *rtwdev, u8 path, u8 ibias)
1128 struct rtw89_iqk_info *iqk_info = &rtwdev->iqk;
1130 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK]===>%s\n", __func__);
1131 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK]S%x, ibias = %x\n", path, ibias);
1133 rtw89_write_rf(rtwdev, path, RR_LUTWE, RFREG_MASK, 0x2);
1135 rtw89_write_rf(rtwdev, path, RR_LUTWA, RFREG_MASK, 0x0);
1137 rtw89_write_rf(rtwdev, path, RR_LUTWA, RFREG_MASK, 0x1);
1138 rtw89_write_rf(rtwdev, path, RR_LUTWD0, RFREG_MASK, ibias);
1139 rtw89_write_rf(rtwdev, path, RR_LUTWE, RFREG_MASK, 0x0);
1140 rtw89_write_rf(rtwdev, path, RR_TXVBUF, RR_TXVBUF_DACEN, 0x1);
1142 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK]S%x, 0x7c = %x\n", path,
1143 rtw89_read_rf(rtwdev, path, RR_TXVBUF, RFREG_MASK));
1146 static bool _lok_finetune_check(struct rtw89_dev *rtwdev, u8 path)
1148 struct rtw89_iqk_info *iqk_info = &rtwdev->iqk;
1157 tmp = rtw89_read_rf(rtwdev, path, RR_TXMO, RFREG_MASK);
1169 tmp = rtw89_read_rf(rtwdev, path, RR_LOKVB, RFREG_MASK);
1180 rtw89_debug(rtwdev, RTW89_DBG_RFK,
1183 rtw89_debug(rtwdev, RTW89_DBG_RFK,
1190 static bool _iqk_lok(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx, u8 path)
1192 struct rtw89_iqk_info *iqk_info = &rtwdev->iqk;
1195 rtw89_phy_write32_mask(rtwdev, R_IQK_DIF4, B_IQK_DIF4_TXT, 0x021);
1199 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR0, 0x0);
1200 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR1, 0x6);
1203 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR0, 0x0);
1204 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR1, 0x4);
1212 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x0);
1215 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x0);
1221 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8), MASKDWORD, 0x9);
1222 tmp = _iqk_one_shot(rtwdev, phy_idx, path, ID_FLOK_COARSE);
1227 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x12);
1230 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x12);
1236 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8), MASKDWORD, 0x24);
1237 tmp = _iqk_one_shot(rtwdev, phy_idx, path, ID_FLOK_VBUFFER);
1241 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x0);
1244 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x0);
1250 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8), MASKDWORD, 0x9);
1251 rtw89_phy_write32_mask(rtwdev, R_IQK_DIF4, B_IQK_DIF4_TXT, 0x021);
1252 tmp = _iqk_one_shot(rtwdev, phy_idx, path, ID_FLOK_FINE);
1257 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x12);
1260 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x12);
1266 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8), MASKDWORD, 0x24);
1267 _iqk_one_shot(rtwdev, phy_idx, path, ID_FLOK_VBUFFER);
1269 return _lok_finetune_check(rtwdev, path);
1272 static void _iqk_txk_setting(struct rtw89_dev *rtwdev, u8 path)
1274 struct rtw89_iqk_info *iqk_info = &rtwdev->iqk;
1278 rtw89_write_rf(rtwdev, path, RR_XALNA2, RR_XALNA2_SW2, 0x00);
1279 rtw89_write_rf(rtwdev, path, RR_TXG1, RR_TXG1_ATT2, 0x0);
1280 rtw89_write_rf(rtwdev, path, RR_TXG1, RR_TXG1_ATT1, 0x0);
1281 rtw89_write_rf(rtwdev, path, RR_TXG2, RR_TXG2_ATT0, 0x1);
1282 rtw89_write_rf(rtwdev, path, RR_TXGA, RR_TXGA_LOK_EXT, 0x0);
1283 rtw89_write_rf(rtwdev, path, RR_LUTWE, RR_LUTWE_LOK, 0x1);
1284 rtw89_write_rf(rtwdev, path, RR_LUTWA, RR_LUTWA_M1, 0x00);
1285 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_IQK, 0x403e);
1289 rtw89_write_rf(rtwdev, path, RR_XGLNA2, RR_XGLNA2_SW, 0x00);
1290 rtw89_write_rf(rtwdev, path, RR_BIASA, RR_BIASA_A, 0x1);
1291 rtw89_write_rf(rtwdev, path, RR_TXGA, RR_TXGA_LOK_EXT, 0x0);
1292 rtw89_write_rf(rtwdev, path, RR_LUTWE, RR_LUTWE_LOK, 0x1);
1293 rtw89_write_rf(rtwdev, path, RR_LUTWA, RR_LUTWA_M1, 0x80);
1294 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_IQK, 0x403e);
1302 static void _iqk_txclk_setting(struct rtw89_dev *rtwdev, u8 path)
1304 rtw89_phy_write32_mask(rtwdev, R_P0_NRBW, B_P0_NRBW_DBG, 0x1);
1305 rtw89_phy_write32_mask(rtwdev, R_P1_DBGMOD, B_P1_DBGMOD_ON, 0x1);
1307 rtw89_phy_write32_mask(rtwdev, R_ANAPAR_PW15, B_ANAPAR_PW15, 0x1f);
1309 rtw89_phy_write32_mask(rtwdev, R_ANAPAR_PW15, B_ANAPAR_PW15, 0x13);
1310 rtw89_phy_write32_mask(rtwdev, R_ANAPAR, B_ANAPAR_15, 0x0001);
1312 rtw89_phy_write32_mask(rtwdev, R_ANAPAR, B_ANAPAR_15, 0x0041);
1315 static void _iqk_info_iqk(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx, u8 path)
1317 struct rtw89_iqk_info *iqk_info = &rtwdev->iqk;
1322 rtw89_phy_write32_mask(rtwdev, R_IQKINF, B_IQKINF_FCOR << (path * 4), flag);
1324 rtw89_phy_write32_mask(rtwdev, R_IQKINF, B_IQKINF_FFIN << (path * 4), flag);
1326 rtw89_phy_write32_mask(rtwdev, R_IQKINF, B_IQKINF_FTX << (path * 4), flag);
1328 rtw89_phy_write32_mask(rtwdev, R_IQKINF, B_IQKINF_F_RX << (path * 4), flag);
1330 tmp = rtw89_phy_read32_mask(rtwdev, R_IQK_RES + (path << 8), MASKDWORD);
1332 tmp = rtw89_phy_read32_mask(rtwdev, R_TXIQC + (path << 8), MASKDWORD);
1334 tmp = rtw89_phy_read32_mask(rtwdev, R_RXIQC + (path << 8), MASKDWORD);
1337 rtw89_phy_write32_mask(rtwdev, R_IQKINF2, B_IQKINF2_KCNT, iqk_info->iqk_times);
1339 tmp = rtw89_phy_read32_mask(rtwdev, R_IQKINF, B_IQKINF_FAIL << (path * 4));
1342 rtw89_phy_write32_mask(rtwdev, R_IQKINF2, B_IQKINF2_FCNT << (path * 4),
1346 static void _iqk_by_path(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx, u8 path)
1348 struct rtw89_iqk_info *iqk_info = &rtwdev->iqk;
1354 _iqk_txclk_setting(rtwdev, path);
1358 _lok_res_table(rtwdev, path, ibias++);
1359 _iqk_txk_setting(rtwdev, path);
1360 lok_is_fail = _iqk_lok(rtwdev, phy_idx, path);
1366 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK] LOK (%d) fail\n", path);
1370 iqk_info->iqk_tx_fail[0][path] = _iqk_nbtxk(rtwdev, phy_idx, path);
1372 iqk_info->iqk_tx_fail[0][path] = _txk_group_sel(rtwdev, phy_idx, path);
1375 _iqk_rxclk_setting(rtwdev, path);
1376 _iqk_rxk_setting(rtwdev, path);
1378 iqk_info->iqk_rx_fail[0][path] = _iqk_nbrxk(rtwdev, phy_idx, path);
1380 iqk_info->iqk_rx_fail[0][path] = _rxk_group_sel(rtwdev, phy_idx, path);
1382 _iqk_info_iqk(rtwdev, phy_idx, path);
1385 static void _iqk_get_ch_info(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy, u8 path)
1387 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_SUB_ENTITY_0);
1388 struct rtw89_iqk_info *iqk_info = &rtwdev->iqk;
1400 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK] (1)idx = %x\n", idx);
1407 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK] (2)idx = %x\n", idx);
1409 reg_rf18 = rtw89_read_rf(rtwdev, path, RR_CFGCH, RFREG_MASK);
1410 reg_35c = rtw89_phy_read32_mask(rtwdev, R_CIRST, B_CIRST_SYN);
1418 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK]S%x, 0x18= 0x%x, idx = %x\n",
1420 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK]S%x, 0x18= 0x%x\n",
1422 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK]times = 0x%x, ch =%x\n",
1424 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK]iqk_mcc_ch[%x][%x] = 0x%x\n",
1432 rtw89_debug(rtwdev, RTW89_DBG_RFK,
1436 rtw89_phy_write32_mask(rtwdev, R_IQKINF, B_IQKINF_VER, RTW8852B_IQK_VER);
1438 rtw89_phy_write32_mask(rtwdev, R_IQKCH, B_IQKCH_BAND << (path * 16),
1441 rtw89_phy_write32_mask(rtwdev, R_IQKCH, B_IQKCH_BW << (path * 16),
1443 rtw89_phy_write32_mask(rtwdev, R_IQKCH, B_IQKCH_CH << (path * 16),
1447 static void _iqk_start_iqk(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx, u8 path)
1449 _iqk_by_path(rtwdev, phy_idx, path);
1452 static void _iqk_restore(struct rtw89_dev *rtwdev, u8 path)
1454 struct rtw89_iqk_info *iqk_info = &rtwdev->iqk;
1457 rtw89_phy_write32_mask(rtwdev, R_TXIQC + (path << 8), MASKDWORD,
1459 rtw89_phy_write32_mask(rtwdev, R_RXIQC + (path << 8), MASKDWORD,
1461 rtw89_phy_write32_mask(rtwdev, R_NCTL_CFG, MASKDWORD,
1463 fail = _iqk_check_cal(rtwdev, path);
1465 rtw89_debug(rtwdev, RTW89_DBG_RFK, "%s result =%x\n", __func__, fail);
1467 rtw89_phy_write32_mask(rtwdev, R_NCTL_N1, B_NCTL_N1_CIP, 0x00);
1468 rtw89_phy_write32_mask(rtwdev, R_NCTL_RPT, MASKDWORD, 0x00000000);
1469 rtw89_phy_write32_mask(rtwdev, R_KIP_SYSCFG, MASKDWORD, 0x80000000);
1470 rtw89_phy_write32_mask(rtwdev, R_CFIR_SYS, B_IQK_RES_K, 0x0);
1471 rtw89_phy_write32_mask(rtwdev, R_IQRSN, B_IQRSN_K1, 0x0);
1472 rtw89_phy_write32_mask(rtwdev, R_IQRSN, B_IQRSN_K2, 0x0);
1473 rtw89_write_rf(rtwdev, path, RR_LUTWE, RR_LUTWE_LOK, 0x0);
1474 rtw89_write_rf(rtwdev, path, RR_LUTWE, RR_LUTWE_LOK, 0x0);
1475 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_MASK, 0x3);
1476 rtw89_write_rf(rtwdev, path, RR_RSV1, RR_RSV1_RST, 0x1);
1477 rtw89_write_rf(rtwdev, path, RR_BBDC, RR_BBDC_SEL, 0x1);
1480 static void _iqk_afebb_restore(struct rtw89_dev *rtwdev,
1488 rtw89_debug(rtwdev, RTW89_DBG_RFK, "===> %s\n", __func__);
1490 kpath = _kpath(rtwdev, phy_idx);
1503 rtw89_phy_write32_mask(rtwdev, def->addr, def->mask, def->data);
1506 static void _iqk_preset(struct rtw89_dev *rtwdev, u8 path)
1508 struct rtw89_iqk_info *iqk_info = &rtwdev->iqk;
1512 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK] (3)idx = %x\n", idx);
1514 rtw89_phy_write32_mask(rtwdev, R_COEF_SEL + (path << 8), B_COEF_SEL_IQC, idx);
1515 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), B_CFIR_LUT_G3, idx);
1517 rtw89_write_rf(rtwdev, path, RR_RSV1, RR_RSV1_RST, 0x0);
1518 rtw89_write_rf(rtwdev, path, RR_BBDC, RR_BBDC_SEL, 0x0);
1519 rtw89_phy_write32_mask(rtwdev, R_NCTL_RPT, MASKDWORD, 0x00000080);
1520 rtw89_phy_write32_mask(rtwdev, R_KIP_SYSCFG, MASKDWORD, 0x81ff010a);
1522 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK](1)S%x, 0x8%x54 = 0x%x\n", path, 1 << path,
1523 rtw89_phy_read32_mask(rtwdev, R_CFIR_LUT + (path << 8), MASKDWORD));
1524 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK](1)S%x, 0x8%x04 = 0x%x\n", path, 1 << path,
1525 rtw89_phy_read32_mask(rtwdev, R_COEF_SEL + (path << 8), MASKDWORD));
1528 static void _iqk_macbb_setting(struct rtw89_dev *rtwdev,
1536 kpath = _kpath(rtwdev, phy_idx);
1549 rtw89_phy_write32_mask(rtwdev, def->addr, def->mask, def->data);
1552 static void _iqk_init(struct rtw89_dev *rtwdev)
1554 struct rtw89_iqk_info *iqk_info = &rtwdev->iqk;
1557 rtw89_phy_write32_mask(rtwdev, R_IQKINF, MASKDWORD, 0x0);
1561 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK]===>%s\n", __func__);
1583 static void _wait_rx_mode(struct rtw89_dev *rtwdev, u8 kpath)
1595 rtwdev, path, RR_MOD, RR_MOD_MASK);
1596 rtw89_debug(rtwdev, RTW89_DBG_RFK,
1601 static void _tmac_tx_pause(struct rtw89_dev *rtwdev, enum rtw89_phy_idx band_idx,
1607 _wait_rx_mode(rtwdev, _kpath(rtwdev, band_idx));
1610 static void _doiqk(struct rtw89_dev *rtwdev, bool force,
1613 struct rtw89_iqk_info *iqk_info = &rtwdev->iqk;
1616 u8 phy_map = rtw89_btc_phymap(rtwdev, phy_idx, RF_AB);
1618 rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, BTC_WRFKT_IQK, BTC_WRFK_ONESHOT_START);
1620 rtw89_debug(rtwdev, RTW89_DBG_RFK,
1625 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK]Test Ver 0x%x\n", iqk_info->version);
1626 _iqk_get_ch_info(rtwdev, phy_idx, path);
1628 _rfk_backup_bb_reg(rtwdev, &backup_bb_val[0]);
1629 _rfk_backup_rf_reg(rtwdev, &backup_rf_val[path][0], path);
1630 _iqk_macbb_setting(rtwdev, phy_idx, path);
1631 _iqk_preset(rtwdev, path);
1632 _iqk_start_iqk(rtwdev, phy_idx, path);
1633 _iqk_restore(rtwdev, path);
1634 _iqk_afebb_restore(rtwdev, phy_idx, path);
1635 _rfk_restore_bb_reg(rtwdev, &backup_bb_val[0]);
1636 _rfk_restore_rf_reg(rtwdev, &backup_rf_val[path][0], path);
1638 rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, BTC_WRFKT_IQK, BTC_WRFK_ONESHOT_STOP);
1641 static void _iqk(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx, bool force)
1643 u8 kpath = _kpath(rtwdev, phy_idx);
1647 _doiqk(rtwdev, force, phy_idx, RF_PATH_A);
1650 _doiqk(rtwdev, force, phy_idx, RF_PATH_B);
1653 _doiqk(rtwdev, force, phy_idx, RF_PATH_A);
1654 _doiqk(rtwdev, force, phy_idx, RF_PATH_B);
1661 static void _dpk_bkup_kip(struct rtw89_dev *rtwdev, const u32 reg[],
1668 rtw89_phy_read32_mask(rtwdev, reg[i] + (path << 8), MASKDWORD);
1669 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] Backup 0x%x = %x\n",
1674 static void _dpk_reload_kip(struct rtw89_dev *rtwdev, const u32 reg[],
1680 rtw89_phy_write32_mask(rtwdev, reg[i] + (path << 8), MASKDWORD,
1682 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] Reload 0x%x = %x\n",
1687 static u8 _dpk_order_convert(struct rtw89_dev *rtwdev)
1692 order = rtw89_phy_read32_mask(rtwdev, R_LDL_NORM, B_LDL_NORM_OP);
1695 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] convert MDPD order to 0x%x\n", val);
1700 static void _dpk_onoff(struct rtw89_dev *rtwdev, enum rtw89_rf_path path, bool off)
1702 struct rtw89_dpk_info *dpk = &rtwdev->dpk;
1707 rtw89_phy_write32_mask(rtwdev, R_DPD_CH0A + (path << 8) + (kidx << 2),
1708 MASKBYTE3, _dpk_order_convert(rtwdev) << 1 | val);
1710 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] S%d[%d] DPK %s !!!\n", path,
1714 static void _dpk_one_shot(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
1722 rtw89_phy_write32_mask(rtwdev, R_NCTL_CFG, MASKDWORD, dpk_cmd);
1726 rtwdev, 0xbff8, MASKBYTE0);
1728 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] one-shot over 20ms!!!!\n");
1732 rtw89_phy_write32_mask(rtwdev, R_KIP_RPT1, MASKDWORD, 0x00030000);
1736 rtwdev, 0x80fc, MASKLWORD);
1738 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] one-shot over 20ms!!!!\n");
1740 rtw89_phy_write32_mask(rtwdev, R_NCTL_N1, MASKBYTE0, 0x0);
1742 rtw89_debug(rtwdev, RTW89_DBG_RFK,
1756 static void _dpk_rx_dck(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
1759 rtw89_write_rf(rtwdev, path, RR_RXBB2, RR_EN_TIA_IDA, 0x3);
1760 _set_rx_dck(rtwdev, phy, path);
1763 static void _dpk_information(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
1766 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_SUB_ENTITY_0);
1767 struct rtw89_dpk_info *dpk = &rtwdev->dpk;
1775 rtw89_debug(rtwdev, RTW89_DBG_RFK,
1778 rtwdev->is_tssi_mode[path] ? "on" : "off",
1779 rtwdev->dbcc_en ? "on" : "off",
1787 static void _dpk_bb_afe_setting(struct rtw89_dev *rtwdev,
1791 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_SUB_ENTITY_0);
1793 rtw89_rfk_parser(rtwdev, &rtw8852b_dpk_afe_defs_tbl);
1796 rtw89_phy_write32_mask(rtwdev, R_P0_CFCH_BW1, B_P0_CFCH_EX, 0x1);
1797 rtw89_phy_write32_mask(rtwdev, R_PATH1_BW_SEL_V1, B_PATH1_BW_SEL_EX, 0x1);
1800 rtw89_debug(rtwdev, RTW89_DBG_RFK,
1804 static void _dpk_bb_afe_restore(struct rtw89_dev *rtwdev,
1808 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_SUB_ENTITY_0);
1810 rtw89_rfk_parser(rtwdev, &rtw8852b_dpk_afe_restore_defs_tbl);
1812 rtw89_debug(rtwdev, RTW89_DBG_RFK,
1816 rtw89_phy_write32_mask(rtwdev, R_P0_CFCH_BW1, B_P0_CFCH_EX, 0x0);
1817 rtw89_phy_write32_mask(rtwdev, R_PATH1_BW_SEL_V1, B_PATH1_BW_SEL_EX, 0x0);
1821 static void _dpk_tssi_pause(struct rtw89_dev *rtwdev,
1824 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_TRK + (path << 13),
1827 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] S%d TSSI %s\n", path,
1831 static void _dpk_kip_restore(struct rtw89_dev *rtwdev,
1834 rtw89_rfk_parser(rtwdev, &rtw8852b_dpk_kip_defs_tbl);
1836 if (rtwdev->hal.cv > CHIP_CAV)
1837 rtw89_phy_write32_mask(rtwdev, R_DPD_COM + (path << 8), B_DPD_COM_OF, 0x1);
1839 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] S%d restore KIP\n", path);
1842 static void _dpk_lbk_rxiqk(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
1848 cur_rxbb = rtw89_read_rf(rtwdev, path, RR_MOD, RFREG_MASKRXBB);
1850 rtw89_phy_write32_mask(rtwdev, R_MDPK_RX_DCK, B_MDPK_RX_DCK_EN, 0x1);
1851 rtw89_phy_write32_mask(rtwdev, R_IQK_RES + (path << 8), B_IQK_RES_RXCFIR, 0x0);
1853 tmp = rtw89_read_rf(rtwdev, path, RR_CFGCH, RFREG_MASK);
1854 rtw89_write_rf(rtwdev, path, RR_RSV4, RFREG_MASK, tmp);
1855 rtw89_write_rf(rtwdev, path, RR_MOD, RFREG_MASKMODE, 0xd);
1856 rtw89_write_rf(rtwdev, path, RR_RXK, RR_RXK_PLLEN, 0x1);
1859 rtw89_write_rf(rtwdev, path, RR_TXIQK, RR_TXIQK_ATT1, 0x13);
1861 rtw89_write_rf(rtwdev, path, RR_TXIQK, RR_TXIQK_ATT1, 0x00);
1863 rtw89_write_rf(rtwdev, path, RR_TXIQK, RR_TXIQK_ATT1, 0x05);
1865 rtw89_write_rf(rtwdev, path, RR_XGLNA2, RR_XGLNA2_SW, 0x0);
1866 rtw89_write_rf(rtwdev, path, RR_RXKPLL, RR_RXKPLL_POW, 0x0);
1867 rtw89_write_rf(rtwdev, path, RR_RXKPLL, RFREG_MASK, 0x80014);
1870 rtw89_phy_write32_mask(rtwdev, R_P0_RFCTM, B_P0_RFCTM_EN, 0x1);
1871 rtw89_phy_write32_mask(rtwdev, R_IQK_DIF4, B_IQK_DIF4_RXT, 0x025);
1873 _dpk_one_shot(rtwdev, phy, path, LBK_RXIQK);
1875 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] S%d LBK RXIQC = 0x%x\n", path,
1876 rtw89_phy_read32_mask(rtwdev, R_RXIQC, MASKDWORD));
1878 rtw89_phy_write32_mask(rtwdev, R_P0_RFCTM, B_P0_RFCTM_EN, 0x0);
1879 rtw89_write_rf(rtwdev, path, RR_RXK, RR_RXK_PLLEN, 0x0);
1880 rtw89_phy_write32_mask(rtwdev, R_MDPK_RX_DCK, B_MDPK_RX_DCK_EN, 0x0);
1881 rtw89_phy_write32_mask(rtwdev, R_KPATH_CFG, B_KPATH_CFG_ED, 0x0);
1882 rtw89_phy_write32_mask(rtwdev, R_LOAD_COEF + (path << 8), B_LOAD_COEF_DI, 0x1);
1883 rtw89_write_rf(rtwdev, path, RR_MOD, RFREG_MASKMODE, 0x5);
1886 static void _dpk_get_thermal(struct rtw89_dev *rtwdev, u8 kidx, enum rtw89_rf_path path)
1888 struct rtw89_dpk_info *dpk = &rtwdev->dpk;
1890 rtw89_write_rf(rtwdev, path, RR_TM, RR_TM_TRI, 0x1);
1891 rtw89_write_rf(rtwdev, path, RR_TM, RR_TM_TRI, 0x0);
1892 rtw89_write_rf(rtwdev, path, RR_TM, RR_TM_TRI, 0x1);
1896 dpk->bp[path][kidx].ther_dpk = rtw89_read_rf(rtwdev, path, RR_TM, RR_TM_VAL);
1898 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] thermal@DPK = 0x%x\n",
1902 static void _dpk_rf_setting(struct rtw89_dev *rtwdev, u8 gain,
1905 struct rtw89_dpk_info *dpk = &rtwdev->dpk;
1908 rtw89_write_rf(rtwdev, path, RR_MOD, RFREG_MASK, 0x50220);
1909 rtw89_write_rf(rtwdev, path, RR_RXBB, RR_RXBB_FATT, 0xf2);
1910 rtw89_write_rf(rtwdev, path, RR_LUTDBG, RR_LUTDBG_TIA, 0x1);
1911 rtw89_write_rf(rtwdev, path, RR_TIA, RR_TIA_N6, 0x1);
1913 rtw89_write_rf(rtwdev, path, RR_MOD, RFREG_MASK, 0x50220);
1914 rtw89_write_rf(rtwdev, path, RR_RXA2, RR_RAA2_SWATT, 0x5);
1915 rtw89_write_rf(rtwdev, path, RR_LUTDBG, RR_LUTDBG_TIA, 0x1);
1916 rtw89_write_rf(rtwdev, path, RR_TIA, RR_TIA_N6, 0x1);
1917 rtw89_write_rf(rtwdev, path, RR_RXA_LNA, RFREG_MASK, 0x920FC);
1918 rtw89_write_rf(rtwdev, path, RR_XALNA2, RFREG_MASK, 0x002C0);
1919 rtw89_write_rf(rtwdev, path, RR_IQGEN, RFREG_MASK, 0x38800);
1922 rtw89_write_rf(rtwdev, path, RR_RCKD, RR_RCKD_BW, 0x1);
1923 rtw89_write_rf(rtwdev, path, RR_BTC, RR_BTC_TXBB, dpk->bp[path][kidx].bw + 1);
1924 rtw89_write_rf(rtwdev, path, RR_BTC, RR_BTC_RXBB, 0x0);
1926 rtw89_debug(rtwdev, RTW89_DBG_RFK,
1928 rtw89_read_rf(rtwdev, path, RR_MOD, RFREG_MASK),
1929 rtw89_read_rf(rtwdev, path, RR_TXIG, RFREG_MASK),
1930 rtw89_read_rf(rtwdev, path, RR_BTC, RFREG_MASK));
1933 static void _dpk_bypass_rxcfir(struct rtw89_dev *rtwdev,
1937 rtw89_phy_write32_mask(rtwdev, R_RXIQC + (path << 8),
1939 rtw89_phy_write32_mask(rtwdev, R_RXIQC + (path << 8),
1941 rtw89_debug(rtwdev, RTW89_DBG_RFK,
1943 rtw89_phy_read32_mask(rtwdev, R_RXIQC + (path << 8),
1946 rtw89_phy_write32_clr(rtwdev, R_RXIQC + (path << 8), B_RXIQC_BYPASS2);
1947 rtw89_phy_write32_clr(rtwdev, R_RXIQC + (path << 8), B_RXIQC_BYPASS);
1948 rtw89_debug(rtwdev, RTW89_DBG_RFK,
1950 rtw89_phy_read32_mask(rtwdev, R_RXIQC + (path << 8),
1956 void _dpk_tpg_sel(struct rtw89_dev *rtwdev, enum rtw89_rf_path path, u8 kidx)
1958 struct rtw89_dpk_info *dpk = &rtwdev->dpk;
1961 rtw89_phy_write32_clr(rtwdev, R_TPG_MOD, B_TPG_MOD_F);
1963 rtw89_phy_write32_mask(rtwdev, R_TPG_MOD, B_TPG_MOD_F, 0x2);
1965 rtw89_phy_write32_mask(rtwdev, R_TPG_MOD, B_TPG_MOD_F, 0x1);
1967 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] TPG_Select for %s\n",
1972 static void _dpk_table_select(struct rtw89_dev *rtwdev,
1978 rtw89_phy_write32_mask(rtwdev, R_DPD_CH0 + (path << 8), MASKBYTE3, val);
1979 rtw89_debug(rtwdev, RTW89_DBG_RFK,
1984 static bool _dpk_sync_check(struct rtw89_dev *rtwdev, enum rtw89_rf_path path, u8 kidx)
1989 struct rtw89_dpk_info *dpk = &rtwdev->dpk;
1993 rtw89_phy_write32_clr(rtwdev, R_KIP_RPT1, B_KIP_RPT1_SEL);
1995 corr_idx = rtw89_phy_read32_mask(rtwdev, R_RPT_COM, B_PRT_COM_CORI);
1996 corr_val = rtw89_phy_read32_mask(rtwdev, R_RPT_COM, B_PRT_COM_CORV);
1998 rtw89_debug(rtwdev, RTW89_DBG_RFK,
2005 rtw89_phy_write32_mask(rtwdev, R_KIP_RPT1, B_KIP_RPT1_SEL, 0x9);
2007 dc_i = rtw89_phy_read32_mask(rtwdev, R_RPT_COM, B_PRT_COM_DCI);
2008 dc_q = rtw89_phy_read32_mask(rtwdev, R_RPT_COM, B_PRT_COM_DCQ);
2013 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] S%d DC I/Q, = %d / %d\n",
2026 static bool _dpk_sync(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
2029 _dpk_one_shot(rtwdev, phy, path, SYNC);
2031 return _dpk_sync_check(rtwdev, path, kidx);
2034 static u16 _dpk_dgain_read(struct rtw89_dev *rtwdev)
2038 rtw89_phy_write32_mask(rtwdev, R_KIP_RPT1, B_KIP_RPT1_SEL, 0x0);
2040 dgain = rtw89_phy_read32_mask(rtwdev, R_RPT_COM, B_PRT_COM_DCI);
2042 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] DGain = 0x%x\n", dgain);
2047 static s8 _dpk_dgain_mapping(struct rtw89_dev *rtwdev, u16 dgain)
2090 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] DGain offset = %d\n", offset);
2095 static u8 _dpk_gainloss_read(struct rtw89_dev *rtwdev)
2097 rtw89_phy_write32_mask(rtwdev, R_KIP_RPT1, B_KIP_RPT1_SEL, 0x6);
2098 rtw89_phy_write32_mask(rtwdev, R_DPK_CFG2, B_DPK_CFG2_ST, 0x1);
2100 return rtw89_phy_read32_mask(rtwdev, R_RPT_COM, B_PRT_COM_GL);
2103 static void _dpk_gainloss(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
2106 _dpk_table_select(rtwdev, path, kidx, 1);
2107 _dpk_one_shot(rtwdev, phy, path, GAIN_LOSS);
2110 static void _dpk_kip_preset(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
2113 _dpk_tpg_sel(rtwdev, path, kidx);
2114 _dpk_one_shot(rtwdev, phy, path, KIP_PRESET);
2117 static void _dpk_kip_pwr_clk_on(struct rtw89_dev *rtwdev,
2120 rtw89_phy_write32_mask(rtwdev, R_NCTL_RPT, MASKDWORD, 0x00000080);
2121 rtw89_phy_write32_mask(rtwdev, R_KIP_SYSCFG, MASKDWORD, 0x807f030a);
2122 rtw89_phy_write32_mask(rtwdev, R_CFIR_SYS + (path << 8), MASKDWORD, 0xce000a08);
2124 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] KIP Power/CLK on\n");
2127 static void _dpk_kip_set_txagc(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
2130 rtw89_write_rf(rtwdev, path, RR_TXAGC, RFREG_MASK, txagc);
2131 rtw89_phy_write32_mask(rtwdev, R_P0_RFCTM, B_P0_RFCTM_EN, 0x1);
2132 _dpk_one_shot(rtwdev, phy, path, DPK_TXAGC);
2133 rtw89_phy_write32_mask(rtwdev, R_P0_RFCTM, B_P0_RFCTM_EN, 0x0);
2135 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] set TXAGC = 0x%x\n", txagc);
2138 static void _dpk_kip_set_rxagc(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
2143 tmp = rtw89_read_rf(rtwdev, path, RR_MOD, RFREG_MASK);
2144 rtw89_phy_write32_mask(rtwdev, R_KIP_MOD, B_KIP_MOD, tmp);
2145 rtw89_phy_write32_mask(rtwdev, R_P0_RFCTM, B_P0_RFCTM_EN, 0x1);
2146 _dpk_one_shot(rtwdev, phy, path, DPK_RXAGC);
2147 rtw89_phy_write32_mask(rtwdev, R_P0_RFCTM, B_P0_RFCTM_EN, 0x0);
2148 rtw89_phy_write32_mask(rtwdev, R_KIP_RPT1, B_KIP_RPT1_SEL_V1, 0x8);
2150 rtw89_debug(rtwdev, RTW89_DBG_RFK,
2152 rtw89_phy_read32_mask(rtwdev, R_RPT_COM, B_PRT_COM_RXBB_V1),
2153 rtw89_read_rf(rtwdev, path, RR_MOD, RFREG_MASKRXBB));
2156 static u8 _dpk_set_offset(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
2161 txagc = rtw89_read_rf(rtwdev, path, RR_TXAGC, RFREG_MASK);
2170 _dpk_kip_set_txagc(rtwdev, phy, path, txagc);
2172 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] tmp_txagc (GL=%d) = 0x%x\n",
2177 static bool _dpk_pas_read(struct rtw89_dev *rtwdev, bool is_check)
2182 rtw89_phy_write32_mask(rtwdev, R_KIP_RPT1, MASKBYTE2, 0x06);
2183 rtw89_phy_write32_mask(rtwdev, R_DPK_CFG2, B_DPK_CFG2_ST, 0x0);
2184 rtw89_phy_write32_mask(rtwdev, R_DPK_CFG3, MASKBYTE2, 0x08);
2187 rtw89_phy_write32_mask(rtwdev, R_DPK_CFG3, MASKBYTE3, 0x00);
2188 val1_i = rtw89_phy_read32_mask(rtwdev, R_RPT_COM, MASKHWORD);
2190 val1_q = rtw89_phy_read32_mask(rtwdev, R_RPT_COM, MASKLWORD);
2193 rtw89_phy_write32_mask(rtwdev, R_DPK_CFG3, MASKBYTE3, 0x1f);
2194 val2_i = rtw89_phy_read32_mask(rtwdev, R_RPT_COM, MASKHWORD);
2196 val2_q = rtw89_phy_read32_mask(rtwdev, R_RPT_COM, MASKLWORD);
2199 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] PAS_delta = 0x%x\n",
2204 rtw89_phy_write32_mask(rtwdev, R_DPK_CFG3, MASKBYTE3, i);
2205 rtw89_debug(rtwdev, RTW89_DBG_RFK,
2207 rtw89_phy_read32_mask(rtwdev, R_RPT_COM, MASKDWORD));
2218 static u8 _dpk_agc(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
2222 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_SUB_ENTITY_0);
2235 if (_dpk_sync(rtwdev, phy, path, kidx)) {
2241 dgain = _dpk_dgain_read(rtwdev);
2250 tmp_rxbb = rtw89_read_rf(rtwdev, path, RR_MOD,
2252 offset = _dpk_dgain_mapping(rtwdev, dgain);
2264 rtw89_write_rf(rtwdev, path, RR_MOD, RFREG_MASKRXBB,
2266 rtw89_debug(rtwdev, RTW89_DBG_RFK,
2270 _dpk_bypass_rxcfir(rtwdev, path, true);
2272 _dpk_lbk_rxiqk(rtwdev, phy, path);
2283 _dpk_gainloss(rtwdev, phy, path, kidx);
2284 tmp_gl_idx = _dpk_gainloss_read(rtwdev);
2286 if ((tmp_gl_idx == 0 && _dpk_pas_read(rtwdev, true)) ||
2298 rtw89_debug(rtwdev, RTW89_DBG_RFK,
2301 tmp_txagc = _dpk_set_offset(rtwdev, phy, path, 0x3);
2310 rtw89_debug(rtwdev, RTW89_DBG_RFK,
2313 tmp_txagc = _dpk_set_offset(rtwdev, phy, path, 0xfe);
2319 tmp_txagc = _dpk_set_offset(rtwdev, phy, path, tmp_gl_idx);
2330 rtw89_debug(rtwdev, RTW89_DBG_RFK,
2337 static void _dpk_set_mdpd_para(struct rtw89_dev *rtwdev, u8 order)
2341 rtw89_phy_write32_mask(rtwdev, R_LDL_NORM, B_LDL_NORM_OP, order);
2342 rtw89_phy_write32_mask(rtwdev, R_LDL_NORM, B_LDL_NORM_PN, 0x3);
2343 rtw89_phy_write32_mask(rtwdev, R_MDPK_SYNC, B_MDPK_SYNC_MAN, 0x1);
2346 rtw89_phy_write32_mask(rtwdev, R_LDL_NORM, B_LDL_NORM_OP, order);
2347 rtw89_phy_write32_clr(rtwdev, R_LDL_NORM, B_LDL_NORM_PN);
2348 rtw89_phy_write32_clr(rtwdev, R_MDPK_SYNC, B_MDPK_SYNC_MAN);
2351 rtw89_phy_write32_mask(rtwdev, R_LDL_NORM, B_LDL_NORM_OP, order);
2352 rtw89_phy_write32_clr(rtwdev, R_LDL_NORM, B_LDL_NORM_PN);
2353 rtw89_phy_write32_clr(rtwdev, R_MDPK_SYNC, B_MDPK_SYNC_MAN);
2356 rtw89_debug(rtwdev, RTW89_DBG_RFK,
2361 rtw89_debug(rtwdev, RTW89_DBG_RFK,
2365 static void _dpk_idl_mpa(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
2368 struct rtw89_dpk_info *dpk = &rtwdev->dpk;
2372 _dpk_set_mdpd_para(rtwdev, 0x2);
2374 _dpk_set_mdpd_para(rtwdev, 0x0);
2376 _dpk_one_shot(rtwdev, phy, path, MDPK_IDL);
2379 static void _dpk_fill_result(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
2382 struct rtw89_dpk_info *dpk = &rtwdev->dpk;
2386 rtw89_phy_write32_mask(rtwdev, R_COEF_SEL + (path << 8),
2389 rtw89_debug(rtwdev, RTW89_DBG_RFK,
2394 rtw89_phy_write32_mask(rtwdev, R_TXAGC_RFK + (path << 8),
2398 rtw89_phy_write32_mask(rtwdev, R_DPD_BND + (path << 8) + (kidx << 2),
2401 rtw89_phy_write32_mask(rtwdev, R_LOAD_COEF + (path << 8), B_LOAD_COEF_MDPD, 0x1);
2402 rtw89_phy_write32_mask(rtwdev, R_LOAD_COEF + (path << 8), B_LOAD_COEF_MDPD, 0x0);
2406 rtw89_phy_write32_mask(rtwdev, R_DPD_CH0A + (path << 8) + (kidx << 2),
2409 rtw89_phy_write32_mask(rtwdev, R_DPD_CH0A + (path << 8) + (kidx << 2),
2412 rtw89_phy_write32_mask(rtwdev, R_DPD_CH0A + (path << 8) + (kidx << 2),
2413 B_DPD_ORDER_V1, _dpk_order_convert(rtwdev));
2414 rtw89_phy_write32_mask(rtwdev, R_DPD_V1 + (path << 8), MASKDWORD, 0x0);
2415 rtw89_phy_write32_mask(rtwdev, R_MDPK_SYNC, B_MDPK_SYNC_SEL, 0x0);
2418 static bool _dpk_reload_check(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
2421 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_SUB_ENTITY_0);
2422 struct rtw89_dpk_info *dpk = &rtwdev->dpk;
2434 rtw89_phy_write32_mask(rtwdev, R_COEF_SEL + (path << 8),
2438 rtw89_debug(rtwdev, RTW89_DBG_RFK,
2445 static bool _dpk_main(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
2448 struct rtw89_dpk_info *dpk = &rtwdev->dpk;
2452 rtw89_debug(rtwdev, RTW89_DBG_RFK,
2455 _rfk_rf_direct_cntrl(rtwdev, path, false);
2456 _rfk_drf_direct_cntrl(rtwdev, path, false);
2458 _dpk_kip_pwr_clk_on(rtwdev, path);
2459 _dpk_kip_set_txagc(rtwdev, phy, path, txagc);
2460 _dpk_rf_setting(rtwdev, gain, path, kidx);
2461 _dpk_rx_dck(rtwdev, phy, path);
2463 _dpk_kip_preset(rtwdev, phy, path, kidx);
2464 _dpk_kip_set_rxagc(rtwdev, phy, path);
2465 _dpk_table_select(rtwdev, path, kidx, gain);
2467 txagc = _dpk_agc(rtwdev, phy, path, kidx, txagc, false);
2468 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] Adjust txagc = 0x%x\n", txagc);
2473 _dpk_get_thermal(rtwdev, kidx, path);
2475 _dpk_idl_mpa(rtwdev, phy, path, kidx, gain);
2477 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_MASK, RR_MOD_V_RX);
2479 _dpk_fill_result(rtwdev, phy, path, kidx, gain, txagc);
2487 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] S%d[%d] DPK %s\n", path, kidx,
2493 static void _dpk_cal_select(struct rtw89_dev *rtwdev, bool force,
2496 struct rtw89_dpk_info *dpk = &rtwdev->dpk;
2506 reloaded[path] = _dpk_reload_check(rtwdev, phy, path);
2510 _dpk_onoff(rtwdev, path, false);
2517 _rfk_backup_bb_reg(rtwdev, &backup_bb_val[0]);
2520 _dpk_bkup_kip(rtwdev, kip_reg, kip_bkup, path);
2521 _rfk_backup_rf_reg(rtwdev, &backup_rf_val[path][0], path);
2522 _dpk_information(rtwdev, phy, path);
2523 if (rtwdev->is_tssi_mode[path])
2524 _dpk_tssi_pause(rtwdev, path, true);
2527 _dpk_bb_afe_setting(rtwdev, phy, path, kpath);
2530 is_fail = _dpk_main(rtwdev, phy, path, 1);
2531 _dpk_onoff(rtwdev, path, is_fail);
2534 _dpk_bb_afe_restore(rtwdev, phy, path, kpath);
2535 _rfk_restore_bb_reg(rtwdev, &backup_bb_val[0]);
2538 _dpk_kip_restore(rtwdev, path);
2539 _dpk_reload_kip(rtwdev, kip_reg, kip_bkup, path);
2540 _rfk_restore_rf_reg(rtwdev, &backup_rf_val[path][0], path);
2541 if (rtwdev->is_tssi_mode[path])
2542 _dpk_tssi_pause(rtwdev, path, false);
2546 static bool _dpk_bypass_check(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy)
2548 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_SUB_ENTITY_0);
2549 struct rtw89_fem_info *fem = &rtwdev->fem;
2552 rtw89_debug(rtwdev, RTW89_DBG_RFK,
2556 rtw89_debug(rtwdev, RTW89_DBG_RFK,
2560 rtw89_debug(rtwdev, RTW89_DBG_RFK,
2568 static void _dpk_force_bypass(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy)
2572 kpath = _kpath(rtwdev, phy);
2576 _dpk_onoff(rtwdev, path, true);
2580 static void _dpk(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy, bool force)
2582 rtw89_debug(rtwdev, RTW89_DBG_RFK,
2584 RTW8852B_DPK_VER, rtwdev->hal.cv,
2587 if (_dpk_bypass_check(rtwdev, phy))
2588 _dpk_force_bypass(rtwdev, phy);
2590 _dpk_cal_select(rtwdev, force, phy, RF_AB);
2593 static void _dpk_track(struct rtw89_dev *rtwdev)
2595 struct rtw89_dpk_info *dpk = &rtwdev->dpk;
2607 rtw89_debug(rtwdev, RTW89_DBG_RFK_TRACK,
2611 cur_ther = ewma_thermal_read(&rtwdev->phystat.avg_thermal[path]);
2613 rtw89_debug(rtwdev, RTW89_DBG_RFK_TRACK,
2624 txagc_rf = rtw89_phy_read32_mask(rtwdev, R_TXAGC_BB + (path << 13),
2627 if (rtwdev->is_tssi_mode[path]) {
2628 trk_idx = rtw89_read_rf(rtwdev, path, RR_TXA, RR_TXA_TRK);
2630 rtw89_debug(rtwdev, RTW89_DBG_RFK_TRACK,
2635 rtw89_phy_read32_mask(rtwdev, R_TXAGC_BB + (path << 13),
2638 rtw89_phy_read32_mask(rtwdev, R_TXAGC_TP + (path << 13),
2641 rtw89_debug(rtwdev, RTW89_DBG_RFK_TRACK,
2646 rtw89_phy_read32_mask(rtwdev, R_TXAGC_BB + (path << 13),
2649 rtw89_debug(rtwdev, RTW89_DBG_RFK_TRACK,
2652 tmp = rtw89_phy_read32_mask(rtwdev, R_DPD_COM + (path << 8),
2656 rtw89_debug(rtwdev, RTW89_DBG_RFK_TRACK,
2663 tmp = rtw89_phy_read32_mask(rtwdev,
2681 tmp = rtw89_phy_read32_mask(rtwdev, R_DPK_TRK, B_DPK_TRK_DIS);
2683 rtw89_debug(rtwdev, RTW89_DBG_RFK_TRACK,
2687 rtw89_phy_write32_mask(rtwdev,
2690 rtw89_phy_write32_mask(rtwdev,
2697 static void _set_dpd_backoff(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy)
2699 struct rtw89_dpk_info *dpk = &rtwdev->dpk;
2702 kpath = _kpath(rtwdev, phy);
2704 ofdm_bkof = rtw89_phy_read32_mask(rtwdev, R_DPD_BF + (phy << 13), B_DPD_BF_OFDM);
2705 tx_scale = rtw89_phy_read32_mask(rtwdev, R_DPD_BF + (phy << 13), B_DPD_BF_SCA);
2714 rtw89_phy_write32_mask(rtwdev, R_DPD_CH0A + (path << 8),
2716 rtw89_debug(rtwdev, RTW89_DBG_RFK,
2724 static void _tssi_rf_setting(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
2727 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_SUB_ENTITY_0);
2731 rtw89_write_rf(rtwdev, path, RR_TXPOW, RR_TXPOW_TXG, 0x1);
2733 rtw89_write_rf(rtwdev, path, RR_TXPOW, RR_TXPOW_TXA, 0x1);
2736 static void _tssi_set_sys(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
2739 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_SUB_ENTITY_0);
2742 rtw89_rfk_parser(rtwdev, &rtw8852b_tssi_sys_defs_tbl);
2745 rtw89_rfk_parser_by_cond(rtwdev, band == RTW89_BAND_2G,
2749 rtw89_rfk_parser_by_cond(rtwdev, band == RTW89_BAND_2G,
2754 static void _tssi_ini_txpwr_ctrl_bb(struct rtw89_dev *rtwdev,
2758 rtw89_rfk_parser_by_cond(rtwdev, path == RF_PATH_A,
2763 static void _tssi_ini_txpwr_ctrl_bb_he_tb(struct rtw89_dev *rtwdev,
2767 rtw89_rfk_parser_by_cond(rtwdev, path == RF_PATH_A,
2772 static void _tssi_set_dck(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
2775 rtw89_rfk_parser_by_cond(rtwdev, path == RF_PATH_A,
2780 static void _tssi_set_tmeter_tbl(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
2794 struct rtw89_tssi_info *tssi_info = &rtwdev->tssi;
2795 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_SUB_ENTITY_0);
2838 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
2841 rtw89_phy_write32_mask(rtwdev, R_P0_TMETER, B_P0_TMETER_DIS, 0x0);
2842 rtw89_phy_write32_mask(rtwdev, R_P0_TMETER, B_P0_TMETER_TRK, 0x1);
2845 rtw89_phy_write32_mask(rtwdev, R_P0_TMETER, B_P0_TMETER, 32);
2846 rtw89_phy_write32_mask(rtwdev, R_P0_RFCTM, B_P0_RFCTM_VAL, 32);
2849 rtw89_phy_write32(rtwdev, R_P0_TSSI_BASE + i, 0x0);
2851 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
2857 rtw89_phy_write32_mask(rtwdev, R_P0_TMETER, B_P0_TMETER, thermal);
2858 rtw89_phy_write32_mask(rtwdev, R_P0_RFCTM, B_P0_RFCTM_VAL,
2875 rtw89_phy_write32(rtwdev, R_P0_TSSI_BASE + i, tmp);
2877 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
2882 rtw89_phy_write32_mask(rtwdev, R_P0_RFCTM, R_P0_RFCTM_RDY, 0x1);
2883 rtw89_phy_write32_mask(rtwdev, R_P0_RFCTM, R_P0_RFCTM_RDY, 0x0);
2888 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
2891 rtw89_phy_write32_mask(rtwdev, R_P1_TMETER, B_P1_TMETER_DIS, 0x0);
2892 rtw89_phy_write32_mask(rtwdev, R_P1_TMETER, B_P1_TMETER_TRK, 0x1);
2895 rtw89_phy_write32_mask(rtwdev, R_P1_TMETER, B_P1_TMETER, 32);
2896 rtw89_phy_write32_mask(rtwdev, R_P1_RFCTM, B_P1_RFCTM_VAL, 32);
2899 rtw89_phy_write32(rtwdev, R_TSSI_THOF + i, 0x0);
2901 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
2907 rtw89_phy_write32_mask(rtwdev, R_P1_TMETER, B_P1_TMETER, thermal);
2908 rtw89_phy_write32_mask(rtwdev, R_P1_RFCTM, B_P1_RFCTM_VAL,
2925 rtw89_phy_write32(rtwdev, R_TSSI_THOF + i, tmp);
2927 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
2932 rtw89_phy_write32_mask(rtwdev, R_P1_RFCTM, R_P1_RFCTM_RDY, 0x1);
2933 rtw89_phy_write32_mask(rtwdev, R_P1_RFCTM, R_P1_RFCTM_RDY, 0x0);
2938 static void _tssi_set_dac_gain_tbl(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
2941 rtw89_rfk_parser_by_cond(rtwdev, path == RF_PATH_A,
2946 static void _tssi_slope_cal_org(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
2949 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_SUB_ENTITY_0);
2953 rtw89_rfk_parser_by_cond(rtwdev, band == RTW89_BAND_2G,
2957 rtw89_rfk_parser_by_cond(rtwdev, band == RTW89_BAND_2G,
2962 static void _tssi_alignment_default(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
2965 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_SUB_ENTITY_0);
3017 rtw89_rfk_parser(rtwdev, tbl);
3020 static void _tssi_set_tssi_slope(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
3023 rtw89_rfk_parser_by_cond(rtwdev, path == RF_PATH_A,
3028 static void _tssi_set_tssi_track(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
3032 rtw89_phy_write32_mask(rtwdev, R_P0_TSSIC, B_P0_TSSIC_BYPASS, 0x0);
3034 rtw89_phy_write32_mask(rtwdev, R_P1_TSSIC, B_P1_TSSIC_BYPASS, 0x0);
3037 static void _tssi_set_txagc_offset_mv_avg(struct rtw89_dev *rtwdev,
3041 rtw89_debug(rtwdev, RTW89_DBG_TSSI, "======>%s path=%d\n", __func__,
3045 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_MV_AVG, B_P0_TSSI_MV_MIX, 0x010);
3047 rtw89_phy_write32_mask(rtwdev, R_P1_TSSI_MV_AVG, B_P1_RFCTM_DEL, 0x010);
3050 static void _tssi_enable(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy)
3055 _tssi_set_tssi_track(rtwdev, phy, i);
3056 _tssi_set_txagc_offset_mv_avg(rtwdev, phy, i);
3059 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_MV_AVG,
3061 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_AVG,
3063 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_AVG,
3065 rtw89_write_rf(rtwdev, i, RR_TXGA_V1,
3067 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_TRK,
3070 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_TRK,
3072 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_TRK,
3074 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_TRK,
3077 rtwdev->is_tssi_mode[RF_PATH_A] = true;
3079 rtw89_phy_write32_mask(rtwdev, R_P1_TSSI_MV_AVG,
3081 rtw89_phy_write32_mask(rtwdev, R_P1_TSSI_AVG,
3083 rtw89_phy_write32_mask(rtwdev, R_P1_TSSI_AVG,
3085 rtw89_write_rf(rtwdev, i, RR_TXGA_V1,
3087 rtw89_phy_write32_mask(rtwdev, R_P1_TSSI_TRK,
3090 rtw89_phy_write32_mask(rtwdev, R_P1_TSSI_TRK,
3092 rtw89_phy_write32_mask(rtwdev, R_P1_TSSI_TRK,
3094 rtw89_phy_write32_mask(rtwdev, R_P1_TSSI_TRK,
3097 rtwdev->is_tssi_mode[RF_PATH_B] = true;
3102 static void _tssi_disable(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy)
3104 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_AVG, B_P0_TSSI_EN, 0x0);
3105 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_TRK, B_P0_TSSI_RFC, 0x1);
3106 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_MV_AVG, B_P0_TSSI_MV_CLR, 0x1);
3107 rtw89_phy_write32_mask(rtwdev, R_P1_TSSI_AVG, B_P1_TSSI_EN, 0x0);
3108 rtw89_phy_write32_mask(rtwdev, R_P1_TSSI_TRK, B_P1_TSSI_RFC, 0x1);
3109 rtw89_phy_write32_mask(rtwdev, R_P1_TSSI_MV_AVG, B_P1_TSSI_MV_CLR, 0x1);
3111 rtwdev->is_tssi_mode[RF_PATH_A] = false;
3112 rtwdev->is_tssi_mode[RF_PATH_B] = false;
3115 static u32 _tssi_get_cck_group(struct rtw89_dev *rtwdev, u8 ch)
3141 static u32 _tssi_get_ofdm_group(struct rtw89_dev *rtwdev, u8 ch)
3209 static u32 _tssi_get_trim_group(struct rtw89_dev *rtwdev, u8 ch)
3233 static s8 _tssi_get_ofdm_de(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
3236 struct rtw89_tssi_info *tssi_info = &rtwdev->tssi;
3237 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_SUB_ENTITY_0);
3244 gidx = _tssi_get_ofdm_group(rtwdev, ch);
3246 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
3256 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
3262 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
3269 static s8 _tssi_get_ofdm_trim_de(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
3272 struct rtw89_tssi_info *tssi_info = &rtwdev->tssi;
3273 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_SUB_ENTITY_0);
3280 tgidx = _tssi_get_trim_group(rtwdev, ch);
3282 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
3293 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
3299 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
3307 static void _tssi_set_efuse_to_de(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy)
3309 struct rtw89_tssi_info *tssi_info = &rtwdev->tssi;
3310 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_SUB_ENTITY_0);
3318 rtw89_debug(rtwdev, RTW89_DBG_TSSI, "[TSSI][TRIM]: phy=%d ch=%d\n",
3322 gidx = _tssi_get_cck_group(rtwdev, ch);
3323 trim_de = _tssi_get_ofdm_trim_de(rtwdev, phy, i);
3326 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
3330 rtw89_phy_write32_mask(rtwdev, _tssi_de_cck_long[i], _TSSI_DE_MASK, val);
3331 rtw89_phy_write32_mask(rtwdev, _tssi_de_cck_short[i], _TSSI_DE_MASK, val);
3333 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
3336 rtw89_phy_read32_mask(rtwdev, _tssi_de_cck_long[i],
3339 ofdm_de = _tssi_get_ofdm_de(rtwdev, phy, i);
3340 trim_de = _tssi_get_ofdm_trim_de(rtwdev, phy, i);
3343 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
3347 rtw89_phy_write32_mask(rtwdev, _tssi_de_mcs_20m[i], _TSSI_DE_MASK, val);
3348 rtw89_phy_write32_mask(rtwdev, _tssi_de_mcs_40m[i], _TSSI_DE_MASK, val);
3349 rtw89_phy_write32_mask(rtwdev, _tssi_de_mcs_80m[i], _TSSI_DE_MASK, val);
3350 rtw89_phy_write32_mask(rtwdev, _tssi_de_mcs_80m_80m[i], _TSSI_DE_MASK, val);
3351 rtw89_phy_write32_mask(rtwdev, _tssi_de_mcs_5m[i], _TSSI_DE_MASK, val);
3352 rtw89_phy_write32_mask(rtwdev, _tssi_de_mcs_10m[i], _TSSI_DE_MASK, val);
3354 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
3357 rtw89_phy_read32_mask(rtwdev, _tssi_de_mcs_20m[i],
3362 static void _tssi_alimentk_dump_result(struct rtw89_dev *rtwdev, enum rtw89_rf_path path)
3364 rtw89_debug(rtwdev, RTW89_DBG_RFK,
3368 rtw89_phy_read32_mask(rtwdev, R_TSSI_PA_K1 + (path << 13), MASKDWORD),
3370 rtw89_phy_read32_mask(rtwdev, R_TSSI_PA_K2 + (path << 13), MASKDWORD),
3372 rtw89_phy_read32_mask(rtwdev, R_P0_TSSI_ALIM1 + (path << 13), MASKDWORD),
3374 rtw89_phy_read32_mask(rtwdev, R_P0_TSSI_ALIM3 + (path << 13), MASKDWORD),
3376 rtw89_phy_read32_mask(rtwdev, R_TSSI_PA_K5 + (path << 13), MASKDWORD),
3378 rtw89_phy_read32_mask(rtwdev, R_P0_TSSI_ALIM2 + (path << 13), MASKDWORD),
3380 rtw89_phy_read32_mask(rtwdev, R_P0_TSSI_ALIM4 + (path << 13), MASKDWORD),
3382 rtw89_phy_read32_mask(rtwdev, R_TSSI_PA_K8 + (path << 13), MASKDWORD));
3385 static void _tssi_alimentk_done(struct rtw89_dev *rtwdev,
3388 struct rtw89_tssi_info *tssi_info = &rtwdev->tssi;
3389 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_SUB_ENTITY_0);
3393 rtw89_debug(rtwdev, RTW89_DBG_RFK,
3408 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_ALIM1 + (path << 13), MASKDWORD,
3410 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_ALIM3 + (path << 13), MASKDWORD,
3412 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_ALIM2 + (path << 13), MASKDWORD,
3414 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_ALIM4 + (path << 13), MASKDWORD,
3418 _tssi_alimentk_dump_result(rtwdev, path);
3421 static void _tssi_hw_tx(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
3437 rtw8852bx_bb_set_plcp_tx(rtwdev);
3438 rtw8852bx_bb_cfg_tx_path(rtwdev, path);
3439 rtw8852bx_bb_ctrl_rx_path(rtwdev, rx_path);
3440 rtw8852bx_bb_set_power(rtwdev, pwr_dbm, phy);
3443 rtw8852bx_bb_set_pmac_pkt_tx(rtwdev, enable, cnt, period, 20, phy);
3446 static void _tssi_backup_bb_registers(struct rtw89_dev *rtwdev,
3453 reg_backup[i] = rtw89_phy_read32_mask(rtwdev, reg[i], MASKDWORD);
3455 rtw89_debug(rtwdev, RTW89_DBG_RFK,
3461 static void _tssi_reload_bb_registers(struct rtw89_dev *rtwdev,
3469 rtw89_phy_write32_mask(rtwdev, reg[i], MASKDWORD, reg_backup[i]);
3471 rtw89_debug(rtwdev, RTW89_DBG_RFK,
3477 static u8 _tssi_ch_to_idx(struct rtw89_dev *rtwdev, u8 channel)
3495 static bool _tssi_get_cw_report(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
3505 rtw89_phy_write32_mask(rtwdev, _tssi_trigger[path], B_P0_TSSI_EN, 0x0);
3506 rtw89_phy_write32_mask(rtwdev, _tssi_trigger[path], B_P0_TSSI_EN, 0x1);
3508 tx_counter = rtw89_phy_read32_mask(rtwdev, R_TX_COUNTER, MASKLWORD);
3510 tmp = rtw89_phy_read32_mask(rtwdev, _tssi_trigger[path], MASKDWORD);
3511 rtw89_debug(rtwdev, RTW89_DBG_RFK,
3516 _tssi_hw_tx(rtwdev, phy, path, 100, 5000, power[j], true);
3518 _tssi_hw_tx(rtwdev, phy, RF_PATH_ABCD, 100, 5000, power[j], true);
3520 tx_counter_tmp = rtw89_phy_read32_mask(rtwdev, R_TX_COUNTER, MASKLWORD);
3523 rtw89_debug(rtwdev, RTW89_DBG_RFK,
3528 tmp = rtw89_phy_read32_mask(rtwdev, _tssi_cw_rpt_addr[path],
3536 rtw89_phy_read32_mask(rtwdev, R_TX_COUNTER, MASKLWORD);
3539 rtw89_debug(rtwdev, RTW89_DBG_RFK,
3545 rtw89_debug(rtwdev, RTW89_DBG_RFK,
3549 _tssi_hw_tx(rtwdev, phy, path, 100, 5000, power[j], false);
3554 rtw89_phy_read32_mask(rtwdev, _tssi_cw_rpt_addr[path], B_TSSI_CWRPT);
3556 _tssi_hw_tx(rtwdev, phy, path, 100, 5000, power[j], false);
3558 tx_counter_tmp = rtw89_phy_read32_mask(rtwdev, R_TX_COUNTER, MASKLWORD);
3561 rtw89_debug(rtwdev, RTW89_DBG_RFK,
3569 static void _tssi_alimentk(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
3576 struct rtw89_tssi_info *tssi_info = &rtwdev->tssi;
3577 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_SUB_ENTITY_0);
3581 u8 ch_idx = _tssi_ch_to_idx(rtwdev, channel);
3592 rtw89_debug(rtwdev, RTW89_DBG_RFK,
3597 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_ALIM1 + (path << 13), MASKDWORD,
3599 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_ALIM3 + (path << 13), MASKDWORD,
3601 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_ALIM2 + (path << 13), MASKDWORD,
3603 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_ALIM4 + (path << 13), MASKDWORD,
3606 rtw89_debug(rtwdev, RTW89_DBG_RFK,
3608 _tssi_alimentk_dump_result(rtwdev, path);
3630 rtw8852bx_bb_backup_tssi(rtwdev, phy, &tssi_bak);
3631 _tssi_backup_bb_registers(rtwdev, phy, bb_reg, bb_reg_backup, ARRAY_SIZE(bb_reg_backup));
3633 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_AVG, B_P0_TSSI_AVG, 0x8);
3634 rtw89_phy_write32_mask(rtwdev, R_P1_TSSI_AVG, B_P1_TSSI_AVG, 0x8);
3635 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_MV_AVG, B_P0_TSSI_MV_AVG, 0x2);
3636 rtw89_phy_write32_mask(rtwdev, R_P1_TSSI_MV_AVG, B_P1_TSSI_MV_AVG, 0x2);
3638 ok = _tssi_get_cw_report(rtwdev, phy, path, power, tssi_cw_rpt);
3643 rtw89_debug(rtwdev, RTW89_DBG_RFK,
3648 tmp = rtw89_phy_read32_mask(rtwdev, _tssi_cw_default_addr[path][1],
3655 tmp = rtw89_phy_read32_mask(rtwdev, _tssi_cw_default_addr[path][2],
3660 tmp = rtw89_phy_read32_mask(rtwdev, _tssi_cw_default_addr[path][3],
3670 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_ALIM1, B_P0_TSSI_ALIM1, tmp);
3671 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_ALIM2, B_P0_TSSI_ALIM2, tmp);
3673 rtw89_debug(rtwdev, RTW89_DBG_RFK,
3675 rtw89_phy_read32_mask(rtwdev, R_P0_TSSI_ALIM3, B_P0_TSSI_ALIM31),
3676 rtw89_phy_read32_mask(rtwdev, R_P0_TSSI_ALIM1, B_P0_TSSI_ALIM11),
3677 rtw89_phy_read32_mask(rtwdev, R_P0_TSSI_ALIM1, B_P0_TSSI_ALIM12),
3678 rtw89_phy_read32_mask(rtwdev, R_P0_TSSI_ALIM1, B_P0_TSSI_ALIM13));
3684 rtw89_phy_write32_mask(rtwdev, R_P1_TSSI_ALIM1, B_P1_TSSI_ALIM1, tmp);
3685 rtw89_phy_write32_mask(rtwdev, R_P1_TSSI_ALIM2, B_P1_TSSI_ALIM2, tmp);
3687 rtw89_debug(rtwdev, RTW89_DBG_RFK,
3689 rtw89_phy_read32_mask(rtwdev, R_P1_TSSI_ALIM3, B_P1_TSSI_ALIM31),
3690 rtw89_phy_read32_mask(rtwdev, R_P1_TSSI_ALIM1, B_P1_TSSI_ALIM11),
3691 rtw89_phy_read32_mask(rtwdev, R_P1_TSSI_ALIM1, B_P1_TSSI_ALIM12),
3692 rtw89_phy_read32_mask(rtwdev, R_P1_TSSI_ALIM1, B_P1_TSSI_ALIM13));
3697 rtw89_phy_read32_mask(rtwdev, R_P0_TSSI_ALIM1 + (path << 13), MASKDWORD);
3699 rtw89_phy_read32_mask(rtwdev, R_P0_TSSI_ALIM3 + (path << 13), MASKDWORD);
3701 rtw89_phy_read32_mask(rtwdev, R_P0_TSSI_ALIM2 + (path << 13), MASKDWORD);
3703 rtw89_phy_read32_mask(rtwdev, R_P0_TSSI_ALIM4 + (path << 13), MASKDWORD);
3707 rtw89_phy_read32_mask(rtwdev, R_P0_TSSI_ALIM1 + (path << 13), MASKDWORD);
3709 rtw89_phy_read32_mask(rtwdev, R_P0_TSSI_ALIM3 + (path << 13), MASKDWORD);
3711 rtw89_phy_read32_mask(rtwdev, R_P0_TSSI_ALIM2 + (path << 13), MASKDWORD);
3713 rtw89_phy_read32_mask(rtwdev, R_P0_TSSI_ALIM4 + (path << 13), MASKDWORD);
3715 rtw89_debug(rtwdev, RTW89_DBG_RFK,
3719 rtw89_debug(rtwdev, RTW89_DBG_RFK,
3723 rtw89_debug(rtwdev, RTW89_DBG_RFK,
3727 rtw89_debug(rtwdev, RTW89_DBG_RFK,
3733 _tssi_reload_bb_registers(rtwdev, phy, bb_reg, bb_reg_backup, ARRAY_SIZE(bb_reg_backup));
3734 rtw8852bx_bb_restore_tssi(rtwdev, phy, &tssi_bak);
3735 rtw8852bx_bb_tx_mode_switch(rtwdev, phy, 0);
3740 rtw89_debug(rtwdev, RTW89_DBG_RFK,
3745 void rtw8852b_dpk_init(struct rtw89_dev *rtwdev)
3747 _set_dpd_backoff(rtwdev, RTW89_PHY_0);
3750 void rtw8852b_rck(struct rtw89_dev *rtwdev)
3755 _rck(rtwdev, path);
3758 void rtw8852b_dack(struct rtw89_dev *rtwdev)
3760 u8 phy_map = rtw89_btc_phymap(rtwdev, RTW89_PHY_0, 0);
3762 rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, BTC_WRFKT_DACK, BTC_WRFK_START);
3763 _dac_cal(rtwdev, false);
3764 rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, BTC_WRFKT_DACK, BTC_WRFK_STOP);
3767 void rtw8852b_iqk(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx)
3769 u8 phy_map = rtw89_btc_phymap(rtwdev, phy_idx, 0);
3772 rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, BTC_WRFKT_IQK, BTC_WRFK_START);
3773 rtw89_chip_stop_sch_tx(rtwdev, phy_idx, &tx_en, RTW89_SCH_TX_SEL_ALL);
3774 _wait_rx_mode(rtwdev, _kpath(rtwdev, phy_idx));
3776 _iqk_init(rtwdev);
3777 _iqk(rtwdev, phy_idx, false);
3779 rtw89_chip_resume_sch_tx(rtwdev, phy_idx, tx_en);
3780 rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, BTC_WRFKT_IQK, BTC_WRFK_STOP);
3783 void rtw8852b_rx_dck(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx)
3785 u8 phy_map = rtw89_btc_phymap(rtwdev, phy_idx, 0);
3788 rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, BTC_WRFKT_RXDCK, BTC_WRFK_START);
3789 rtw89_chip_stop_sch_tx(rtwdev, phy_idx, &tx_en, RTW89_SCH_TX_SEL_ALL);
3790 _wait_rx_mode(rtwdev, _kpath(rtwdev, phy_idx));
3792 _rx_dck(rtwdev, phy_idx);
3794 rtw89_chip_resume_sch_tx(rtwdev, phy_idx, tx_en);
3795 rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, BTC_WRFKT_RXDCK, BTC_WRFK_STOP);
3798 void rtw8852b_dpk(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx)
3800 u8 phy_map = rtw89_btc_phymap(rtwdev, phy_idx, 0);
3803 rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, BTC_WRFKT_DPK, BTC_WRFK_START);
3804 rtw89_chip_stop_sch_tx(rtwdev, phy_idx, &tx_en, RTW89_SCH_TX_SEL_ALL);
3805 _wait_rx_mode(rtwdev, _kpath(rtwdev, phy_idx));
3807 rtwdev->dpk.is_dpk_enable = true;
3808 rtwdev->dpk.is_dpk_reload_en = false;
3809 _dpk(rtwdev, phy_idx, false);
3811 rtw89_chip_resume_sch_tx(rtwdev, phy_idx, tx_en);
3812 rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, BTC_WRFKT_DPK, BTC_WRFK_STOP);
3815 void rtw8852b_dpk_track(struct rtw89_dev *rtwdev)
3817 _dpk_track(rtwdev);
3820 void rtw8852b_tssi(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy, bool hwtx_en)
3822 u8 phy_map = rtw89_btc_phymap(rtwdev, phy, RF_AB);
3826 rtw89_debug(rtwdev, RTW89_DBG_TSSI, "[TSSI] %s: phy=%d\n", __func__, phy);
3827 rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, BTC_WRFKT_IQK, BTC_WRFK_ONESHOT_START);
3829 _tssi_disable(rtwdev, phy);
3832 _tssi_rf_setting(rtwdev, phy, i);
3833 _tssi_set_sys(rtwdev, phy, i);
3834 _tssi_ini_txpwr_ctrl_bb(rtwdev, phy, i);
3835 _tssi_ini_txpwr_ctrl_bb_he_tb(rtwdev, phy, i);
3836 _tssi_set_dck(rtwdev, phy, i);
3837 _tssi_set_tmeter_tbl(rtwdev, phy, i);
3838 _tssi_set_dac_gain_tbl(rtwdev, phy, i);
3839 _tssi_slope_cal_org(rtwdev, phy, i);
3840 _tssi_alignment_default(rtwdev, phy, i, true);
3841 _tssi_set_tssi_slope(rtwdev, phy, i);
3843 rtw89_chip_stop_sch_tx(rtwdev, phy, &tx_en, RTW89_SCH_TX_SEL_ALL);
3844 _tmac_tx_pause(rtwdev, phy, true);
3846 _tssi_alimentk(rtwdev, phy, i);
3847 _tmac_tx_pause(rtwdev, phy, false);
3848 rtw89_chip_resume_sch_tx(rtwdev, phy, tx_en);
3851 _tssi_enable(rtwdev, phy);
3852 _tssi_set_efuse_to_de(rtwdev, phy);
3854 rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, BTC_WRFKT_IQK, BTC_WRFK_ONESHOT_STOP);
3857 void rtw8852b_tssi_scan(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy)
3859 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_SUB_ENTITY_0);
3860 struct rtw89_tssi_info *tssi_info = &rtwdev->tssi;
3865 rtw89_debug(rtwdev, RTW89_DBG_RFK,
3879 _tssi_disable(rtwdev, phy);
3882 _tssi_rf_setting(rtwdev, phy, i);
3883 _tssi_set_sys(rtwdev, phy, i);
3884 _tssi_set_tmeter_tbl(rtwdev, phy, i);
3887 _tssi_alimentk_done(rtwdev, phy, i);
3889 _tssi_alignment_default(rtwdev, phy, i, true);
3892 _tssi_enable(rtwdev, phy);
3893 _tssi_set_efuse_to_de(rtwdev, phy);
3896 static void rtw8852b_tssi_default_txagc(struct rtw89_dev *rtwdev,
3899 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_SUB_ENTITY_0);
3902 rtw89_debug(rtwdev, RTW89_DBG_RFK, "======> %s ch=%d\n",
3906 if (!rtwdev->is_tssi_mode[RF_PATH_A] && !rtwdev->is_tssi_mode[RF_PATH_B])
3907 rtw8852b_tssi(rtwdev, phy, true);
3911 rtw89_debug(rtwdev, RTW89_DBG_RFK,
3914 rtw89_phy_read32_mask(rtwdev, R_P0_TSSI_TRK, B_P0_TSSI_OFT),
3915 rtw89_phy_read32_mask(rtwdev, R_P1_TSSI_TRK, B_P1_TSSI_OFT));
3917 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_TRK, B_P0_TSSI_OFT, 0xc0);
3918 rtw89_phy_write32_mask(rtwdev, R_P1_TSSI_TRK, B_P1_TSSI_OFT, 0xc0);
3919 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_TRK, B_P0_TSSI_OFT_EN, 0x0);
3920 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_TRK, B_P0_TSSI_OFT_EN, 0x1);
3921 rtw89_phy_write32_mask(rtwdev, R_P1_TSSI_TRK, B_P1_TSSI_OFT_EN, 0x0);
3922 rtw89_phy_write32_mask(rtwdev, R_P1_TSSI_TRK, B_P1_TSSI_OFT_EN, 0x1);
3924 _tssi_alimentk_done(rtwdev, phy, RF_PATH_A);
3925 _tssi_alimentk_done(rtwdev, phy, RF_PATH_B);
3927 rtw89_debug(rtwdev, RTW89_DBG_RFK,
3930 rtw89_phy_read32_mask(rtwdev, R_P0_TSSI_TRK, B_P0_TSSI_OFT),
3931 rtw89_phy_read32_mask(rtwdev, R_P1_TSSI_TRK, B_P1_TSSI_OFT));
3933 rtw89_debug(rtwdev, RTW89_DBG_RFK,
3937 void rtw8852b_wifi_scan_notify(struct rtw89_dev *rtwdev, bool scan_start,
3941 rtw8852b_tssi_default_txagc(rtwdev, phy_idx, true);
3943 rtw8852b_tssi_default_txagc(rtwdev, phy_idx, false);
3946 static void _bw_setting(struct rtw89_dev *rtwdev, enum rtw89_rf_path path,
3952 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[RFK]===> %s\n", __func__);
3954 rf_reg18 = rtw89_read_rf(rtwdev, path, reg18_addr, RFREG_MASK);
3956 rtw89_debug(rtwdev, RTW89_DBG_RFK,
3975 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[RFK]Fail to set CH\n");
3981 rtw89_write_rf(rtwdev, path, reg18_addr, RFREG_MASK, rf_reg18);
3983 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[RFK] set %x at path%d, %x =0x%x\n",
3985 rtw89_read_rf(rtwdev, path, reg18_addr, RFREG_MASK));
3988 static void _ctrl_bw(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
3991 _bw_setting(rtwdev, RF_PATH_A, bw, true);
3992 _bw_setting(rtwdev, RF_PATH_B, bw, true);
3993 _bw_setting(rtwdev, RF_PATH_A, bw, false);
3994 _bw_setting(rtwdev, RF_PATH_B, bw, false);
3997 static bool _set_s0_arfc18(struct rtw89_dev *rtwdev, u32 val)
4003 bak = rtw89_read_rf(rtwdev, RF_PATH_A, RR_LDO, RFREG_MASK);
4004 rtw89_write_rf(rtwdev, RF_PATH_A, RR_LDO, RR_LDO_SEL, 0x1);
4005 rtw89_write_rf(rtwdev, RF_PATH_A, RR_CFGCH, RFREG_MASK, val);
4008 false, rtwdev, RF_PATH_A, RR_LPF, RR_LPF_BUSY);
4010 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[LCK]LCK timeout\n");
4012 rtw89_write_rf(rtwdev, RF_PATH_A, RR_LDO, RFREG_MASK, bak);
4017 static void _lck_check(struct rtw89_dev *rtwdev)
4021 if (rtw89_read_rf(rtwdev, RF_PATH_A, RR_SYNFB, RR_SYNFB_LK) == 0) {
4022 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[LCK]SYN MMD reset\n");
4024 rtw89_write_rf(rtwdev, RF_PATH_A, RR_MMD, RR_MMD_RST_EN, 0x1);
4025 rtw89_write_rf(rtwdev, RF_PATH_A, RR_MMD, RR_MMD_RST_SYN, 0x0);
4026 rtw89_write_rf(rtwdev, RF_PATH_A, RR_MMD, RR_MMD_RST_SYN, 0x1);
4027 rtw89_write_rf(rtwdev, RF_PATH_A, RR_MMD, RR_MMD_RST_EN, 0x0);
4032 if (rtw89_read_rf(rtwdev, RF_PATH_A, RR_SYNFB, RR_SYNFB_LK) == 0) {
4033 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[LCK]re-set RF 0x18\n");
4035 rtw89_write_rf(rtwdev, RF_PATH_A, RR_LCK_TRG, RR_LCK_TRGSEL, 0x1);
4036 tmp = rtw89_read_rf(rtwdev, RF_PATH_A, RR_CFGCH, RFREG_MASK);
4037 _set_s0_arfc18(rtwdev, tmp);
4038 rtw89_write_rf(rtwdev, RF_PATH_A, RR_LCK_TRG, RR_LCK_TRGSEL, 0x0);
4041 if (rtw89_read_rf(rtwdev, RF_PATH_A, RR_SYNFB, RR_SYNFB_LK) == 0) {
4042 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[LCK]SYN off/on\n");
4044 tmp = rtw89_read_rf(rtwdev, RF_PATH_A, RR_POW, RFREG_MASK);
4045 rtw89_write_rf(rtwdev, RF_PATH_A, RR_POW, RFREG_MASK, tmp);
4046 tmp = rtw89_read_rf(rtwdev, RF_PATH_A, RR_SX, RFREG_MASK);
4047 rtw89_write_rf(rtwdev, RF_PATH_A, RR_SX, RFREG_MASK, tmp);
4049 rtw89_write_rf(rtwdev, RF_PATH_A, RR_SYNLUT, RR_SYNLUT_MOD, 0x1);
4050 rtw89_write_rf(rtwdev, RF_PATH_A, RR_POW, RR_POW_SYN, 0x0);
4051 rtw89_write_rf(rtwdev, RF_PATH_A, RR_POW, RR_POW_SYN, 0x3);
4052 rtw89_write_rf(rtwdev, RF_PATH_A, RR_SYNLUT, RR_SYNLUT_MOD, 0x0);
4054 rtw89_write_rf(rtwdev, RF_PATH_A, RR_LCK_TRG, RR_LCK_TRGSEL, 0x1);
4055 tmp = rtw89_read_rf(rtwdev, RF_PATH_A, RR_CFGCH, RFREG_MASK);
4056 _set_s0_arfc18(rtwdev, tmp);
4057 rtw89_write_rf(rtwdev, RF_PATH_A, RR_LCK_TRG, RR_LCK_TRGSEL, 0x0);
4059 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[LCK]0xb2=%x, 0xc5=%x\n",
4060 rtw89_read_rf(rtwdev, RF_PATH_A, RR_VCO, RFREG_MASK),
4061 rtw89_read_rf(rtwdev, RF_PATH_A, RR_SYNFB, RFREG_MASK));
4065 static void _set_ch(struct rtw89_dev *rtwdev, u32 val)
4069 timeout = _set_s0_arfc18(rtwdev, val);
4071 _lck_check(rtwdev);
4074 static void _ch_setting(struct rtw89_dev *rtwdev, enum rtw89_rf_path path,
4081 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[RFK]===> %s\n", __func__);
4083 rf_reg18 = rtw89_read_rf(rtwdev, path, reg18_addr, RFREG_MASK);
4097 _set_ch(rtwdev, rf_reg18);
4099 rtw89_write_rf(rtwdev, path, reg18_addr, RFREG_MASK, rf_reg18);
4101 rtw89_write_rf(rtwdev, path, RR_LCKST, RR_LCKST_BIN, 0);
4102 rtw89_write_rf(rtwdev, path, RR_LCKST, RR_LCKST_BIN, 1);
4104 rtw89_debug(rtwdev, RTW89_DBG_RFK,
4107 rtw89_read_rf(rtwdev, path, reg18_addr, RFREG_MASK));
4110 static void _ctrl_ch(struct rtw89_dev *rtwdev, u8 central_ch)
4112 _ch_setting(rtwdev, RF_PATH_A, central_ch, true);
4113 _ch_setting(rtwdev, RF_PATH_B, central_ch, true);
4114 _ch_setting(rtwdev, RF_PATH_A, central_ch, false);
4115 _ch_setting(rtwdev, RF_PATH_B, central_ch, false);
4118 static void _set_rxbb_bw(struct rtw89_dev *rtwdev, enum rtw89_bandwidth bw,
4121 rtw89_write_rf(rtwdev, path, RR_LUTWE2, RR_LUTWE2_RTXBW, 0x1);
4122 rtw89_write_rf(rtwdev, path, RR_LUTWA, RR_LUTWA_M2, 0x12);
4125 rtw89_write_rf(rtwdev, path, RR_LUTWD0, RR_LUTWD0_LB, 0x1b);
4127 rtw89_write_rf(rtwdev, path, RR_LUTWD0, RR_LUTWD0_LB, 0x13);
4129 rtw89_write_rf(rtwdev, path, RR_LUTWD0, RR_LUTWD0_LB, 0xb);
4131 rtw89_write_rf(rtwdev, path, RR_LUTWD0, RR_LUTWD0_LB, 0x3);
4133 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[RFK] set S%d RXBB BW 0x3F = 0x%x\n", path,
4134 rtw89_read_rf(rtwdev, path, RR_LUTWD0, RR_LUTWD0_LB));
4136 rtw89_write_rf(rtwdev, path, RR_LUTWE2, RR_LUTWE2_RTXBW, 0x0);
4139 static void _rxbb_bw(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy,
4144 kpath = _kpath(rtwdev, phy);
4150 _set_rxbb_bw(rtwdev, bw, path);
4154 static void rtw8852b_ctrl_bw_ch(struct rtw89_dev *rtwdev,
4158 _ctrl_ch(rtwdev, central_ch);
4159 _ctrl_bw(rtwdev, phy, bw);
4160 _rxbb_bw(rtwdev, phy, bw);
4163 void rtw8852b_set_channel_rf(struct rtw89_dev *rtwdev,
4167 rtw8852b_ctrl_bw_ch(rtwdev, phy_idx, chan->channel, chan->band_type,