Lines Matching refs:path
235 enum rtw89_rf_path path, bool is_bybb)
238 rtw89_write_rf(rtwdev, path, RR_RSV1, RR_RSV1_RST, 0x1);
240 rtw89_write_rf(rtwdev, path, RR_RSV1, RR_RSV1_RST, 0x0);
244 enum rtw89_rf_path path, bool is_bybb)
247 rtw89_write_rf(rtwdev, path, RR_BBDC, RR_BBDC_SEL, 0x1);
249 rtw89_write_rf(rtwdev, path, RR_BBDC, RR_BBDC_SEL, 0x0);
252 static bool _iqk_check_cal(struct rtw89_dev *rtwdev, u8 path)
269 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK]S%x, ret=%d\n", path, ret);
271 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK]S%x, 0x8008 = 0x%x\n", path, val);
295 enum rtw89_rf_path path)
297 rtw89_write_rf(rtwdev, path, RR_DCK1, RR_DCK1_CLR, 0x0);
298 rtw89_write_rf(rtwdev, path, RR_DCK, RR_DCK_LV, 0x0);
299 rtw89_write_rf(rtwdev, path, RR_DCK, RR_DCK_LV, 0x1);
305 u8 path, dck_tune;
312 for (path = 0; path < RF_PATH_NUM_8852B; path++) {
313 rf_reg5 = rtw89_read_rf(rtwdev, path, RR_RSV1, RFREG_MASK);
314 dck_tune = rtw89_read_rf(rtwdev, path, RR_DCK, RR_DCK_FINE);
316 if (rtwdev->is_tssi_mode[path])
318 R_P0_TSSI_TRK + (path << 13),
321 rtw89_write_rf(rtwdev, path, RR_RSV1, RR_RSV1_RST, 0x0);
322 rtw89_write_rf(rtwdev, path, RR_DCK, RR_DCK_FINE, 0x0);
323 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_MASK, RR_MOD_V_RX);
324 _set_rx_dck(rtwdev, phy, path);
325 rtw89_write_rf(rtwdev, path, RR_DCK, RR_DCK_FINE, dck_tune);
326 rtw89_write_rf(rtwdev, path, RR_RSV1, RFREG_MASK, rf_reg5);
328 if (rtwdev->is_tssi_mode[path])
330 R_P0_TSSI_TRK + (path << 13),
335 static void _rck(struct rtw89_dev *rtwdev, enum rtw89_rf_path path)
342 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[RCK] ====== S%d RCK ======\n", path);
344 rf_reg5 = rtw89_read_rf(rtwdev, path, RR_RSV1, RFREG_MASK);
346 rtw89_write_rf(rtwdev, path, RR_RSV1, RR_RSV1_RST, 0x0);
347 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_MASK, RR_MOD_V_RX);
350 rtw89_read_rf(rtwdev, path, RR_MOD, RFREG_MASK));
353 rtw89_write_rf(rtwdev, path, RR_RCKC, RFREG_MASK, 0x00240);
356 false, rtwdev, path, RR_RCKS, BIT(3));
358 rck_val = rtw89_read_rf(rtwdev, path, RR_RCKC, RR_RCKC_CA);
363 rtw89_write_rf(rtwdev, path, RR_RCKC, RFREG_MASK, rck_val);
364 rtw89_write_rf(rtwdev, path, RR_RSV1, RFREG_MASK, rf_reg5);
367 rtw89_read_rf(rtwdev, path, RR_RCKC, RFREG_MASK));
487 static void _check_addc(struct rtw89_dev *rtwdev, enum rtw89_rf_path path)
493 rtw89_rfk_parser_by_cond(rtwdev, path == RF_PATH_A,
507 "[DACK]S%d,dc_re = 0x%x,dc_im =0x%x\n", path, dc_re, dc_im);
587 static void _check_dadc(struct rtw89_dev *rtwdev, enum rtw89_rf_path path)
589 rtw89_rfk_parser_by_cond(rtwdev, path == RF_PATH_A,
593 _check_addc(rtwdev, path);
595 rtw89_rfk_parser_by_cond(rtwdev, path == RF_PATH_A,
791 static void _iqk_rxk_setting(struct rtw89_dev *rtwdev, u8 path)
796 switch (iqk_info->iqk_band[path]) {
798 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_MASK, 0xc);
799 rtw89_write_rf(rtwdev, path, RR_RXK, RR_RXK_SEL2G, 0x1);
800 tmp = rtw89_read_rf(rtwdev, path, RR_CFGCH, RFREG_MASK);
801 rtw89_write_rf(rtwdev, path, RR_RSV4, RFREG_MASK, tmp);
804 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_MASK, 0xc);
805 rtw89_write_rf(rtwdev, path, RR_RXK, RR_RXK_SEL5G, 0x1);
806 tmp = rtw89_read_rf(rtwdev, path, RR_CFGCH, RFREG_MASK);
807 rtw89_write_rf(rtwdev, path, RR_RSV4, RFREG_MASK, tmp);
815 u8 path, u8 ktype)
824 iqk_cmd = 0x108 | (1 << (4 + path));
828 iqk_cmd = 0x208 | (1 << (4 + path));
832 iqk_cmd = 0x308 | (1 << (4 + path));
836 iqk_cmd = 0x008 | (1 << (path + 4)) |
837 (((0x8 + iqk_info->iqk_bw[path]) & 0xf) << 8);
840 iqk_cmd = 0x508 | (1 << (4 + path)) | (path << 1);
844 iqk_cmd = 0x008 | (1 << (path + 4)) |
845 (((0xb + iqk_info->iqk_bw[path]) & 0xf) << 8);
850 iqk_cmd = 0x408 | (1 << (4 + path));
855 iqk_cmd = 0x608 | (1 << (4 + path));
863 fail = _iqk_check_cal(rtwdev, path);
870 u8 path)
878 switch (iqk_info->iqk_band[path]) {
880 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_RGM,
882 rtw89_write_rf(rtwdev, path, RR_RXBB, RR_RXBB_C2G,
884 rtw89_write_rf(rtwdev, path, RR_RXBB, RR_RXBB_C1G,
888 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_RGM,
890 rtw89_write_rf(rtwdev, path, RR_RXA2, RR_RXA2_HATT,
892 rtw89_write_rf(rtwdev, path, RR_RXA2, RR_RXA2_CC2,
899 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8),
901 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8),
903 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8),
905 fail = _iqk_one_shot(rtwdev, phy_idx, path, ID_RXK);
907 BIT(16 + gp + path * 4), fail);
910 rtw89_write_rf(rtwdev, path, RR_RXK, RR_RXK_SEL5G, 0x0);
913 iqk_info->nb_rxcfir[path] = 0x40000002;
914 rtw89_phy_write32_mask(rtwdev, R_IQK_RES + (path << 8),
916 iqk_info->is_wb_rxiqk[path] = false;
918 iqk_info->nb_rxcfir[path] = 0x40000000;
919 rtw89_phy_write32_mask(rtwdev, R_IQK_RES + (path << 8),
921 iqk_info->is_wb_rxiqk[path] = true;
928 u8 path)
935 switch (iqk_info->iqk_band[path]) {
937 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_RGM,
939 rtw89_write_rf(rtwdev, path, RR_RXBB, RR_RXBB_C2G,
941 rtw89_write_rf(rtwdev, path, RR_RXBB, RR_RXBB_C1G,
945 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_RGM,
947 rtw89_write_rf(rtwdev, path, RR_RXA2, RR_RXA2_HATT,
949 rtw89_write_rf(rtwdev, path, RR_RXA2, RR_RXA2_CC2,
956 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), B_CFIR_LUT_SEL, 0x1);
957 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), B_CFIR_LUT_SET, 0x0);
958 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), B_CFIR_LUT_GP_V1, gp);
959 rtw89_write_rf(rtwdev, path, RR_RXKPLL, RFREG_MASK, 0x80013);
962 fail = _iqk_one_shot(rtwdev, phy_idx, path, ID_NBRXK);
963 rtw89_phy_write32_mask(rtwdev, R_IQKINF, BIT(16 + gp + path * 4), fail);
965 rtw89_write_rf(rtwdev, path, RR_RXK, RR_RXK_SEL5G, 0x0);
968 iqk_info->nb_rxcfir[path] =
969 rtw89_phy_read32_mask(rtwdev, R_RXIQC + (path << 8), MASKDWORD) | 0x2;
971 iqk_info->nb_rxcfir[path] = 0x40000002;
976 static void _iqk_rxclk_setting(struct rtw89_dev *rtwdev, u8 path)
980 if (iqk_info->iqk_bw[path] == RTW89_CHANNEL_WIDTH_80) {
1015 static bool _txk_group_sel(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx, u8 path)
1023 switch (iqk_info->iqk_band[path]) {
1025 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR0,
1027 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR1,
1029 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG,
1031 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8),
1035 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR0,
1037 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR1,
1039 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG,
1041 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8),
1048 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8),
1050 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8),
1052 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8),
1054 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8),
1057 fail = _iqk_one_shot(rtwdev, phy_idx, path, ID_TXK);
1059 BIT(8 + gp + path * 4), fail);
1064 iqk_info->nb_txcfir[path] = 0x40000002;
1065 rtw89_phy_write32_mask(rtwdev, R_IQK_RES + (path << 8),
1067 iqk_info->is_wb_txiqk[path] = false;
1069 iqk_info->nb_txcfir[path] = 0x40000000;
1070 rtw89_phy_write32_mask(rtwdev, R_IQK_RES + (path << 8),
1072 iqk_info->is_wb_txiqk[path] = true;
1078 static bool _iqk_nbtxk(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx, u8 path)
1084 switch (iqk_info->iqk_band[path]) {
1086 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR0,
1088 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR1,
1090 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG,
1092 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8),
1096 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR0,
1098 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR1,
1100 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG,
1102 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8),
1109 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), B_CFIR_LUT_SEL, 0x1);
1110 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), B_CFIR_LUT_SET, 0x1);
1111 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), B_CFIR_LUT_G2, 0x0);
1112 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), B_CFIR_LUT_GP, gp);
1114 kfail = _iqk_one_shot(rtwdev, phy_idx, path, ID_NBTXK);
1117 iqk_info->nb_txcfir[path] =
1118 rtw89_phy_read32_mask(rtwdev, R_TXIQC + (path << 8),
1121 iqk_info->nb_txcfir[path] = 0x40000002;
1126 static void _lok_res_table(struct rtw89_dev *rtwdev, u8 path, u8 ibias)
1131 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK]S%x, ibias = %x\n", path, ibias);
1133 rtw89_write_rf(rtwdev, path, RR_LUTWE, RFREG_MASK, 0x2);
1134 if (iqk_info->iqk_band[path] == RTW89_BAND_2G)
1135 rtw89_write_rf(rtwdev, path, RR_LUTWA, RFREG_MASK, 0x0);
1137 rtw89_write_rf(rtwdev, path, RR_LUTWA, RFREG_MASK, 0x1);
1138 rtw89_write_rf(rtwdev, path, RR_LUTWD0, RFREG_MASK, ibias);
1139 rtw89_write_rf(rtwdev, path, RR_LUTWE, RFREG_MASK, 0x0);
1140 rtw89_write_rf(rtwdev, path, RR_TXVBUF, RR_TXVBUF_DACEN, 0x1);
1142 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK]S%x, 0x7c = %x\n", path,
1143 rtw89_read_rf(rtwdev, path, RR_TXVBUF, RFREG_MASK));
1146 static bool _lok_finetune_check(struct rtw89_dev *rtwdev, u8 path)
1157 tmp = rtw89_read_rf(rtwdev, path, RR_TXMO, RFREG_MASK);
1167 iqk_info->lok_idac[ch][path] = tmp;
1169 tmp = rtw89_read_rf(rtwdev, path, RR_LOKVB, RFREG_MASK);
1178 iqk_info->lok_vbuf[ch][path] = tmp;
1181 "[IQK]S%x, lok_idac[%x][%x] = 0x%x\n", path, ch, path,
1182 iqk_info->lok_idac[ch][path]);
1184 "[IQK]S%x, lok_vbuf[%x][%x] = 0x%x\n", path, ch, path,
1185 iqk_info->lok_vbuf[ch][path]);
1190 static bool _iqk_lok(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx, u8 path)
1197 switch (iqk_info->iqk_band[path]) {
1199 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR0, 0x0);
1200 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR1, 0x6);
1203 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR0, 0x0);
1204 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_GR1, 0x4);
1210 switch (iqk_info->iqk_band[path]) {
1212 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x0);
1215 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x0);
1221 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8), MASKDWORD, 0x9);
1222 tmp = _iqk_one_shot(rtwdev, phy_idx, path, ID_FLOK_COARSE);
1223 iqk_info->lok_cor_fail[0][path] = tmp;
1225 switch (iqk_info->iqk_band[path]) {
1227 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x12);
1230 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x12);
1236 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8), MASKDWORD, 0x24);
1237 tmp = _iqk_one_shot(rtwdev, phy_idx, path, ID_FLOK_VBUFFER);
1239 switch (iqk_info->iqk_band[path]) {
1241 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x0);
1244 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x0);
1250 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8), MASKDWORD, 0x9);
1252 tmp = _iqk_one_shot(rtwdev, phy_idx, path, ID_FLOK_FINE);
1253 iqk_info->lok_fin_fail[0][path] = tmp;
1255 switch (iqk_info->iqk_band[path]) {
1257 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x12);
1260 rtw89_write_rf(rtwdev, path, RR_TXIG, RR_TXIG_TG, 0x12);
1266 rtw89_phy_write32_mask(rtwdev, R_KIP_IQP + (path << 8), MASKDWORD, 0x24);
1267 _iqk_one_shot(rtwdev, phy_idx, path, ID_FLOK_VBUFFER);
1269 return _lok_finetune_check(rtwdev, path);
1272 static void _iqk_txk_setting(struct rtw89_dev *rtwdev, u8 path)
1276 switch (iqk_info->iqk_band[path]) {
1278 rtw89_write_rf(rtwdev, path, RR_XALNA2, RR_XALNA2_SW2, 0x00);
1279 rtw89_write_rf(rtwdev, path, RR_TXG1, RR_TXG1_ATT2, 0x0);
1280 rtw89_write_rf(rtwdev, path, RR_TXG1, RR_TXG1_ATT1, 0x0);
1281 rtw89_write_rf(rtwdev, path, RR_TXG2, RR_TXG2_ATT0, 0x1);
1282 rtw89_write_rf(rtwdev, path, RR_TXGA, RR_TXGA_LOK_EXT, 0x0);
1283 rtw89_write_rf(rtwdev, path, RR_LUTWE, RR_LUTWE_LOK, 0x1);
1284 rtw89_write_rf(rtwdev, path, RR_LUTWA, RR_LUTWA_M1, 0x00);
1285 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_IQK, 0x403e);
1289 rtw89_write_rf(rtwdev, path, RR_XGLNA2, RR_XGLNA2_SW, 0x00);
1290 rtw89_write_rf(rtwdev, path, RR_BIASA, RR_BIASA_A, 0x1);
1291 rtw89_write_rf(rtwdev, path, RR_TXGA, RR_TXGA_LOK_EXT, 0x0);
1292 rtw89_write_rf(rtwdev, path, RR_LUTWE, RR_LUTWE_LOK, 0x1);
1293 rtw89_write_rf(rtwdev, path, RR_LUTWA, RR_LUTWA_M1, 0x80);
1294 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_IQK, 0x403e);
1302 static void _iqk_txclk_setting(struct rtw89_dev *rtwdev, u8 path)
1315 static void _iqk_info_iqk(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx, u8 path)
1321 flag = iqk_info->lok_cor_fail[0][path];
1322 rtw89_phy_write32_mask(rtwdev, R_IQKINF, B_IQKINF_FCOR << (path * 4), flag);
1323 flag = iqk_info->lok_fin_fail[0][path];
1324 rtw89_phy_write32_mask(rtwdev, R_IQKINF, B_IQKINF_FFIN << (path * 4), flag);
1325 flag = iqk_info->iqk_tx_fail[0][path];
1326 rtw89_phy_write32_mask(rtwdev, R_IQKINF, B_IQKINF_FTX << (path * 4), flag);
1327 flag = iqk_info->iqk_rx_fail[0][path];
1328 rtw89_phy_write32_mask(rtwdev, R_IQKINF, B_IQKINF_F_RX << (path * 4), flag);
1330 tmp = rtw89_phy_read32_mask(rtwdev, R_IQK_RES + (path << 8), MASKDWORD);
1331 iqk_info->bp_iqkenable[path] = tmp;
1332 tmp = rtw89_phy_read32_mask(rtwdev, R_TXIQC + (path << 8), MASKDWORD);
1333 iqk_info->bp_txkresult[path] = tmp;
1334 tmp = rtw89_phy_read32_mask(rtwdev, R_RXIQC + (path << 8), MASKDWORD);
1335 iqk_info->bp_rxkresult[path] = tmp;
1339 tmp = rtw89_phy_read32_mask(rtwdev, R_IQKINF, B_IQKINF_FAIL << (path * 4));
1342 rtw89_phy_write32_mask(rtwdev, R_IQKINF2, B_IQKINF2_FCNT << (path * 4),
1346 static void _iqk_by_path(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx, u8 path)
1354 _iqk_txclk_setting(rtwdev, path);
1358 _lok_res_table(rtwdev, path, ibias++);
1359 _iqk_txk_setting(rtwdev, path);
1360 lok_is_fail = _iqk_lok(rtwdev, phy_idx, path);
1366 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK] LOK (%d) fail\n", path);
1370 iqk_info->iqk_tx_fail[0][path] = _iqk_nbtxk(rtwdev, phy_idx, path);
1372 iqk_info->iqk_tx_fail[0][path] = _txk_group_sel(rtwdev, phy_idx, path);
1375 _iqk_rxclk_setting(rtwdev, path);
1376 _iqk_rxk_setting(rtwdev, path);
1378 iqk_info->iqk_rx_fail[0][path] = _iqk_nbrxk(rtwdev, phy_idx, path);
1380 iqk_info->iqk_rx_fail[0][path] = _rxk_group_sel(rtwdev, phy_idx, path);
1382 _iqk_info_iqk(rtwdev, phy_idx, path);
1385 static void _iqk_get_ch_info(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy, u8 path)
1395 if (iqk_info->iqk_mcc_ch[idx][path] == 0) {
1403 idx = iqk_info->iqk_table_idx[path] + 1;
1409 reg_rf18 = rtw89_read_rf(rtwdev, path, RR_CFGCH, RFREG_MASK);
1412 iqk_info->iqk_band[path] = chan->band_type;
1413 iqk_info->iqk_bw[path] = chan->band_width;
1414 iqk_info->iqk_ch[path] = chan->channel;
1415 iqk_info->iqk_mcc_ch[idx][path] = chan->channel;
1416 iqk_info->iqk_table_idx[path] = idx;
1419 path, reg_rf18, idx);
1421 path, reg_rf18);
1425 idx, path, iqk_info->iqk_mcc_ch[idx][path]);
1433 "[IQK]S%x, iqk_info->syn1to2= 0x%x\n", path,
1438 rtw89_phy_write32_mask(rtwdev, R_IQKCH, B_IQKCH_BAND << (path * 16),
1439 iqk_info->iqk_band[path]);
1441 rtw89_phy_write32_mask(rtwdev, R_IQKCH, B_IQKCH_BW << (path * 16),
1442 iqk_info->iqk_bw[path]);
1443 rtw89_phy_write32_mask(rtwdev, R_IQKCH, B_IQKCH_CH << (path * 16),
1444 iqk_info->iqk_ch[path]);
1447 static void _iqk_start_iqk(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx, u8 path)
1449 _iqk_by_path(rtwdev, phy_idx, path);
1452 static void _iqk_restore(struct rtw89_dev *rtwdev, u8 path)
1457 rtw89_phy_write32_mask(rtwdev, R_TXIQC + (path << 8), MASKDWORD,
1458 iqk_info->nb_txcfir[path]);
1459 rtw89_phy_write32_mask(rtwdev, R_RXIQC + (path << 8), MASKDWORD,
1460 iqk_info->nb_rxcfir[path]);
1462 0x00000e19 + (path << 4));
1463 fail = _iqk_check_cal(rtwdev, path);
1473 rtw89_write_rf(rtwdev, path, RR_LUTWE, RR_LUTWE_LOK, 0x0);
1474 rtw89_write_rf(rtwdev, path, RR_LUTWE, RR_LUTWE_LOK, 0x0);
1475 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_MASK, 0x3);
1476 rtw89_write_rf(rtwdev, path, RR_RSV1, RR_RSV1_RST, 0x1);
1477 rtw89_write_rf(rtwdev, path, RR_BBDC, RR_BBDC_SEL, 0x1);
1481 enum rtw89_phy_idx phy_idx, u8 path)
1506 static void _iqk_preset(struct rtw89_dev *rtwdev, u8 path)
1511 idx = iqk_info->iqk_table_idx[path];
1514 rtw89_phy_write32_mask(rtwdev, R_COEF_SEL + (path << 8), B_COEF_SEL_IQC, idx);
1515 rtw89_phy_write32_mask(rtwdev, R_CFIR_LUT + (path << 8), B_CFIR_LUT_G3, idx);
1517 rtw89_write_rf(rtwdev, path, RR_RSV1, RR_RSV1_RST, 0x0);
1518 rtw89_write_rf(rtwdev, path, RR_BBDC, RR_BBDC_SEL, 0x0);
1522 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK](1)S%x, 0x8%x54 = 0x%x\n", path, 1 << path,
1523 rtw89_phy_read32_mask(rtwdev, R_CFIR_LUT + (path << 8), MASKDWORD));
1524 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[IQK](1)S%x, 0x8%x04 = 0x%x\n", path, 1 << path,
1525 rtw89_phy_read32_mask(rtwdev, R_COEF_SEL + (path << 8), MASKDWORD));
1529 enum rtw89_phy_idx phy_idx, u8 path)
1555 u8 idx, path;
1572 for (path = 0; path < RTW8852B_IQK_SS; path++) {
1573 iqk_info->lok_cor_fail[idx][path] = false;
1574 iqk_info->lok_fin_fail[idx][path] = false;
1575 iqk_info->iqk_tx_fail[idx][path] = false;
1576 iqk_info->iqk_rx_fail[idx][path] = false;
1577 iqk_info->iqk_mcc_ch[idx][path] = 0x0;
1578 iqk_info->iqk_table_idx[path] = 0x0;
1586 u8 path;
1589 for (path = 0; path < RF_PATH_MAX; path++) {
1590 if (!(kpath & BIT(path)))
1595 rtwdev, path, RR_MOD, RR_MOD_MASK);
1597 "[RFK] Wait S%d to Rx mode!! (ret = %d)\n", path, ret);
1611 enum rtw89_phy_idx phy_idx, u8 path)
1626 _iqk_get_ch_info(rtwdev, phy_idx, path);
1629 _rfk_backup_rf_reg(rtwdev, &backup_rf_val[path][0], path);
1630 _iqk_macbb_setting(rtwdev, phy_idx, path);
1631 _iqk_preset(rtwdev, path);
1632 _iqk_start_iqk(rtwdev, phy_idx, path);
1633 _iqk_restore(rtwdev, path);
1634 _iqk_afebb_restore(rtwdev, phy_idx, path);
1636 _rfk_restore_rf_reg(rtwdev, &backup_rf_val[path][0], path);
1662 u32 reg_bkup[][RTW8852B_DPK_KIP_REG_NUM], u8 path)
1667 reg_bkup[path][i] =
1668 rtw89_phy_read32_mask(rtwdev, reg[i] + (path << 8), MASKDWORD);
1670 reg[i] + (path << 8), reg_bkup[path][i]);
1675 const u32 reg_bkup[][RTW8852B_DPK_KIP_REG_NUM], u8 path)
1680 rtw89_phy_write32_mask(rtwdev, reg[i] + (path << 8), MASKDWORD,
1681 reg_bkup[path][i]);
1683 reg[i] + (path << 8), reg_bkup[path][i]);
1700 static void _dpk_onoff(struct rtw89_dev *rtwdev, enum rtw89_rf_path path, bool off)
1703 u8 val, kidx = dpk->cur_idx[path];
1705 val = dpk->is_dpk_enable && !off && dpk->bp[path][kidx].path_ok;
1707 rtw89_phy_write32_mask(rtwdev, R_DPD_CH0A + (path << 8) + (kidx << 2),
1710 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] S%d[%d] DPK %s !!!\n", path,
1715 enum rtw89_rf_path path, enum rtw8852b_dpk_id id)
1721 dpk_cmd = (id << 8) | (0x19 + (path << 4));
1757 enum rtw89_rf_path path)
1759 rtw89_write_rf(rtwdev, path, RR_RXBB2, RR_EN_TIA_IDA, 0x3);
1760 _set_rx_dck(rtwdev, phy, path);
1764 enum rtw89_rf_path path)
1769 u8 kidx = dpk->cur_idx[path];
1771 dpk->bp[path][kidx].band = chan->band_type;
1772 dpk->bp[path][kidx].ch = chan->channel;
1773 dpk->bp[path][kidx].bw = chan->band_width;
1777 path, dpk->cur_idx[path], phy,
1778 rtwdev->is_tssi_mode[path] ? "on" : "off",
1780 dpk->bp[path][kidx].band == 0 ? "2G" :
1781 dpk->bp[path][kidx].band == 1 ? "5G" : "6G",
1782 dpk->bp[path][kidx].ch,
1783 dpk->bp[path][kidx].bw == 0 ? "20M" :
1784 dpk->bp[path][kidx].bw == 1 ? "40M" : "80M");
1789 enum rtw89_rf_path path, u8 kpath)
1806 enum rtw89_rf_path path, u8 kpath)
1822 enum rtw89_rf_path path, bool is_pause)
1824 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_TRK + (path << 13),
1827 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] S%d TSSI %s\n", path,
1832 enum rtw89_rf_path path)
1837 rtw89_phy_write32_mask(rtwdev, R_DPD_COM + (path << 8), B_DPD_COM_OF, 0x1);
1839 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] S%d restore KIP\n", path);
1843 enum rtw89_rf_path path)
1848 cur_rxbb = rtw89_read_rf(rtwdev, path, RR_MOD, RFREG_MASKRXBB);
1851 rtw89_phy_write32_mask(rtwdev, R_IQK_RES + (path << 8), B_IQK_RES_RXCFIR, 0x0);
1853 tmp = rtw89_read_rf(rtwdev, path, RR_CFGCH, RFREG_MASK);
1854 rtw89_write_rf(rtwdev, path, RR_RSV4, RFREG_MASK, tmp);
1855 rtw89_write_rf(rtwdev, path, RR_MOD, RFREG_MASKMODE, 0xd);
1856 rtw89_write_rf(rtwdev, path, RR_RXK, RR_RXK_PLLEN, 0x1);
1859 rtw89_write_rf(rtwdev, path, RR_TXIQK, RR_TXIQK_ATT1, 0x13);
1861 rtw89_write_rf(rtwdev, path, RR_TXIQK, RR_TXIQK_ATT1, 0x00);
1863 rtw89_write_rf(rtwdev, path, RR_TXIQK, RR_TXIQK_ATT1, 0x05);
1865 rtw89_write_rf(rtwdev, path, RR_XGLNA2, RR_XGLNA2_SW, 0x0);
1866 rtw89_write_rf(rtwdev, path, RR_RXKPLL, RR_RXKPLL_POW, 0x0);
1867 rtw89_write_rf(rtwdev, path, RR_RXKPLL, RFREG_MASK, 0x80014);
1873 _dpk_one_shot(rtwdev, phy, path, LBK_RXIQK);
1875 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] S%d LBK RXIQC = 0x%x\n", path,
1879 rtw89_write_rf(rtwdev, path, RR_RXK, RR_RXK_PLLEN, 0x0);
1882 rtw89_phy_write32_mask(rtwdev, R_LOAD_COEF + (path << 8), B_LOAD_COEF_DI, 0x1);
1883 rtw89_write_rf(rtwdev, path, RR_MOD, RFREG_MASKMODE, 0x5);
1886 static void _dpk_get_thermal(struct rtw89_dev *rtwdev, u8 kidx, enum rtw89_rf_path path)
1890 rtw89_write_rf(rtwdev, path, RR_TM, RR_TM_TRI, 0x1);
1891 rtw89_write_rf(rtwdev, path, RR_TM, RR_TM_TRI, 0x0);
1892 rtw89_write_rf(rtwdev, path, RR_TM, RR_TM_TRI, 0x1);
1896 dpk->bp[path][kidx].ther_dpk = rtw89_read_rf(rtwdev, path, RR_TM, RR_TM_VAL);
1899 dpk->bp[path][kidx].ther_dpk);
1903 enum rtw89_rf_path path, u8 kidx)
1907 if (dpk->bp[path][kidx].band == RTW89_BAND_2G) {
1908 rtw89_write_rf(rtwdev, path, RR_MOD, RFREG_MASK, 0x50220);
1909 rtw89_write_rf(rtwdev, path, RR_RXBB, RR_RXBB_FATT, 0xf2);
1910 rtw89_write_rf(rtwdev, path, RR_LUTDBG, RR_LUTDBG_TIA, 0x1);
1911 rtw89_write_rf(rtwdev, path, RR_TIA, RR_TIA_N6, 0x1);
1913 rtw89_write_rf(rtwdev, path, RR_MOD, RFREG_MASK, 0x50220);
1914 rtw89_write_rf(rtwdev, path, RR_RXA2, RR_RAA2_SWATT, 0x5);
1915 rtw89_write_rf(rtwdev, path, RR_LUTDBG, RR_LUTDBG_TIA, 0x1);
1916 rtw89_write_rf(rtwdev, path, RR_TIA, RR_TIA_N6, 0x1);
1917 rtw89_write_rf(rtwdev, path, RR_RXA_LNA, RFREG_MASK, 0x920FC);
1918 rtw89_write_rf(rtwdev, path, RR_XALNA2, RFREG_MASK, 0x002C0);
1919 rtw89_write_rf(rtwdev, path, RR_IQGEN, RFREG_MASK, 0x38800);
1922 rtw89_write_rf(rtwdev, path, RR_RCKD, RR_RCKD_BW, 0x1);
1923 rtw89_write_rf(rtwdev, path, RR_BTC, RR_BTC_TXBB, dpk->bp[path][kidx].bw + 1);
1924 rtw89_write_rf(rtwdev, path, RR_BTC, RR_BTC_RXBB, 0x0);
1928 rtw89_read_rf(rtwdev, path, RR_MOD, RFREG_MASK),
1929 rtw89_read_rf(rtwdev, path, RR_TXIG, RFREG_MASK),
1930 rtw89_read_rf(rtwdev, path, RR_BTC, RFREG_MASK));
1934 enum rtw89_rf_path path, bool is_bypass)
1937 rtw89_phy_write32_mask(rtwdev, R_RXIQC + (path << 8),
1939 rtw89_phy_write32_mask(rtwdev, R_RXIQC + (path << 8),
1942 "[DPK] Bypass RXIQC (0x8%d3c = 0x%x)\n", 1 + path,
1943 rtw89_phy_read32_mask(rtwdev, R_RXIQC + (path << 8),
1946 rtw89_phy_write32_clr(rtwdev, R_RXIQC + (path << 8), B_RXIQC_BYPASS2);
1947 rtw89_phy_write32_clr(rtwdev, R_RXIQC + (path << 8), B_RXIQC_BYPASS);
1949 "[DPK] restore 0x8%d3c = 0x%x\n", 1 + path,
1950 rtw89_phy_read32_mask(rtwdev, R_RXIQC + (path << 8),
1956 void _dpk_tpg_sel(struct rtw89_dev *rtwdev, enum rtw89_rf_path path, u8 kidx)
1960 if (dpk->bp[path][kidx].bw == RTW89_CHANNEL_WIDTH_80)
1962 else if (dpk->bp[path][kidx].bw == RTW89_CHANNEL_WIDTH_40)
1968 dpk->bp[path][kidx].bw == RTW89_CHANNEL_WIDTH_80 ? "80M" :
1969 dpk->bp[path][kidx].bw == RTW89_CHANNEL_WIDTH_40 ? "40M" : "20M");
1973 enum rtw89_rf_path path, u8 kidx, u8 gain)
1978 rtw89_phy_write32_mask(rtwdev, R_DPD_CH0 + (path << 8), MASKBYTE3, val);
1984 static bool _dpk_sync_check(struct rtw89_dev *rtwdev, enum rtw89_rf_path path, u8 kidx)
2000 path, corr_idx, corr_val);
2002 dpk->corr_idx[path][kidx] = corr_idx;
2003 dpk->corr_val[path][kidx] = corr_val;
2014 path, dc_i, dc_q);
2016 dpk->dc_i[path][kidx] = dc_i;
2017 dpk->dc_q[path][kidx] = dc_q;
2027 enum rtw89_rf_path path, u8 kidx)
2029 _dpk_one_shot(rtwdev, phy, path, SYNC);
2031 return _dpk_sync_check(rtwdev, path, kidx);
2104 enum rtw89_rf_path path, u8 kidx)
2106 _dpk_table_select(rtwdev, path, kidx, 1);
2107 _dpk_one_shot(rtwdev, phy, path, GAIN_LOSS);
2111 enum rtw89_rf_path path, u8 kidx)
2113 _dpk_tpg_sel(rtwdev, path, kidx);
2114 _dpk_one_shot(rtwdev, phy, path, KIP_PRESET);
2118 enum rtw89_rf_path path)
2122 rtw89_phy_write32_mask(rtwdev, R_CFIR_SYS + (path << 8), MASKDWORD, 0xce000a08);
2128 enum rtw89_rf_path path, u8 txagc)
2130 rtw89_write_rf(rtwdev, path, RR_TXAGC, RFREG_MASK, txagc);
2132 _dpk_one_shot(rtwdev, phy, path, DPK_TXAGC);
2139 enum rtw89_rf_path path)
2143 tmp = rtw89_read_rf(rtwdev, path, RR_MOD, RFREG_MASK);
2146 _dpk_one_shot(rtwdev, phy, path, DPK_RXAGC);
2153 rtw89_read_rf(rtwdev, path, RR_MOD, RFREG_MASKRXBB));
2157 enum rtw89_rf_path path, s8 gain_offset)
2161 txagc = rtw89_read_rf(rtwdev, path, RR_TXAGC, RFREG_MASK);
2170 _dpk_kip_set_txagc(rtwdev, phy, path, txagc);
2219 enum rtw89_rf_path path, u8 kidx, u8 init_txagc,
2235 if (_dpk_sync(rtwdev, phy, path, kidx)) {
2250 tmp_rxbb = rtw89_read_rf(rtwdev, path, RR_MOD,
2264 rtw89_write_rf(rtwdev, path, RR_MOD, RFREG_MASKRXBB,
2270 _dpk_bypass_rxcfir(rtwdev, path, true);
2272 _dpk_lbk_rxiqk(rtwdev, phy, path);
2283 _dpk_gainloss(rtwdev, phy, path, kidx);
2301 tmp_txagc = _dpk_set_offset(rtwdev, phy, path, 0x3);
2313 tmp_txagc = _dpk_set_offset(rtwdev, phy, path, 0xfe);
2319 tmp_txagc = _dpk_set_offset(rtwdev, phy, path, tmp_gl_idx);
2366 enum rtw89_rf_path path, u8 kidx, u8 gain)
2370 if (dpk->bp[path][kidx].bw < RTW89_CHANNEL_WIDTH_80 &&
2371 dpk->bp[path][kidx].band == RTW89_BAND_5G)
2376 _dpk_one_shot(rtwdev, phy, path, MDPK_IDL);
2380 enum rtw89_rf_path path, u8 kidx, u8 gain, u8 txagc)
2386 rtw89_phy_write32_mask(rtwdev, R_COEF_SEL + (path << 8),
2393 dpk->bp[path][kidx].txagc_dpk = txagc;
2394 rtw89_phy_write32_mask(rtwdev, R_TXAGC_RFK + (path << 8),
2397 dpk->bp[path][kidx].pwsf = pwsf;
2398 rtw89_phy_write32_mask(rtwdev, R_DPD_BND + (path << 8) + (kidx << 2),
2401 rtw89_phy_write32_mask(rtwdev, R_LOAD_COEF + (path << 8), B_LOAD_COEF_MDPD, 0x1);
2402 rtw89_phy_write32_mask(rtwdev, R_LOAD_COEF + (path << 8), B_LOAD_COEF_MDPD, 0x0);
2404 dpk->bp[path][kidx].gs = gs;
2406 rtw89_phy_write32_mask(rtwdev, R_DPD_CH0A + (path << 8) + (kidx << 2),
2409 rtw89_phy_write32_mask(rtwdev, R_DPD_CH0A + (path << 8) + (kidx << 2),
2412 rtw89_phy_write32_mask(rtwdev, R_DPD_CH0A + (path << 8) + (kidx << 2),
2414 rtw89_phy_write32_mask(rtwdev, R_DPD_V1 + (path << 8), MASKDWORD, 0x0);
2419 enum rtw89_rf_path path)
2430 if (cur_band != dpk->bp[path][idx].band ||
2431 cur_ch != dpk->bp[path][idx].ch)
2434 rtw89_phy_write32_mask(rtwdev, R_COEF_SEL + (path << 8),
2436 dpk->cur_idx[path] = idx;
2439 "[DPK] reload S%d[%d] success\n", path, idx);
2446 enum rtw89_rf_path path, u8 gain)
2449 u8 txagc = 0x38, kidx = dpk->cur_idx[path];
2453 "[DPK] ========= S%d[%d] DPK Start =========\n", path, kidx);
2455 _rfk_rf_direct_cntrl(rtwdev, path, false);
2456 _rfk_drf_direct_cntrl(rtwdev, path, false);
2458 _dpk_kip_pwr_clk_on(rtwdev, path);
2459 _dpk_kip_set_txagc(rtwdev, phy, path, txagc);
2460 _dpk_rf_setting(rtwdev, gain, path, kidx);
2461 _dpk_rx_dck(rtwdev, phy, path);
2463 _dpk_kip_preset(rtwdev, phy, path, kidx);
2464 _dpk_kip_set_rxagc(rtwdev, phy, path);
2465 _dpk_table_select(rtwdev, path, kidx, gain);
2467 txagc = _dpk_agc(rtwdev, phy, path, kidx, txagc, false);
2473 _dpk_get_thermal(rtwdev, kidx, path);
2475 _dpk_idl_mpa(rtwdev, phy, path, kidx, gain);
2477 rtw89_write_rf(rtwdev, path, RR_MOD, RR_MOD_MASK, RR_MOD_V_RX);
2479 _dpk_fill_result(rtwdev, phy, path, kidx, gain, txagc);
2483 dpk->bp[path][kidx].path_ok = true;
2485 dpk->bp[path][kidx].path_ok = false;
2487 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[DPK] S%d[%d] DPK %s\n", path, kidx,
2502 u8 path;
2505 for (path = 0; path < RTW8852B_DPK_RF_PATH; path++) {
2506 reloaded[path] = _dpk_reload_check(rtwdev, phy, path);
2507 if (!reloaded[path] && dpk->bp[path][0].ch)
2508 dpk->cur_idx[path] = !dpk->cur_idx[path];
2510 _dpk_onoff(rtwdev, path, false);
2513 for (path = 0; path < RTW8852B_DPK_RF_PATH; path++)
2514 dpk->cur_idx[path] = 0;
2519 for (path = 0; path < RTW8852B_DPK_RF_PATH; path++) {
2520 _dpk_bkup_kip(rtwdev, kip_reg, kip_bkup, path);
2521 _rfk_backup_rf_reg(rtwdev, &backup_rf_val[path][0], path);
2522 _dpk_information(rtwdev, phy, path);
2523 if (rtwdev->is_tssi_mode[path])
2524 _dpk_tssi_pause(rtwdev, path, true);
2527 _dpk_bb_afe_setting(rtwdev, phy, path, kpath);
2529 for (path = 0; path < RTW8852B_DPK_RF_PATH; path++) {
2530 is_fail = _dpk_main(rtwdev, phy, path, 1);
2531 _dpk_onoff(rtwdev, path, is_fail);
2534 _dpk_bb_afe_restore(rtwdev, phy, path, kpath);
2537 for (path = 0; path < RTW8852B_DPK_RF_PATH; path++) {
2538 _dpk_kip_restore(rtwdev, path);
2539 _dpk_reload_kip(rtwdev, kip_reg, kip_bkup, path);
2540 _rfk_restore_rf_reg(rtwdev, &backup_rf_val[path][0], path);
2541 if (rtwdev->is_tssi_mode[path])
2542 _dpk_tssi_pause(rtwdev, path, false);
2570 u8 path, kpath;
2574 for (path = 0; path < RTW8852B_DPK_RF_PATH; path++) {
2575 if (kpath & BIT(path))
2576 _dpk_onoff(rtwdev, path, true);
2599 u8 path, kidx;
2604 for (path = 0; path < RF_PATH_NUM_8852B; path++) {
2605 kidx = dpk->cur_idx[path];
2609 path, kidx, dpk->bp[path][kidx].ch);
2611 cur_ther = ewma_thermal_read(&rtwdev->phystat.avg_thermal[path]);
2616 if (dpk->bp[path][kidx].ch && cur_ther)
2617 delta_ther[path] = dpk->bp[path][kidx].ther_dpk - cur_ther;
2619 if (dpk->bp[path][kidx].band == RTW89_BAND_2G)
2620 delta_ther[path] = delta_ther[path] * 3 / 2;
2622 delta_ther[path] = delta_ther[path] * 5 / 2;
2624 txagc_rf = rtw89_phy_read32_mask(rtwdev, R_TXAGC_BB + (path << 13),
2627 if (rtwdev->is_tssi_mode[path]) {
2628 trk_idx = rtw89_read_rf(rtwdev, path, RR_TXA, RR_TXA_TRK);
2635 rtw89_phy_read32_mask(rtwdev, R_TXAGC_BB + (path << 13),
2638 rtw89_phy_read32_mask(rtwdev, R_TXAGC_TP + (path << 13),
2646 rtw89_phy_read32_mask(rtwdev, R_TXAGC_BB + (path << 13),
2651 txagc_ofst, delta_ther[path]);
2652 tmp = rtw89_phy_read32_mask(rtwdev, R_DPD_COM + (path << 8),
2661 ini_diff = txagc_ofst + (delta_ther[path]);
2664 R_P0_TXDPD + (path << 13),
2667 pwsf[0] = dpk->bp[path][kidx].pwsf +
2669 pwsf[1] = dpk->bp[path][kidx].pwsf +
2672 pwsf[0] = dpk->bp[path][kidx].pwsf + ini_diff;
2673 pwsf[1] = dpk->bp[path][kidx].pwsf + ini_diff;
2677 pwsf[0] = (dpk->bp[path][kidx].pwsf + delta_ther[path]) & 0x1ff;
2678 pwsf[1] = (dpk->bp[path][kidx].pwsf + delta_ther[path]) & 0x1ff;
2688 R_DPD_BND + (path << 8) + (kidx << 2),
2691 R_DPD_BND + (path << 8) + (kidx << 2),
2700 u8 tx_scale, ofdm_bkof, path, kpath;
2710 for (path = 0; path < RF_PATH_NUM_8852B; path++) {
2711 if (!(kpath & BIT(path)))
2714 rtw89_phy_write32_mask(rtwdev, R_DPD_CH0A + (path << 8),
2717 "[RFK] Set S%d DPD backoff to 0dB\n", path);
2725 enum rtw89_rf_path path)
2731 rtw89_write_rf(rtwdev, path, RR_TXPOW, RR_TXPOW_TXG, 0x1);
2733 rtw89_write_rf(rtwdev, path, RR_TXPOW, RR_TXPOW_TXA, 0x1);
2737 enum rtw89_rf_path path)
2744 if (path == RF_PATH_A)
2756 enum rtw89_rf_path path)
2758 rtw89_rfk_parser_by_cond(rtwdev, path == RF_PATH_A,
2765 enum rtw89_rf_path path)
2767 rtw89_rfk_parser_by_cond(rtwdev, path == RF_PATH_A,
2773 enum rtw89_rf_path path)
2775 rtw89_rfk_parser_by_cond(rtwdev, path == RF_PATH_A,
2781 enum rtw89_rf_path path)
2835 if (path == RF_PATH_A) {
2939 enum rtw89_rf_path path)
2941 rtw89_rfk_parser_by_cond(rtwdev, path == RF_PATH_A,
2947 enum rtw89_rf_path path)
2952 if (path == RF_PATH_A)
2963 enum rtw89_rf_path path, bool all)
2970 if (path == RF_PATH_A) {
3021 enum rtw89_rf_path path)
3023 rtw89_rfk_parser_by_cond(rtwdev, path == RF_PATH_A,
3029 enum rtw89_rf_path path)
3031 if (path == RF_PATH_A)
3039 enum rtw89_rf_path path)
3041 rtw89_debug(rtwdev, RTW89_DBG_TSSI, "======>%s path=%d\n", __func__,
3042 path);
3044 if (path == RF_PATH_A)
3234 enum rtw89_rf_path path)
3247 "[TSSI][TRIM]: path=%d mcs group_idx=0x%x\n", path, gidx);
3252 de_1st = tssi_info->tssi_mcs[path][gidx_1st];
3253 de_2nd = tssi_info->tssi_mcs[path][gidx_2nd];
3257 "[TSSI][TRIM]: path=%d mcs de=%d 1st=%d 2nd=%d\n",
3258 path, val, de_1st, de_2nd);
3260 val = tssi_info->tssi_mcs[path][gidx];
3263 "[TSSI][TRIM]: path=%d mcs de=%d\n", path, val);
3270 enum rtw89_rf_path path)
3283 "[TSSI][TRIM]: path=%d mcs trim_group_idx=0x%x\n",
3284 path, tgidx);
3289 tde_1st = tssi_info->tssi_trim[path][tgidx_1st];
3290 tde_2nd = tssi_info->tssi_trim[path][tgidx_2nd];
3294 "[TSSI][TRIM]: path=%d mcs trim_de=%d 1st=%d 2nd=%d\n",
3295 path, val, tde_1st, tde_2nd);
3297 val = tssi_info->tssi_trim[path][tgidx];
3300 "[TSSI][TRIM]: path=%d mcs trim_de=%d\n",
3301 path, val);
3327 "[TSSI][TRIM]: path=%d cck[%d]=0x%x trim=0x%x\n",
3344 "[TSSI][TRIM]: path=%d mcs=0x%x trim=0x%x\n",
3362 static void _tssi_alimentk_dump_result(struct rtw89_dev *rtwdev, enum rtw89_rf_path path)
3367 R_TSSI_PA_K1 + (path << 13),
3368 rtw89_phy_read32_mask(rtwdev, R_TSSI_PA_K1 + (path << 13), MASKDWORD),
3369 R_TSSI_PA_K2 + (path << 13),
3370 rtw89_phy_read32_mask(rtwdev, R_TSSI_PA_K2 + (path << 13), MASKDWORD),
3371 R_P0_TSSI_ALIM1 + (path << 13),
3372 rtw89_phy_read32_mask(rtwdev, R_P0_TSSI_ALIM1 + (path << 13), MASKDWORD),
3373 R_P0_TSSI_ALIM3 + (path << 13),
3374 rtw89_phy_read32_mask(rtwdev, R_P0_TSSI_ALIM3 + (path << 13), MASKDWORD),
3375 R_TSSI_PA_K5 + (path << 13),
3376 rtw89_phy_read32_mask(rtwdev, R_TSSI_PA_K5 + (path << 13), MASKDWORD),
3377 R_P0_TSSI_ALIM2 + (path << 13),
3378 rtw89_phy_read32_mask(rtwdev, R_P0_TSSI_ALIM2 + (path << 13), MASKDWORD),
3379 R_P0_TSSI_ALIM4 + (path << 13),
3380 rtw89_phy_read32_mask(rtwdev, R_P0_TSSI_ALIM4 + (path << 13), MASKDWORD),
3381 R_TSSI_PA_K8 + (path << 13),
3382 rtw89_phy_read32_mask(rtwdev, R_TSSI_PA_K8 + (path << 13), MASKDWORD));
3386 enum rtw89_phy_idx phy, enum rtw89_rf_path path)
3394 "======>%s phy=%d path=%d\n", __func__, phy, path);
3407 if (tssi_info->alignment_done[path][band]) {
3408 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_ALIM1 + (path << 13), MASKDWORD,
3409 tssi_info->alignment_value[path][band][0]);
3410 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_ALIM3 + (path << 13), MASKDWORD,
3411 tssi_info->alignment_value[path][band][1]);
3412 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_ALIM2 + (path << 13), MASKDWORD,
3413 tssi_info->alignment_value[path][band][2]);
3414 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_ALIM4 + (path << 13), MASKDWORD,
3415 tssi_info->alignment_value[path][band][3]);
3418 _tssi_alimentk_dump_result(rtwdev, path);
3422 enum rtw89_rf_path path, u16 cnt, u16 period, s16 pwr_dbm,
3427 if (path == RF_PATH_A)
3429 else if (path == RF_PATH_B)
3431 else if (path == RF_PATH_AB)
3434 rx_path = RF_ABCD; /* don't change path, but still set others */
3438 rtw8852bx_bb_cfg_tx_path(rtwdev, path);
3496 enum rtw89_rf_path path, const s16 *power,
3505 rtw89_phy_write32_mask(rtwdev, _tssi_trigger[path], B_P0_TSSI_EN, 0x0);
3506 rtw89_phy_write32_mask(rtwdev, _tssi_trigger[path], B_P0_TSSI_EN, 0x1);
3510 tmp = rtw89_phy_read32_mask(rtwdev, _tssi_trigger[path], MASKDWORD);
3512 "[TSSI PA K] 0x%x = 0x%08x path=%d\n",
3513 _tssi_trigger[path], tmp, path);
3516 _tssi_hw_tx(rtwdev, phy, path, 100, 5000, power[j], true);
3524 "[TSSI PA K] First HWTXcounter=%d path=%d\n",
3525 tx_counter_tmp, path);
3528 tmp = rtw89_phy_read32_mask(rtwdev, _tssi_cw_rpt_addr[path],
3540 "[TSSI PA K] Flow k = %d HWTXcounter=%d path=%d\n",
3541 k, tx_counter_tmp, path);
3546 "[TSSI PA K] TSSI finish bit k > %d mp:100ms normal:30us path=%d\n",
3547 k, path);
3549 _tssi_hw_tx(rtwdev, phy, path, 100, 5000, power[j], false);
3554 rtw89_phy_read32_mask(rtwdev, _tssi_cw_rpt_addr[path], B_TSSI_CWRPT);
3556 _tssi_hw_tx(rtwdev, phy, path, 100, 5000, power[j], false);
3562 "[TSSI PA K] Final HWTXcounter=%d path=%d\n",
3563 tx_counter_tmp, path);
3570 enum rtw89_rf_path path)
3593 "======> %s channel=%d path=%d\n", __func__, channel,
3594 path);
3596 if (tssi_info->check_backup_aligmk[path][ch_idx]) {
3597 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_ALIM1 + (path << 13), MASKDWORD,
3598 tssi_info->alignment_backup_by_ch[path][ch_idx][0]);
3599 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_ALIM3 + (path << 13), MASKDWORD,
3600 tssi_info->alignment_backup_by_ch[path][ch_idx][1]);
3601 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_ALIM2 + (path << 13), MASKDWORD,
3602 tssi_info->alignment_backup_by_ch[path][ch_idx][2]);
3603 rtw89_phy_write32_mask(rtwdev, R_P0_TSSI_ALIM4 + (path << 13), MASKDWORD,
3604 tssi_info->alignment_backup_by_ch[path][ch_idx][3]);
3608 _tssi_alimentk_dump_result(rtwdev, path);
3638 ok = _tssi_get_cw_report(rtwdev, phy, path, power, tssi_cw_rpt);
3648 tmp = rtw89_phy_read32_mask(rtwdev, _tssi_cw_default_addr[path][1],
3655 tmp = rtw89_phy_read32_mask(rtwdev, _tssi_cw_default_addr[path][2],
3660 tmp = rtw89_phy_read32_mask(rtwdev, _tssi_cw_default_addr[path][3],
3665 if (path == RF_PATH_A) {
3695 tssi_info->alignment_done[path][band] = true;
3696 tssi_info->alignment_value[path][band][0] =
3697 rtw89_phy_read32_mask(rtwdev, R_P0_TSSI_ALIM1 + (path << 13), MASKDWORD);
3698 tssi_info->alignment_value[path][band][1] =
3699 rtw89_phy_read32_mask(rtwdev, R_P0_TSSI_ALIM3 + (path << 13), MASKDWORD);
3700 tssi_info->alignment_value[path][band][2] =
3701 rtw89_phy_read32_mask(rtwdev, R_P0_TSSI_ALIM2 + (path << 13), MASKDWORD);
3702 tssi_info->alignment_value[path][band][3] =
3703 rtw89_phy_read32_mask(rtwdev, R_P0_TSSI_ALIM4 + (path << 13), MASKDWORD);
3705 tssi_info->check_backup_aligmk[path][ch_idx] = true;
3706 tssi_info->alignment_backup_by_ch[path][ch_idx][0] =
3707 rtw89_phy_read32_mask(rtwdev, R_P0_TSSI_ALIM1 + (path << 13), MASKDWORD);
3708 tssi_info->alignment_backup_by_ch[path][ch_idx][1] =
3709 rtw89_phy_read32_mask(rtwdev, R_P0_TSSI_ALIM3 + (path << 13), MASKDWORD);
3710 tssi_info->alignment_backup_by_ch[path][ch_idx][2] =
3711 rtw89_phy_read32_mask(rtwdev, R_P0_TSSI_ALIM2 + (path << 13), MASKDWORD);
3712 tssi_info->alignment_backup_by_ch[path][ch_idx][3] =
3713 rtw89_phy_read32_mask(rtwdev, R_P0_TSSI_ALIM4 + (path << 13), MASKDWORD);
3716 "[TSSI PA K] tssi_info->alignment_value[path=%d][band=%d][0], 0x%x = 0x%08x\n",
3717 path, band, R_P0_TSSI_ALIM1 + (path << 13),
3718 tssi_info->alignment_value[path][band][0]);
3720 "[TSSI PA K] tssi_info->alignment_value[path=%d][band=%d][1], 0x%x = 0x%08x\n",
3721 path, band, R_P0_TSSI_ALIM3 + (path << 13),
3722 tssi_info->alignment_value[path][band][1]);
3724 "[TSSI PA K] tssi_info->alignment_value[path=%d][band=%d][2], 0x%x = 0x%08x\n",
3725 path, band, R_P0_TSSI_ALIM2 + (path << 13),
3726 tssi_info->alignment_value[path][band][2]);
3728 "[TSSI PA K] tssi_info->alignment_value[path=%d][band=%d][3], 0x%x = 0x%08x\n",
3729 path, band, R_P0_TSSI_ALIM4 + (path << 13),
3730 tssi_info->alignment_value[path][band][3]);
3752 u8 path;
3754 for (path = 0; path < RF_PATH_NUM_8852B; path++)
3755 _rck(rtwdev, path);
3946 static void _bw_setting(struct rtw89_dev *rtwdev, enum rtw89_rf_path path,
3954 rf_reg18 = rtw89_read_rf(rtwdev, path, reg18_addr, RFREG_MASK);
3957 "[RFK]Invalid RF_0x18 for Path-%d\n", path);
3981 rtw89_write_rf(rtwdev, path, reg18_addr, RFREG_MASK, rf_reg18);
3983 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[RFK] set %x at path%d, %x =0x%x\n",
3984 bw, path, reg18_addr,
3985 rtw89_read_rf(rtwdev, path, reg18_addr, RFREG_MASK));
4074 static void _ch_setting(struct rtw89_dev *rtwdev, enum rtw89_rf_path path,
4083 rf_reg18 = rtw89_read_rf(rtwdev, path, reg18_addr, RFREG_MASK);
4096 if (path == RF_PATH_A && dav)
4099 rtw89_write_rf(rtwdev, path, reg18_addr, RFREG_MASK, rf_reg18);
4101 rtw89_write_rf(rtwdev, path, RR_LCKST, RR_LCKST_BIN, 0);
4102 rtw89_write_rf(rtwdev, path, RR_LCKST, RR_LCKST_BIN, 1);
4106 central_ch, path, reg18_addr,
4107 rtw89_read_rf(rtwdev, path, reg18_addr, RFREG_MASK));
4119 enum rtw89_rf_path path)
4121 rtw89_write_rf(rtwdev, path, RR_LUTWE2, RR_LUTWE2_RTXBW, 0x1);
4122 rtw89_write_rf(rtwdev, path, RR_LUTWA, RR_LUTWA_M2, 0x12);
4125 rtw89_write_rf(rtwdev, path, RR_LUTWD0, RR_LUTWD0_LB, 0x1b);
4127 rtw89_write_rf(rtwdev, path, RR_LUTWD0, RR_LUTWD0_LB, 0x13);
4129 rtw89_write_rf(rtwdev, path, RR_LUTWD0, RR_LUTWD0_LB, 0xb);
4131 rtw89_write_rf(rtwdev, path, RR_LUTWD0, RR_LUTWD0_LB, 0x3);
4133 rtw89_debug(rtwdev, RTW89_DBG_RFK, "[RFK] set S%d RXBB BW 0x3F = 0x%x\n", path,
4134 rtw89_read_rf(rtwdev, path, RR_LUTWD0, RR_LUTWD0_LB));
4136 rtw89_write_rf(rtwdev, path, RR_LUTWE2, RR_LUTWE2_RTXBW, 0x0);
4142 u8 kpath, path;
4146 for (path = 0; path < RF_PATH_NUM_8852B; path++) {
4147 if (!(kpath & BIT(path)))
4150 _set_rxbb_bw(rtwdev, bw, path);