1 // SPDX-License-Identifier: GPL-2.0 OR BSD-3-Clause 2 /* Copyright(c) 2023 Realtek Corporation 3 */ 4 5 #include "coex.h" 6 #include "debug.h" 7 #include "efuse.h" 8 #include "fw.h" 9 #include "mac.h" 10 #include "phy.h" 11 #include "reg.h" 12 #include "rtw8922a.h" 13 #include "rtw8922a_rfk.h" 14 #include "util.h" 15 16 #define RTW8922A_FW_FORMAT_MAX 0 17 #define RTW8922A_FW_BASENAME "rtw89/rtw8922a_fw" 18 #define RTW8922A_MODULE_FIRMWARE \ 19 RTW8922A_FW_BASENAME ".bin" 20 21 #define HE_N_USER_MAX_8922A 4 22 23 static const struct rtw89_hfc_ch_cfg rtw8922a_hfc_chcfg_pcie[] = { 24 {2, 1641, grp_0}, /* ACH 0 */ 25 {2, 1641, grp_0}, /* ACH 1 */ 26 {2, 1641, grp_0}, /* ACH 2 */ 27 {2, 1641, grp_0}, /* ACH 3 */ 28 {2, 1641, grp_1}, /* ACH 4 */ 29 {2, 1641, grp_1}, /* ACH 5 */ 30 {2, 1641, grp_1}, /* ACH 6 */ 31 {2, 1641, grp_1}, /* ACH 7 */ 32 {2, 1641, grp_0}, /* B0MGQ */ 33 {2, 1641, grp_0}, /* B0HIQ */ 34 {2, 1641, grp_1}, /* B1MGQ */ 35 {2, 1641, grp_1}, /* B1HIQ */ 36 {0, 0, 0}, /* FWCMDQ */ 37 {0, 0, 0}, /* BMC */ 38 {0, 0, 0}, /* H2D */ 39 }; 40 41 static const struct rtw89_hfc_pub_cfg rtw8922a_hfc_pubcfg_pcie = { 42 1651, /* Group 0 */ 43 1651, /* Group 1 */ 44 3302, /* Public Max */ 45 0, /* WP threshold */ 46 }; 47 48 static const struct rtw89_hfc_param_ini rtw8922a_hfc_param_ini_pcie[] = { 49 [RTW89_QTA_SCC] = {rtw8922a_hfc_chcfg_pcie, &rtw8922a_hfc_pubcfg_pcie, 50 &rtw89_mac_size.hfc_prec_cfg_c0, RTW89_HCIFC_POH}, 51 [RTW89_QTA_DBCC] = {rtw8922a_hfc_chcfg_pcie, &rtw8922a_hfc_pubcfg_pcie, 52 &rtw89_mac_size.hfc_prec_cfg_c0, RTW89_HCIFC_POH}, 53 [RTW89_QTA_DLFW] = {NULL, NULL, &rtw89_mac_size.hfc_prec_cfg_c2, 54 RTW89_HCIFC_POH}, 55 [RTW89_QTA_INVALID] = {NULL}, 56 }; 57 58 static const struct rtw89_dle_mem rtw8922a_dle_mem_pcie[] = { 59 [RTW89_QTA_SCC] = {RTW89_QTA_SCC, &rtw89_mac_size.wde_size0_v1, 60 &rtw89_mac_size.ple_size0_v1, &rtw89_mac_size.wde_qt0_v1, 61 &rtw89_mac_size.wde_qt0_v1, &rtw89_mac_size.ple_qt0, 62 &rtw89_mac_size.ple_qt1, &rtw89_mac_size.ple_rsvd_qt0, 63 &rtw89_mac_size.rsvd0_size0, &rtw89_mac_size.rsvd1_size0}, 64 [RTW89_QTA_DBCC] = {RTW89_QTA_DBCC, &rtw89_mac_size.wde_size0_v1, 65 &rtw89_mac_size.ple_size0_v1, &rtw89_mac_size.wde_qt0_v1, 66 &rtw89_mac_size.wde_qt0_v1, &rtw89_mac_size.ple_qt0, 67 &rtw89_mac_size.ple_qt1, &rtw89_mac_size.ple_rsvd_qt0, 68 &rtw89_mac_size.rsvd0_size0, &rtw89_mac_size.rsvd1_size0}, 69 [RTW89_QTA_DLFW] = {RTW89_QTA_DLFW, &rtw89_mac_size.wde_size4_v1, 70 &rtw89_mac_size.ple_size3_v1, &rtw89_mac_size.wde_qt4, 71 &rtw89_mac_size.wde_qt4, &rtw89_mac_size.ple_qt9, 72 &rtw89_mac_size.ple_qt9, &rtw89_mac_size.ple_rsvd_qt1, 73 &rtw89_mac_size.rsvd0_size0, &rtw89_mac_size.rsvd1_size0}, 74 [RTW89_QTA_INVALID] = {RTW89_QTA_INVALID, NULL, NULL, NULL, NULL, NULL, 75 NULL}, 76 }; 77 78 static const u32 rtw8922a_h2c_regs[RTW89_H2CREG_MAX] = { 79 R_BE_H2CREG_DATA0, R_BE_H2CREG_DATA1, R_BE_H2CREG_DATA2, 80 R_BE_H2CREG_DATA3 81 }; 82 83 static const u32 rtw8922a_c2h_regs[RTW89_H2CREG_MAX] = { 84 R_BE_C2HREG_DATA0, R_BE_C2HREG_DATA1, R_BE_C2HREG_DATA2, 85 R_BE_C2HREG_DATA3 86 }; 87 88 static const u32 rtw8922a_wow_wakeup_regs[RTW89_WOW_REASON_NUM] = { 89 R_AX_C2HREG_DATA3_V1 + 3, R_BE_DBG_WOW, 90 }; 91 92 static const struct rtw89_page_regs rtw8922a_page_regs = { 93 .hci_fc_ctrl = R_BE_HCI_FC_CTRL, 94 .ch_page_ctrl = R_BE_CH_PAGE_CTRL, 95 .ach_page_ctrl = R_BE_CH0_PAGE_CTRL, 96 .ach_page_info = R_BE_CH0_PAGE_INFO, 97 .pub_page_info3 = R_BE_PUB_PAGE_INFO3, 98 .pub_page_ctrl1 = R_BE_PUB_PAGE_CTRL1, 99 .pub_page_ctrl2 = R_BE_PUB_PAGE_CTRL2, 100 .pub_page_info1 = R_BE_PUB_PAGE_INFO1, 101 .pub_page_info2 = R_BE_PUB_PAGE_INFO2, 102 .wp_page_ctrl1 = R_BE_WP_PAGE_CTRL1, 103 .wp_page_ctrl2 = R_BE_WP_PAGE_CTRL2, 104 .wp_page_info1 = R_BE_WP_PAGE_INFO1, 105 }; 106 107 static const struct rtw89_reg_imr rtw8922a_imr_dmac_regs[] = { 108 {R_BE_DISP_HOST_IMR, B_BE_DISP_HOST_IMR_CLR, B_BE_DISP_HOST_IMR_SET}, 109 {R_BE_DISP_CPU_IMR, B_BE_DISP_CPU_IMR_CLR, B_BE_DISP_CPU_IMR_SET}, 110 {R_BE_DISP_OTHER_IMR, B_BE_DISP_OTHER_IMR_CLR, B_BE_DISP_OTHER_IMR_SET}, 111 {R_BE_PKTIN_ERR_IMR, B_BE_PKTIN_ERR_IMR_CLR, B_BE_PKTIN_ERR_IMR_SET}, 112 {R_BE_INTERRUPT_MASK_REG, B_BE_INTERRUPT_MASK_REG_CLR, B_BE_INTERRUPT_MASK_REG_SET}, 113 {R_BE_MLO_ERR_IDCT_IMR, B_BE_MLO_ERR_IDCT_IMR_CLR, B_BE_MLO_ERR_IDCT_IMR_SET}, 114 {R_BE_MPDU_TX_ERR_IMR, B_BE_MPDU_TX_ERR_IMR_CLR, B_BE_MPDU_TX_ERR_IMR_SET}, 115 {R_BE_MPDU_RX_ERR_IMR, B_BE_MPDU_RX_ERR_IMR_CLR, B_BE_MPDU_RX_ERR_IMR_SET}, 116 {R_BE_SEC_ERROR_IMR, B_BE_SEC_ERROR_IMR_CLR, B_BE_SEC_ERROR_IMR_SET}, 117 {R_BE_CPUIO_ERR_IMR, B_BE_CPUIO_ERR_IMR_CLR, B_BE_CPUIO_ERR_IMR_SET}, 118 {R_BE_WDE_ERR_IMR, B_BE_WDE_ERR_IMR_CLR, B_BE_WDE_ERR_IMR_SET}, 119 {R_BE_WDE_ERR1_IMR, B_BE_WDE_ERR1_IMR_CLR, B_BE_WDE_ERR1_IMR_SET}, 120 {R_BE_PLE_ERR_IMR, B_BE_PLE_ERR_IMR_CLR, B_BE_PLE_ERR_IMR_SET}, 121 {R_BE_PLE_ERRFLAG1_IMR, B_BE_PLE_ERRFLAG1_IMR_CLR, B_BE_PLE_ERRFLAG1_IMR_SET}, 122 {R_BE_WDRLS_ERR_IMR, B_BE_WDRLS_ERR_IMR_CLR, B_BE_WDRLS_ERR_IMR_SET}, 123 {R_BE_TXPKTCTL_B0_ERRFLAG_IMR, B_BE_TXPKTCTL_B0_ERRFLAG_IMR_CLR, 124 B_BE_TXPKTCTL_B0_ERRFLAG_IMR_SET}, 125 {R_BE_TXPKTCTL_B1_ERRFLAG_IMR, B_BE_TXPKTCTL_B1_ERRFLAG_IMR_CLR, 126 B_BE_TXPKTCTL_B1_ERRFLAG_IMR_SET}, 127 {R_BE_BBRPT_COM_ERR_IMR, B_BE_BBRPT_COM_ERR_IMR_CLR, B_BE_BBRPT_COM_ERR_IMR_SET}, 128 {R_BE_BBRPT_CHINFO_ERR_IMR, B_BE_BBRPT_CHINFO_ERR_IMR_CLR, 129 B_BE_BBRPT_CHINFO_ERR_IMR_SET}, 130 {R_BE_BBRPT_DFS_ERR_IMR, B_BE_BBRPT_DFS_ERR_IMR_CLR, B_BE_BBRPT_DFS_ERR_IMR_SET}, 131 {R_BE_LA_ERRFLAG_IMR, B_BE_LA_ERRFLAG_IMR_CLR, B_BE_LA_ERRFLAG_IMR_SET}, 132 {R_BE_CH_INFO_DBGFLAG_IMR, B_BE_CH_INFO_DBGFLAG_IMR_CLR, B_BE_CH_INFO_DBGFLAG_IMR_SET}, 133 {R_BE_PLRLS_ERR_IMR, B_BE_PLRLS_ERR_IMR_CLR, B_BE_PLRLS_ERR_IMR_SET}, 134 {R_BE_HAXI_IDCT_MSK, B_BE_HAXI_IDCT_MSK_CLR, B_BE_HAXI_IDCT_MSK_SET}, 135 }; 136 137 static const struct rtw89_imr_table rtw8922a_imr_dmac_table = { 138 .regs = rtw8922a_imr_dmac_regs, 139 .n_regs = ARRAY_SIZE(rtw8922a_imr_dmac_regs), 140 }; 141 142 static const struct rtw89_reg_imr rtw8922a_imr_cmac_regs[] = { 143 {R_BE_RESP_IMR, B_BE_RESP_IMR_CLR, B_BE_RESP_IMR_SET}, 144 {R_BE_RX_ERROR_FLAG_IMR, B_BE_RX_ERROR_FLAG_IMR_CLR, B_BE_RX_ERROR_FLAG_IMR_SET}, 145 {R_BE_TX_ERROR_FLAG_IMR, B_BE_TX_ERROR_FLAG_IMR_CLR, B_BE_TX_ERROR_FLAG_IMR_SET}, 146 {R_BE_RX_ERROR_FLAG_IMR_1, B_BE_TX_ERROR_FLAG_IMR_1_CLR, B_BE_TX_ERROR_FLAG_IMR_1_SET}, 147 {R_BE_PTCL_IMR1, B_BE_PTCL_IMR1_CLR, B_BE_PTCL_IMR1_SET}, 148 {R_BE_PTCL_IMR0, B_BE_PTCL_IMR0_CLR, B_BE_PTCL_IMR0_SET}, 149 {R_BE_PTCL_IMR_2, B_BE_PTCL_IMR_2_CLR, B_BE_PTCL_IMR_2_SET}, 150 {R_BE_SCHEDULE_ERR_IMR, B_BE_SCHEDULE_ERR_IMR_CLR, B_BE_SCHEDULE_ERR_IMR_SET}, 151 {R_BE_C0_TXPWR_IMR, B_BE_C0_TXPWR_IMR_CLR, B_BE_C0_TXPWR_IMR_SET}, 152 {R_BE_TRXPTCL_ERROR_INDICA_MASK, B_BE_TRXPTCL_ERROR_INDICA_MASK_CLR, 153 B_BE_TRXPTCL_ERROR_INDICA_MASK_SET}, 154 {R_BE_RX_ERR_IMR, B_BE_RX_ERR_IMR_CLR, B_BE_RX_ERR_IMR_SET}, 155 {R_BE_PHYINFO_ERR_IMR_V1, B_BE_PHYINFO_ERR_IMR_V1_CLR, B_BE_PHYINFO_ERR_IMR_V1_SET}, 156 }; 157 158 static const struct rtw89_imr_table rtw8922a_imr_cmac_table = { 159 .regs = rtw8922a_imr_cmac_regs, 160 .n_regs = ARRAY_SIZE(rtw8922a_imr_cmac_regs), 161 }; 162 163 static const struct rtw89_rrsr_cfgs rtw8922a_rrsr_cfgs = { 164 .ref_rate = {R_BE_TRXPTCL_RESP_1, B_BE_WMAC_RESP_REF_RATE_SEL, 0}, 165 .rsc = {R_BE_PTCL_RRSR1, B_BE_RSC_MASK, 2}, 166 }; 167 168 static const struct rtw89_dig_regs rtw8922a_dig_regs = { 169 .seg0_pd_reg = R_SEG0R_PD_V2, 170 .pd_lower_bound_mask = B_SEG0R_PD_LOWER_BOUND_MSK, 171 .pd_spatial_reuse_en = B_SEG0R_PD_SPATIAL_REUSE_EN_MSK_V1, 172 .bmode_pd_reg = R_BMODE_PDTH_EN_V2, 173 .bmode_cca_rssi_limit_en = B_BMODE_PDTH_LIMIT_EN_MSK_V1, 174 .bmode_pd_lower_bound_reg = R_BMODE_PDTH_V2, 175 .bmode_rssi_nocca_low_th_mask = B_BMODE_PDTH_LOWER_BOUND_MSK_V1, 176 .p0_lna_init = {R_PATH0_LNA_INIT_V1, B_PATH0_LNA_INIT_IDX_MSK}, 177 .p1_lna_init = {R_PATH1_LNA_INIT_V1, B_PATH1_LNA_INIT_IDX_MSK}, 178 .p0_tia_init = {R_PATH0_TIA_INIT_V1, B_PATH0_TIA_INIT_IDX_MSK_V1}, 179 .p1_tia_init = {R_PATH1_TIA_INIT_V1, B_PATH1_TIA_INIT_IDX_MSK_V1}, 180 .p0_rxb_init = {R_PATH0_RXB_INIT_V1, B_PATH0_RXB_INIT_IDX_MSK_V1}, 181 .p1_rxb_init = {R_PATH1_RXB_INIT_V1, B_PATH1_RXB_INIT_IDX_MSK_V1}, 182 .p0_p20_pagcugc_en = {R_PATH0_P20_FOLLOW_BY_PAGCUGC_V3, 183 B_PATH0_P20_FOLLOW_BY_PAGCUGC_EN_MSK}, 184 .p0_s20_pagcugc_en = {R_PATH0_S20_FOLLOW_BY_PAGCUGC_V3, 185 B_PATH0_S20_FOLLOW_BY_PAGCUGC_EN_MSK}, 186 .p1_p20_pagcugc_en = {R_PATH1_P20_FOLLOW_BY_PAGCUGC_V3, 187 B_PATH1_P20_FOLLOW_BY_PAGCUGC_EN_MSK}, 188 .p1_s20_pagcugc_en = {R_PATH1_S20_FOLLOW_BY_PAGCUGC_V3, 189 B_PATH1_S20_FOLLOW_BY_PAGCUGC_EN_MSK}, 190 }; 191 192 static const struct rtw89_edcca_regs rtw8922a_edcca_regs = { 193 .edcca_level = R_SEG0R_EDCCA_LVL_BE, 194 .edcca_mask = B_EDCCA_LVL_MSK0, 195 .edcca_p_mask = B_EDCCA_LVL_MSK1, 196 .ppdu_level = R_SEG0R_PPDU_LVL_BE, 197 .ppdu_mask = B_EDCCA_LVL_MSK1, 198 .rpt_a = R_EDCCA_RPT_A_BE, 199 .rpt_b = R_EDCCA_RPT_B_BE, 200 .rpt_sel = R_EDCCA_RPT_SEL_BE, 201 .rpt_sel_mask = B_EDCCA_RPT_SEL_MSK, 202 .rpt_sel_be = R_EDCCA_RPTREG_SEL_BE, 203 .rpt_sel_be_mask = B_EDCCA_RPTREG_SEL_BE_MSK, 204 .tx_collision_t2r_st = R_TX_COLLISION_T2R_ST_BE, 205 .tx_collision_t2r_st_mask = B_TX_COLLISION_T2R_ST_BE_M, 206 }; 207 208 static const struct rtw89_efuse_block_cfg rtw8922a_efuse_blocks[] = { 209 [RTW89_EFUSE_BLOCK_SYS] = {.offset = 0x00000, .size = 0x310}, 210 [RTW89_EFUSE_BLOCK_RF] = {.offset = 0x10000, .size = 0x240}, 211 [RTW89_EFUSE_BLOCK_HCI_DIG_PCIE_SDIO] = {.offset = 0x20000, .size = 0x4800}, 212 [RTW89_EFUSE_BLOCK_HCI_DIG_USB] = {.offset = 0x30000, .size = 0x890}, 213 [RTW89_EFUSE_BLOCK_HCI_PHY_PCIE] = {.offset = 0x40000, .size = 0x200}, 214 [RTW89_EFUSE_BLOCK_HCI_PHY_USB3] = {.offset = 0x50000, .size = 0x80}, 215 [RTW89_EFUSE_BLOCK_HCI_PHY_USB2] = {.offset = 0x60000, .size = 0x0}, 216 [RTW89_EFUSE_BLOCK_ADIE] = {.offset = 0x70000, .size = 0x10}, 217 }; 218 219 static void rtw8922a_ctrl_btg_bt_rx(struct rtw89_dev *rtwdev, bool en, 220 enum rtw89_phy_idx phy_idx) 221 { 222 if (en) { 223 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_A, B_BT_SHARE_A, 0x1, phy_idx); 224 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_A, B_BTG_PATH_A, 0x0, phy_idx); 225 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_B, B_BT_SHARE_B, 0x1, phy_idx); 226 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_B, B_BTG_PATH_B, 0x1, phy_idx); 227 rtw89_phy_write32_idx(rtwdev, R_LNA_OP, B_LNA6, 0x20, phy_idx); 228 rtw89_phy_write32_idx(rtwdev, R_LNA_TIA, B_TIA0_B, 0x30, phy_idx); 229 rtw89_phy_write32_idx(rtwdev, R_PMAC_GNT, B_PMAC_GNT_P1, 0x0, phy_idx); 230 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_ANT_BT_SHARE, 0x1, phy_idx); 231 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_RX_BT_SG0, 0x2, phy_idx); 232 rtw89_phy_write32_idx(rtwdev, R_GNT_BT_WGT_EN, B_GNT_BT_WGT_EN, 233 0x1, phy_idx); 234 } else { 235 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_A, B_BT_SHARE_A, 0x0, phy_idx); 236 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_A, B_BTG_PATH_A, 0x0, phy_idx); 237 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_B, B_BT_SHARE_B, 0x0, phy_idx); 238 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_B, B_BTG_PATH_B, 0x0, phy_idx); 239 rtw89_phy_write32_idx(rtwdev, R_LNA_OP, B_LNA6, 0x1a, phy_idx); 240 rtw89_phy_write32_idx(rtwdev, R_LNA_TIA, B_TIA0_B, 0x2a, phy_idx); 241 rtw89_phy_write32_idx(rtwdev, R_PMAC_GNT, B_PMAC_GNT_P1, 0xc, phy_idx); 242 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_ANT_BT_SHARE, 0x0, phy_idx); 243 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_RX_BT_SG0, 0x0, phy_idx); 244 rtw89_phy_write32_idx(rtwdev, R_GNT_BT_WGT_EN, B_GNT_BT_WGT_EN, 245 0x0, phy_idx); 246 } 247 } 248 249 static int rtw8922a_pwr_on_func(struct rtw89_dev *rtwdev) 250 { 251 struct rtw89_hal *hal = &rtwdev->hal; 252 u32 val32; 253 int ret; 254 255 rtw89_write32_clr(rtwdev, R_BE_SYS_PW_CTRL, B_BE_AFSM_WLSUS_EN | 256 B_BE_AFSM_PCIE_SUS_EN); 257 rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_DIS_WLBT_PDNSUSEN_SOPC); 258 rtw89_write32_set(rtwdev, R_BE_WLLPS_CTRL, B_BE_DIS_WLBT_LPSEN_LOPC); 259 rtw89_write32_clr(rtwdev, R_BE_SYS_PW_CTRL, B_BE_APDM_HPDN); 260 rtw89_write32_clr(rtwdev, R_BE_SYS_PW_CTRL, B_BE_APFM_SWLPS); 261 262 ret = read_poll_timeout(rtw89_read32, val32, val32 & B_BE_RDY_SYSPWR, 263 1000, 3000000, false, rtwdev, R_BE_SYS_PW_CTRL); 264 if (ret) 265 return ret; 266 267 rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_EN_WLON); 268 rtw89_write32_set(rtwdev, R_BE_WLRESUME_CTRL, B_BE_LPSROP_CMAC0 | 269 B_BE_LPSROP_CMAC1); 270 rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_APFN_ONMAC); 271 272 ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_BE_APFN_ONMAC), 273 1000, 3000000, false, rtwdev, R_BE_SYS_PW_CTRL); 274 if (ret) 275 return ret; 276 277 rtw89_write32_clr(rtwdev, R_BE_AFE_ON_CTRL1, B_BE_REG_CK_MON_CK960M_EN); 278 rtw89_write8_set(rtwdev, R_BE_ANAPAR_POW_MAC, B_BE_POW_PC_LDO_PORT0 | 279 B_BE_POW_PC_LDO_PORT1); 280 rtw89_write32_clr(rtwdev, R_BE_FEN_RST_ENABLE, B_BE_R_SYM_ISO_ADDA_P02PP | 281 B_BE_R_SYM_ISO_ADDA_P12PP); 282 rtw89_write8_set(rtwdev, R_BE_PLATFORM_ENABLE, B_BE_PLATFORM_EN); 283 rtw89_write32_set(rtwdev, R_BE_HCI_OPT_CTRL, B_BE_HAXIDMA_IO_EN); 284 285 ret = read_poll_timeout(rtw89_read32, val32, val32 & B_BE_HAXIDMA_IO_ST, 286 1000, 3000000, false, rtwdev, R_BE_HCI_OPT_CTRL); 287 if (ret) 288 return ret; 289 290 ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_BE_HAXIDMA_BACKUP_RESTORE_ST), 291 1000, 3000000, false, rtwdev, R_BE_HCI_OPT_CTRL); 292 if (ret) 293 return ret; 294 295 rtw89_write32_set(rtwdev, R_BE_HCI_OPT_CTRL, B_BE_HCI_WLAN_IO_EN); 296 297 ret = read_poll_timeout(rtw89_read32, val32, val32 & B_BE_HCI_WLAN_IO_ST, 298 1000, 3000000, false, rtwdev, R_BE_HCI_OPT_CTRL); 299 if (ret) 300 return ret; 301 302 rtw89_write32_clr(rtwdev, R_BE_SYS_SDIO_CTRL, B_BE_PCIE_FORCE_IBX_EN); 303 304 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_PLL, 0x02, 0x02); 305 if (ret) 306 return ret; 307 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_PLL, 0x01, 0x01); 308 if (ret) 309 return ret; 310 311 rtw89_write32_set(rtwdev, R_BE_SYS_ADIE_PAD_PWR_CTRL, B_BE_SYM_PADPDN_WL_RFC1_1P3); 312 313 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x40, 0x40); 314 if (ret) 315 return ret; 316 317 rtw89_write32_set(rtwdev, R_BE_SYS_ADIE_PAD_PWR_CTRL, B_BE_SYM_PADPDN_WL_RFC0_1P3); 318 319 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x20, 0x20); 320 if (ret) 321 return ret; 322 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x04, 0x04); 323 if (ret) 324 return ret; 325 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x08, 0x08); 326 if (ret) 327 return ret; 328 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x10); 329 if (ret) 330 return ret; 331 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S0, 0xEB, 0xFF); 332 if (ret) 333 return ret; 334 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S1, 0xEB, 0xFF); 335 if (ret) 336 return ret; 337 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x01, 0x01); 338 if (ret) 339 return ret; 340 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x02, 0x02); 341 if (ret) 342 return ret; 343 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x80); 344 if (ret) 345 return ret; 346 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_XREF_RF1, 0, 0x40); 347 if (ret) 348 return ret; 349 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_XREF_RF2, 0, 0x40); 350 if (ret) 351 return ret; 352 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_PLL_1, 0x40, 0x60); 353 if (ret) 354 return ret; 355 356 if (hal->cv != CHIP_CAV) { 357 rtw89_write32_set(rtwdev, R_BE_PMC_DBG_CTRL2, B_BE_SYSON_DIS_PMCR_BE_WRMSK); 358 rtw89_write32_set(rtwdev, R_BE_SYS_ISO_CTRL, B_BE_ISO_EB2CORE); 359 rtw89_write32_clr(rtwdev, R_BE_SYS_ISO_CTRL, B_BE_PWC_EV2EF_B); 360 361 mdelay(1); 362 363 rtw89_write32_clr(rtwdev, R_BE_SYS_ISO_CTRL, B_BE_PWC_EV2EF_S); 364 rtw89_write32_clr(rtwdev, R_BE_PMC_DBG_CTRL2, B_BE_SYSON_DIS_PMCR_BE_WRMSK); 365 } 366 367 rtw89_write32_set(rtwdev, R_BE_DMAC_FUNC_EN, 368 B_BE_MAC_FUNC_EN | B_BE_DMAC_FUNC_EN | B_BE_MPDU_PROC_EN | 369 B_BE_WD_RLS_EN | B_BE_DLE_WDE_EN | B_BE_TXPKT_CTRL_EN | 370 B_BE_STA_SCH_EN | B_BE_DLE_PLE_EN | B_BE_PKT_BUF_EN | 371 B_BE_DMAC_TBL_EN | B_BE_PKT_IN_EN | B_BE_DLE_CPUIO_EN | 372 B_BE_DISPATCHER_EN | B_BE_BBRPT_EN | B_BE_MAC_SEC_EN | 373 B_BE_H_AXIDMA_EN | B_BE_DMAC_MLO_EN | B_BE_PLRLS_EN | 374 B_BE_P_AXIDMA_EN | B_BE_DLE_DATACPUIO_EN | B_BE_LTR_CTL_EN); 375 376 set_bit(RTW89_FLAG_DMAC_FUNC, rtwdev->flags); 377 378 rtw89_write32_set(rtwdev, R_BE_CMAC_SHARE_FUNC_EN, 379 B_BE_CMAC_SHARE_EN | B_BE_RESPBA_EN | B_BE_ADDRSRCH_EN | 380 B_BE_BTCOEX_EN); 381 rtw89_write32_set(rtwdev, R_BE_CMAC_FUNC_EN, 382 B_BE_CMAC_EN | B_BE_CMAC_TXEN | B_BE_CMAC_RXEN | 383 B_BE_SIGB_EN | B_BE_PHYINTF_EN | B_BE_CMAC_DMA_EN | 384 B_BE_PTCLTOP_EN | B_BE_SCHEDULER_EN | B_BE_TMAC_EN | 385 B_BE_RMAC_EN | B_BE_TXTIME_EN | B_BE_RESP_PKTCTL_EN); 386 387 set_bit(RTW89_FLAG_CMAC0_FUNC, rtwdev->flags); 388 389 rtw89_write32_set(rtwdev, R_BE_FEN_RST_ENABLE, B_BE_FEN_BB_IP_RSTN | 390 B_BE_FEN_BBPLAT_RSTB); 391 392 if (!test_bit(RTW89_FLAG_PROBE_DONE, rtwdev->flags)) 393 rtw89_efuse_read_fw_secure_be(rtwdev); 394 395 return 0; 396 } 397 398 static int rtw8922a_pwr_off_func(struct rtw89_dev *rtwdev) 399 { 400 u32 val32; 401 int ret; 402 403 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x10, 0x10); 404 if (ret) 405 return ret; 406 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x08); 407 if (ret) 408 return ret; 409 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x04); 410 if (ret) 411 return ret; 412 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S0, 0xC6, 0xFF); 413 if (ret) 414 return ret; 415 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S1, 0xC6, 0xFF); 416 if (ret) 417 return ret; 418 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x80, 0x80); 419 if (ret) 420 return ret; 421 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x02); 422 if (ret) 423 return ret; 424 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x01); 425 if (ret) 426 return ret; 427 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_PLL, 0x02, 0xFF); 428 if (ret) 429 return ret; 430 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_PLL, 0x00, 0xFF); 431 if (ret) 432 return ret; 433 434 rtw89_write32_set(rtwdev, R_BE_FEN_RST_ENABLE, B_BE_R_SYM_ISO_ADDA_P02PP | 435 B_BE_R_SYM_ISO_ADDA_P12PP); 436 rtw89_write8_clr(rtwdev, R_BE_ANAPAR_POW_MAC, B_BE_POW_PC_LDO_PORT0 | 437 B_BE_POW_PC_LDO_PORT1); 438 rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_EN_WLON); 439 rtw89_write8_clr(rtwdev, R_BE_FEN_RST_ENABLE, B_BE_FEN_BB_IP_RSTN | 440 B_BE_FEN_BBPLAT_RSTB); 441 rtw89_write32_clr(rtwdev, R_BE_SYS_ADIE_PAD_PWR_CTRL, B_BE_SYM_PADPDN_WL_RFC0_1P3); 442 443 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x20); 444 if (ret) 445 return ret; 446 447 rtw89_write32_clr(rtwdev, R_BE_SYS_ADIE_PAD_PWR_CTRL, B_BE_SYM_PADPDN_WL_RFC1_1P3); 448 449 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x40); 450 if (ret) 451 return ret; 452 453 rtw89_write32_clr(rtwdev, R_BE_HCI_OPT_CTRL, B_BE_HAXIDMA_IO_EN); 454 455 ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_BE_HAXIDMA_IO_ST), 456 1000, 3000000, false, rtwdev, R_BE_HCI_OPT_CTRL); 457 if (ret) 458 return ret; 459 460 ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_BE_HAXIDMA_BACKUP_RESTORE_ST), 461 1000, 3000000, false, rtwdev, R_BE_HCI_OPT_CTRL); 462 if (ret) 463 return ret; 464 465 rtw89_write32_clr(rtwdev, R_BE_HCI_OPT_CTRL, B_BE_HCI_WLAN_IO_EN); 466 467 ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_BE_HCI_WLAN_IO_ST), 468 1000, 3000000, false, rtwdev, R_BE_HCI_OPT_CTRL); 469 if (ret) 470 return ret; 471 472 rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_APFM_OFFMAC); 473 474 ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_BE_APFM_OFFMAC), 475 1000, 3000000, false, rtwdev, R_BE_SYS_PW_CTRL); 476 if (ret) 477 return ret; 478 479 rtw89_write32(rtwdev, R_BE_WLLPS_CTRL, 0x0000A1B2); 480 rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_XTAL_OFF_A_DIE); 481 rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_APFM_SWLPS); 482 rtw89_write32(rtwdev, R_BE_UDM1, 0); 483 484 return 0; 485 } 486 487 static void rtw8922a_efuse_parsing_tssi(struct rtw89_dev *rtwdev, 488 struct rtw8922a_efuse *map) 489 { 490 struct rtw8922a_tssi_offset *ofst[] = {&map->path_a_tssi, &map->path_b_tssi}; 491 u8 *bw40_1s_tssi_6g_ofst[] = {map->bw40_1s_tssi_6g_a, map->bw40_1s_tssi_6g_b}; 492 struct rtw89_tssi_info *tssi = &rtwdev->tssi; 493 u8 i, j; 494 495 tssi->thermal[RF_PATH_A] = map->path_a_therm; 496 tssi->thermal[RF_PATH_B] = map->path_b_therm; 497 498 for (i = 0; i < RF_PATH_NUM_8922A; i++) { 499 memcpy(tssi->tssi_cck[i], ofst[i]->cck_tssi, 500 sizeof(ofst[i]->cck_tssi)); 501 502 for (j = 0; j < TSSI_CCK_CH_GROUP_NUM; j++) 503 rtw89_debug(rtwdev, RTW89_DBG_TSSI, 504 "[TSSI][EFUSE] path=%d cck[%d]=0x%x\n", 505 i, j, tssi->tssi_cck[i][j]); 506 507 memcpy(tssi->tssi_mcs[i], ofst[i]->bw40_tssi, 508 sizeof(ofst[i]->bw40_tssi)); 509 memcpy(tssi->tssi_mcs[i] + TSSI_MCS_2G_CH_GROUP_NUM, 510 ofst[i]->bw40_1s_tssi_5g, sizeof(ofst[i]->bw40_1s_tssi_5g)); 511 memcpy(tssi->tssi_6g_mcs[i], bw40_1s_tssi_6g_ofst[i], 512 sizeof(tssi->tssi_6g_mcs[i])); 513 514 for (j = 0; j < TSSI_MCS_CH_GROUP_NUM; j++) 515 rtw89_debug(rtwdev, RTW89_DBG_TSSI, 516 "[TSSI][EFUSE] path=%d mcs[%d]=0x%x\n", 517 i, j, tssi->tssi_mcs[i][j]); 518 } 519 } 520 521 static void rtw8922a_efuse_parsing_gain_offset(struct rtw89_dev *rtwdev, 522 struct rtw8922a_efuse *map) 523 { 524 struct rtw89_phy_efuse_gain *gain = &rtwdev->efuse_gain; 525 bool all_0xff = true, all_0x00 = true; 526 int i, j; 527 u8 t; 528 529 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_2G_CCK] = map->rx_gain_a._2g_cck; 530 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_2G_CCK] = map->rx_gain_b._2g_cck; 531 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_2G_OFDM] = map->rx_gain_a._2g_ofdm; 532 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_2G_OFDM] = map->rx_gain_b._2g_ofdm; 533 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_5G_LOW] = map->rx_gain_a._5g_low; 534 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_5G_LOW] = map->rx_gain_b._5g_low; 535 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_5G_MID] = map->rx_gain_a._5g_mid; 536 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_5G_MID] = map->rx_gain_b._5g_mid; 537 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_5G_HIGH] = map->rx_gain_a._5g_high; 538 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_5G_HIGH] = map->rx_gain_b._5g_high; 539 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_L0] = map->rx_gain_6g_a._6g_l0; 540 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_L0] = map->rx_gain_6g_b._6g_l0; 541 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_L1] = map->rx_gain_6g_a._6g_l1; 542 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_L1] = map->rx_gain_6g_b._6g_l1; 543 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_M0] = map->rx_gain_6g_a._6g_m0; 544 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_M0] = map->rx_gain_6g_b._6g_m0; 545 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_M1] = map->rx_gain_6g_a._6g_m1; 546 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_M1] = map->rx_gain_6g_b._6g_m1; 547 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_H0] = map->rx_gain_6g_a._6g_h0; 548 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_H0] = map->rx_gain_6g_b._6g_h0; 549 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_H1] = map->rx_gain_6g_a._6g_h1; 550 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_H1] = map->rx_gain_6g_b._6g_h1; 551 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_UH0] = map->rx_gain_6g_a._6g_uh0; 552 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_UH0] = map->rx_gain_6g_b._6g_uh0; 553 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_UH1] = map->rx_gain_6g_a._6g_uh1; 554 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_UH1] = map->rx_gain_6g_b._6g_uh1; 555 556 for (i = RF_PATH_A; i <= RF_PATH_B; i++) 557 for (j = 0; j < RTW89_GAIN_OFFSET_NR; j++) { 558 t = gain->offset[i][j]; 559 if (t != 0xff) 560 all_0xff = false; 561 if (t != 0x0) 562 all_0x00 = false; 563 564 /* transform: sign-bit + U(7,2) to S(8,2) */ 565 if (t & 0x80) 566 gain->offset[i][j] = (t ^ 0x7f) + 1; 567 } 568 569 gain->offset_valid = !all_0xff && !all_0x00; 570 } 571 572 static void rtw8922a_read_efuse_mac_addr(struct rtw89_dev *rtwdev, u32 addr) 573 { 574 struct rtw89_efuse *efuse = &rtwdev->efuse; 575 u16 val; 576 int i; 577 578 for (i = 0; i < ETH_ALEN; i += 2, addr += 2) { 579 val = rtw89_read16(rtwdev, addr); 580 efuse->addr[i] = val & 0xff; 581 efuse->addr[i + 1] = val >> 8; 582 } 583 } 584 585 static int rtw8922a_read_efuse_pci_sdio(struct rtw89_dev *rtwdev, u8 *log_map) 586 { 587 struct rtw89_efuse *efuse = &rtwdev->efuse; 588 589 if (rtwdev->hci.type == RTW89_HCI_TYPE_PCIE) 590 rtw8922a_read_efuse_mac_addr(rtwdev, 0x3104); 591 else 592 ether_addr_copy(efuse->addr, log_map + 0x001A); 593 594 return 0; 595 } 596 597 static int rtw8922a_read_efuse_usb(struct rtw89_dev *rtwdev, u8 *log_map) 598 { 599 rtw8922a_read_efuse_mac_addr(rtwdev, 0x4078); 600 601 return 0; 602 } 603 604 static int rtw8922a_read_efuse_rf(struct rtw89_dev *rtwdev, u8 *log_map) 605 { 606 struct rtw8922a_efuse *map = (struct rtw8922a_efuse *)log_map; 607 struct rtw89_efuse *efuse = &rtwdev->efuse; 608 609 efuse->rfe_type = map->rfe_type; 610 efuse->xtal_cap = map->xtal_k; 611 efuse->country_code[0] = map->country_code[0]; 612 efuse->country_code[1] = map->country_code[1]; 613 rtw8922a_efuse_parsing_tssi(rtwdev, map); 614 rtw8922a_efuse_parsing_gain_offset(rtwdev, map); 615 616 rtw89_info(rtwdev, "chip rfe_type is %d\n", efuse->rfe_type); 617 618 return 0; 619 } 620 621 static int rtw8922a_read_efuse(struct rtw89_dev *rtwdev, u8 *log_map, 622 enum rtw89_efuse_block block) 623 { 624 switch (block) { 625 case RTW89_EFUSE_BLOCK_HCI_DIG_PCIE_SDIO: 626 return rtw8922a_read_efuse_pci_sdio(rtwdev, log_map); 627 case RTW89_EFUSE_BLOCK_HCI_DIG_USB: 628 return rtw8922a_read_efuse_usb(rtwdev, log_map); 629 case RTW89_EFUSE_BLOCK_RF: 630 return rtw8922a_read_efuse_rf(rtwdev, log_map); 631 default: 632 return 0; 633 } 634 } 635 636 #define THM_TRIM_POSITIVE_MASK BIT(6) 637 #define THM_TRIM_MAGNITUDE_MASK GENMASK(5, 0) 638 639 static void rtw8922a_phycap_parsing_thermal_trim(struct rtw89_dev *rtwdev, 640 u8 *phycap_map) 641 { 642 static const u32 thm_trim_addr[RF_PATH_NUM_8922A] = {0x1706, 0x1733}; 643 struct rtw89_power_trim_info *info = &rtwdev->pwr_trim; 644 u32 addr = rtwdev->chip->phycap_addr; 645 bool pg = true; 646 u8 pg_th; 647 s8 val; 648 u8 i; 649 650 for (i = 0; i < RF_PATH_NUM_8922A; i++) { 651 pg_th = phycap_map[thm_trim_addr[i] - addr]; 652 if (pg_th == 0xff) { 653 info->thermal_trim[i] = 0; 654 pg = false; 655 break; 656 } 657 658 val = u8_get_bits(pg_th, THM_TRIM_MAGNITUDE_MASK); 659 660 if (!(pg_th & THM_TRIM_POSITIVE_MASK)) 661 val *= -1; 662 663 info->thermal_trim[i] = val; 664 665 rtw89_debug(rtwdev, RTW89_DBG_RFK, 666 "[THERMAL][TRIM] path=%d thermal_trim=0x%x (%d)\n", 667 i, pg_th, val); 668 } 669 670 info->pg_thermal_trim = pg; 671 } 672 673 static void rtw8922a_phycap_parsing_pa_bias_trim(struct rtw89_dev *rtwdev, 674 u8 *phycap_map) 675 { 676 static const u32 pabias_trim_addr[RF_PATH_NUM_8922A] = {0x1707, 0x1734}; 677 static const u32 check_pa_pad_trim_addr = 0x1700; 678 struct rtw89_power_trim_info *info = &rtwdev->pwr_trim; 679 u32 addr = rtwdev->chip->phycap_addr; 680 u8 val; 681 u8 i; 682 683 val = phycap_map[check_pa_pad_trim_addr - addr]; 684 if (val != 0xff) 685 info->pg_pa_bias_trim = true; 686 687 for (i = 0; i < RF_PATH_NUM_8922A; i++) { 688 info->pa_bias_trim[i] = phycap_map[pabias_trim_addr[i] - addr]; 689 690 rtw89_debug(rtwdev, RTW89_DBG_RFK, 691 "[PA_BIAS][TRIM] path=%d pa_bias_trim=0x%x\n", 692 i, info->pa_bias_trim[i]); 693 } 694 } 695 696 static void rtw8922a_pa_bias_trim(struct rtw89_dev *rtwdev) 697 { 698 struct rtw89_power_trim_info *info = &rtwdev->pwr_trim; 699 u8 pabias_2g, pabias_5g; 700 u8 i; 701 702 if (!info->pg_pa_bias_trim) { 703 rtw89_debug(rtwdev, RTW89_DBG_RFK, 704 "[PA_BIAS][TRIM] no PG, do nothing\n"); 705 706 return; 707 } 708 709 for (i = 0; i < RF_PATH_NUM_8922A; i++) { 710 pabias_2g = FIELD_GET(GENMASK(3, 0), info->pa_bias_trim[i]); 711 pabias_5g = FIELD_GET(GENMASK(7, 4), info->pa_bias_trim[i]); 712 713 rtw89_debug(rtwdev, RTW89_DBG_RFK, 714 "[PA_BIAS][TRIM] path=%d 2G=0x%x 5G=0x%x\n", 715 i, pabias_2g, pabias_5g); 716 717 rtw89_write_rf(rtwdev, i, RR_BIASA, RR_BIASA_TXG_V1, pabias_2g); 718 rtw89_write_rf(rtwdev, i, RR_BIASA, RR_BIASA_TXA_V1, pabias_5g); 719 } 720 } 721 722 static void rtw8922a_phycap_parsing_pad_bias_trim(struct rtw89_dev *rtwdev, 723 u8 *phycap_map) 724 { 725 static const u32 pad_bias_trim_addr[RF_PATH_NUM_8922A] = {0x1708, 0x1735}; 726 struct rtw89_power_trim_info *info = &rtwdev->pwr_trim; 727 u32 addr = rtwdev->chip->phycap_addr; 728 u8 i; 729 730 for (i = 0; i < RF_PATH_NUM_8922A; i++) { 731 info->pad_bias_trim[i] = phycap_map[pad_bias_trim_addr[i] - addr]; 732 733 rtw89_debug(rtwdev, RTW89_DBG_RFK, 734 "[PAD_BIAS][TRIM] path=%d pad_bias_trim=0x%x\n", 735 i, info->pad_bias_trim[i]); 736 } 737 } 738 739 static void rtw8922a_pad_bias_trim(struct rtw89_dev *rtwdev) 740 { 741 struct rtw89_power_trim_info *info = &rtwdev->pwr_trim; 742 u8 pad_bias_2g, pad_bias_5g; 743 u8 i; 744 745 if (!info->pg_pa_bias_trim) { 746 rtw89_debug(rtwdev, RTW89_DBG_RFK, 747 "[PAD_BIAS][TRIM] no PG, do nothing\n"); 748 return; 749 } 750 751 for (i = 0; i < RF_PATH_NUM_8922A; i++) { 752 pad_bias_2g = u8_get_bits(info->pad_bias_trim[i], GENMASK(3, 0)); 753 pad_bias_5g = u8_get_bits(info->pad_bias_trim[i], GENMASK(7, 4)); 754 755 rtw89_debug(rtwdev, RTW89_DBG_RFK, 756 "[PAD_BIAS][TRIM] path=%d 2G=0x%x 5G=0x%x\n", 757 i, pad_bias_2g, pad_bias_5g); 758 759 rtw89_write_rf(rtwdev, i, RR_BIASA, RR_BIASD_TXG_V1, pad_bias_2g); 760 rtw89_write_rf(rtwdev, i, RR_BIASA, RR_BIASD_TXA_V1, pad_bias_5g); 761 } 762 } 763 764 static int rtw8922a_read_phycap(struct rtw89_dev *rtwdev, u8 *phycap_map) 765 { 766 rtw8922a_phycap_parsing_thermal_trim(rtwdev, phycap_map); 767 rtw8922a_phycap_parsing_pa_bias_trim(rtwdev, phycap_map); 768 rtw8922a_phycap_parsing_pad_bias_trim(rtwdev, phycap_map); 769 770 return 0; 771 } 772 773 static void rtw8922a_power_trim(struct rtw89_dev *rtwdev) 774 { 775 rtw8922a_pa_bias_trim(rtwdev); 776 rtw8922a_pad_bias_trim(rtwdev); 777 } 778 779 static void rtw8922a_set_channel_mac(struct rtw89_dev *rtwdev, 780 const struct rtw89_chan *chan, 781 u8 mac_idx) 782 { 783 u32 sub_carr = rtw89_mac_reg_by_idx(rtwdev, R_BE_TX_SUB_BAND_VALUE, mac_idx); 784 u32 chk_rate = rtw89_mac_reg_by_idx(rtwdev, R_BE_TXRATE_CHK, mac_idx); 785 u32 rf_mod = rtw89_mac_reg_by_idx(rtwdev, R_BE_WMAC_RFMOD, mac_idx); 786 u8 txsb20 = 0, txsb40 = 0, txsb80 = 0; 787 u8 rf_mod_val, chk_rate_mask; 788 u32 txsb; 789 u32 reg; 790 791 switch (chan->band_width) { 792 case RTW89_CHANNEL_WIDTH_160: 793 txsb80 = rtw89_phy_get_txsb(rtwdev, chan, RTW89_CHANNEL_WIDTH_80); 794 fallthrough; 795 case RTW89_CHANNEL_WIDTH_80: 796 txsb40 = rtw89_phy_get_txsb(rtwdev, chan, RTW89_CHANNEL_WIDTH_40); 797 fallthrough; 798 case RTW89_CHANNEL_WIDTH_40: 799 txsb20 = rtw89_phy_get_txsb(rtwdev, chan, RTW89_CHANNEL_WIDTH_20); 800 break; 801 default: 802 break; 803 } 804 805 switch (chan->band_width) { 806 case RTW89_CHANNEL_WIDTH_160: 807 rf_mod_val = BE_WMAC_RFMOD_160M; 808 txsb = u32_encode_bits(txsb20, B_BE_TXSB_20M_MASK) | 809 u32_encode_bits(txsb40, B_BE_TXSB_40M_MASK) | 810 u32_encode_bits(txsb80, B_BE_TXSB_80M_MASK); 811 break; 812 case RTW89_CHANNEL_WIDTH_80: 813 rf_mod_val = BE_WMAC_RFMOD_80M; 814 txsb = u32_encode_bits(txsb20, B_BE_TXSB_20M_MASK) | 815 u32_encode_bits(txsb40, B_BE_TXSB_40M_MASK); 816 break; 817 case RTW89_CHANNEL_WIDTH_40: 818 rf_mod_val = BE_WMAC_RFMOD_40M; 819 txsb = u32_encode_bits(txsb20, B_BE_TXSB_20M_MASK); 820 break; 821 case RTW89_CHANNEL_WIDTH_20: 822 default: 823 rf_mod_val = BE_WMAC_RFMOD_20M; 824 txsb = 0; 825 break; 826 } 827 828 if (txsb20 <= BE_PRI20_BITMAP_MAX) 829 txsb |= u32_encode_bits(BIT(txsb20), B_BE_PRI20_BITMAP_MASK); 830 831 rtw89_write8_mask(rtwdev, rf_mod, B_BE_WMAC_RFMOD_MASK, rf_mod_val); 832 rtw89_write32(rtwdev, sub_carr, txsb); 833 834 switch (chan->band_type) { 835 case RTW89_BAND_2G: 836 chk_rate_mask = B_BE_BAND_MODE; 837 break; 838 case RTW89_BAND_5G: 839 case RTW89_BAND_6G: 840 chk_rate_mask = B_BE_CHECK_CCK_EN | B_BE_RTS_LIMIT_IN_OFDM6; 841 break; 842 default: 843 rtw89_warn(rtwdev, "Invalid band_type:%d\n", chan->band_type); 844 return; 845 } 846 847 rtw89_write8_clr(rtwdev, chk_rate, B_BE_BAND_MODE | B_BE_CHECK_CCK_EN | 848 B_BE_RTS_LIMIT_IN_OFDM6); 849 rtw89_write8_set(rtwdev, chk_rate, chk_rate_mask); 850 851 switch (chan->band_width) { 852 case RTW89_CHANNEL_WIDTH_320: 853 case RTW89_CHANNEL_WIDTH_160: 854 case RTW89_CHANNEL_WIDTH_80: 855 case RTW89_CHANNEL_WIDTH_40: 856 reg = rtw89_mac_reg_by_idx(rtwdev, R_BE_PREBKF_CFG_1, mac_idx); 857 rtw89_write32_mask(rtwdev, reg, B_BE_SIFS_MACTXEN_T1_MASK, 0x41); 858 reg = rtw89_mac_reg_by_idx(rtwdev, R_BE_MUEDCA_EN, mac_idx); 859 rtw89_write32_mask(rtwdev, reg, B_BE_SIFS_MACTXEN_TB_T1_MASK, 0x41); 860 break; 861 default: 862 reg = rtw89_mac_reg_by_idx(rtwdev, R_BE_PREBKF_CFG_1, mac_idx); 863 rtw89_write32_mask(rtwdev, reg, B_BE_SIFS_MACTXEN_T1_MASK, 0x3f); 864 reg = rtw89_mac_reg_by_idx(rtwdev, R_BE_MUEDCA_EN, mac_idx); 865 rtw89_write32_mask(rtwdev, reg, B_BE_SIFS_MACTXEN_TB_T1_MASK, 0x3e); 866 break; 867 } 868 } 869 870 static const u32 rtw8922a_sco_barker_threshold[14] = { 871 0x1fe4f, 0x1ff5e, 0x2006c, 0x2017b, 0x2028a, 0x20399, 0x204a8, 0x205b6, 872 0x206c5, 0x207d4, 0x208e3, 0x209f2, 0x20b00, 0x20d8a 873 }; 874 875 static const u32 rtw8922a_sco_cck_threshold[14] = { 876 0x2bdac, 0x2bf21, 0x2c095, 0x2c209, 0x2c37e, 0x2c4f2, 0x2c666, 0x2c7db, 877 0x2c94f, 0x2cac3, 0x2cc38, 0x2cdac, 0x2cf21, 0x2d29e 878 }; 879 880 static int rtw8922a_ctrl_sco_cck(struct rtw89_dev *rtwdev, 881 u8 primary_ch, enum rtw89_bandwidth bw, 882 enum rtw89_phy_idx phy_idx) 883 { 884 u8 ch_element; 885 886 if (primary_ch >= 14) 887 return -EINVAL; 888 889 ch_element = primary_ch - 1; 890 891 rtw89_phy_write32_idx(rtwdev, R_BK_FC0INV, B_BK_FC0INV, 892 rtw8922a_sco_barker_threshold[ch_element], 893 phy_idx); 894 rtw89_phy_write32_idx(rtwdev, R_CCK_FC0INV, B_CCK_FC0INV, 895 rtw8922a_sco_cck_threshold[ch_element], 896 phy_idx); 897 898 return 0; 899 } 900 901 struct rtw8922a_bb_gain { 902 u32 gain_g[BB_PATH_NUM_8922A]; 903 u32 gain_a[BB_PATH_NUM_8922A]; 904 u32 gain_g_mask; 905 u32 gain_a_mask; 906 }; 907 908 static const struct rtw89_reg_def rpl_comp_bw160[RTW89_BW20_SC_160M] = { 909 { .addr = 0x41E8, .mask = 0xFF00}, 910 { .addr = 0x41E8, .mask = 0xFF0000}, 911 { .addr = 0x41E8, .mask = 0xFF000000}, 912 { .addr = 0x41EC, .mask = 0xFF}, 913 { .addr = 0x41EC, .mask = 0xFF00}, 914 { .addr = 0x41EC, .mask = 0xFF0000}, 915 { .addr = 0x41EC, .mask = 0xFF000000}, 916 { .addr = 0x41F0, .mask = 0xFF} 917 }; 918 919 static const struct rtw89_reg_def rpl_comp_bw80[RTW89_BW20_SC_80M] = { 920 { .addr = 0x41F4, .mask = 0xFF}, 921 { .addr = 0x41F4, .mask = 0xFF00}, 922 { .addr = 0x41F4, .mask = 0xFF0000}, 923 { .addr = 0x41F4, .mask = 0xFF000000} 924 }; 925 926 static const struct rtw89_reg_def rpl_comp_bw40[RTW89_BW20_SC_40M] = { 927 { .addr = 0x41F0, .mask = 0xFF0000}, 928 { .addr = 0x41F0, .mask = 0xFF000000} 929 }; 930 931 static const struct rtw89_reg_def rpl_comp_bw20[RTW89_BW20_SC_20M] = { 932 { .addr = 0x41F0, .mask = 0xFF00} 933 }; 934 935 static const struct rtw8922a_bb_gain bb_gain_lna[LNA_GAIN_NUM] = { 936 { .gain_g = {0x409c, 0x449c}, .gain_a = {0x406C, 0x446C}, 937 .gain_g_mask = 0xFF00, .gain_a_mask = 0xFF}, 938 { .gain_g = {0x409c, 0x449c}, .gain_a = {0x406C, 0x446C}, 939 .gain_g_mask = 0xFF000000, .gain_a_mask = 0xFF0000}, 940 { .gain_g = {0x40a0, 0x44a0}, .gain_a = {0x4070, 0x4470}, 941 .gain_g_mask = 0xFF00, .gain_a_mask = 0xFF}, 942 { .gain_g = {0x40a0, 0x44a0}, .gain_a = {0x4070, 0x4470}, 943 .gain_g_mask = 0xFF000000, .gain_a_mask = 0xFF0000}, 944 { .gain_g = {0x40a4, 0x44a4}, .gain_a = {0x4074, 0x4474}, 945 .gain_g_mask = 0xFF00, .gain_a_mask = 0xFF}, 946 { .gain_g = {0x40a4, 0x44a4}, .gain_a = {0x4074, 0x4474}, 947 .gain_g_mask = 0xFF000000, .gain_a_mask = 0xFF0000}, 948 { .gain_g = {0x40a8, 0x44a8}, .gain_a = {0x4078, 0x4478}, 949 .gain_g_mask = 0xFF00, .gain_a_mask = 0xFF}, 950 }; 951 952 static const struct rtw8922a_bb_gain bb_gain_tia[TIA_GAIN_NUM] = { 953 { .gain_g = {0x4054, 0x4454}, .gain_a = {0x4054, 0x4454}, 954 .gain_g_mask = 0x7FC0000, .gain_a_mask = 0x1FF}, 955 { .gain_g = {0x4058, 0x4458}, .gain_a = {0x4054, 0x4454}, 956 .gain_g_mask = 0x1FF, .gain_a_mask = 0x3FE00 }, 957 }; 958 959 struct rtw8922a_bb_gain_bypass { 960 u32 gain_g[BB_PATH_NUM_8922A]; 961 u32 gain_a[BB_PATH_NUM_8922A]; 962 u32 gain_mask_g; 963 u32 gain_mask_a; 964 }; 965 966 static void rtw8922a_set_rpl_gain(struct rtw89_dev *rtwdev, 967 const struct rtw89_chan *chan, 968 enum rtw89_rf_path path, 969 enum rtw89_phy_idx phy_idx) 970 { 971 const struct rtw89_phy_bb_gain_info_be *gain = &rtwdev->bb_gain.be; 972 u8 gain_band = rtw89_subband_to_gain_band_be(chan->subband_type); 973 u32 reg_path_ofst = 0; 974 u32 mask; 975 s32 val; 976 u32 reg; 977 int i; 978 979 if (path == RF_PATH_B) 980 reg_path_ofst = 0x400; 981 982 for (i = 0; i < RTW89_BW20_SC_160M; i++) { 983 reg = rpl_comp_bw160[i].addr | reg_path_ofst; 984 mask = rpl_comp_bw160[i].mask; 985 val = gain->rpl_ofst_160[gain_band][path][i]; 986 rtw89_phy_write32_idx(rtwdev, reg, mask, val, phy_idx); 987 } 988 989 for (i = 0; i < RTW89_BW20_SC_80M; i++) { 990 reg = rpl_comp_bw80[i].addr | reg_path_ofst; 991 mask = rpl_comp_bw80[i].mask; 992 val = gain->rpl_ofst_80[gain_band][path][i]; 993 rtw89_phy_write32_idx(rtwdev, reg, mask, val, phy_idx); 994 } 995 996 for (i = 0; i < RTW89_BW20_SC_40M; i++) { 997 reg = rpl_comp_bw40[i].addr | reg_path_ofst; 998 mask = rpl_comp_bw40[i].mask; 999 val = gain->rpl_ofst_40[gain_band][path][i]; 1000 rtw89_phy_write32_idx(rtwdev, reg, mask, val, phy_idx); 1001 } 1002 1003 for (i = 0; i < RTW89_BW20_SC_20M; i++) { 1004 reg = rpl_comp_bw20[i].addr | reg_path_ofst; 1005 mask = rpl_comp_bw20[i].mask; 1006 val = gain->rpl_ofst_20[gain_band][path][i]; 1007 rtw89_phy_write32_idx(rtwdev, reg, mask, val, phy_idx); 1008 } 1009 } 1010 1011 static void rtw8922a_set_lna_tia_gain(struct rtw89_dev *rtwdev, 1012 const struct rtw89_chan *chan, 1013 enum rtw89_rf_path path, 1014 enum rtw89_phy_idx phy_idx) 1015 { 1016 const struct rtw89_phy_bb_gain_info_be *gain = &rtwdev->bb_gain.be; 1017 u8 gain_band = rtw89_subband_to_gain_band_be(chan->subband_type); 1018 enum rtw89_phy_bb_bw_be bw_type; 1019 s32 val; 1020 u32 reg; 1021 u32 mask; 1022 int i; 1023 1024 bw_type = chan->band_width <= RTW89_CHANNEL_WIDTH_40 ? 1025 RTW89_BB_BW_20_40 : RTW89_BB_BW_80_160_320; 1026 1027 for (i = 0; i < LNA_GAIN_NUM; i++) { 1028 if (chan->band_type == RTW89_BAND_2G) { 1029 reg = bb_gain_lna[i].gain_g[path]; 1030 mask = bb_gain_lna[i].gain_g_mask; 1031 } else { 1032 reg = bb_gain_lna[i].gain_a[path]; 1033 mask = bb_gain_lna[i].gain_a_mask; 1034 } 1035 val = gain->lna_gain[gain_band][bw_type][path][i]; 1036 rtw89_phy_write32_idx(rtwdev, reg, mask, val, phy_idx); 1037 } 1038 1039 for (i = 0; i < TIA_GAIN_NUM; i++) { 1040 if (chan->band_type == RTW89_BAND_2G) { 1041 reg = bb_gain_tia[i].gain_g[path]; 1042 mask = bb_gain_tia[i].gain_g_mask; 1043 } else { 1044 reg = bb_gain_tia[i].gain_a[path]; 1045 mask = bb_gain_tia[i].gain_a_mask; 1046 } 1047 val = gain->tia_gain[gain_band][bw_type][path][i]; 1048 rtw89_phy_write32_idx(rtwdev, reg, mask, val, phy_idx); 1049 } 1050 } 1051 1052 static void rtw8922a_set_gain(struct rtw89_dev *rtwdev, 1053 const struct rtw89_chan *chan, 1054 enum rtw89_rf_path path, 1055 enum rtw89_phy_idx phy_idx) 1056 { 1057 rtw8922a_set_lna_tia_gain(rtwdev, chan, path, phy_idx); 1058 rtw8922a_set_rpl_gain(rtwdev, chan, path, phy_idx); 1059 } 1060 1061 static void rtw8922a_set_rx_gain_normal_cck(struct rtw89_dev *rtwdev, 1062 const struct rtw89_chan *chan, 1063 enum rtw89_rf_path path) 1064 { 1065 struct rtw89_phy_efuse_gain *gain = &rtwdev->efuse_gain; 1066 s8 value = -gain->offset[path][RTW89_GAIN_OFFSET_2G_CCK]; /* S(8,2) */ 1067 u8 fraction = value & 0x3; 1068 1069 if (fraction) { 1070 rtw89_phy_write32_mask(rtwdev, R_MGAIN_BIAS, B_MGAIN_BIAS_BW20, 1071 (0x4 - fraction) << 1); 1072 rtw89_phy_write32_mask(rtwdev, R_MGAIN_BIAS, B_MGAIN_BIAS_BW40, 1073 (0x4 - fraction) << 1); 1074 1075 value >>= 2; 1076 rtw89_phy_write32_mask(rtwdev, R_CCK_RPL_OFST, B_CCK_RPL_OFST, 1077 value + 1 + 0xdc); 1078 } else { 1079 rtw89_phy_write32_mask(rtwdev, R_MGAIN_BIAS, B_MGAIN_BIAS_BW20, 0); 1080 rtw89_phy_write32_mask(rtwdev, R_MGAIN_BIAS, B_MGAIN_BIAS_BW40, 0); 1081 1082 value >>= 2; 1083 rtw89_phy_write32_mask(rtwdev, R_CCK_RPL_OFST, B_CCK_RPL_OFST, 1084 value + 0xdc); 1085 } 1086 } 1087 1088 static void rtw8922a_set_rx_gain_normal_ofdm(struct rtw89_dev *rtwdev, 1089 const struct rtw89_chan *chan, 1090 enum rtw89_rf_path path) 1091 { 1092 static const u32 rssi_tb_bias_comp[2] = {0x41f8, 0x45f8}; 1093 static const u32 rssi_tb_ext_comp[2] = {0x4208, 0x4608}; 1094 static const u32 rssi_ofst_addr[2] = {0x40c8, 0x44c8}; 1095 static const u32 rpl_bias_comp[2] = {0x41e8, 0x45e8}; 1096 static const u32 rpl_ext_comp[2] = {0x41f8, 0x45f8}; 1097 struct rtw89_phy_efuse_gain *gain = &rtwdev->efuse_gain; 1098 enum rtw89_gain_offset gain_band; 1099 s8 v1, v2, v3; 1100 s32 value; 1101 1102 gain_band = rtw89_subband_to_gain_offset_band_of_ofdm(chan->subband_type); 1103 value = gain->offset[path][gain_band]; 1104 rtw89_phy_write32_mask(rtwdev, rssi_ofst_addr[path], 0xff000000, value + 0xF8); 1105 1106 value *= -4; 1107 v1 = clamp_t(s32, value, S8_MIN, S8_MAX); 1108 value -= v1; 1109 v2 = clamp_t(s32, value, S8_MIN, S8_MAX); 1110 value -= v2; 1111 v3 = clamp_t(s32, value, S8_MIN, S8_MAX); 1112 1113 rtw89_phy_write32_mask(rtwdev, rpl_bias_comp[path], 0xff, v1); 1114 rtw89_phy_write32_mask(rtwdev, rpl_ext_comp[path], 0xff, v2); 1115 rtw89_phy_write32_mask(rtwdev, rpl_ext_comp[path], 0xff00, v3); 1116 1117 rtw89_phy_write32_mask(rtwdev, rssi_tb_bias_comp[path], 0xff0000, v1); 1118 rtw89_phy_write32_mask(rtwdev, rssi_tb_ext_comp[path], 0xff0000, v2); 1119 rtw89_phy_write32_mask(rtwdev, rssi_tb_ext_comp[path], 0xff000000, v3); 1120 } 1121 1122 static void rtw8922a_set_rx_gain_normal(struct rtw89_dev *rtwdev, 1123 const struct rtw89_chan *chan, 1124 enum rtw89_rf_path path) 1125 { 1126 struct rtw89_phy_efuse_gain *gain = &rtwdev->efuse_gain; 1127 1128 if (!gain->offset_valid) 1129 return; 1130 1131 if (chan->band_type == RTW89_BAND_2G) 1132 rtw8922a_set_rx_gain_normal_cck(rtwdev, chan, path); 1133 1134 rtw8922a_set_rx_gain_normal_ofdm(rtwdev, chan, path); 1135 } 1136 1137 static void rtw8922a_set_cck_parameters(struct rtw89_dev *rtwdev, u8 central_ch, 1138 enum rtw89_phy_idx phy_idx) 1139 { 1140 if (central_ch == 14) { 1141 rtw89_phy_write32_idx(rtwdev, R_PCOEFF01, B_PCOEFF01, 0x3b13ff, phy_idx); 1142 rtw89_phy_write32_idx(rtwdev, R_PCOEFF23, B_PCOEFF23, 0x1c42de, phy_idx); 1143 rtw89_phy_write32_idx(rtwdev, R_PCOEFF45, B_PCOEFF45, 0xfdb0ad, phy_idx); 1144 rtw89_phy_write32_idx(rtwdev, R_PCOEFF67, B_PCOEFF67, 0xf60f6e, phy_idx); 1145 rtw89_phy_write32_idx(rtwdev, R_PCOEFF89, B_PCOEFF89, 0xfd8f92, phy_idx); 1146 rtw89_phy_write32_idx(rtwdev, R_PCOEFFAB, B_PCOEFFAB, 0x02d011, phy_idx); 1147 rtw89_phy_write32_idx(rtwdev, R_PCOEFFCD, B_PCOEFFCD, 0x01c02c, phy_idx); 1148 rtw89_phy_write32_idx(rtwdev, R_PCOEFFEF, B_PCOEFFEF, 0xfff00a, phy_idx); 1149 } else { 1150 rtw89_phy_write32_idx(rtwdev, R_PCOEFF01, B_PCOEFF01, 0x3a63ca, phy_idx); 1151 rtw89_phy_write32_idx(rtwdev, R_PCOEFF23, B_PCOEFF23, 0x2a833f, phy_idx); 1152 rtw89_phy_write32_idx(rtwdev, R_PCOEFF45, B_PCOEFF45, 0x1491f8, phy_idx); 1153 rtw89_phy_write32_idx(rtwdev, R_PCOEFF67, B_PCOEFF67, 0x03c0b0, phy_idx); 1154 rtw89_phy_write32_idx(rtwdev, R_PCOEFF89, B_PCOEFF89, 0xfccff1, phy_idx); 1155 rtw89_phy_write32_idx(rtwdev, R_PCOEFFAB, B_PCOEFFAB, 0xfccfc3, phy_idx); 1156 rtw89_phy_write32_idx(rtwdev, R_PCOEFFCD, B_PCOEFFCD, 0xfebfdc, phy_idx); 1157 rtw89_phy_write32_idx(rtwdev, R_PCOEFFEF, B_PCOEFFEF, 0xffdff7, phy_idx); 1158 } 1159 } 1160 1161 static void rtw8922a_ctrl_ch(struct rtw89_dev *rtwdev, 1162 const struct rtw89_chan *chan, 1163 enum rtw89_phy_idx phy_idx) 1164 { 1165 static const u32 band_sel[2] = {0x4160, 0x4560}; 1166 u16 central_freq = chan->freq; 1167 u8 central_ch = chan->channel; 1168 u8 band = chan->band_type; 1169 bool is_2g = band == RTW89_BAND_2G; 1170 u8 chan_idx; 1171 u8 path; 1172 u8 sco; 1173 1174 if (!central_freq) { 1175 rtw89_warn(rtwdev, "Invalid central_freq\n"); 1176 return; 1177 } 1178 1179 rtw8922a_set_gain(rtwdev, chan, RF_PATH_A, phy_idx); 1180 rtw8922a_set_gain(rtwdev, chan, RF_PATH_B, phy_idx); 1181 1182 for (path = RF_PATH_A; path < BB_PATH_NUM_8922A; path++) 1183 rtw89_phy_write32_idx(rtwdev, band_sel[path], BIT((26)), is_2g, phy_idx); 1184 1185 rtw8922a_set_rx_gain_normal(rtwdev, chan, RF_PATH_A); 1186 rtw8922a_set_rx_gain_normal(rtwdev, chan, RF_PATH_B); 1187 1188 rtw89_phy_write32_idx(rtwdev, R_FC0, B_FC0, central_freq, phy_idx); 1189 sco = DIV_ROUND_CLOSEST(1 << 18, central_freq); 1190 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_FC0_INV, sco, phy_idx); 1191 1192 if (band == RTW89_BAND_2G) 1193 rtw8922a_set_cck_parameters(rtwdev, central_ch, phy_idx); 1194 1195 chan_idx = rtw89_encode_chan_idx(rtwdev, chan->primary_channel, band); 1196 rtw89_phy_write32_idx(rtwdev, R_MAC_PIN_SEL, B_CH_IDX_SEG0, chan_idx, phy_idx); 1197 } 1198 1199 static void 1200 rtw8922a_ctrl_bw(struct rtw89_dev *rtwdev, u8 pri_sb, u8 bw, 1201 enum rtw89_phy_idx phy_idx) 1202 { 1203 switch (bw) { 1204 case RTW89_CHANNEL_WIDTH_5: 1205 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_BW, 0x0, phy_idx); 1206 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_SMALLBW, 0x1, phy_idx); 1207 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_PRICH, 0x0, phy_idx); 1208 rtw89_phy_write32_idx(rtwdev, R_DAC_CLK, B_DAC_CLK, 0x1, phy_idx); 1209 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP0, B_GAIN_MAP0_EN, 0x0, phy_idx); 1210 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP1, B_GAIN_MAP1_EN, 0x0, phy_idx); 1211 break; 1212 case RTW89_CHANNEL_WIDTH_10: 1213 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_BW, 0x0, phy_idx); 1214 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_SMALLBW, 0x2, phy_idx); 1215 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_PRICH, 0x0, phy_idx); 1216 rtw89_phy_write32_idx(rtwdev, R_DAC_CLK, B_DAC_CLK, 0x1, phy_idx); 1217 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP0, B_GAIN_MAP0_EN, 0x0, phy_idx); 1218 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP1, B_GAIN_MAP1_EN, 0x0, phy_idx); 1219 break; 1220 case RTW89_CHANNEL_WIDTH_20: 1221 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_BW, 0x0, phy_idx); 1222 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_SMALLBW, 0x0, phy_idx); 1223 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_PRICH, 0x0, phy_idx); 1224 rtw89_phy_write32_idx(rtwdev, R_DAC_CLK, B_DAC_CLK, 0x1, phy_idx); 1225 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP0, B_GAIN_MAP0_EN, 0x0, phy_idx); 1226 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP1, B_GAIN_MAP1_EN, 0x0, phy_idx); 1227 break; 1228 case RTW89_CHANNEL_WIDTH_40: 1229 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_BW, 0x1, phy_idx); 1230 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_SMALLBW, 0x0, phy_idx); 1231 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_PRICH, pri_sb, phy_idx); 1232 rtw89_phy_write32_idx(rtwdev, R_DAC_CLK, B_DAC_CLK, 0x1, phy_idx); 1233 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP0, B_GAIN_MAP0_EN, 0x0, phy_idx); 1234 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP1, B_GAIN_MAP1_EN, 0x0, phy_idx); 1235 break; 1236 case RTW89_CHANNEL_WIDTH_80: 1237 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_BW, 0x2, phy_idx); 1238 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_SMALLBW, 0x0, phy_idx); 1239 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_PRICH, pri_sb, phy_idx); 1240 rtw89_phy_write32_idx(rtwdev, R_DAC_CLK, B_DAC_CLK, 0x1, phy_idx); 1241 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP0, B_GAIN_MAP0_EN, 0x1, phy_idx); 1242 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP1, B_GAIN_MAP1_EN, 0x1, phy_idx); 1243 break; 1244 case RTW89_CHANNEL_WIDTH_160: 1245 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_BW, 0x3, phy_idx); 1246 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_SMALLBW, 0x0, phy_idx); 1247 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_PRICH, pri_sb, phy_idx); 1248 rtw89_phy_write32_idx(rtwdev, R_DAC_CLK, B_DAC_CLK, 0x1, phy_idx); 1249 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP0, B_GAIN_MAP0_EN, 0x1, phy_idx); 1250 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP1, B_GAIN_MAP1_EN, 0x1, phy_idx); 1251 break; 1252 default: 1253 rtw89_warn(rtwdev, "Fail to switch bw (bw:%d, pri_sb:%d)\n", bw, 1254 pri_sb); 1255 break; 1256 } 1257 1258 if (bw == RTW89_CHANNEL_WIDTH_40) 1259 rtw89_phy_write32_idx(rtwdev, R_FC0, B_BW40_2XFFT, 1, phy_idx); 1260 else 1261 rtw89_phy_write32_idx(rtwdev, R_FC0, B_BW40_2XFFT, 0, phy_idx); 1262 } 1263 1264 static u32 rtw8922a_spur_freq(struct rtw89_dev *rtwdev, 1265 const struct rtw89_chan *chan) 1266 { 1267 return 0; 1268 } 1269 1270 #define CARRIER_SPACING_312_5 312500 /* 312.5 kHz */ 1271 #define CARRIER_SPACING_78_125 78125 /* 78.125 kHz */ 1272 #define MAX_TONE_NUM 2048 1273 1274 static void rtw8922a_set_csi_tone_idx(struct rtw89_dev *rtwdev, 1275 const struct rtw89_chan *chan, 1276 enum rtw89_phy_idx phy_idx) 1277 { 1278 s32 freq_diff, csi_idx, csi_tone_idx; 1279 u32 spur_freq; 1280 1281 spur_freq = rtw8922a_spur_freq(rtwdev, chan); 1282 if (spur_freq == 0) { 1283 rtw89_phy_write32_idx(rtwdev, R_S0S1_CSI_WGT, B_S0S1_CSI_WGT_EN, 1284 0, phy_idx); 1285 return; 1286 } 1287 1288 freq_diff = (spur_freq - chan->freq) * 1000000; 1289 csi_idx = s32_div_u32_round_closest(freq_diff, CARRIER_SPACING_78_125); 1290 s32_div_u32_round_down(csi_idx, MAX_TONE_NUM, &csi_tone_idx); 1291 1292 rtw89_phy_write32_idx(rtwdev, R_S0S1_CSI_WGT, B_S0S1_CSI_WGT_TONE_IDX, 1293 csi_tone_idx, phy_idx); 1294 rtw89_phy_write32_idx(rtwdev, R_S0S1_CSI_WGT, B_S0S1_CSI_WGT_EN, 1, phy_idx); 1295 } 1296 1297 static const struct rtw89_nbi_reg_def rtw8922a_nbi_reg_def[] = { 1298 [RF_PATH_A] = { 1299 .notch1_idx = {0x41a0, 0xFF}, 1300 .notch1_frac_idx = {0x41a0, 0xC00}, 1301 .notch1_en = {0x41a0, 0x1000}, 1302 .notch2_idx = {0x41ac, 0xFF}, 1303 .notch2_frac_idx = {0x41ac, 0xC00}, 1304 .notch2_en = {0x41ac, 0x1000}, 1305 }, 1306 [RF_PATH_B] = { 1307 .notch1_idx = {0x45a0, 0xFF}, 1308 .notch1_frac_idx = {0x45a0, 0xC00}, 1309 .notch1_en = {0x45a0, 0x1000}, 1310 .notch2_idx = {0x45ac, 0xFF}, 1311 .notch2_frac_idx = {0x45ac, 0xC00}, 1312 .notch2_en = {0x45ac, 0x1000}, 1313 }, 1314 }; 1315 1316 static void rtw8922a_set_nbi_tone_idx(struct rtw89_dev *rtwdev, 1317 const struct rtw89_chan *chan, 1318 enum rtw89_rf_path path, 1319 enum rtw89_phy_idx phy_idx) 1320 { 1321 const struct rtw89_nbi_reg_def *nbi = &rtw8922a_nbi_reg_def[path]; 1322 s32 nbi_frac_idx, nbi_frac_tone_idx; 1323 s32 nbi_idx, nbi_tone_idx; 1324 bool notch2_chk = false; 1325 u32 spur_freq, fc; 1326 s32 freq_diff; 1327 1328 spur_freq = rtw8922a_spur_freq(rtwdev, chan); 1329 if (spur_freq == 0) { 1330 rtw89_phy_write32_idx(rtwdev, nbi->notch1_en.addr, 1331 nbi->notch1_en.mask, 0, phy_idx); 1332 rtw89_phy_write32_idx(rtwdev, nbi->notch2_en.addr, 1333 nbi->notch2_en.mask, 0, phy_idx); 1334 return; 1335 } 1336 1337 fc = chan->freq; 1338 if (chan->band_width == RTW89_CHANNEL_WIDTH_160) { 1339 fc = (spur_freq > fc) ? fc + 40 : fc - 40; 1340 if ((fc > spur_freq && 1341 chan->channel < chan->primary_channel) || 1342 (fc < spur_freq && 1343 chan->channel > chan->primary_channel)) 1344 notch2_chk = true; 1345 } 1346 1347 freq_diff = (spur_freq - fc) * 1000000; 1348 nbi_idx = s32_div_u32_round_down(freq_diff, CARRIER_SPACING_312_5, 1349 &nbi_frac_idx); 1350 1351 if (chan->band_width == RTW89_CHANNEL_WIDTH_20) { 1352 s32_div_u32_round_down(nbi_idx + 32, 64, &nbi_tone_idx); 1353 } else { 1354 u16 tone_para = (chan->band_width == RTW89_CHANNEL_WIDTH_40) ? 1355 128 : 256; 1356 1357 s32_div_u32_round_down(nbi_idx, tone_para, &nbi_tone_idx); 1358 } 1359 nbi_frac_tone_idx = 1360 s32_div_u32_round_closest(nbi_frac_idx, CARRIER_SPACING_78_125); 1361 1362 if (chan->band_width == RTW89_CHANNEL_WIDTH_160 && notch2_chk) { 1363 rtw89_phy_write32_idx(rtwdev, nbi->notch2_idx.addr, 1364 nbi->notch2_idx.mask, nbi_tone_idx, phy_idx); 1365 rtw89_phy_write32_idx(rtwdev, nbi->notch2_frac_idx.addr, 1366 nbi->notch2_frac_idx.mask, nbi_frac_tone_idx, 1367 phy_idx); 1368 rtw89_phy_write32_idx(rtwdev, nbi->notch2_en.addr, 1369 nbi->notch2_en.mask, 0, phy_idx); 1370 rtw89_phy_write32_idx(rtwdev, nbi->notch2_en.addr, 1371 nbi->notch2_en.mask, 1, phy_idx); 1372 rtw89_phy_write32_idx(rtwdev, nbi->notch1_en.addr, 1373 nbi->notch1_en.mask, 0, phy_idx); 1374 } else { 1375 rtw89_phy_write32_idx(rtwdev, nbi->notch1_idx.addr, 1376 nbi->notch1_idx.mask, nbi_tone_idx, phy_idx); 1377 rtw89_phy_write32_idx(rtwdev, nbi->notch1_frac_idx.addr, 1378 nbi->notch1_frac_idx.mask, nbi_frac_tone_idx, 1379 phy_idx); 1380 rtw89_phy_write32_idx(rtwdev, nbi->notch1_en.addr, 1381 nbi->notch1_en.mask, 0, phy_idx); 1382 rtw89_phy_write32_idx(rtwdev, nbi->notch1_en.addr, 1383 nbi->notch1_en.mask, 1, phy_idx); 1384 rtw89_phy_write32_idx(rtwdev, nbi->notch2_en.addr, 1385 nbi->notch2_en.mask, 0, phy_idx); 1386 } 1387 } 1388 1389 static void rtw8922a_spur_elimination(struct rtw89_dev *rtwdev, 1390 const struct rtw89_chan *chan, 1391 enum rtw89_phy_idx phy_idx) 1392 { 1393 rtw8922a_set_csi_tone_idx(rtwdev, chan, phy_idx); 1394 rtw8922a_set_nbi_tone_idx(rtwdev, chan, RF_PATH_A, phy_idx); 1395 rtw8922a_set_nbi_tone_idx(rtwdev, chan, RF_PATH_B, phy_idx); 1396 } 1397 1398 static void rtw8922a_ctrl_afe_dac(struct rtw89_dev *rtwdev, enum rtw89_bandwidth bw, 1399 enum rtw89_rf_path path) 1400 { 1401 u32 cr_ofst = 0x0; 1402 1403 if (path == RF_PATH_B) 1404 cr_ofst = 0x100; 1405 1406 switch (bw) { 1407 case RTW89_CHANNEL_WIDTH_5: 1408 case RTW89_CHANNEL_WIDTH_10: 1409 case RTW89_CHANNEL_WIDTH_20: 1410 case RTW89_CHANNEL_WIDTH_40: 1411 case RTW89_CHANNEL_WIDTH_80: 1412 rtw89_phy_write32_mask(rtwdev, R_AFEDAC0 + cr_ofst, B_AFEDAC0, 0xE); 1413 rtw89_phy_write32_mask(rtwdev, R_AFEDAC1 + cr_ofst, B_AFEDAC1, 0x7); 1414 break; 1415 case RTW89_CHANNEL_WIDTH_160: 1416 rtw89_phy_write32_mask(rtwdev, R_AFEDAC0 + cr_ofst, B_AFEDAC0, 0xD); 1417 rtw89_phy_write32_mask(rtwdev, R_AFEDAC1 + cr_ofst, B_AFEDAC1, 0x6); 1418 break; 1419 default: 1420 break; 1421 } 1422 } 1423 1424 static const struct rtw89_reg2_def bb_mcu0_init_reg[] = { 1425 {0x6990, 0x00000000}, 1426 {0x6994, 0x00000000}, 1427 {0x6998, 0x00000000}, 1428 {0x6820, 0xFFFFFFFE}, 1429 {0x6800, 0xC0000FFE}, 1430 {0x6808, 0x76543210}, 1431 {0x6814, 0xBFBFB000}, 1432 {0x6818, 0x0478C009}, 1433 {0x6800, 0xC0000FFF}, 1434 {0x6820, 0xFFFFFFFF}, 1435 }; 1436 1437 static const struct rtw89_reg2_def bb_mcu1_init_reg[] = { 1438 {0x6990, 0x00000000}, 1439 {0x6994, 0x00000000}, 1440 {0x6998, 0x00000000}, 1441 {0x6820, 0xFFFFFFFE}, 1442 {0x6800, 0xC0000FFE}, 1443 {0x6808, 0x76543210}, 1444 {0x6814, 0xBFBFB000}, 1445 {0x6818, 0x0478C009}, 1446 {0x6800, 0xC0000FFF}, 1447 {0x6820, 0xFFFFFFFF}, 1448 }; 1449 1450 static void rtw8922a_bbmcu_cr_init(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx) 1451 { 1452 const struct rtw89_reg2_def *reg; 1453 int size; 1454 int i; 1455 1456 if (phy_idx == RTW89_PHY_0) { 1457 reg = bb_mcu0_init_reg; 1458 size = ARRAY_SIZE(bb_mcu0_init_reg); 1459 } else { 1460 reg = bb_mcu1_init_reg; 1461 size = ARRAY_SIZE(bb_mcu1_init_reg); 1462 } 1463 1464 for (i = 0; i < size; i++, reg++) 1465 rtw89_bbmcu_write32(rtwdev, reg->addr, reg->data, phy_idx); 1466 } 1467 1468 static const u32 dmac_sys_mask[2] = {B_BE_DMAC_BB_PHY0_MASK, B_BE_DMAC_BB_PHY1_MASK}; 1469 static const u32 bbrst_mask[2] = {B_BE_FEN_BBPLAT_RSTB, B_BE_FEN_BB1PLAT_RSTB}; 1470 static const u32 glbrst_mask[2] = {B_BE_FEN_BB_IP_RSTN, B_BE_FEN_BB1_IP_RSTN}; 1471 static const u32 mcu_bootrdy_mask[2] = {B_BE_BOOT_RDY0, B_BE_BOOT_RDY1}; 1472 1473 static void rtw8922a_bb_preinit(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx) 1474 { 1475 u32 rdy = 0; 1476 1477 if (phy_idx == RTW89_PHY_1) 1478 rdy = 1; 1479 1480 rtw89_write32_mask(rtwdev, R_BE_DMAC_SYS_CR32B, dmac_sys_mask[phy_idx], 0x7FF9); 1481 rtw89_write32_mask(rtwdev, R_BE_FEN_RST_ENABLE, glbrst_mask[phy_idx], 0x0); 1482 rtw89_write32_mask(rtwdev, R_BE_FEN_RST_ENABLE, bbrst_mask[phy_idx], 0x0); 1483 rtw89_write32_mask(rtwdev, R_BE_FEN_RST_ENABLE, glbrst_mask[phy_idx], 0x1); 1484 rtw89_write32_mask(rtwdev, R_BE_FEN_RST_ENABLE, mcu_bootrdy_mask[phy_idx], rdy); 1485 rtw89_write32_mask(rtwdev, R_BE_MEM_PWR_CTRL, B_BE_MEM_BBMCU0_DS_V1, 0); 1486 1487 fsleep(1); 1488 rtw8922a_bbmcu_cr_init(rtwdev, phy_idx); 1489 } 1490 1491 static void rtw8922a_bb_postinit(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx) 1492 { 1493 if (phy_idx == RTW89_PHY_0) 1494 rtw89_write32_set(rtwdev, R_BE_FEN_RST_ENABLE, mcu_bootrdy_mask[phy_idx]); 1495 rtw89_write32_set(rtwdev, R_BE_FEN_RST_ENABLE, bbrst_mask[phy_idx]); 1496 1497 rtw89_phy_write32_set(rtwdev, R_BBCLK, B_CLK_640M); 1498 rtw89_phy_write32_clr(rtwdev, R_TXSCALE, B_TXFCTR_EN); 1499 rtw89_phy_set_phy_regs(rtwdev, R_TXFCTR, B_TXFCTR_THD, 0x200); 1500 rtw89_phy_set_phy_regs(rtwdev, R_SLOPE, B_EHT_RATE_TH, 0xA); 1501 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE, B_HE_RATE_TH, 0xA); 1502 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE2, B_HT_VHT_TH, 0xAAA); 1503 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE, B_EHT_MCS14, 0x1); 1504 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE2, B_EHT_MCS15, 0x1); 1505 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE3, B_EHTTB_EN, 0x0); 1506 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE3, B_HEERSU_EN, 0x0); 1507 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE3, B_HEMU_EN, 0x0); 1508 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE3, B_TB_EN, 0x0); 1509 rtw89_phy_set_phy_regs(rtwdev, R_SU_PUNC, B_SU_PUNC_EN, 0x1); 1510 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE5, B_HWGEN_EN, 0x1); 1511 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE5, B_PWROFST_COMP, 0x1); 1512 rtw89_phy_set_phy_regs(rtwdev, R_MAG_AB, B_BY_SLOPE, 0x1); 1513 rtw89_phy_set_phy_regs(rtwdev, R_MAG_A, B_MGA_AEND, 0xe0); 1514 rtw89_phy_set_phy_regs(rtwdev, R_MAG_AB, B_MAG_AB, 0xe0c000); 1515 rtw89_phy_set_phy_regs(rtwdev, R_SLOPE, B_SLOPE_A, 0x3FE0); 1516 rtw89_phy_set_phy_regs(rtwdev, R_SLOPE, B_SLOPE_B, 0x3FE0); 1517 rtw89_phy_set_phy_regs(rtwdev, R_SC_CORNER, B_SC_CORNER, 0x200); 1518 rtw89_phy_write32_idx(rtwdev, R_UDP_COEEF, B_UDP_COEEF, 0x0, phy_idx); 1519 rtw89_phy_write32_idx(rtwdev, R_UDP_COEEF, B_UDP_COEEF, 0x1, phy_idx); 1520 } 1521 1522 static void rtw8922a_bb_reset_en(struct rtw89_dev *rtwdev, enum rtw89_band band, 1523 bool en, enum rtw89_phy_idx phy_idx) 1524 { 1525 if (en) { 1526 rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, 1, phy_idx); 1527 if (band == RTW89_BAND_2G) 1528 rtw89_phy_write32_idx(rtwdev, R_RXCCA_BE1, 1529 B_RXCCA_BE1_DIS, 0x0, phy_idx); 1530 rtw89_phy_write32_idx(rtwdev, R_PD_CTRL, B_PD_HIT_DIS, 0x0, phy_idx); 1531 } else { 1532 rtw89_phy_write32_idx(rtwdev, R_RXCCA_BE1, B_RXCCA_BE1_DIS, 0x1, phy_idx); 1533 rtw89_phy_write32_idx(rtwdev, R_PD_CTRL, B_PD_HIT_DIS, 0x1, phy_idx); 1534 fsleep(1); 1535 rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, 0, phy_idx); 1536 } 1537 } 1538 1539 static int rtw8922a_ctrl_tx_path_tmac(struct rtw89_dev *rtwdev, 1540 enum rtw89_rf_path tx_path, 1541 enum rtw89_phy_idx phy_idx) 1542 { 1543 struct rtw89_reg2_def path_com_cr[] = { 1544 {0x11A00, 0x21C86900}, 1545 {0x11A04, 0x00E4E433}, 1546 {0x11A08, 0x39390CC9}, 1547 {0x11A0C, 0x4E433240}, 1548 {0x11A10, 0x90CC900E}, 1549 {0x11A14, 0x00240393}, 1550 {0x11A18, 0x201C8600}, 1551 }; 1552 int ret = 0; 1553 u32 reg; 1554 int i; 1555 1556 rtw89_phy_write32_idx(rtwdev, R_MAC_SEL, B_MAC_SEL, 0x0, phy_idx); 1557 1558 if (phy_idx == RTW89_PHY_1 && !rtwdev->dbcc_en) 1559 return 0; 1560 1561 if (tx_path == RF_PATH_A) { 1562 path_com_cr[0].data = 0x21C82900; 1563 path_com_cr[1].data = 0x00E4E431; 1564 path_com_cr[2].data = 0x39390C49; 1565 path_com_cr[3].data = 0x4E431240; 1566 path_com_cr[4].data = 0x90C4900E; 1567 path_com_cr[6].data = 0x201C8200; 1568 } else if (tx_path == RF_PATH_B) { 1569 path_com_cr[0].data = 0x21C04900; 1570 path_com_cr[1].data = 0x00E4E032; 1571 path_com_cr[2].data = 0x39380C89; 1572 path_com_cr[3].data = 0x4E032240; 1573 path_com_cr[4].data = 0x80C8900E; 1574 path_com_cr[6].data = 0x201C0400; 1575 } else if (tx_path == RF_PATH_AB) { 1576 path_com_cr[0].data = 0x21C86900; 1577 path_com_cr[1].data = 0x00E4E433; 1578 path_com_cr[2].data = 0x39390CC9; 1579 path_com_cr[3].data = 0x4E433240; 1580 path_com_cr[4].data = 0x90CC900E; 1581 path_com_cr[6].data = 0x201C8600; 1582 } else { 1583 ret = -EINVAL; 1584 } 1585 1586 for (i = 0; i < ARRAY_SIZE(path_com_cr); i++) { 1587 reg = rtw89_mac_reg_by_idx(rtwdev, path_com_cr[i].addr, phy_idx); 1588 rtw89_write32(rtwdev, reg, path_com_cr[i].data); 1589 } 1590 1591 return ret; 1592 } 1593 1594 static void rtw8922a_bb_reset(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx) 1595 { 1596 } 1597 1598 static int rtw8922a_cfg_rx_nss_limit(struct rtw89_dev *rtwdev, u8 rx_nss, 1599 enum rtw89_phy_idx phy_idx) 1600 { 1601 if (rx_nss == 1) { 1602 rtw89_phy_write32_idx(rtwdev, R_BRK_R, B_HTMCS_LMT, 0, phy_idx); 1603 rtw89_phy_write32_idx(rtwdev, R_BRK_R, B_VHTMCS_LMT, 0, phy_idx); 1604 rtw89_phy_write32_idx(rtwdev, R_BRK_HE, B_N_USR_MAX, 1605 HE_N_USER_MAX_8922A, phy_idx); 1606 rtw89_phy_write32_idx(rtwdev, R_BRK_HE, B_NSS_MAX, 0, phy_idx); 1607 rtw89_phy_write32_idx(rtwdev, R_BRK_HE, B_TB_NSS_MAX, 0, phy_idx); 1608 rtw89_phy_write32_idx(rtwdev, R_BRK_EHT, B_RXEHT_NSS_MAX, 0, phy_idx); 1609 rtw89_phy_write32_idx(rtwdev, R_BRK_RXEHT, B_RXEHTTB_NSS_MAX, 0, 1610 phy_idx); 1611 rtw89_phy_write32_idx(rtwdev, R_BRK_RXEHT, B_RXEHT_N_USER_MAX, 1612 HE_N_USER_MAX_8922A, phy_idx); 1613 } else if (rx_nss == 2) { 1614 rtw89_phy_write32_idx(rtwdev, R_BRK_R, B_HTMCS_LMT, 1, phy_idx); 1615 rtw89_phy_write32_idx(rtwdev, R_BRK_R, B_VHTMCS_LMT, 1, phy_idx); 1616 rtw89_phy_write32_idx(rtwdev, R_BRK_HE, B_N_USR_MAX, 1617 HE_N_USER_MAX_8922A, phy_idx); 1618 rtw89_phy_write32_idx(rtwdev, R_BRK_HE, B_NSS_MAX, 1, phy_idx); 1619 rtw89_phy_write32_idx(rtwdev, R_BRK_HE, B_TB_NSS_MAX, 1, phy_idx); 1620 rtw89_phy_write32_idx(rtwdev, R_BRK_EHT, B_RXEHT_NSS_MAX, 1, phy_idx); 1621 rtw89_phy_write32_idx(rtwdev, R_BRK_RXEHT, B_RXEHTTB_NSS_MAX, 1, 1622 phy_idx); 1623 rtw89_phy_write32_idx(rtwdev, R_BRK_RXEHT, B_RXEHT_N_USER_MAX, 1624 HE_N_USER_MAX_8922A, phy_idx); 1625 } else { 1626 return -EINVAL; 1627 } 1628 1629 return 0; 1630 } 1631 1632 static void rtw8922a_tssi_reset(struct rtw89_dev *rtwdev, 1633 enum rtw89_rf_path path, 1634 enum rtw89_phy_idx phy_idx) 1635 { 1636 if (rtwdev->mlo_dbcc_mode == MLO_1_PLUS_1_1RF) { 1637 if (phy_idx == RTW89_PHY_0) { 1638 rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTA, B_TXPWR_RSTA, 0x0); 1639 rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTA, B_TXPWR_RSTA, 0x1); 1640 } else { 1641 rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTB, B_TXPWR_RSTB, 0x0); 1642 rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTB, B_TXPWR_RSTB, 0x1); 1643 } 1644 } else { 1645 rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTA, B_TXPWR_RSTA, 0x0); 1646 rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTA, B_TXPWR_RSTA, 0x1); 1647 rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTB, B_TXPWR_RSTB, 0x0); 1648 rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTB, B_TXPWR_RSTB, 0x1); 1649 } 1650 } 1651 1652 static int rtw8922a_ctrl_rx_path_tmac(struct rtw89_dev *rtwdev, 1653 enum rtw89_rf_path rx_path, 1654 enum rtw89_phy_idx phy_idx) 1655 { 1656 u8 rx_nss = (rx_path == RF_PATH_AB) ? 2 : 1; 1657 1658 /* Set to 0 first to avoid abnormal EDCCA report */ 1659 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_ANT_RX_SG0, 0x0, phy_idx); 1660 1661 if (rx_path == RF_PATH_A) { 1662 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_ANT_RX_SG0, 0x1, phy_idx); 1663 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_RX_1RCCA, 1, phy_idx); 1664 rtw8922a_cfg_rx_nss_limit(rtwdev, rx_nss, phy_idx); 1665 rtw8922a_tssi_reset(rtwdev, rx_path, phy_idx); 1666 } else if (rx_path == RF_PATH_B) { 1667 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_ANT_RX_SG0, 0x2, phy_idx); 1668 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_RX_1RCCA, 2, phy_idx); 1669 rtw8922a_cfg_rx_nss_limit(rtwdev, rx_nss, phy_idx); 1670 rtw8922a_tssi_reset(rtwdev, rx_path, phy_idx); 1671 } else if (rx_path == RF_PATH_AB) { 1672 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_ANT_RX_SG0, 0x3, phy_idx); 1673 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_RX_1RCCA, 3, phy_idx); 1674 rtw8922a_cfg_rx_nss_limit(rtwdev, rx_nss, phy_idx); 1675 rtw8922a_tssi_reset(rtwdev, rx_path, phy_idx); 1676 } else { 1677 return -EINVAL; 1678 } 1679 1680 return 0; 1681 } 1682 1683 static int rtw8922a_ctrl_mlo(struct rtw89_dev *rtwdev, enum rtw89_mlo_dbcc_mode mode) 1684 { 1685 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_SUB_ENTITY_0); 1686 1687 if (mode == MLO_1_PLUS_1_1RF || mode == DBCC_LEGACY) { 1688 rtw89_phy_write32_mask(rtwdev, R_DBCC, B_DBCC_EN, 0x1); 1689 rtw89_phy_write32_mask(rtwdev, R_DBCC_FA, B_DBCC_FA, 0x0); 1690 } else if (mode == MLO_2_PLUS_0_1RF || mode == MLO_0_PLUS_2_1RF || 1691 mode == MLO_DBCC_NOT_SUPPORT) { 1692 rtw89_phy_write32_mask(rtwdev, R_DBCC, B_DBCC_EN, 0x0); 1693 rtw89_phy_write32_mask(rtwdev, R_DBCC_FA, B_DBCC_FA, 0x1); 1694 } else { 1695 return -EOPNOTSUPP; 1696 } 1697 1698 if (mode == MLO_2_PLUS_0_1RF) { 1699 rtw8922a_ctrl_afe_dac(rtwdev, chan->band_width, RF_PATH_A); 1700 rtw8922a_ctrl_afe_dac(rtwdev, chan->band_width, RF_PATH_B); 1701 } else { 1702 rtw89_warn(rtwdev, "unsupported MLO mode %d\n", mode); 1703 } 1704 1705 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0x6180); 1706 1707 if (mode == MLO_2_PLUS_0_1RF) { 1708 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xBBAB); 1709 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xABA9); 1710 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEBA9); 1711 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEAA9); 1712 } else if (mode == MLO_0_PLUS_2_1RF) { 1713 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xBBAB); 1714 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xAFFF); 1715 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEFFF); 1716 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEEFF); 1717 } else if ((mode == MLO_1_PLUS_1_1RF) || (mode == DBCC_LEGACY)) { 1718 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0x7BAB); 1719 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0x3BAB); 1720 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0x3AAB); 1721 } else { 1722 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0x180); 1723 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0x0); 1724 } 1725 1726 return 0; 1727 } 1728 1729 static void rtw8922a_bb_sethw(struct rtw89_dev *rtwdev) 1730 { 1731 u32 reg; 1732 1733 rtw89_phy_write32_clr(rtwdev, R_EN_SND_WO_NDP, B_EN_SND_WO_NDP); 1734 rtw89_phy_write32_clr(rtwdev, R_EN_SND_WO_NDP_C1, B_EN_SND_WO_NDP); 1735 1736 rtw89_write32_mask(rtwdev, R_BE_PWR_BOOST, B_BE_PWR_CTRL_SEL, 0); 1737 if (rtwdev->dbcc_en) { 1738 reg = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_BOOST, RTW89_MAC_1); 1739 rtw89_write32_mask(rtwdev, reg, B_BE_PWR_CTRL_SEL, 0); 1740 } 1741 1742 rtw8922a_ctrl_mlo(rtwdev, rtwdev->mlo_dbcc_mode); 1743 } 1744 1745 static void rtw8922a_ctrl_cck_en(struct rtw89_dev *rtwdev, bool cck_en, 1746 enum rtw89_phy_idx phy_idx) 1747 { 1748 if (cck_en) { 1749 rtw89_phy_write32_idx(rtwdev, R_RXCCA_BE1, B_RXCCA_BE1_DIS, 0, phy_idx); 1750 rtw89_phy_write32_idx(rtwdev, R_UPD_CLK_ADC, B_ENABLE_CCK, 1, phy_idx); 1751 rtw89_phy_write32_idx(rtwdev, R_PD_ARBITER_OFF, B_PD_ARBITER_OFF, 1752 0, phy_idx); 1753 } else { 1754 rtw89_phy_write32_idx(rtwdev, R_RXCCA_BE1, B_RXCCA_BE1_DIS, 1, phy_idx); 1755 rtw89_phy_write32_idx(rtwdev, R_UPD_CLK_ADC, B_ENABLE_CCK, 0, phy_idx); 1756 rtw89_phy_write32_idx(rtwdev, R_PD_ARBITER_OFF, B_PD_ARBITER_OFF, 1757 1, phy_idx); 1758 } 1759 } 1760 1761 static void rtw8922a_set_channel_bb(struct rtw89_dev *rtwdev, 1762 const struct rtw89_chan *chan, 1763 enum rtw89_phy_idx phy_idx) 1764 { 1765 bool cck_en = chan->band_type == RTW89_BAND_2G; 1766 u8 pri_sb = chan->pri_sb_idx; 1767 1768 if (cck_en) 1769 rtw8922a_ctrl_sco_cck(rtwdev, chan->primary_channel, 1770 chan->band_width, phy_idx); 1771 1772 rtw8922a_ctrl_ch(rtwdev, chan, phy_idx); 1773 rtw8922a_ctrl_bw(rtwdev, pri_sb, chan->band_width, phy_idx); 1774 rtw8922a_ctrl_cck_en(rtwdev, cck_en, phy_idx); 1775 rtw8922a_spur_elimination(rtwdev, chan, phy_idx); 1776 1777 rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, 1, phy_idx); 1778 rtw8922a_tssi_reset(rtwdev, RF_PATH_AB, phy_idx); 1779 } 1780 1781 static void rtw8922a_pre_set_channel_bb(struct rtw89_dev *rtwdev, 1782 enum rtw89_phy_idx phy_idx) 1783 { 1784 if (!rtwdev->dbcc_en) 1785 return; 1786 1787 if (phy_idx == RTW89_PHY_0) { 1788 rtw89_phy_write32_mask(rtwdev, R_DBCC, B_DBCC_EN, 0x0); 1789 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0x6180); 1790 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xBBAB); 1791 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xABA9); 1792 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEBA9); 1793 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEAA9); 1794 } else { 1795 rtw89_phy_write32_mask(rtwdev, R_DBCC, B_DBCC_EN, 0x0); 1796 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xBBAB); 1797 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xAFFF); 1798 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEFFF); 1799 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEEFF); 1800 } 1801 } 1802 1803 static void rtw8922a_post_set_channel_bb(struct rtw89_dev *rtwdev, 1804 enum rtw89_mlo_dbcc_mode mode) 1805 { 1806 if (!rtwdev->dbcc_en) 1807 return; 1808 1809 rtw8922a_ctrl_mlo(rtwdev, mode); 1810 } 1811 1812 static void rtw8922a_set_channel(struct rtw89_dev *rtwdev, 1813 const struct rtw89_chan *chan, 1814 enum rtw89_mac_idx mac_idx, 1815 enum rtw89_phy_idx phy_idx) 1816 { 1817 rtw8922a_set_channel_mac(rtwdev, chan, mac_idx); 1818 rtw8922a_set_channel_bb(rtwdev, chan, phy_idx); 1819 rtw8922a_set_channel_rf(rtwdev, chan, phy_idx); 1820 } 1821 1822 static void rtw8922a_dfs_en_idx(struct rtw89_dev *rtwdev, 1823 enum rtw89_phy_idx phy_idx, enum rtw89_rf_path path, 1824 bool en) 1825 { 1826 u32 path_ofst = (path == RF_PATH_B) ? 0x100 : 0x0; 1827 1828 if (en) 1829 rtw89_phy_write32_idx(rtwdev, 0x2800 + path_ofst, BIT(1), 1, 1830 phy_idx); 1831 else 1832 rtw89_phy_write32_idx(rtwdev, 0x2800 + path_ofst, BIT(1), 0, 1833 phy_idx); 1834 } 1835 1836 static void rtw8922a_dfs_en(struct rtw89_dev *rtwdev, bool en, 1837 enum rtw89_phy_idx phy_idx) 1838 { 1839 rtw8922a_dfs_en_idx(rtwdev, phy_idx, RF_PATH_A, en); 1840 rtw8922a_dfs_en_idx(rtwdev, phy_idx, RF_PATH_B, en); 1841 } 1842 1843 static void rtw8922a_adc_en_path(struct rtw89_dev *rtwdev, 1844 enum rtw89_rf_path path, bool en) 1845 { 1846 u32 val; 1847 1848 val = rtw89_phy_read32_mask(rtwdev, R_ADC_FIFO_V1, B_ADC_FIFO_EN_V1); 1849 1850 if (en) { 1851 if (path == RF_PATH_A) 1852 val &= ~0x1; 1853 else 1854 val &= ~0x2; 1855 } else { 1856 if (path == RF_PATH_A) 1857 val |= 0x1; 1858 else 1859 val |= 0x2; 1860 } 1861 1862 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO_V1, B_ADC_FIFO_EN_V1, val); 1863 } 1864 1865 static void rtw8922a_adc_en(struct rtw89_dev *rtwdev, bool en, u8 phy_idx) 1866 { 1867 if (rtwdev->mlo_dbcc_mode == MLO_1_PLUS_1_1RF) { 1868 if (phy_idx == RTW89_PHY_0) 1869 rtw8922a_adc_en_path(rtwdev, RF_PATH_A, en); 1870 else 1871 rtw8922a_adc_en_path(rtwdev, RF_PATH_B, en); 1872 } else { 1873 rtw8922a_adc_en_path(rtwdev, RF_PATH_A, en); 1874 rtw8922a_adc_en_path(rtwdev, RF_PATH_B, en); 1875 } 1876 } 1877 1878 static 1879 void rtw8922a_hal_reset(struct rtw89_dev *rtwdev, 1880 enum rtw89_phy_idx phy_idx, enum rtw89_mac_idx mac_idx, 1881 enum rtw89_band band, u32 *tx_en, bool enter) 1882 { 1883 if (enter) { 1884 rtw89_chip_stop_sch_tx(rtwdev, mac_idx, tx_en, RTW89_SCH_TX_SEL_ALL); 1885 rtw89_mac_cfg_ppdu_status(rtwdev, mac_idx, false); 1886 rtw8922a_dfs_en(rtwdev, false, phy_idx); 1887 rtw8922a_tssi_cont_en_phyidx(rtwdev, false, phy_idx); 1888 rtw8922a_adc_en(rtwdev, false, phy_idx); 1889 fsleep(40); 1890 rtw8922a_bb_reset_en(rtwdev, band, false, phy_idx); 1891 } else { 1892 rtw89_mac_cfg_ppdu_status(rtwdev, mac_idx, true); 1893 rtw8922a_adc_en(rtwdev, true, phy_idx); 1894 rtw8922a_dfs_en(rtwdev, true, phy_idx); 1895 rtw8922a_tssi_cont_en_phyidx(rtwdev, true, phy_idx); 1896 rtw8922a_bb_reset_en(rtwdev, band, true, phy_idx); 1897 rtw89_chip_resume_sch_tx(rtwdev, mac_idx, *tx_en); 1898 } 1899 } 1900 1901 static void rtw8922a_set_channel_help(struct rtw89_dev *rtwdev, bool enter, 1902 struct rtw89_channel_help_params *p, 1903 const struct rtw89_chan *chan, 1904 enum rtw89_mac_idx mac_idx, 1905 enum rtw89_phy_idx phy_idx) 1906 { 1907 if (enter) { 1908 rtw8922a_pre_set_channel_bb(rtwdev, phy_idx); 1909 rtw8922a_pre_set_channel_rf(rtwdev, phy_idx); 1910 } 1911 1912 rtw8922a_hal_reset(rtwdev, phy_idx, mac_idx, chan->band_type, &p->tx_en, enter); 1913 1914 if (!enter) { 1915 rtw8922a_post_set_channel_bb(rtwdev, rtwdev->mlo_dbcc_mode); 1916 rtw8922a_post_set_channel_rf(rtwdev, phy_idx); 1917 } 1918 } 1919 1920 static void rtw8922a_rfk_init(struct rtw89_dev *rtwdev) 1921 { 1922 struct rtw89_rfk_mcc_info *rfk_mcc = &rtwdev->rfk_mcc; 1923 1924 rtwdev->is_tssi_mode[RF_PATH_A] = false; 1925 rtwdev->is_tssi_mode[RF_PATH_B] = false; 1926 memset(rfk_mcc, 0, sizeof(*rfk_mcc)); 1927 } 1928 1929 static void rtw8922a_rfk_init_late(struct rtw89_dev *rtwdev) 1930 { 1931 rtw89_phy_rfk_pre_ntfy_and_wait(rtwdev, RTW89_PHY_0, 5); 1932 1933 rtw89_phy_rfk_dack_and_wait(rtwdev, RTW89_PHY_0, 58); 1934 rtw89_phy_rfk_rxdck_and_wait(rtwdev, RTW89_PHY_0, 32); 1935 } 1936 1937 static void _wait_rx_mode(struct rtw89_dev *rtwdev, u8 kpath) 1938 { 1939 u32 rf_mode; 1940 u8 path; 1941 int ret; 1942 1943 for (path = 0; path < RF_PATH_NUM_8922A; path++) { 1944 if (!(kpath & BIT(path))) 1945 continue; 1946 1947 ret = read_poll_timeout_atomic(rtw89_read_rf, rf_mode, rf_mode != 2, 1948 2, 5000, false, rtwdev, path, 0x00, 1949 RR_MOD_MASK); 1950 rtw89_debug(rtwdev, RTW89_DBG_RFK, 1951 "[RFK] Wait S%d to Rx mode!! (ret = %d)\n", 1952 path, ret); 1953 } 1954 } 1955 1956 static void rtw8922a_rfk_channel(struct rtw89_dev *rtwdev) 1957 { 1958 enum rtw89_phy_idx phy_idx = RTW89_PHY_0; 1959 u8 phy_map = rtw89_btc_phymap(rtwdev, phy_idx, RF_AB); 1960 u32 tx_en; 1961 1962 rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, BTC_WRFKT_CHLK, BTC_WRFK_START); 1963 rtw89_chip_stop_sch_tx(rtwdev, phy_idx, &tx_en, RTW89_SCH_TX_SEL_ALL); 1964 _wait_rx_mode(rtwdev, RF_AB); 1965 1966 rtw89_phy_rfk_pre_ntfy_and_wait(rtwdev, phy_idx, 5); 1967 rtw89_phy_rfk_txgapk_and_wait(rtwdev, phy_idx, 54); 1968 rtw89_phy_rfk_iqk_and_wait(rtwdev, phy_idx, 84); 1969 rtw89_phy_rfk_tssi_and_wait(rtwdev, phy_idx, RTW89_TSSI_NORMAL, 6); 1970 rtw89_phy_rfk_dpk_and_wait(rtwdev, phy_idx, 34); 1971 rtw89_phy_rfk_rxdck_and_wait(rtwdev, RTW89_PHY_0, 32); 1972 1973 rtw89_chip_resume_sch_tx(rtwdev, phy_idx, tx_en); 1974 rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, BTC_WRFKT_CHLK, BTC_WRFK_STOP); 1975 } 1976 1977 static void rtw8922a_rfk_band_changed(struct rtw89_dev *rtwdev, 1978 enum rtw89_phy_idx phy_idx) 1979 { 1980 rtw89_phy_rfk_tssi_and_wait(rtwdev, phy_idx, RTW89_TSSI_SCAN, 6); 1981 } 1982 1983 static void rtw8922a_rfk_scan(struct rtw89_dev *rtwdev, bool start) 1984 { 1985 } 1986 1987 static void rtw8922a_rfk_track(struct rtw89_dev *rtwdev) 1988 { 1989 } 1990 1991 static void rtw8922a_set_txpwr_ref(struct rtw89_dev *rtwdev, 1992 enum rtw89_phy_idx phy_idx) 1993 { 1994 s16 ref_ofdm = 0; 1995 s16 ref_cck = 0; 1996 1997 rtw89_debug(rtwdev, RTW89_DBG_TXPWR, "[TXPWR] set txpwr reference\n"); 1998 1999 rtw89_mac_txpwr_write32_mask(rtwdev, phy_idx, R_BE_PWR_REF_CTRL, 2000 B_BE_PWR_REF_CTRL_OFDM, ref_ofdm); 2001 rtw89_mac_txpwr_write32_mask(rtwdev, phy_idx, R_BE_PWR_REF_CTRL, 2002 B_BE_PWR_REF_CTRL_CCK, ref_cck); 2003 } 2004 2005 static void rtw8922a_bb_tx_triangular(struct rtw89_dev *rtwdev, bool en, 2006 enum rtw89_phy_idx phy_idx) 2007 { 2008 u8 ctrl = en ? 0x1 : 0x0; 2009 2010 rtw89_phy_write32_idx(rtwdev, R_BEDGE3, B_BEDGE_CFG, ctrl, phy_idx); 2011 } 2012 2013 static void rtw8922a_set_tx_shape(struct rtw89_dev *rtwdev, 2014 const struct rtw89_chan *chan, 2015 enum rtw89_phy_idx phy_idx) 2016 { 2017 const struct rtw89_rfe_parms *rfe_parms = rtwdev->rfe_parms; 2018 const struct rtw89_tx_shape *tx_shape = &rfe_parms->tx_shape; 2019 u8 tx_shape_idx; 2020 u8 band, regd; 2021 2022 band = chan->band_type; 2023 regd = rtw89_regd_get(rtwdev, band); 2024 tx_shape_idx = (*tx_shape->lmt)[band][RTW89_RS_OFDM][regd]; 2025 2026 if (tx_shape_idx == 0) 2027 rtw8922a_bb_tx_triangular(rtwdev, false, phy_idx); 2028 else 2029 rtw8922a_bb_tx_triangular(rtwdev, true, phy_idx); 2030 } 2031 2032 static void rtw8922a_set_txpwr(struct rtw89_dev *rtwdev, 2033 const struct rtw89_chan *chan, 2034 enum rtw89_phy_idx phy_idx) 2035 { 2036 rtw89_phy_set_txpwr_byrate(rtwdev, chan, phy_idx); 2037 rtw89_phy_set_txpwr_offset(rtwdev, chan, phy_idx); 2038 rtw8922a_set_tx_shape(rtwdev, chan, phy_idx); 2039 rtw89_phy_set_txpwr_limit(rtwdev, chan, phy_idx); 2040 rtw89_phy_set_txpwr_limit_ru(rtwdev, chan, phy_idx); 2041 } 2042 2043 static void rtw8922a_set_txpwr_ctrl(struct rtw89_dev *rtwdev, 2044 enum rtw89_phy_idx phy_idx) 2045 { 2046 rtw8922a_set_txpwr_ref(rtwdev, phy_idx); 2047 } 2048 2049 static void rtw8922a_ctrl_trx_path(struct rtw89_dev *rtwdev, 2050 enum rtw89_rf_path tx_path, u8 tx_nss, 2051 enum rtw89_rf_path rx_path, u8 rx_nss) 2052 { 2053 enum rtw89_phy_idx phy_idx; 2054 2055 for (phy_idx = RTW89_PHY_0; phy_idx <= RTW89_PHY_1; phy_idx++) { 2056 rtw8922a_ctrl_tx_path_tmac(rtwdev, tx_path, phy_idx); 2057 rtw8922a_ctrl_rx_path_tmac(rtwdev, rx_path, phy_idx); 2058 rtw8922a_cfg_rx_nss_limit(rtwdev, rx_nss, phy_idx); 2059 } 2060 } 2061 2062 static void rtw8922a_ctrl_nbtg_bt_tx(struct rtw89_dev *rtwdev, bool en, 2063 enum rtw89_phy_idx phy_idx) 2064 { 2065 if (en) { 2066 rtw89_phy_write32_idx(rtwdev, R_FORCE_FIR_A, B_FORCE_FIR_A, 0x3, phy_idx); 2067 rtw89_phy_write32_idx(rtwdev, R_RXBY_WBADC_A, B_RXBY_WBADC_A, 2068 0xf, phy_idx); 2069 rtw89_phy_write32_idx(rtwdev, R_BT_RXBY_WBADC_A, B_BT_RXBY_WBADC_A, 2070 0x0, phy_idx); 2071 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_A, B_BT_TRK_OFF_A, 0x0, phy_idx); 2072 rtw89_phy_write32_idx(rtwdev, R_OP1DB_A, B_OP1DB_A, 0x80, phy_idx); 2073 rtw89_phy_write32_idx(rtwdev, R_OP1DB1_A, B_TIA10_A, 0x8080, phy_idx); 2074 rtw89_phy_write32_idx(rtwdev, R_BACKOFF_A, B_LNA_IBADC_A, 0x34, phy_idx); 2075 rtw89_phy_write32_idx(rtwdev, R_BKOFF_A, B_BKOFF_IBADC_A, 0x34, phy_idx); 2076 rtw89_phy_write32_idx(rtwdev, R_FORCE_FIR_B, B_FORCE_FIR_B, 0x3, phy_idx); 2077 rtw89_phy_write32_idx(rtwdev, R_RXBY_WBADC_B, B_RXBY_WBADC_B, 2078 0xf, phy_idx); 2079 rtw89_phy_write32_idx(rtwdev, R_BT_RXBY_WBADC_B, B_BT_RXBY_WBADC_B, 2080 0x0, phy_idx); 2081 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_B, B_BT_TRK_OFF_B, 0x0, phy_idx); 2082 rtw89_phy_write32_idx(rtwdev, R_LNA_OP, B_LNA6, 0x80, phy_idx); 2083 rtw89_phy_write32_idx(rtwdev, R_LNA_TIA, B_TIA10_B, 0x8080, phy_idx); 2084 rtw89_phy_write32_idx(rtwdev, R_BACKOFF_B, B_LNA_IBADC_B, 0x34, phy_idx); 2085 rtw89_phy_write32_idx(rtwdev, R_BKOFF_B, B_BKOFF_IBADC_B, 0x34, phy_idx); 2086 } else { 2087 rtw89_phy_write32_idx(rtwdev, R_FORCE_FIR_A, B_FORCE_FIR_A, 0x0, phy_idx); 2088 rtw89_phy_write32_idx(rtwdev, R_RXBY_WBADC_A, B_RXBY_WBADC_A, 2089 0x0, phy_idx); 2090 rtw89_phy_write32_idx(rtwdev, R_BT_RXBY_WBADC_A, B_BT_RXBY_WBADC_A, 2091 0x1, phy_idx); 2092 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_A, B_BT_TRK_OFF_A, 0x1, phy_idx); 2093 rtw89_phy_write32_idx(rtwdev, R_OP1DB_A, B_OP1DB_A, 0x1a, phy_idx); 2094 rtw89_phy_write32_idx(rtwdev, R_OP1DB1_A, B_TIA10_A, 0x2a2a, phy_idx); 2095 rtw89_phy_write32_idx(rtwdev, R_BACKOFF_A, B_LNA_IBADC_A, 0x7a6, phy_idx); 2096 rtw89_phy_write32_idx(rtwdev, R_BKOFF_A, B_BKOFF_IBADC_A, 0x26, phy_idx); 2097 rtw89_phy_write32_idx(rtwdev, R_FORCE_FIR_B, B_FORCE_FIR_B, 0x0, phy_idx); 2098 rtw89_phy_write32_idx(rtwdev, R_RXBY_WBADC_B, B_RXBY_WBADC_B, 2099 0x0, phy_idx); 2100 rtw89_phy_write32_idx(rtwdev, R_BT_RXBY_WBADC_B, B_BT_RXBY_WBADC_B, 2101 0x1, phy_idx); 2102 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_B, B_BT_TRK_OFF_B, 0x1, phy_idx); 2103 rtw89_phy_write32_idx(rtwdev, R_LNA_OP, B_LNA6, 0x20, phy_idx); 2104 rtw89_phy_write32_idx(rtwdev, R_LNA_TIA, B_TIA10_B, 0x2a30, phy_idx); 2105 rtw89_phy_write32_idx(rtwdev, R_BACKOFF_B, B_LNA_IBADC_B, 0x7a6, phy_idx); 2106 rtw89_phy_write32_idx(rtwdev, R_BKOFF_B, B_BKOFF_IBADC_B, 0x26, phy_idx); 2107 } 2108 } 2109 2110 static void rtw8922a_bb_cfg_txrx_path(struct rtw89_dev *rtwdev) 2111 { 2112 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_SUB_ENTITY_0); 2113 enum rtw89_band band = chan->band_type; 2114 struct rtw89_hal *hal = &rtwdev->hal; 2115 u8 ntx_path = RF_PATH_AB; 2116 u32 tx_en0, tx_en1; 2117 2118 if (hal->antenna_tx == RF_A) 2119 ntx_path = RF_PATH_A; 2120 else if (hal->antenna_tx == RF_B) 2121 ntx_path = RF_PATH_B; 2122 2123 rtw8922a_hal_reset(rtwdev, RTW89_PHY_0, RTW89_MAC_0, band, &tx_en0, true); 2124 if (rtwdev->dbcc_en) 2125 rtw8922a_hal_reset(rtwdev, RTW89_PHY_1, RTW89_MAC_1, band, 2126 &tx_en1, true); 2127 2128 rtw8922a_ctrl_trx_path(rtwdev, ntx_path, 2, RF_PATH_AB, 2); 2129 2130 rtw8922a_hal_reset(rtwdev, RTW89_PHY_0, RTW89_MAC_0, band, &tx_en0, false); 2131 if (rtwdev->dbcc_en) 2132 rtw8922a_hal_reset(rtwdev, RTW89_PHY_1, RTW89_MAC_1, band, 2133 &tx_en1, false); 2134 } 2135 2136 static u8 rtw8922a_get_thermal(struct rtw89_dev *rtwdev, enum rtw89_rf_path rf_path) 2137 { 2138 struct rtw89_power_trim_info *info = &rtwdev->pwr_trim; 2139 int th; 2140 2141 /* read thermal only if debugging */ 2142 if (!rtw89_debug_is_enabled(rtwdev, RTW89_DBG_CFO | RTW89_DBG_RFK_TRACK)) 2143 return 80; 2144 2145 rtw89_write_rf(rtwdev, rf_path, RR_TM, RR_TM_TRI, 0x1); 2146 rtw89_write_rf(rtwdev, rf_path, RR_TM, RR_TM_TRI, 0x0); 2147 rtw89_write_rf(rtwdev, rf_path, RR_TM, RR_TM_TRI, 0x1); 2148 2149 fsleep(200); 2150 2151 th = rtw89_read_rf(rtwdev, rf_path, RR_TM, RR_TM_VAL_V1); 2152 th += (s8)info->thermal_trim[rf_path]; 2153 2154 return clamp_t(int, th, 0, U8_MAX); 2155 } 2156 2157 static void rtw8922a_btc_set_rfe(struct rtw89_dev *rtwdev) 2158 { 2159 union rtw89_btc_module_info *md = &rtwdev->btc.mdinfo; 2160 struct rtw89_btc_module_v7 *module = &md->md_v7; 2161 2162 module->rfe_type = rtwdev->efuse.rfe_type; 2163 module->kt_ver = rtwdev->hal.cv; 2164 module->bt_solo = 0; 2165 module->switch_type = BTC_SWITCH_INTERNAL; 2166 module->wa_type = 0; 2167 2168 module->ant.type = BTC_ANT_SHARED; 2169 module->ant.num = 2; 2170 module->ant.isolation = 10; 2171 module->ant.diversity = 0; 2172 module->ant.single_pos = RF_PATH_A; 2173 module->ant.btg_pos = RF_PATH_B; 2174 2175 if (module->kt_ver <= 1) 2176 module->wa_type |= BTC_WA_HFP_ZB; 2177 2178 rtwdev->btc.cx.other.type = BTC_3CX_NONE; 2179 2180 if (module->rfe_type == 0) { 2181 rtwdev->btc.dm.error.map.rfe_type0 = true; 2182 return; 2183 } 2184 2185 module->ant.num = (module->rfe_type % 2) ? 2 : 3; 2186 2187 if (module->kt_ver == 0) 2188 module->ant.num = 2; 2189 2190 if (module->ant.num == 3) { 2191 module->ant.type = BTC_ANT_DEDICATED; 2192 module->bt_pos = BTC_BT_ALONE; 2193 } else { 2194 module->ant.type = BTC_ANT_SHARED; 2195 module->bt_pos = BTC_BT_BTG; 2196 } 2197 rtwdev->btc.btg_pos = module->ant.btg_pos; 2198 rtwdev->btc.ant_type = module->ant.type; 2199 } 2200 2201 static 2202 void rtw8922a_set_trx_mask(struct rtw89_dev *rtwdev, u8 path, u8 group, u32 val) 2203 { 2204 rtw89_write_rf(rtwdev, path, RR_LUTWA, RFREG_MASK, group); 2205 rtw89_write_rf(rtwdev, path, RR_LUTWD0, RFREG_MASK, val); 2206 } 2207 2208 static void rtw8922a_btc_init_cfg(struct rtw89_dev *rtwdev) 2209 { 2210 struct rtw89_btc *btc = &rtwdev->btc; 2211 struct rtw89_btc_ant_info_v7 *ant = &btc->mdinfo.md_v7.ant; 2212 u32 wl_pri, path_min, path_max; 2213 u8 path; 2214 2215 /* for 1-Ant && 1-ss case: only 1-path */ 2216 if (ant->num == 1) { 2217 path_min = ant->single_pos; 2218 path_max = path_min; 2219 } else { 2220 path_min = RF_PATH_A; 2221 path_max = RF_PATH_B; 2222 } 2223 2224 path = path_min; 2225 2226 for (path = path_min; path <= path_max; path++) { 2227 /* set DEBUG_LUT_RFMODE_MASK = 1 to start trx-mask-setup */ 2228 rtw89_write_rf(rtwdev, path, RR_LUTWE, RFREG_MASK, BIT(17)); 2229 2230 /* if GNT_WL=0 && BT=SS_group --> WL Tx/Rx = THRU */ 2231 rtw8922a_set_trx_mask(rtwdev, path, BTC_BT_SS_GROUP, 0x5ff); 2232 2233 /* if GNT_WL=0 && BT=Rx_group --> WL-Rx = THRU + WL-Tx = MASK */ 2234 rtw8922a_set_trx_mask(rtwdev, path, BTC_BT_RX_GROUP, 0x5df); 2235 2236 /* if GNT_WL = 0 && BT = Tx_group --> 2237 * Shared-Ant && BTG-path:WL mask(0x55f), others:WL THRU(0x5ff) 2238 */ 2239 if (btc->ant_type == BTC_ANT_SHARED && btc->btg_pos == path) 2240 rtw8922a_set_trx_mask(rtwdev, path, BTC_BT_TX_GROUP, 0x55f); 2241 else 2242 rtw8922a_set_trx_mask(rtwdev, path, BTC_BT_TX_GROUP, 0x5ff); 2243 2244 rtw89_write_rf(rtwdev, path, RR_LUTWE, RFREG_MASK, 0); 2245 } 2246 2247 /* set WL PTA Hi-Pri: Ack-Tx, beacon-tx, Trig-frame-Tx, Null-Tx*/ 2248 wl_pri = B_BTC_RSP_ACK_HI | B_BTC_TX_BCN_HI | B_BTC_TX_TRI_HI | 2249 B_BTC_TX_NULL_HI; 2250 rtw89_write32(rtwdev, R_BTC_COEX_WL_REQ_BE, wl_pri); 2251 2252 /* set PTA break table */ 2253 rtw89_write32(rtwdev, R_BE_BT_BREAK_TABLE, BTC_BREAK_PARAM); 2254 2255 /* ZB coex table init for HFP PTA req-cmd bit-4 define issue COEX-900*/ 2256 rtw89_write32(rtwdev, R_BTC_ZB_COEX_TBL_0, 0xda5a5a5a); 2257 2258 rtw89_write32(rtwdev, R_BTC_ZB_COEX_TBL_1, 0xda5a5a5a); 2259 2260 rtw89_write32(rtwdev, R_BTC_ZB_BREAK_TBL, 0xf0ffffff); 2261 btc->cx.wl.status.map.init_ok = true; 2262 } 2263 2264 static void 2265 rtw8922a_btc_set_wl_txpwr_ctrl(struct rtw89_dev *rtwdev, u32 txpwr_val) 2266 { 2267 u16 ctrl_all_time = u32_get_bits(txpwr_val, GENMASK(15, 0)); 2268 u16 ctrl_gnt_bt = u32_get_bits(txpwr_val, GENMASK(31, 16)); 2269 2270 switch (ctrl_all_time) { 2271 case 0xffff: 2272 rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_RATE_CTRL, 2273 B_BE_FORCE_PWR_BY_RATE_EN, 0x0); 2274 rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_RATE_CTRL, 2275 B_BE_FORCE_PWR_BY_RATE_VAL, 0x0); 2276 break; 2277 default: 2278 rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_RATE_CTRL, 2279 B_BE_FORCE_PWR_BY_RATE_VAL, ctrl_all_time); 2280 rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_RATE_CTRL, 2281 B_BE_FORCE_PWR_BY_RATE_EN, 0x1); 2282 break; 2283 } 2284 2285 switch (ctrl_gnt_bt) { 2286 case 0xffff: 2287 rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_REG_CTRL, 2288 B_BE_PWR_BT_EN, 0x0); 2289 rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_COEX_CTRL, 2290 B_BE_PWR_BT_VAL, 0x0); 2291 break; 2292 default: 2293 rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_COEX_CTRL, 2294 B_BE_PWR_BT_VAL, ctrl_gnt_bt); 2295 rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_REG_CTRL, 2296 B_BE_PWR_BT_EN, 0x1); 2297 break; 2298 } 2299 } 2300 2301 static 2302 s8 rtw8922a_btc_get_bt_rssi(struct rtw89_dev *rtwdev, s8 val) 2303 { 2304 return clamp_t(s8, val, -100, 0) + 100; 2305 } 2306 2307 static const struct rtw89_btc_rf_trx_para rtw89_btc_8922a_rf_ul[] = { 2308 {255, 0, 0, 7}, /* 0 -> original */ 2309 {255, 2, 0, 7}, /* 1 -> for BT-connected ACI issue && BTG co-rx */ 2310 {255, 0, 0, 7}, /* 2 ->reserved for shared-antenna */ 2311 {255, 0, 0, 7}, /* 3- >reserved for shared-antenna */ 2312 {255, 0, 0, 7}, /* 4 ->reserved for shared-antenna */ 2313 {255, 1, 0, 7}, /* the below id is for non-shared-antenna free-run */ 2314 {6, 1, 0, 7}, 2315 {13, 1, 0, 7}, 2316 {13, 1, 0, 7} 2317 }; 2318 2319 static const struct rtw89_btc_rf_trx_para rtw89_btc_8922a_rf_dl[] = { 2320 {255, 0, 0, 7}, /* 0 -> original */ 2321 {255, 2, 0, 7}, /* 1 -> reserved for shared-antenna */ 2322 {255, 0, 0, 7}, /* 2 ->reserved for shared-antenna */ 2323 {255, 0, 0, 7}, /* 3- >reserved for shared-antenna */ 2324 {255, 0, 0, 7}, /* 4 ->reserved for shared-antenna */ 2325 {255, 1, 0, 7}, /* the below id is for non-shared-antenna free-run */ 2326 {255, 1, 0, 7}, 2327 {255, 1, 0, 7}, 2328 {255, 1, 0, 7} 2329 }; 2330 2331 static const u8 rtw89_btc_8922a_wl_rssi_thres[BTC_WL_RSSI_THMAX] = {60, 50, 40, 30}; 2332 static const u8 rtw89_btc_8922a_bt_rssi_thres[BTC_BT_RSSI_THMAX] = {50, 40, 30, 20}; 2333 2334 static const struct rtw89_btc_fbtc_mreg rtw89_btc_8922a_mon_reg[] = { 2335 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe300), 2336 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe320), 2337 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe324), 2338 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe328), 2339 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe32c), 2340 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe330), 2341 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe334), 2342 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe338), 2343 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe344), 2344 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe348), 2345 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe34c), 2346 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe350), 2347 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0x11a2c), 2348 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0x11a50), 2349 RTW89_DEF_FBTC_MREG(REG_BB, 4, 0x980), 2350 RTW89_DEF_FBTC_MREG(REG_BB, 4, 0x660), 2351 RTW89_DEF_FBTC_MREG(REG_BB, 4, 0x1660), 2352 RTW89_DEF_FBTC_MREG(REG_BB, 4, 0x418c), 2353 RTW89_DEF_FBTC_MREG(REG_BB, 4, 0x518c), 2354 }; 2355 2356 static 2357 void rtw8922a_btc_update_bt_cnt(struct rtw89_dev *rtwdev) 2358 { 2359 /* Feature move to firmware */ 2360 } 2361 2362 static 2363 void rtw8922a_btc_wl_s1_standby(struct rtw89_dev *rtwdev, bool state) 2364 { 2365 if (!state) { 2366 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x80000); 2367 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x1); 2368 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD1, RFREG_MASK, 0x0c110); 2369 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0, RFREG_MASK, 0x01018); 2370 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x00000); 2371 2372 rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWE, RFREG_MASK, 0x80000); 2373 rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWA, RFREG_MASK, 0x1); 2374 rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWD1, RFREG_MASK, 0x0c110); 2375 rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWD0, RFREG_MASK, 0x01018); 2376 rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWE, RFREG_MASK, 0x00000); 2377 } else { 2378 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x80000); 2379 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x1); 2380 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD1, RFREG_MASK, 0x0c110); 2381 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0, RFREG_MASK, 0x09018); 2382 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x00000); 2383 2384 rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWE, RFREG_MASK, 0x80000); 2385 rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWA, RFREG_MASK, 0x1); 2386 rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWD1, RFREG_MASK, 0x0c110); 2387 rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWD0, RFREG_MASK, 0x09018); 2388 rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWE, RFREG_MASK, 0x00000); 2389 } 2390 } 2391 2392 static void rtw8922a_btc_set_wl_rx_gain(struct rtw89_dev *rtwdev, u32 level) 2393 { 2394 } 2395 2396 static void rtw8922a_fill_freq_with_ppdu(struct rtw89_dev *rtwdev, 2397 struct rtw89_rx_phy_ppdu *phy_ppdu, 2398 struct ieee80211_rx_status *status) 2399 { 2400 u8 chan_idx = phy_ppdu->chan_idx; 2401 enum nl80211_band band; 2402 u8 ch; 2403 2404 if (chan_idx == 0) 2405 return; 2406 2407 rtw89_decode_chan_idx(rtwdev, chan_idx, &ch, &band); 2408 status->freq = ieee80211_channel_to_frequency(ch, band); 2409 status->band = band; 2410 } 2411 2412 static void rtw8922a_query_ppdu(struct rtw89_dev *rtwdev, 2413 struct rtw89_rx_phy_ppdu *phy_ppdu, 2414 struct ieee80211_rx_status *status) 2415 { 2416 u8 path; 2417 u8 *rx_power = phy_ppdu->rssi; 2418 2419 status->signal = 2420 RTW89_RSSI_RAW_TO_DBM(max(rx_power[RF_PATH_A], rx_power[RF_PATH_B])); 2421 for (path = 0; path < rtwdev->chip->rf_path_num; path++) { 2422 status->chains |= BIT(path); 2423 status->chain_signal[path] = RTW89_RSSI_RAW_TO_DBM(rx_power[path]); 2424 } 2425 if (phy_ppdu->valid) 2426 rtw8922a_fill_freq_with_ppdu(rtwdev, phy_ppdu, status); 2427 } 2428 2429 static int rtw8922a_mac_enable_bb_rf(struct rtw89_dev *rtwdev) 2430 { 2431 rtw89_write8_set(rtwdev, R_BE_FEN_RST_ENABLE, 2432 B_BE_FEN_BBPLAT_RSTB | B_BE_FEN_BB_IP_RSTN); 2433 rtw89_write32(rtwdev, R_BE_DMAC_SYS_CR32B, 0x7FF97FF9); 2434 2435 return 0; 2436 } 2437 2438 static int rtw8922a_mac_disable_bb_rf(struct rtw89_dev *rtwdev) 2439 { 2440 rtw89_write8_clr(rtwdev, R_BE_FEN_RST_ENABLE, 2441 B_BE_FEN_BBPLAT_RSTB | B_BE_FEN_BB_IP_RSTN); 2442 2443 return 0; 2444 } 2445 2446 #ifdef CONFIG_PM 2447 static const struct wiphy_wowlan_support rtw_wowlan_stub_8922a = { 2448 .flags = WIPHY_WOWLAN_MAGIC_PKT | WIPHY_WOWLAN_DISCONNECT, 2449 .n_patterns = RTW89_MAX_PATTERN_NUM, 2450 .pattern_max_len = RTW89_MAX_PATTERN_SIZE, 2451 .pattern_min_len = 1, 2452 }; 2453 #endif 2454 2455 static const struct rtw89_chip_ops rtw8922a_chip_ops = { 2456 .enable_bb_rf = rtw8922a_mac_enable_bb_rf, 2457 .disable_bb_rf = rtw8922a_mac_disable_bb_rf, 2458 .bb_preinit = rtw8922a_bb_preinit, 2459 .bb_postinit = rtw8922a_bb_postinit, 2460 .bb_reset = rtw8922a_bb_reset, 2461 .bb_sethw = rtw8922a_bb_sethw, 2462 .read_rf = rtw89_phy_read_rf_v2, 2463 .write_rf = rtw89_phy_write_rf_v2, 2464 .set_channel = rtw8922a_set_channel, 2465 .set_channel_help = rtw8922a_set_channel_help, 2466 .read_efuse = rtw8922a_read_efuse, 2467 .read_phycap = rtw8922a_read_phycap, 2468 .fem_setup = NULL, 2469 .rfe_gpio = NULL, 2470 .rfk_hw_init = rtw8922a_rfk_hw_init, 2471 .rfk_init = rtw8922a_rfk_init, 2472 .rfk_init_late = rtw8922a_rfk_init_late, 2473 .rfk_channel = rtw8922a_rfk_channel, 2474 .rfk_band_changed = rtw8922a_rfk_band_changed, 2475 .rfk_scan = rtw8922a_rfk_scan, 2476 .rfk_track = rtw8922a_rfk_track, 2477 .power_trim = rtw8922a_power_trim, 2478 .set_txpwr = rtw8922a_set_txpwr, 2479 .set_txpwr_ctrl = rtw8922a_set_txpwr_ctrl, 2480 .init_txpwr_unit = NULL, 2481 .get_thermal = rtw8922a_get_thermal, 2482 .ctrl_btg_bt_rx = rtw8922a_ctrl_btg_bt_rx, 2483 .query_ppdu = rtw8922a_query_ppdu, 2484 .ctrl_nbtg_bt_tx = rtw8922a_ctrl_nbtg_bt_tx, 2485 .cfg_txrx_path = rtw8922a_bb_cfg_txrx_path, 2486 .set_txpwr_ul_tb_offset = NULL, 2487 .pwr_on_func = rtw8922a_pwr_on_func, 2488 .pwr_off_func = rtw8922a_pwr_off_func, 2489 .query_rxdesc = rtw89_core_query_rxdesc_v2, 2490 .fill_txdesc = rtw89_core_fill_txdesc_v2, 2491 .fill_txdesc_fwcmd = rtw89_core_fill_txdesc_fwcmd_v2, 2492 .cfg_ctrl_path = rtw89_mac_cfg_ctrl_path_v2, 2493 .mac_cfg_gnt = rtw89_mac_cfg_gnt_v2, 2494 .stop_sch_tx = rtw89_mac_stop_sch_tx_v2, 2495 .resume_sch_tx = rtw89_mac_resume_sch_tx_v2, 2496 .h2c_dctl_sec_cam = rtw89_fw_h2c_dctl_sec_cam_v2, 2497 .h2c_default_cmac_tbl = rtw89_fw_h2c_default_cmac_tbl_g7, 2498 .h2c_assoc_cmac_tbl = rtw89_fw_h2c_assoc_cmac_tbl_g7, 2499 .h2c_ampdu_cmac_tbl = rtw89_fw_h2c_ampdu_cmac_tbl_g7, 2500 .h2c_default_dmac_tbl = rtw89_fw_h2c_default_dmac_tbl_v2, 2501 .h2c_update_beacon = rtw89_fw_h2c_update_beacon_be, 2502 .h2c_ba_cam = rtw89_fw_h2c_ba_cam_v1, 2503 2504 .btc_set_rfe = rtw8922a_btc_set_rfe, 2505 .btc_init_cfg = rtw8922a_btc_init_cfg, 2506 .btc_set_wl_pri = NULL, 2507 .btc_set_wl_txpwr_ctrl = rtw8922a_btc_set_wl_txpwr_ctrl, 2508 .btc_get_bt_rssi = rtw8922a_btc_get_bt_rssi, 2509 .btc_update_bt_cnt = rtw8922a_btc_update_bt_cnt, 2510 .btc_wl_s1_standby = rtw8922a_btc_wl_s1_standby, 2511 .btc_set_wl_rx_gain = rtw8922a_btc_set_wl_rx_gain, 2512 .btc_set_policy = rtw89_btc_set_policy_v1, 2513 }; 2514 2515 const struct rtw89_chip_info rtw8922a_chip_info = { 2516 .chip_id = RTL8922A, 2517 .chip_gen = RTW89_CHIP_BE, 2518 .ops = &rtw8922a_chip_ops, 2519 .mac_def = &rtw89_mac_gen_be, 2520 .phy_def = &rtw89_phy_gen_be, 2521 .fw_basename = RTW8922A_FW_BASENAME, 2522 .fw_format_max = RTW8922A_FW_FORMAT_MAX, 2523 .try_ce_fw = false, 2524 .bbmcu_nr = 1, 2525 .needed_fw_elms = RTW89_BE_GEN_DEF_NEEDED_FW_ELEMENTS, 2526 .fifo_size = 589824, 2527 .small_fifo_size = false, 2528 .dle_scc_rsvd_size = 0, 2529 .max_amsdu_limit = 8000, 2530 .dis_2g_40m_ul_ofdma = false, 2531 .rsvd_ple_ofst = 0x8f800, 2532 .hfc_param_ini = rtw8922a_hfc_param_ini_pcie, 2533 .dle_mem = rtw8922a_dle_mem_pcie, 2534 .wde_qempty_acq_grpnum = 4, 2535 .wde_qempty_mgq_grpsel = 4, 2536 .rf_base_addr = {0xe000, 0xf000}, 2537 .pwr_on_seq = NULL, 2538 .pwr_off_seq = NULL, 2539 .bb_table = NULL, 2540 .bb_gain_table = NULL, 2541 .rf_table = {}, 2542 .nctl_table = NULL, 2543 .nctl_post_table = NULL, 2544 .dflt_parms = NULL, /* load parm from fw */ 2545 .rfe_parms_conf = NULL, /* load parm from fw */ 2546 .txpwr_factor_rf = 2, 2547 .txpwr_factor_mac = 1, 2548 .dig_table = NULL, 2549 .dig_regs = &rtw8922a_dig_regs, 2550 .tssi_dbw_table = NULL, 2551 .support_macid_num = 32, 2552 .support_chanctx_num = 2, 2553 .support_rnr = true, 2554 .support_bands = BIT(NL80211_BAND_2GHZ) | 2555 BIT(NL80211_BAND_5GHZ) | 2556 BIT(NL80211_BAND_6GHZ), 2557 .support_bandwidths = BIT(NL80211_CHAN_WIDTH_20) | 2558 BIT(NL80211_CHAN_WIDTH_40) | 2559 BIT(NL80211_CHAN_WIDTH_80) | 2560 BIT(NL80211_CHAN_WIDTH_160), 2561 .support_unii4 = true, 2562 .ul_tb_waveform_ctrl = false, 2563 .ul_tb_pwr_diff = false, 2564 .hw_sec_hdr = true, 2565 .rf_path_num = 2, 2566 .tx_nss = 2, 2567 .rx_nss = 2, 2568 .acam_num = 128, 2569 .bcam_num = 20, 2570 .scam_num = 32, 2571 .bacam_num = 24, 2572 .bacam_dynamic_num = 8, 2573 .bacam_ver = RTW89_BACAM_V1, 2574 .ppdu_max_usr = 16, 2575 .sec_ctrl_efuse_size = 4, 2576 .physical_efuse_size = 0x1300, 2577 .logical_efuse_size = 0x70000, 2578 .limit_efuse_size = 0x40000, 2579 .dav_phy_efuse_size = 0, 2580 .dav_log_efuse_size = 0, 2581 .efuse_blocks = rtw8922a_efuse_blocks, 2582 .phycap_addr = 0x1700, 2583 .phycap_size = 0x38, 2584 .para_ver = 0xf, 2585 .wlcx_desired = 0x07110000, 2586 .btcx_desired = 0x7, 2587 .scbd = 0x1, 2588 .mailbox = 0x1, 2589 2590 .afh_guard_ch = 6, 2591 .wl_rssi_thres = rtw89_btc_8922a_wl_rssi_thres, 2592 .bt_rssi_thres = rtw89_btc_8922a_bt_rssi_thres, 2593 .rssi_tol = 2, 2594 .mon_reg_num = ARRAY_SIZE(rtw89_btc_8922a_mon_reg), 2595 .mon_reg = rtw89_btc_8922a_mon_reg, 2596 .rf_para_ulink_num = ARRAY_SIZE(rtw89_btc_8922a_rf_ul), 2597 .rf_para_ulink = rtw89_btc_8922a_rf_ul, 2598 .rf_para_dlink_num = ARRAY_SIZE(rtw89_btc_8922a_rf_dl), 2599 .rf_para_dlink = rtw89_btc_8922a_rf_dl, 2600 .ps_mode_supported = BIT(RTW89_PS_MODE_RFOFF) | 2601 BIT(RTW89_PS_MODE_CLK_GATED) | 2602 BIT(RTW89_PS_MODE_PWR_GATED), 2603 .low_power_hci_modes = 0, 2604 .h2c_cctl_func_id = H2C_FUNC_MAC_CCTLINFO_UD_G7, 2605 .hci_func_en_addr = R_BE_HCI_FUNC_EN, 2606 .h2c_desc_size = sizeof(struct rtw89_rxdesc_short_v2), 2607 .txwd_body_size = sizeof(struct rtw89_txwd_body_v2), 2608 .txwd_info_size = sizeof(struct rtw89_txwd_info_v2), 2609 .h2c_ctrl_reg = R_BE_H2CREG_CTRL, 2610 .h2c_counter_reg = {R_BE_UDM1 + 1, B_BE_UDM1_HALMAC_H2C_DEQ_CNT_MASK >> 8}, 2611 .h2c_regs = rtw8922a_h2c_regs, 2612 .c2h_ctrl_reg = R_BE_C2HREG_CTRL, 2613 .c2h_counter_reg = {R_BE_UDM1 + 1, B_BE_UDM1_HALMAC_C2H_ENQ_CNT_MASK >> 8}, 2614 .c2h_regs = rtw8922a_c2h_regs, 2615 .page_regs = &rtw8922a_page_regs, 2616 .wow_reason_reg = rtw8922a_wow_wakeup_regs, 2617 .cfo_src_fd = true, 2618 .cfo_hw_comp = true, 2619 .dcfo_comp = NULL, 2620 .dcfo_comp_sft = 0, 2621 .imr_info = NULL, 2622 .imr_dmac_table = &rtw8922a_imr_dmac_table, 2623 .imr_cmac_table = &rtw8922a_imr_cmac_table, 2624 .rrsr_cfgs = &rtw8922a_rrsr_cfgs, 2625 .bss_clr_vld = {R_BSS_CLR_VLD_V2, B_BSS_CLR_VLD0_V2}, 2626 .bss_clr_map_reg = R_BSS_CLR_MAP_V2, 2627 .dma_ch_mask = 0, 2628 .edcca_regs = &rtw8922a_edcca_regs, 2629 #ifdef CONFIG_PM 2630 .wowlan_stub = &rtw_wowlan_stub_8922a, 2631 #endif 2632 .xtal_info = NULL, 2633 }; 2634 EXPORT_SYMBOL(rtw8922a_chip_info); 2635 2636 MODULE_FIRMWARE(RTW8922A_MODULE_FIRMWARE); 2637 MODULE_AUTHOR("Realtek Corporation"); 2638 MODULE_DESCRIPTION("Realtek 802.11be wireless 8922A driver"); 2639 MODULE_LICENSE("Dual BSD/GPL"); 2640