1 // SPDX-License-Identifier: GPL-2.0 OR BSD-3-Clause 2 /* Copyright(c) 2023 Realtek Corporation 3 */ 4 5 #include "coex.h" 6 #include "debug.h" 7 #include "efuse.h" 8 #include "fw.h" 9 #include "mac.h" 10 #include "phy.h" 11 #include "reg.h" 12 #include "rtw8922a.h" 13 #include "rtw8922a_rfk.h" 14 #include "util.h" 15 16 #define RTW8922A_FW_FORMAT_MAX 1 17 #define RTW8922A_FW_BASENAME "rtw89/rtw8922a_fw" 18 #define RTW8922A_MODULE_FIRMWARE \ 19 RTW8922A_FW_BASENAME "-" __stringify(RTW8922A_FW_FORMAT_MAX) ".bin" 20 21 #define HE_N_USER_MAX_8922A 4 22 23 static const struct rtw89_hfc_ch_cfg rtw8922a_hfc_chcfg_pcie[] = { 24 {2, 1641, grp_0}, /* ACH 0 */ 25 {2, 1641, grp_0}, /* ACH 1 */ 26 {2, 1641, grp_0}, /* ACH 2 */ 27 {2, 1641, grp_0}, /* ACH 3 */ 28 {2, 1641, grp_1}, /* ACH 4 */ 29 {2, 1641, grp_1}, /* ACH 5 */ 30 {2, 1641, grp_1}, /* ACH 6 */ 31 {2, 1641, grp_1}, /* ACH 7 */ 32 {2, 1641, grp_0}, /* B0MGQ */ 33 {2, 1641, grp_0}, /* B0HIQ */ 34 {2, 1641, grp_1}, /* B1MGQ */ 35 {2, 1641, grp_1}, /* B1HIQ */ 36 {0, 0, 0}, /* FWCMDQ */ 37 {0, 0, 0}, /* BMC */ 38 {0, 0, 0}, /* H2D */ 39 }; 40 41 static const struct rtw89_hfc_pub_cfg rtw8922a_hfc_pubcfg_pcie = { 42 1651, /* Group 0 */ 43 1651, /* Group 1 */ 44 3302, /* Public Max */ 45 0, /* WP threshold */ 46 }; 47 48 static const struct rtw89_hfc_param_ini rtw8922a_hfc_param_ini_pcie[] = { 49 [RTW89_QTA_SCC] = {rtw8922a_hfc_chcfg_pcie, &rtw8922a_hfc_pubcfg_pcie, 50 &rtw89_mac_size.hfc_prec_cfg_c0, RTW89_HCIFC_POH}, 51 [RTW89_QTA_DBCC] = {rtw8922a_hfc_chcfg_pcie, &rtw8922a_hfc_pubcfg_pcie, 52 &rtw89_mac_size.hfc_prec_cfg_c0, RTW89_HCIFC_POH}, 53 [RTW89_QTA_DLFW] = {NULL, NULL, &rtw89_mac_size.hfc_prec_cfg_c2, 54 RTW89_HCIFC_POH}, 55 [RTW89_QTA_INVALID] = {NULL}, 56 }; 57 58 static const struct rtw89_dle_mem rtw8922a_dle_mem_pcie[] = { 59 [RTW89_QTA_SCC] = {RTW89_QTA_SCC, &rtw89_mac_size.wde_size0_v1, 60 &rtw89_mac_size.ple_size0_v1, &rtw89_mac_size.wde_qt0_v1, 61 &rtw89_mac_size.wde_qt0_v1, &rtw89_mac_size.ple_qt0, 62 &rtw89_mac_size.ple_qt1, &rtw89_mac_size.ple_rsvd_qt0, 63 &rtw89_mac_size.rsvd0_size0, &rtw89_mac_size.rsvd1_size0}, 64 [RTW89_QTA_DBCC] = {RTW89_QTA_DBCC, &rtw89_mac_size.wde_size0_v1, 65 &rtw89_mac_size.ple_size0_v1, &rtw89_mac_size.wde_qt0_v1, 66 &rtw89_mac_size.wde_qt0_v1, &rtw89_mac_size.ple_qt0, 67 &rtw89_mac_size.ple_qt1, &rtw89_mac_size.ple_rsvd_qt0, 68 &rtw89_mac_size.rsvd0_size0, &rtw89_mac_size.rsvd1_size0}, 69 [RTW89_QTA_DLFW] = {RTW89_QTA_DLFW, &rtw89_mac_size.wde_size4_v1, 70 &rtw89_mac_size.ple_size3_v1, &rtw89_mac_size.wde_qt4, 71 &rtw89_mac_size.wde_qt4, &rtw89_mac_size.ple_qt9, 72 &rtw89_mac_size.ple_qt9, &rtw89_mac_size.ple_rsvd_qt1, 73 &rtw89_mac_size.rsvd0_size0, &rtw89_mac_size.rsvd1_size0}, 74 [RTW89_QTA_INVALID] = {RTW89_QTA_INVALID, NULL, NULL, NULL, NULL, NULL, 75 NULL}, 76 }; 77 78 static const u32 rtw8922a_h2c_regs[RTW89_H2CREG_MAX] = { 79 R_BE_H2CREG_DATA0, R_BE_H2CREG_DATA1, R_BE_H2CREG_DATA2, 80 R_BE_H2CREG_DATA3 81 }; 82 83 static const u32 rtw8922a_c2h_regs[RTW89_H2CREG_MAX] = { 84 R_BE_C2HREG_DATA0, R_BE_C2HREG_DATA1, R_BE_C2HREG_DATA2, 85 R_BE_C2HREG_DATA3 86 }; 87 88 static const u32 rtw8922a_wow_wakeup_regs[RTW89_WOW_REASON_NUM] = { 89 R_AX_C2HREG_DATA3_V1 + 3, R_BE_DBG_WOW, 90 }; 91 92 static const struct rtw89_page_regs rtw8922a_page_regs = { 93 .hci_fc_ctrl = R_BE_HCI_FC_CTRL, 94 .ch_page_ctrl = R_BE_CH_PAGE_CTRL, 95 .ach_page_ctrl = R_BE_CH0_PAGE_CTRL, 96 .ach_page_info = R_BE_CH0_PAGE_INFO, 97 .pub_page_info3 = R_BE_PUB_PAGE_INFO3, 98 .pub_page_ctrl1 = R_BE_PUB_PAGE_CTRL1, 99 .pub_page_ctrl2 = R_BE_PUB_PAGE_CTRL2, 100 .pub_page_info1 = R_BE_PUB_PAGE_INFO1, 101 .pub_page_info2 = R_BE_PUB_PAGE_INFO2, 102 .wp_page_ctrl1 = R_BE_WP_PAGE_CTRL1, 103 .wp_page_ctrl2 = R_BE_WP_PAGE_CTRL2, 104 .wp_page_info1 = R_BE_WP_PAGE_INFO1, 105 }; 106 107 static const struct rtw89_reg_imr rtw8922a_imr_dmac_regs[] = { 108 {R_BE_DISP_HOST_IMR, B_BE_DISP_HOST_IMR_CLR, B_BE_DISP_HOST_IMR_SET}, 109 {R_BE_DISP_CPU_IMR, B_BE_DISP_CPU_IMR_CLR, B_BE_DISP_CPU_IMR_SET}, 110 {R_BE_DISP_OTHER_IMR, B_BE_DISP_OTHER_IMR_CLR, B_BE_DISP_OTHER_IMR_SET}, 111 {R_BE_PKTIN_ERR_IMR, B_BE_PKTIN_ERR_IMR_CLR, B_BE_PKTIN_ERR_IMR_SET}, 112 {R_BE_INTERRUPT_MASK_REG, B_BE_INTERRUPT_MASK_REG_CLR, B_BE_INTERRUPT_MASK_REG_SET}, 113 {R_BE_MLO_ERR_IDCT_IMR, B_BE_MLO_ERR_IDCT_IMR_CLR, B_BE_MLO_ERR_IDCT_IMR_SET}, 114 {R_BE_MPDU_TX_ERR_IMR, B_BE_MPDU_TX_ERR_IMR_CLR, B_BE_MPDU_TX_ERR_IMR_SET}, 115 {R_BE_MPDU_RX_ERR_IMR, B_BE_MPDU_RX_ERR_IMR_CLR, B_BE_MPDU_RX_ERR_IMR_SET}, 116 {R_BE_SEC_ERROR_IMR, B_BE_SEC_ERROR_IMR_CLR, B_BE_SEC_ERROR_IMR_SET}, 117 {R_BE_CPUIO_ERR_IMR, B_BE_CPUIO_ERR_IMR_CLR, B_BE_CPUIO_ERR_IMR_SET}, 118 {R_BE_WDE_ERR_IMR, B_BE_WDE_ERR_IMR_CLR, B_BE_WDE_ERR_IMR_SET}, 119 {R_BE_WDE_ERR1_IMR, B_BE_WDE_ERR1_IMR_CLR, B_BE_WDE_ERR1_IMR_SET}, 120 {R_BE_PLE_ERR_IMR, B_BE_PLE_ERR_IMR_CLR, B_BE_PLE_ERR_IMR_SET}, 121 {R_BE_PLE_ERRFLAG1_IMR, B_BE_PLE_ERRFLAG1_IMR_CLR, B_BE_PLE_ERRFLAG1_IMR_SET}, 122 {R_BE_WDRLS_ERR_IMR, B_BE_WDRLS_ERR_IMR_CLR, B_BE_WDRLS_ERR_IMR_SET}, 123 {R_BE_TXPKTCTL_B0_ERRFLAG_IMR, B_BE_TXPKTCTL_B0_ERRFLAG_IMR_CLR, 124 B_BE_TXPKTCTL_B0_ERRFLAG_IMR_SET}, 125 {R_BE_TXPKTCTL_B1_ERRFLAG_IMR, B_BE_TXPKTCTL_B1_ERRFLAG_IMR_CLR, 126 B_BE_TXPKTCTL_B1_ERRFLAG_IMR_SET}, 127 {R_BE_BBRPT_COM_ERR_IMR, B_BE_BBRPT_COM_ERR_IMR_CLR, B_BE_BBRPT_COM_ERR_IMR_SET}, 128 {R_BE_BBRPT_CHINFO_ERR_IMR, B_BE_BBRPT_CHINFO_ERR_IMR_CLR, 129 B_BE_BBRPT_CHINFO_ERR_IMR_SET}, 130 {R_BE_BBRPT_DFS_ERR_IMR, B_BE_BBRPT_DFS_ERR_IMR_CLR, B_BE_BBRPT_DFS_ERR_IMR_SET}, 131 {R_BE_LA_ERRFLAG_IMR, B_BE_LA_ERRFLAG_IMR_CLR, B_BE_LA_ERRFLAG_IMR_SET}, 132 {R_BE_CH_INFO_DBGFLAG_IMR, B_BE_CH_INFO_DBGFLAG_IMR_CLR, B_BE_CH_INFO_DBGFLAG_IMR_SET}, 133 {R_BE_PLRLS_ERR_IMR, B_BE_PLRLS_ERR_IMR_CLR, B_BE_PLRLS_ERR_IMR_SET}, 134 {R_BE_HAXI_IDCT_MSK, B_BE_HAXI_IDCT_MSK_CLR, B_BE_HAXI_IDCT_MSK_SET}, 135 }; 136 137 static const struct rtw89_imr_table rtw8922a_imr_dmac_table = { 138 .regs = rtw8922a_imr_dmac_regs, 139 .n_regs = ARRAY_SIZE(rtw8922a_imr_dmac_regs), 140 }; 141 142 static const struct rtw89_reg_imr rtw8922a_imr_cmac_regs[] = { 143 {R_BE_RESP_IMR, B_BE_RESP_IMR_CLR, B_BE_RESP_IMR_SET}, 144 {R_BE_RX_ERROR_FLAG_IMR, B_BE_RX_ERROR_FLAG_IMR_CLR, B_BE_RX_ERROR_FLAG_IMR_SET}, 145 {R_BE_TX_ERROR_FLAG_IMR, B_BE_TX_ERROR_FLAG_IMR_CLR, B_BE_TX_ERROR_FLAG_IMR_SET}, 146 {R_BE_RX_ERROR_FLAG_IMR_1, B_BE_TX_ERROR_FLAG_IMR_1_CLR, B_BE_TX_ERROR_FLAG_IMR_1_SET}, 147 {R_BE_PTCL_IMR1, B_BE_PTCL_IMR1_CLR, B_BE_PTCL_IMR1_SET}, 148 {R_BE_PTCL_IMR0, B_BE_PTCL_IMR0_CLR, B_BE_PTCL_IMR0_SET}, 149 {R_BE_PTCL_IMR_2, B_BE_PTCL_IMR_2_CLR, B_BE_PTCL_IMR_2_SET}, 150 {R_BE_SCHEDULE_ERR_IMR, B_BE_SCHEDULE_ERR_IMR_CLR, B_BE_SCHEDULE_ERR_IMR_SET}, 151 {R_BE_C0_TXPWR_IMR, B_BE_C0_TXPWR_IMR_CLR, B_BE_C0_TXPWR_IMR_SET}, 152 {R_BE_TRXPTCL_ERROR_INDICA_MASK, B_BE_TRXPTCL_ERROR_INDICA_MASK_CLR, 153 B_BE_TRXPTCL_ERROR_INDICA_MASK_SET}, 154 {R_BE_RX_ERR_IMR, B_BE_RX_ERR_IMR_CLR, B_BE_RX_ERR_IMR_SET}, 155 {R_BE_PHYINFO_ERR_IMR_V1, B_BE_PHYINFO_ERR_IMR_V1_CLR, B_BE_PHYINFO_ERR_IMR_V1_SET}, 156 }; 157 158 static const struct rtw89_imr_table rtw8922a_imr_cmac_table = { 159 .regs = rtw8922a_imr_cmac_regs, 160 .n_regs = ARRAY_SIZE(rtw8922a_imr_cmac_regs), 161 }; 162 163 static const struct rtw89_rrsr_cfgs rtw8922a_rrsr_cfgs = { 164 .ref_rate = {R_BE_TRXPTCL_RESP_1, B_BE_WMAC_RESP_REF_RATE_SEL, 0}, 165 .rsc = {R_BE_PTCL_RRSR1, B_BE_RSC_MASK, 2}, 166 }; 167 168 static const struct rtw89_rfkill_regs rtw8922a_rfkill_regs = { 169 .pinmux = {R_BE_GPIO8_15_FUNC_SEL, 170 B_BE_PINMUX_GPIO9_FUNC_SEL_MASK, 171 0xf}, 172 .mode = {R_BE_GPIO_EXT_CTRL + 2, 173 (B_BE_GPIO_MOD_9 | B_BE_GPIO_IO_SEL_9) >> 16, 174 0x0}, 175 }; 176 177 static const struct rtw89_dig_regs rtw8922a_dig_regs = { 178 .seg0_pd_reg = R_SEG0R_PD_V2, 179 .pd_lower_bound_mask = B_SEG0R_PD_LOWER_BOUND_MSK, 180 .pd_spatial_reuse_en = B_SEG0R_PD_SPATIAL_REUSE_EN_MSK_V1, 181 .bmode_pd_reg = R_BMODE_PDTH_EN_V2, 182 .bmode_cca_rssi_limit_en = B_BMODE_PDTH_LIMIT_EN_MSK_V1, 183 .bmode_pd_lower_bound_reg = R_BMODE_PDTH_V2, 184 .bmode_rssi_nocca_low_th_mask = B_BMODE_PDTH_LOWER_BOUND_MSK_V1, 185 .p0_lna_init = {R_PATH0_LNA_INIT_V1, B_PATH0_LNA_INIT_IDX_MSK}, 186 .p1_lna_init = {R_PATH1_LNA_INIT_V1, B_PATH1_LNA_INIT_IDX_MSK}, 187 .p0_tia_init = {R_PATH0_TIA_INIT_V1, B_PATH0_TIA_INIT_IDX_MSK_V1}, 188 .p1_tia_init = {R_PATH1_TIA_INIT_V1, B_PATH1_TIA_INIT_IDX_MSK_V1}, 189 .p0_rxb_init = {R_PATH0_RXB_INIT_V1, B_PATH0_RXB_INIT_IDX_MSK_V1}, 190 .p1_rxb_init = {R_PATH1_RXB_INIT_V1, B_PATH1_RXB_INIT_IDX_MSK_V1}, 191 .p0_p20_pagcugc_en = {R_PATH0_P20_FOLLOW_BY_PAGCUGC_V3, 192 B_PATH0_P20_FOLLOW_BY_PAGCUGC_EN_MSK}, 193 .p0_s20_pagcugc_en = {R_PATH0_S20_FOLLOW_BY_PAGCUGC_V3, 194 B_PATH0_S20_FOLLOW_BY_PAGCUGC_EN_MSK}, 195 .p1_p20_pagcugc_en = {R_PATH1_P20_FOLLOW_BY_PAGCUGC_V3, 196 B_PATH1_P20_FOLLOW_BY_PAGCUGC_EN_MSK}, 197 .p1_s20_pagcugc_en = {R_PATH1_S20_FOLLOW_BY_PAGCUGC_V3, 198 B_PATH1_S20_FOLLOW_BY_PAGCUGC_EN_MSK}, 199 }; 200 201 static const struct rtw89_edcca_regs rtw8922a_edcca_regs = { 202 .edcca_level = R_SEG0R_EDCCA_LVL_BE, 203 .edcca_mask = B_EDCCA_LVL_MSK0, 204 .edcca_p_mask = B_EDCCA_LVL_MSK1, 205 .ppdu_level = R_SEG0R_PPDU_LVL_BE, 206 .ppdu_mask = B_EDCCA_LVL_MSK1, 207 .rpt_a = R_EDCCA_RPT_A_BE, 208 .rpt_b = R_EDCCA_RPT_B_BE, 209 .rpt_sel = R_EDCCA_RPT_SEL_BE, 210 .rpt_sel_mask = B_EDCCA_RPT_SEL_MSK, 211 .rpt_sel_be = R_EDCCA_RPTREG_SEL_BE, 212 .rpt_sel_be_mask = B_EDCCA_RPTREG_SEL_BE_MSK, 213 .tx_collision_t2r_st = R_TX_COLLISION_T2R_ST_BE, 214 .tx_collision_t2r_st_mask = B_TX_COLLISION_T2R_ST_BE_M, 215 }; 216 217 static const struct rtw89_efuse_block_cfg rtw8922a_efuse_blocks[] = { 218 [RTW89_EFUSE_BLOCK_SYS] = {.offset = 0x00000, .size = 0x310}, 219 [RTW89_EFUSE_BLOCK_RF] = {.offset = 0x10000, .size = 0x240}, 220 [RTW89_EFUSE_BLOCK_HCI_DIG_PCIE_SDIO] = {.offset = 0x20000, .size = 0x4800}, 221 [RTW89_EFUSE_BLOCK_HCI_DIG_USB] = {.offset = 0x30000, .size = 0x890}, 222 [RTW89_EFUSE_BLOCK_HCI_PHY_PCIE] = {.offset = 0x40000, .size = 0x200}, 223 [RTW89_EFUSE_BLOCK_HCI_PHY_USB3] = {.offset = 0x50000, .size = 0x80}, 224 [RTW89_EFUSE_BLOCK_HCI_PHY_USB2] = {.offset = 0x60000, .size = 0x0}, 225 [RTW89_EFUSE_BLOCK_ADIE] = {.offset = 0x70000, .size = 0x10}, 226 }; 227 228 static void rtw8922a_ctrl_btg_bt_rx(struct rtw89_dev *rtwdev, bool en, 229 enum rtw89_phy_idx phy_idx) 230 { 231 if (en) { 232 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_A, B_BT_SHARE_A, 0x1, phy_idx); 233 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_A, B_BTG_PATH_A, 0x0, phy_idx); 234 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_B, B_BT_SHARE_B, 0x1, phy_idx); 235 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_B, B_BTG_PATH_B, 0x1, phy_idx); 236 rtw89_phy_write32_idx(rtwdev, R_LNA_OP, B_LNA6, 0x20, phy_idx); 237 rtw89_phy_write32_idx(rtwdev, R_LNA_TIA, B_TIA0_B, 0x30, phy_idx); 238 rtw89_phy_write32_idx(rtwdev, R_PMAC_GNT, B_PMAC_GNT_P1, 0x0, phy_idx); 239 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_ANT_BT_SHARE, 0x1, phy_idx); 240 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_RX_BT_SG0, 0x2, phy_idx); 241 rtw89_phy_write32_idx(rtwdev, R_GNT_BT_WGT_EN, B_GNT_BT_WGT_EN, 242 0x1, phy_idx); 243 } else { 244 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_A, B_BT_SHARE_A, 0x0, phy_idx); 245 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_A, B_BTG_PATH_A, 0x0, phy_idx); 246 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_B, B_BT_SHARE_B, 0x0, phy_idx); 247 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_B, B_BTG_PATH_B, 0x0, phy_idx); 248 rtw89_phy_write32_idx(rtwdev, R_LNA_OP, B_LNA6, 0x1a, phy_idx); 249 rtw89_phy_write32_idx(rtwdev, R_LNA_TIA, B_TIA0_B, 0x2a, phy_idx); 250 rtw89_phy_write32_idx(rtwdev, R_PMAC_GNT, B_PMAC_GNT_P1, 0xc, phy_idx); 251 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_ANT_BT_SHARE, 0x0, phy_idx); 252 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_RX_BT_SG0, 0x0, phy_idx); 253 rtw89_phy_write32_idx(rtwdev, R_GNT_BT_WGT_EN, B_GNT_BT_WGT_EN, 254 0x0, phy_idx); 255 } 256 } 257 258 static int rtw8922a_pwr_on_func(struct rtw89_dev *rtwdev) 259 { 260 struct rtw89_hal *hal = &rtwdev->hal; 261 u32 val32; 262 int ret; 263 264 rtw89_write32_clr(rtwdev, R_BE_SYS_PW_CTRL, B_BE_AFSM_WLSUS_EN | 265 B_BE_AFSM_PCIE_SUS_EN); 266 rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_DIS_WLBT_PDNSUSEN_SOPC); 267 rtw89_write32_set(rtwdev, R_BE_WLLPS_CTRL, B_BE_DIS_WLBT_LPSEN_LOPC); 268 rtw89_write32_clr(rtwdev, R_BE_SYS_PW_CTRL, B_BE_APDM_HPDN); 269 rtw89_write32_clr(rtwdev, R_BE_SYS_PW_CTRL, B_BE_APFM_SWLPS); 270 271 ret = read_poll_timeout(rtw89_read32, val32, val32 & B_BE_RDY_SYSPWR, 272 1000, 3000000, false, rtwdev, R_BE_SYS_PW_CTRL); 273 if (ret) 274 return ret; 275 276 rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_EN_WLON); 277 rtw89_write32_set(rtwdev, R_BE_WLRESUME_CTRL, B_BE_LPSROP_CMAC0 | 278 B_BE_LPSROP_CMAC1); 279 rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_APFN_ONMAC); 280 281 ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_BE_APFN_ONMAC), 282 1000, 3000000, false, rtwdev, R_BE_SYS_PW_CTRL); 283 if (ret) 284 return ret; 285 286 rtw89_write32_clr(rtwdev, R_BE_AFE_ON_CTRL1, B_BE_REG_CK_MON_CK960M_EN); 287 rtw89_write8_set(rtwdev, R_BE_ANAPAR_POW_MAC, B_BE_POW_PC_LDO_PORT0 | 288 B_BE_POW_PC_LDO_PORT1); 289 rtw89_write32_clr(rtwdev, R_BE_FEN_RST_ENABLE, B_BE_R_SYM_ISO_ADDA_P02PP | 290 B_BE_R_SYM_ISO_ADDA_P12PP); 291 rtw89_write8_set(rtwdev, R_BE_PLATFORM_ENABLE, B_BE_PLATFORM_EN); 292 rtw89_write32_set(rtwdev, R_BE_HCI_OPT_CTRL, B_BE_HAXIDMA_IO_EN); 293 294 ret = read_poll_timeout(rtw89_read32, val32, val32 & B_BE_HAXIDMA_IO_ST, 295 1000, 3000000, false, rtwdev, R_BE_HCI_OPT_CTRL); 296 if (ret) 297 return ret; 298 299 ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_BE_HAXIDMA_BACKUP_RESTORE_ST), 300 1000, 3000000, false, rtwdev, R_BE_HCI_OPT_CTRL); 301 if (ret) 302 return ret; 303 304 rtw89_write32_set(rtwdev, R_BE_HCI_OPT_CTRL, B_BE_HCI_WLAN_IO_EN); 305 306 ret = read_poll_timeout(rtw89_read32, val32, val32 & B_BE_HCI_WLAN_IO_ST, 307 1000, 3000000, false, rtwdev, R_BE_HCI_OPT_CTRL); 308 if (ret) 309 return ret; 310 311 rtw89_write32_clr(rtwdev, R_BE_SYS_SDIO_CTRL, B_BE_PCIE_FORCE_IBX_EN); 312 313 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_PLL, 0x02, 0x02); 314 if (ret) 315 return ret; 316 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_PLL, 0x01, 0x01); 317 if (ret) 318 return ret; 319 320 rtw89_write32_set(rtwdev, R_BE_SYS_ADIE_PAD_PWR_CTRL, B_BE_SYM_PADPDN_WL_RFC1_1P3); 321 322 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x40, 0x40); 323 if (ret) 324 return ret; 325 326 rtw89_write32_set(rtwdev, R_BE_SYS_ADIE_PAD_PWR_CTRL, B_BE_SYM_PADPDN_WL_RFC0_1P3); 327 328 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x20, 0x20); 329 if (ret) 330 return ret; 331 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x04, 0x04); 332 if (ret) 333 return ret; 334 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x08, 0x08); 335 if (ret) 336 return ret; 337 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x10); 338 if (ret) 339 return ret; 340 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S0, 0xEB, 0xFF); 341 if (ret) 342 return ret; 343 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S1, 0xEB, 0xFF); 344 if (ret) 345 return ret; 346 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x01, 0x01); 347 if (ret) 348 return ret; 349 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x02, 0x02); 350 if (ret) 351 return ret; 352 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x80); 353 if (ret) 354 return ret; 355 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_XREF_RF1, 0, 0x40); 356 if (ret) 357 return ret; 358 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_XREF_RF2, 0, 0x40); 359 if (ret) 360 return ret; 361 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_PLL_1, 0x40, 0x60); 362 if (ret) 363 return ret; 364 365 if (hal->cv != CHIP_CAV) { 366 rtw89_write32_set(rtwdev, R_BE_PMC_DBG_CTRL2, B_BE_SYSON_DIS_PMCR_BE_WRMSK); 367 rtw89_write32_set(rtwdev, R_BE_SYS_ISO_CTRL, B_BE_ISO_EB2CORE); 368 rtw89_write32_clr(rtwdev, R_BE_SYS_ISO_CTRL, B_BE_PWC_EV2EF_B); 369 370 mdelay(1); 371 372 rtw89_write32_clr(rtwdev, R_BE_SYS_ISO_CTRL, B_BE_PWC_EV2EF_S); 373 rtw89_write32_clr(rtwdev, R_BE_PMC_DBG_CTRL2, B_BE_SYSON_DIS_PMCR_BE_WRMSK); 374 } 375 376 rtw89_write32_set(rtwdev, R_BE_DMAC_FUNC_EN, 377 B_BE_MAC_FUNC_EN | B_BE_DMAC_FUNC_EN | B_BE_MPDU_PROC_EN | 378 B_BE_WD_RLS_EN | B_BE_DLE_WDE_EN | B_BE_TXPKT_CTRL_EN | 379 B_BE_STA_SCH_EN | B_BE_DLE_PLE_EN | B_BE_PKT_BUF_EN | 380 B_BE_DMAC_TBL_EN | B_BE_PKT_IN_EN | B_BE_DLE_CPUIO_EN | 381 B_BE_DISPATCHER_EN | B_BE_BBRPT_EN | B_BE_MAC_SEC_EN | 382 B_BE_H_AXIDMA_EN | B_BE_DMAC_MLO_EN | B_BE_PLRLS_EN | 383 B_BE_P_AXIDMA_EN | B_BE_DLE_DATACPUIO_EN | B_BE_LTR_CTL_EN); 384 385 set_bit(RTW89_FLAG_DMAC_FUNC, rtwdev->flags); 386 387 rtw89_write32_set(rtwdev, R_BE_CMAC_SHARE_FUNC_EN, 388 B_BE_CMAC_SHARE_EN | B_BE_RESPBA_EN | B_BE_ADDRSRCH_EN | 389 B_BE_BTCOEX_EN); 390 rtw89_write32_set(rtwdev, R_BE_CMAC_FUNC_EN, 391 B_BE_CMAC_EN | B_BE_CMAC_TXEN | B_BE_CMAC_RXEN | 392 B_BE_SIGB_EN | B_BE_PHYINTF_EN | B_BE_CMAC_DMA_EN | 393 B_BE_PTCLTOP_EN | B_BE_SCHEDULER_EN | B_BE_TMAC_EN | 394 B_BE_RMAC_EN | B_BE_TXTIME_EN | B_BE_RESP_PKTCTL_EN); 395 396 set_bit(RTW89_FLAG_CMAC0_FUNC, rtwdev->flags); 397 398 rtw89_write32_set(rtwdev, R_BE_FEN_RST_ENABLE, B_BE_FEN_BB_IP_RSTN | 399 B_BE_FEN_BBPLAT_RSTB); 400 401 if (!test_bit(RTW89_FLAG_PROBE_DONE, rtwdev->flags)) 402 rtw89_efuse_read_fw_secure_be(rtwdev); 403 404 return 0; 405 } 406 407 static int rtw8922a_pwr_off_func(struct rtw89_dev *rtwdev) 408 { 409 u32 val32; 410 int ret; 411 412 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x10, 0x10); 413 if (ret) 414 return ret; 415 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x08); 416 if (ret) 417 return ret; 418 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x04); 419 if (ret) 420 return ret; 421 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S0, 0xC6, 0xFF); 422 if (ret) 423 return ret; 424 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S1, 0xC6, 0xFF); 425 if (ret) 426 return ret; 427 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x80, 0x80); 428 if (ret) 429 return ret; 430 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x02); 431 if (ret) 432 return ret; 433 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x01); 434 if (ret) 435 return ret; 436 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_PLL, 0x02, 0xFF); 437 if (ret) 438 return ret; 439 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_PLL, 0x00, 0xFF); 440 if (ret) 441 return ret; 442 443 rtw89_write32_set(rtwdev, R_BE_FEN_RST_ENABLE, B_BE_R_SYM_ISO_ADDA_P02PP | 444 B_BE_R_SYM_ISO_ADDA_P12PP); 445 rtw89_write8_clr(rtwdev, R_BE_ANAPAR_POW_MAC, B_BE_POW_PC_LDO_PORT0 | 446 B_BE_POW_PC_LDO_PORT1); 447 rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_EN_WLON); 448 rtw89_write8_clr(rtwdev, R_BE_FEN_RST_ENABLE, B_BE_FEN_BB_IP_RSTN | 449 B_BE_FEN_BBPLAT_RSTB); 450 rtw89_write32_clr(rtwdev, R_BE_SYS_ADIE_PAD_PWR_CTRL, B_BE_SYM_PADPDN_WL_RFC0_1P3); 451 452 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x20); 453 if (ret) 454 return ret; 455 456 rtw89_write32_clr(rtwdev, R_BE_SYS_ADIE_PAD_PWR_CTRL, B_BE_SYM_PADPDN_WL_RFC1_1P3); 457 458 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x40); 459 if (ret) 460 return ret; 461 462 rtw89_write32_clr(rtwdev, R_BE_HCI_OPT_CTRL, B_BE_HAXIDMA_IO_EN); 463 464 ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_BE_HAXIDMA_IO_ST), 465 1000, 3000000, false, rtwdev, R_BE_HCI_OPT_CTRL); 466 if (ret) 467 return ret; 468 469 ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_BE_HAXIDMA_BACKUP_RESTORE_ST), 470 1000, 3000000, false, rtwdev, R_BE_HCI_OPT_CTRL); 471 if (ret) 472 return ret; 473 474 rtw89_write32_clr(rtwdev, R_BE_HCI_OPT_CTRL, B_BE_HCI_WLAN_IO_EN); 475 476 ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_BE_HCI_WLAN_IO_ST), 477 1000, 3000000, false, rtwdev, R_BE_HCI_OPT_CTRL); 478 if (ret) 479 return ret; 480 481 rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_APFM_OFFMAC); 482 483 ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_BE_APFM_OFFMAC), 484 1000, 3000000, false, rtwdev, R_BE_SYS_PW_CTRL); 485 if (ret) 486 return ret; 487 488 rtw89_write32(rtwdev, R_BE_WLLPS_CTRL, 0x0000A1B2); 489 rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_XTAL_OFF_A_DIE); 490 rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_APFM_SWLPS); 491 rtw89_write32(rtwdev, R_BE_UDM1, 0); 492 493 return 0; 494 } 495 496 static void rtw8922a_efuse_parsing_tssi(struct rtw89_dev *rtwdev, 497 struct rtw8922a_efuse *map) 498 { 499 struct rtw8922a_tssi_offset *ofst[] = {&map->path_a_tssi, &map->path_b_tssi}; 500 u8 *bw40_1s_tssi_6g_ofst[] = {map->bw40_1s_tssi_6g_a, map->bw40_1s_tssi_6g_b}; 501 struct rtw89_tssi_info *tssi = &rtwdev->tssi; 502 u8 i, j; 503 504 tssi->thermal[RF_PATH_A] = map->path_a_therm; 505 tssi->thermal[RF_PATH_B] = map->path_b_therm; 506 507 for (i = 0; i < RF_PATH_NUM_8922A; i++) { 508 memcpy(tssi->tssi_cck[i], ofst[i]->cck_tssi, 509 sizeof(ofst[i]->cck_tssi)); 510 511 for (j = 0; j < TSSI_CCK_CH_GROUP_NUM; j++) 512 rtw89_debug(rtwdev, RTW89_DBG_TSSI, 513 "[TSSI][EFUSE] path=%d cck[%d]=0x%x\n", 514 i, j, tssi->tssi_cck[i][j]); 515 516 memcpy(tssi->tssi_mcs[i], ofst[i]->bw40_tssi, 517 sizeof(ofst[i]->bw40_tssi)); 518 memcpy(tssi->tssi_mcs[i] + TSSI_MCS_2G_CH_GROUP_NUM, 519 ofst[i]->bw40_1s_tssi_5g, sizeof(ofst[i]->bw40_1s_tssi_5g)); 520 memcpy(tssi->tssi_6g_mcs[i], bw40_1s_tssi_6g_ofst[i], 521 sizeof(tssi->tssi_6g_mcs[i])); 522 523 for (j = 0; j < TSSI_MCS_CH_GROUP_NUM; j++) 524 rtw89_debug(rtwdev, RTW89_DBG_TSSI, 525 "[TSSI][EFUSE] path=%d mcs[%d]=0x%x\n", 526 i, j, tssi->tssi_mcs[i][j]); 527 } 528 } 529 530 static void rtw8922a_efuse_parsing_gain_offset(struct rtw89_dev *rtwdev, 531 struct rtw8922a_efuse *map) 532 { 533 struct rtw89_phy_efuse_gain *gain = &rtwdev->efuse_gain; 534 bool all_0xff = true, all_0x00 = true; 535 int i, j; 536 u8 t; 537 538 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_2G_CCK] = map->rx_gain_a._2g_cck; 539 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_2G_CCK] = map->rx_gain_b._2g_cck; 540 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_2G_OFDM] = map->rx_gain_a._2g_ofdm; 541 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_2G_OFDM] = map->rx_gain_b._2g_ofdm; 542 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_5G_LOW] = map->rx_gain_a._5g_low; 543 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_5G_LOW] = map->rx_gain_b._5g_low; 544 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_5G_MID] = map->rx_gain_a._5g_mid; 545 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_5G_MID] = map->rx_gain_b._5g_mid; 546 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_5G_HIGH] = map->rx_gain_a._5g_high; 547 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_5G_HIGH] = map->rx_gain_b._5g_high; 548 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_L0] = map->rx_gain_6g_a._6g_l0; 549 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_L0] = map->rx_gain_6g_b._6g_l0; 550 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_L1] = map->rx_gain_6g_a._6g_l1; 551 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_L1] = map->rx_gain_6g_b._6g_l1; 552 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_M0] = map->rx_gain_6g_a._6g_m0; 553 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_M0] = map->rx_gain_6g_b._6g_m0; 554 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_M1] = map->rx_gain_6g_a._6g_m1; 555 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_M1] = map->rx_gain_6g_b._6g_m1; 556 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_H0] = map->rx_gain_6g_a._6g_h0; 557 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_H0] = map->rx_gain_6g_b._6g_h0; 558 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_H1] = map->rx_gain_6g_a._6g_h1; 559 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_H1] = map->rx_gain_6g_b._6g_h1; 560 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_UH0] = map->rx_gain_6g_a._6g_uh0; 561 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_UH0] = map->rx_gain_6g_b._6g_uh0; 562 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_UH1] = map->rx_gain_6g_a._6g_uh1; 563 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_UH1] = map->rx_gain_6g_b._6g_uh1; 564 565 for (i = RF_PATH_A; i <= RF_PATH_B; i++) 566 for (j = 0; j < RTW89_GAIN_OFFSET_NR; j++) { 567 t = gain->offset[i][j]; 568 if (t != 0xff) 569 all_0xff = false; 570 if (t != 0x0) 571 all_0x00 = false; 572 573 /* transform: sign-bit + U(7,2) to S(8,2) */ 574 if (t & 0x80) 575 gain->offset[i][j] = (t ^ 0x7f) + 1; 576 } 577 578 gain->offset_valid = !all_0xff && !all_0x00; 579 } 580 581 static void rtw8922a_read_efuse_mac_addr(struct rtw89_dev *rtwdev, u32 addr) 582 { 583 struct rtw89_efuse *efuse = &rtwdev->efuse; 584 u16 val; 585 int i; 586 587 for (i = 0; i < ETH_ALEN; i += 2, addr += 2) { 588 val = rtw89_read16(rtwdev, addr); 589 efuse->addr[i] = val & 0xff; 590 efuse->addr[i + 1] = val >> 8; 591 } 592 } 593 594 static int rtw8922a_read_efuse_pci_sdio(struct rtw89_dev *rtwdev, u8 *log_map) 595 { 596 struct rtw89_efuse *efuse = &rtwdev->efuse; 597 598 if (rtwdev->hci.type == RTW89_HCI_TYPE_PCIE) 599 rtw8922a_read_efuse_mac_addr(rtwdev, 0x3104); 600 else 601 ether_addr_copy(efuse->addr, log_map + 0x001A); 602 603 return 0; 604 } 605 606 static int rtw8922a_read_efuse_usb(struct rtw89_dev *rtwdev, u8 *log_map) 607 { 608 rtw8922a_read_efuse_mac_addr(rtwdev, 0x4078); 609 610 return 0; 611 } 612 613 static int rtw8922a_read_efuse_rf(struct rtw89_dev *rtwdev, u8 *log_map) 614 { 615 struct rtw8922a_efuse *map = (struct rtw8922a_efuse *)log_map; 616 struct rtw89_efuse *efuse = &rtwdev->efuse; 617 618 efuse->rfe_type = map->rfe_type; 619 efuse->xtal_cap = map->xtal_k; 620 efuse->country_code[0] = map->country_code[0]; 621 efuse->country_code[1] = map->country_code[1]; 622 rtw8922a_efuse_parsing_tssi(rtwdev, map); 623 rtw8922a_efuse_parsing_gain_offset(rtwdev, map); 624 625 rtw89_info(rtwdev, "chip rfe_type is %d\n", efuse->rfe_type); 626 627 return 0; 628 } 629 630 static int rtw8922a_read_efuse(struct rtw89_dev *rtwdev, u8 *log_map, 631 enum rtw89_efuse_block block) 632 { 633 switch (block) { 634 case RTW89_EFUSE_BLOCK_HCI_DIG_PCIE_SDIO: 635 return rtw8922a_read_efuse_pci_sdio(rtwdev, log_map); 636 case RTW89_EFUSE_BLOCK_HCI_DIG_USB: 637 return rtw8922a_read_efuse_usb(rtwdev, log_map); 638 case RTW89_EFUSE_BLOCK_RF: 639 return rtw8922a_read_efuse_rf(rtwdev, log_map); 640 default: 641 return 0; 642 } 643 } 644 645 #define THM_TRIM_POSITIVE_MASK BIT(6) 646 #define THM_TRIM_MAGNITUDE_MASK GENMASK(5, 0) 647 648 static void rtw8922a_phycap_parsing_thermal_trim(struct rtw89_dev *rtwdev, 649 u8 *phycap_map) 650 { 651 static const u32 thm_trim_addr[RF_PATH_NUM_8922A] = {0x1706, 0x1733}; 652 struct rtw89_power_trim_info *info = &rtwdev->pwr_trim; 653 u32 addr = rtwdev->chip->phycap_addr; 654 bool pg = true; 655 u8 pg_th; 656 s8 val; 657 u8 i; 658 659 for (i = 0; i < RF_PATH_NUM_8922A; i++) { 660 pg_th = phycap_map[thm_trim_addr[i] - addr]; 661 if (pg_th == 0xff) { 662 info->thermal_trim[i] = 0; 663 pg = false; 664 break; 665 } 666 667 val = u8_get_bits(pg_th, THM_TRIM_MAGNITUDE_MASK); 668 669 if (!(pg_th & THM_TRIM_POSITIVE_MASK)) 670 val *= -1; 671 672 info->thermal_trim[i] = val; 673 674 rtw89_debug(rtwdev, RTW89_DBG_RFK, 675 "[THERMAL][TRIM] path=%d thermal_trim=0x%x (%d)\n", 676 i, pg_th, val); 677 } 678 679 info->pg_thermal_trim = pg; 680 } 681 682 static void rtw8922a_phycap_parsing_pa_bias_trim(struct rtw89_dev *rtwdev, 683 u8 *phycap_map) 684 { 685 static const u32 pabias_trim_addr[RF_PATH_NUM_8922A] = {0x1707, 0x1734}; 686 static const u32 check_pa_pad_trim_addr = 0x1700; 687 struct rtw89_power_trim_info *info = &rtwdev->pwr_trim; 688 u32 addr = rtwdev->chip->phycap_addr; 689 u8 val; 690 u8 i; 691 692 val = phycap_map[check_pa_pad_trim_addr - addr]; 693 if (val != 0xff) 694 info->pg_pa_bias_trim = true; 695 696 for (i = 0; i < RF_PATH_NUM_8922A; i++) { 697 info->pa_bias_trim[i] = phycap_map[pabias_trim_addr[i] - addr]; 698 699 rtw89_debug(rtwdev, RTW89_DBG_RFK, 700 "[PA_BIAS][TRIM] path=%d pa_bias_trim=0x%x\n", 701 i, info->pa_bias_trim[i]); 702 } 703 } 704 705 static void rtw8922a_pa_bias_trim(struct rtw89_dev *rtwdev) 706 { 707 struct rtw89_power_trim_info *info = &rtwdev->pwr_trim; 708 u8 pabias_2g, pabias_5g; 709 u8 i; 710 711 if (!info->pg_pa_bias_trim) { 712 rtw89_debug(rtwdev, RTW89_DBG_RFK, 713 "[PA_BIAS][TRIM] no PG, do nothing\n"); 714 715 return; 716 } 717 718 for (i = 0; i < RF_PATH_NUM_8922A; i++) { 719 pabias_2g = FIELD_GET(GENMASK(3, 0), info->pa_bias_trim[i]); 720 pabias_5g = FIELD_GET(GENMASK(7, 4), info->pa_bias_trim[i]); 721 722 rtw89_debug(rtwdev, RTW89_DBG_RFK, 723 "[PA_BIAS][TRIM] path=%d 2G=0x%x 5G=0x%x\n", 724 i, pabias_2g, pabias_5g); 725 726 rtw89_write_rf(rtwdev, i, RR_BIASA, RR_BIASA_TXG_V1, pabias_2g); 727 rtw89_write_rf(rtwdev, i, RR_BIASA, RR_BIASA_TXA_V1, pabias_5g); 728 } 729 } 730 731 static void rtw8922a_phycap_parsing_pad_bias_trim(struct rtw89_dev *rtwdev, 732 u8 *phycap_map) 733 { 734 static const u32 pad_bias_trim_addr[RF_PATH_NUM_8922A] = {0x1708, 0x1735}; 735 struct rtw89_power_trim_info *info = &rtwdev->pwr_trim; 736 u32 addr = rtwdev->chip->phycap_addr; 737 u8 i; 738 739 for (i = 0; i < RF_PATH_NUM_8922A; i++) { 740 info->pad_bias_trim[i] = phycap_map[pad_bias_trim_addr[i] - addr]; 741 742 rtw89_debug(rtwdev, RTW89_DBG_RFK, 743 "[PAD_BIAS][TRIM] path=%d pad_bias_trim=0x%x\n", 744 i, info->pad_bias_trim[i]); 745 } 746 } 747 748 static void rtw8922a_pad_bias_trim(struct rtw89_dev *rtwdev) 749 { 750 struct rtw89_power_trim_info *info = &rtwdev->pwr_trim; 751 u8 pad_bias_2g, pad_bias_5g; 752 u8 i; 753 754 if (!info->pg_pa_bias_trim) { 755 rtw89_debug(rtwdev, RTW89_DBG_RFK, 756 "[PAD_BIAS][TRIM] no PG, do nothing\n"); 757 return; 758 } 759 760 for (i = 0; i < RF_PATH_NUM_8922A; i++) { 761 pad_bias_2g = u8_get_bits(info->pad_bias_trim[i], GENMASK(3, 0)); 762 pad_bias_5g = u8_get_bits(info->pad_bias_trim[i], GENMASK(7, 4)); 763 764 rtw89_debug(rtwdev, RTW89_DBG_RFK, 765 "[PAD_BIAS][TRIM] path=%d 2G=0x%x 5G=0x%x\n", 766 i, pad_bias_2g, pad_bias_5g); 767 768 rtw89_write_rf(rtwdev, i, RR_BIASA, RR_BIASD_TXG_V1, pad_bias_2g); 769 rtw89_write_rf(rtwdev, i, RR_BIASA, RR_BIASD_TXA_V1, pad_bias_5g); 770 } 771 } 772 773 static int rtw8922a_read_phycap(struct rtw89_dev *rtwdev, u8 *phycap_map) 774 { 775 rtw8922a_phycap_parsing_thermal_trim(rtwdev, phycap_map); 776 rtw8922a_phycap_parsing_pa_bias_trim(rtwdev, phycap_map); 777 rtw8922a_phycap_parsing_pad_bias_trim(rtwdev, phycap_map); 778 779 return 0; 780 } 781 782 static void rtw8922a_power_trim(struct rtw89_dev *rtwdev) 783 { 784 rtw8922a_pa_bias_trim(rtwdev); 785 rtw8922a_pad_bias_trim(rtwdev); 786 } 787 788 static void rtw8922a_set_channel_mac(struct rtw89_dev *rtwdev, 789 const struct rtw89_chan *chan, 790 u8 mac_idx) 791 { 792 u32 sub_carr = rtw89_mac_reg_by_idx(rtwdev, R_BE_TX_SUB_BAND_VALUE, mac_idx); 793 u32 chk_rate = rtw89_mac_reg_by_idx(rtwdev, R_BE_TXRATE_CHK, mac_idx); 794 u32 rf_mod = rtw89_mac_reg_by_idx(rtwdev, R_BE_WMAC_RFMOD, mac_idx); 795 u8 txsb20 = 0, txsb40 = 0, txsb80 = 0; 796 u8 rf_mod_val, chk_rate_mask; 797 u32 txsb; 798 u32 reg; 799 800 switch (chan->band_width) { 801 case RTW89_CHANNEL_WIDTH_160: 802 txsb80 = rtw89_phy_get_txsb(rtwdev, chan, RTW89_CHANNEL_WIDTH_80); 803 fallthrough; 804 case RTW89_CHANNEL_WIDTH_80: 805 txsb40 = rtw89_phy_get_txsb(rtwdev, chan, RTW89_CHANNEL_WIDTH_40); 806 fallthrough; 807 case RTW89_CHANNEL_WIDTH_40: 808 txsb20 = rtw89_phy_get_txsb(rtwdev, chan, RTW89_CHANNEL_WIDTH_20); 809 break; 810 default: 811 break; 812 } 813 814 switch (chan->band_width) { 815 case RTW89_CHANNEL_WIDTH_160: 816 rf_mod_val = BE_WMAC_RFMOD_160M; 817 txsb = u32_encode_bits(txsb20, B_BE_TXSB_20M_MASK) | 818 u32_encode_bits(txsb40, B_BE_TXSB_40M_MASK) | 819 u32_encode_bits(txsb80, B_BE_TXSB_80M_MASK); 820 break; 821 case RTW89_CHANNEL_WIDTH_80: 822 rf_mod_val = BE_WMAC_RFMOD_80M; 823 txsb = u32_encode_bits(txsb20, B_BE_TXSB_20M_MASK) | 824 u32_encode_bits(txsb40, B_BE_TXSB_40M_MASK); 825 break; 826 case RTW89_CHANNEL_WIDTH_40: 827 rf_mod_val = BE_WMAC_RFMOD_40M; 828 txsb = u32_encode_bits(txsb20, B_BE_TXSB_20M_MASK); 829 break; 830 case RTW89_CHANNEL_WIDTH_20: 831 default: 832 rf_mod_val = BE_WMAC_RFMOD_20M; 833 txsb = 0; 834 break; 835 } 836 837 if (txsb20 <= BE_PRI20_BITMAP_MAX) 838 txsb |= u32_encode_bits(BIT(txsb20), B_BE_PRI20_BITMAP_MASK); 839 840 rtw89_write8_mask(rtwdev, rf_mod, B_BE_WMAC_RFMOD_MASK, rf_mod_val); 841 rtw89_write32(rtwdev, sub_carr, txsb); 842 843 switch (chan->band_type) { 844 case RTW89_BAND_2G: 845 chk_rate_mask = B_BE_BAND_MODE; 846 break; 847 case RTW89_BAND_5G: 848 case RTW89_BAND_6G: 849 chk_rate_mask = B_BE_CHECK_CCK_EN | B_BE_RTS_LIMIT_IN_OFDM6; 850 break; 851 default: 852 rtw89_warn(rtwdev, "Invalid band_type:%d\n", chan->band_type); 853 return; 854 } 855 856 rtw89_write8_clr(rtwdev, chk_rate, B_BE_BAND_MODE | B_BE_CHECK_CCK_EN | 857 B_BE_RTS_LIMIT_IN_OFDM6); 858 rtw89_write8_set(rtwdev, chk_rate, chk_rate_mask); 859 860 switch (chan->band_width) { 861 case RTW89_CHANNEL_WIDTH_320: 862 case RTW89_CHANNEL_WIDTH_160: 863 case RTW89_CHANNEL_WIDTH_80: 864 case RTW89_CHANNEL_WIDTH_40: 865 reg = rtw89_mac_reg_by_idx(rtwdev, R_BE_PREBKF_CFG_1, mac_idx); 866 rtw89_write32_mask(rtwdev, reg, B_BE_SIFS_MACTXEN_T1_MASK, 0x41); 867 reg = rtw89_mac_reg_by_idx(rtwdev, R_BE_MUEDCA_EN, mac_idx); 868 rtw89_write32_mask(rtwdev, reg, B_BE_SIFS_MACTXEN_TB_T1_MASK, 0x41); 869 break; 870 default: 871 reg = rtw89_mac_reg_by_idx(rtwdev, R_BE_PREBKF_CFG_1, mac_idx); 872 rtw89_write32_mask(rtwdev, reg, B_BE_SIFS_MACTXEN_T1_MASK, 0x3f); 873 reg = rtw89_mac_reg_by_idx(rtwdev, R_BE_MUEDCA_EN, mac_idx); 874 rtw89_write32_mask(rtwdev, reg, B_BE_SIFS_MACTXEN_TB_T1_MASK, 0x3e); 875 break; 876 } 877 } 878 879 static const u32 rtw8922a_sco_barker_threshold[14] = { 880 0x1fe4f, 0x1ff5e, 0x2006c, 0x2017b, 0x2028a, 0x20399, 0x204a8, 0x205b6, 881 0x206c5, 0x207d4, 0x208e3, 0x209f2, 0x20b00, 0x20d8a 882 }; 883 884 static const u32 rtw8922a_sco_cck_threshold[14] = { 885 0x2bdac, 0x2bf21, 0x2c095, 0x2c209, 0x2c37e, 0x2c4f2, 0x2c666, 0x2c7db, 886 0x2c94f, 0x2cac3, 0x2cc38, 0x2cdac, 0x2cf21, 0x2d29e 887 }; 888 889 static int rtw8922a_ctrl_sco_cck(struct rtw89_dev *rtwdev, 890 u8 primary_ch, enum rtw89_bandwidth bw, 891 enum rtw89_phy_idx phy_idx) 892 { 893 u8 ch_element; 894 895 if (primary_ch >= 14) 896 return -EINVAL; 897 898 ch_element = primary_ch - 1; 899 900 rtw89_phy_write32_idx(rtwdev, R_BK_FC0INV, B_BK_FC0INV, 901 rtw8922a_sco_barker_threshold[ch_element], 902 phy_idx); 903 rtw89_phy_write32_idx(rtwdev, R_CCK_FC0INV, B_CCK_FC0INV, 904 rtw8922a_sco_cck_threshold[ch_element], 905 phy_idx); 906 907 return 0; 908 } 909 910 struct rtw8922a_bb_gain { 911 u32 gain_g[BB_PATH_NUM_8922A]; 912 u32 gain_a[BB_PATH_NUM_8922A]; 913 u32 gain_g_mask; 914 u32 gain_a_mask; 915 }; 916 917 static const struct rtw89_reg_def rpl_comp_bw160[RTW89_BW20_SC_160M] = { 918 { .addr = 0x41E8, .mask = 0xFF00}, 919 { .addr = 0x41E8, .mask = 0xFF0000}, 920 { .addr = 0x41E8, .mask = 0xFF000000}, 921 { .addr = 0x41EC, .mask = 0xFF}, 922 { .addr = 0x41EC, .mask = 0xFF00}, 923 { .addr = 0x41EC, .mask = 0xFF0000}, 924 { .addr = 0x41EC, .mask = 0xFF000000}, 925 { .addr = 0x41F0, .mask = 0xFF} 926 }; 927 928 static const struct rtw89_reg_def rpl_comp_bw80[RTW89_BW20_SC_80M] = { 929 { .addr = 0x41F4, .mask = 0xFF}, 930 { .addr = 0x41F4, .mask = 0xFF00}, 931 { .addr = 0x41F4, .mask = 0xFF0000}, 932 { .addr = 0x41F4, .mask = 0xFF000000} 933 }; 934 935 static const struct rtw89_reg_def rpl_comp_bw40[RTW89_BW20_SC_40M] = { 936 { .addr = 0x41F0, .mask = 0xFF0000}, 937 { .addr = 0x41F0, .mask = 0xFF000000} 938 }; 939 940 static const struct rtw89_reg_def rpl_comp_bw20[RTW89_BW20_SC_20M] = { 941 { .addr = 0x41F0, .mask = 0xFF00} 942 }; 943 944 static const struct rtw8922a_bb_gain bb_gain_lna[LNA_GAIN_NUM] = { 945 { .gain_g = {0x409c, 0x449c}, .gain_a = {0x406C, 0x446C}, 946 .gain_g_mask = 0xFF00, .gain_a_mask = 0xFF}, 947 { .gain_g = {0x409c, 0x449c}, .gain_a = {0x406C, 0x446C}, 948 .gain_g_mask = 0xFF000000, .gain_a_mask = 0xFF0000}, 949 { .gain_g = {0x40a0, 0x44a0}, .gain_a = {0x4070, 0x4470}, 950 .gain_g_mask = 0xFF00, .gain_a_mask = 0xFF}, 951 { .gain_g = {0x40a0, 0x44a0}, .gain_a = {0x4070, 0x4470}, 952 .gain_g_mask = 0xFF000000, .gain_a_mask = 0xFF0000}, 953 { .gain_g = {0x40a4, 0x44a4}, .gain_a = {0x4074, 0x4474}, 954 .gain_g_mask = 0xFF00, .gain_a_mask = 0xFF}, 955 { .gain_g = {0x40a4, 0x44a4}, .gain_a = {0x4074, 0x4474}, 956 .gain_g_mask = 0xFF000000, .gain_a_mask = 0xFF0000}, 957 { .gain_g = {0x40a8, 0x44a8}, .gain_a = {0x4078, 0x4478}, 958 .gain_g_mask = 0xFF00, .gain_a_mask = 0xFF}, 959 }; 960 961 static const struct rtw8922a_bb_gain bb_gain_tia[TIA_GAIN_NUM] = { 962 { .gain_g = {0x4054, 0x4454}, .gain_a = {0x4054, 0x4454}, 963 .gain_g_mask = 0x7FC0000, .gain_a_mask = 0x1FF}, 964 { .gain_g = {0x4058, 0x4458}, .gain_a = {0x4054, 0x4454}, 965 .gain_g_mask = 0x1FF, .gain_a_mask = 0x3FE00 }, 966 }; 967 968 struct rtw8922a_bb_gain_bypass { 969 u32 gain_g[BB_PATH_NUM_8922A]; 970 u32 gain_a[BB_PATH_NUM_8922A]; 971 u32 gain_mask_g; 972 u32 gain_mask_a; 973 }; 974 975 static void rtw8922a_set_rpl_gain(struct rtw89_dev *rtwdev, 976 const struct rtw89_chan *chan, 977 enum rtw89_rf_path path, 978 enum rtw89_phy_idx phy_idx) 979 { 980 const struct rtw89_phy_bb_gain_info_be *gain = &rtwdev->bb_gain.be; 981 u8 gain_band = rtw89_subband_to_gain_band_be(chan->subband_type); 982 u32 reg_path_ofst = 0; 983 u32 mask; 984 s32 val; 985 u32 reg; 986 int i; 987 988 if (path == RF_PATH_B) 989 reg_path_ofst = 0x400; 990 991 for (i = 0; i < RTW89_BW20_SC_160M; i++) { 992 reg = rpl_comp_bw160[i].addr | reg_path_ofst; 993 mask = rpl_comp_bw160[i].mask; 994 val = gain->rpl_ofst_160[gain_band][path][i]; 995 rtw89_phy_write32_idx(rtwdev, reg, mask, val, phy_idx); 996 } 997 998 for (i = 0; i < RTW89_BW20_SC_80M; i++) { 999 reg = rpl_comp_bw80[i].addr | reg_path_ofst; 1000 mask = rpl_comp_bw80[i].mask; 1001 val = gain->rpl_ofst_80[gain_band][path][i]; 1002 rtw89_phy_write32_idx(rtwdev, reg, mask, val, phy_idx); 1003 } 1004 1005 for (i = 0; i < RTW89_BW20_SC_40M; i++) { 1006 reg = rpl_comp_bw40[i].addr | reg_path_ofst; 1007 mask = rpl_comp_bw40[i].mask; 1008 val = gain->rpl_ofst_40[gain_band][path][i]; 1009 rtw89_phy_write32_idx(rtwdev, reg, mask, val, phy_idx); 1010 } 1011 1012 for (i = 0; i < RTW89_BW20_SC_20M; i++) { 1013 reg = rpl_comp_bw20[i].addr | reg_path_ofst; 1014 mask = rpl_comp_bw20[i].mask; 1015 val = gain->rpl_ofst_20[gain_band][path][i]; 1016 rtw89_phy_write32_idx(rtwdev, reg, mask, val, phy_idx); 1017 } 1018 } 1019 1020 static void rtw8922a_set_lna_tia_gain(struct rtw89_dev *rtwdev, 1021 const struct rtw89_chan *chan, 1022 enum rtw89_rf_path path, 1023 enum rtw89_phy_idx phy_idx) 1024 { 1025 const struct rtw89_phy_bb_gain_info_be *gain = &rtwdev->bb_gain.be; 1026 u8 gain_band = rtw89_subband_to_gain_band_be(chan->subband_type); 1027 enum rtw89_phy_bb_bw_be bw_type; 1028 s32 val; 1029 u32 reg; 1030 u32 mask; 1031 int i; 1032 1033 bw_type = chan->band_width <= RTW89_CHANNEL_WIDTH_40 ? 1034 RTW89_BB_BW_20_40 : RTW89_BB_BW_80_160_320; 1035 1036 for (i = 0; i < LNA_GAIN_NUM; i++) { 1037 if (chan->band_type == RTW89_BAND_2G) { 1038 reg = bb_gain_lna[i].gain_g[path]; 1039 mask = bb_gain_lna[i].gain_g_mask; 1040 } else { 1041 reg = bb_gain_lna[i].gain_a[path]; 1042 mask = bb_gain_lna[i].gain_a_mask; 1043 } 1044 val = gain->lna_gain[gain_band][bw_type][path][i]; 1045 rtw89_phy_write32_idx(rtwdev, reg, mask, val, phy_idx); 1046 } 1047 1048 for (i = 0; i < TIA_GAIN_NUM; i++) { 1049 if (chan->band_type == RTW89_BAND_2G) { 1050 reg = bb_gain_tia[i].gain_g[path]; 1051 mask = bb_gain_tia[i].gain_g_mask; 1052 } else { 1053 reg = bb_gain_tia[i].gain_a[path]; 1054 mask = bb_gain_tia[i].gain_a_mask; 1055 } 1056 val = gain->tia_gain[gain_band][bw_type][path][i]; 1057 rtw89_phy_write32_idx(rtwdev, reg, mask, val, phy_idx); 1058 } 1059 } 1060 1061 static void rtw8922a_set_gain(struct rtw89_dev *rtwdev, 1062 const struct rtw89_chan *chan, 1063 enum rtw89_rf_path path, 1064 enum rtw89_phy_idx phy_idx) 1065 { 1066 rtw8922a_set_lna_tia_gain(rtwdev, chan, path, phy_idx); 1067 rtw8922a_set_rpl_gain(rtwdev, chan, path, phy_idx); 1068 } 1069 1070 static void rtw8922a_set_rx_gain_normal_cck(struct rtw89_dev *rtwdev, 1071 const struct rtw89_chan *chan, 1072 enum rtw89_rf_path path) 1073 { 1074 struct rtw89_phy_efuse_gain *gain = &rtwdev->efuse_gain; 1075 s8 value = -gain->offset[path][RTW89_GAIN_OFFSET_2G_CCK]; /* S(8,2) */ 1076 u8 fraction = value & 0x3; 1077 1078 if (fraction) { 1079 rtw89_phy_write32_mask(rtwdev, R_MGAIN_BIAS, B_MGAIN_BIAS_BW20, 1080 (0x4 - fraction) << 1); 1081 rtw89_phy_write32_mask(rtwdev, R_MGAIN_BIAS, B_MGAIN_BIAS_BW40, 1082 (0x4 - fraction) << 1); 1083 1084 value >>= 2; 1085 rtw89_phy_write32_mask(rtwdev, R_CCK_RPL_OFST, B_CCK_RPL_OFST, 1086 value + 1 + 0xdc); 1087 } else { 1088 rtw89_phy_write32_mask(rtwdev, R_MGAIN_BIAS, B_MGAIN_BIAS_BW20, 0); 1089 rtw89_phy_write32_mask(rtwdev, R_MGAIN_BIAS, B_MGAIN_BIAS_BW40, 0); 1090 1091 value >>= 2; 1092 rtw89_phy_write32_mask(rtwdev, R_CCK_RPL_OFST, B_CCK_RPL_OFST, 1093 value + 0xdc); 1094 } 1095 } 1096 1097 static void rtw8922a_set_rx_gain_normal_ofdm(struct rtw89_dev *rtwdev, 1098 const struct rtw89_chan *chan, 1099 enum rtw89_rf_path path) 1100 { 1101 static const u32 rssi_tb_bias_comp[2] = {0x41f8, 0x45f8}; 1102 static const u32 rssi_tb_ext_comp[2] = {0x4208, 0x4608}; 1103 static const u32 rssi_ofst_addr[2] = {0x40c8, 0x44c8}; 1104 static const u32 rpl_bias_comp[2] = {0x41e8, 0x45e8}; 1105 static const u32 rpl_ext_comp[2] = {0x41f8, 0x45f8}; 1106 struct rtw89_phy_efuse_gain *gain = &rtwdev->efuse_gain; 1107 enum rtw89_gain_offset gain_band; 1108 s8 v1, v2, v3; 1109 s32 value; 1110 1111 gain_band = rtw89_subband_to_gain_offset_band_of_ofdm(chan->subband_type); 1112 value = gain->offset[path][gain_band]; 1113 rtw89_phy_write32_mask(rtwdev, rssi_ofst_addr[path], 0xff000000, value + 0xF8); 1114 1115 value *= -4; 1116 v1 = clamp_t(s32, value, S8_MIN, S8_MAX); 1117 value -= v1; 1118 v2 = clamp_t(s32, value, S8_MIN, S8_MAX); 1119 value -= v2; 1120 v3 = clamp_t(s32, value, S8_MIN, S8_MAX); 1121 1122 rtw89_phy_write32_mask(rtwdev, rpl_bias_comp[path], 0xff, v1); 1123 rtw89_phy_write32_mask(rtwdev, rpl_ext_comp[path], 0xff, v2); 1124 rtw89_phy_write32_mask(rtwdev, rpl_ext_comp[path], 0xff00, v3); 1125 1126 rtw89_phy_write32_mask(rtwdev, rssi_tb_bias_comp[path], 0xff0000, v1); 1127 rtw89_phy_write32_mask(rtwdev, rssi_tb_ext_comp[path], 0xff0000, v2); 1128 rtw89_phy_write32_mask(rtwdev, rssi_tb_ext_comp[path], 0xff000000, v3); 1129 } 1130 1131 static void rtw8922a_set_rx_gain_normal(struct rtw89_dev *rtwdev, 1132 const struct rtw89_chan *chan, 1133 enum rtw89_rf_path path) 1134 { 1135 struct rtw89_phy_efuse_gain *gain = &rtwdev->efuse_gain; 1136 1137 if (!gain->offset_valid) 1138 return; 1139 1140 if (chan->band_type == RTW89_BAND_2G) 1141 rtw8922a_set_rx_gain_normal_cck(rtwdev, chan, path); 1142 1143 rtw8922a_set_rx_gain_normal_ofdm(rtwdev, chan, path); 1144 } 1145 1146 static void rtw8922a_set_cck_parameters(struct rtw89_dev *rtwdev, u8 central_ch, 1147 enum rtw89_phy_idx phy_idx) 1148 { 1149 if (central_ch == 14) { 1150 rtw89_phy_write32_idx(rtwdev, R_PCOEFF01, B_PCOEFF01, 0x3b13ff, phy_idx); 1151 rtw89_phy_write32_idx(rtwdev, R_PCOEFF23, B_PCOEFF23, 0x1c42de, phy_idx); 1152 rtw89_phy_write32_idx(rtwdev, R_PCOEFF45, B_PCOEFF45, 0xfdb0ad, phy_idx); 1153 rtw89_phy_write32_idx(rtwdev, R_PCOEFF67, B_PCOEFF67, 0xf60f6e, phy_idx); 1154 rtw89_phy_write32_idx(rtwdev, R_PCOEFF89, B_PCOEFF89, 0xfd8f92, phy_idx); 1155 rtw89_phy_write32_idx(rtwdev, R_PCOEFFAB, B_PCOEFFAB, 0x02d011, phy_idx); 1156 rtw89_phy_write32_idx(rtwdev, R_PCOEFFCD, B_PCOEFFCD, 0x01c02c, phy_idx); 1157 rtw89_phy_write32_idx(rtwdev, R_PCOEFFEF, B_PCOEFFEF, 0xfff00a, phy_idx); 1158 } else { 1159 rtw89_phy_write32_idx(rtwdev, R_PCOEFF01, B_PCOEFF01, 0x3a63ca, phy_idx); 1160 rtw89_phy_write32_idx(rtwdev, R_PCOEFF23, B_PCOEFF23, 0x2a833f, phy_idx); 1161 rtw89_phy_write32_idx(rtwdev, R_PCOEFF45, B_PCOEFF45, 0x1491f8, phy_idx); 1162 rtw89_phy_write32_idx(rtwdev, R_PCOEFF67, B_PCOEFF67, 0x03c0b0, phy_idx); 1163 rtw89_phy_write32_idx(rtwdev, R_PCOEFF89, B_PCOEFF89, 0xfccff1, phy_idx); 1164 rtw89_phy_write32_idx(rtwdev, R_PCOEFFAB, B_PCOEFFAB, 0xfccfc3, phy_idx); 1165 rtw89_phy_write32_idx(rtwdev, R_PCOEFFCD, B_PCOEFFCD, 0xfebfdc, phy_idx); 1166 rtw89_phy_write32_idx(rtwdev, R_PCOEFFEF, B_PCOEFFEF, 0xffdff7, phy_idx); 1167 } 1168 } 1169 1170 static void rtw8922a_ctrl_ch(struct rtw89_dev *rtwdev, 1171 const struct rtw89_chan *chan, 1172 enum rtw89_phy_idx phy_idx) 1173 { 1174 static const u32 band_sel[2] = {0x4160, 0x4560}; 1175 u16 central_freq = chan->freq; 1176 u8 central_ch = chan->channel; 1177 u8 band = chan->band_type; 1178 bool is_2g = band == RTW89_BAND_2G; 1179 u8 chan_idx; 1180 u8 path; 1181 u8 sco; 1182 1183 if (!central_freq) { 1184 rtw89_warn(rtwdev, "Invalid central_freq\n"); 1185 return; 1186 } 1187 1188 rtw8922a_set_gain(rtwdev, chan, RF_PATH_A, phy_idx); 1189 rtw8922a_set_gain(rtwdev, chan, RF_PATH_B, phy_idx); 1190 1191 for (path = RF_PATH_A; path < BB_PATH_NUM_8922A; path++) 1192 rtw89_phy_write32_idx(rtwdev, band_sel[path], BIT((26)), is_2g, phy_idx); 1193 1194 rtw8922a_set_rx_gain_normal(rtwdev, chan, RF_PATH_A); 1195 rtw8922a_set_rx_gain_normal(rtwdev, chan, RF_PATH_B); 1196 1197 rtw89_phy_write32_idx(rtwdev, R_FC0, B_FC0, central_freq, phy_idx); 1198 sco = DIV_ROUND_CLOSEST(1 << 18, central_freq); 1199 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_FC0_INV, sco, phy_idx); 1200 1201 if (band == RTW89_BAND_2G) 1202 rtw8922a_set_cck_parameters(rtwdev, central_ch, phy_idx); 1203 1204 chan_idx = rtw89_encode_chan_idx(rtwdev, chan->primary_channel, band); 1205 rtw89_phy_write32_idx(rtwdev, R_MAC_PIN_SEL, B_CH_IDX_SEG0, chan_idx, phy_idx); 1206 } 1207 1208 static void 1209 rtw8922a_ctrl_bw(struct rtw89_dev *rtwdev, u8 pri_sb, u8 bw, 1210 enum rtw89_phy_idx phy_idx) 1211 { 1212 switch (bw) { 1213 case RTW89_CHANNEL_WIDTH_5: 1214 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_BW, 0x0, phy_idx); 1215 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_SMALLBW, 0x1, phy_idx); 1216 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_PRICH, 0x0, phy_idx); 1217 rtw89_phy_write32_idx(rtwdev, R_DAC_CLK, B_DAC_CLK, 0x1, phy_idx); 1218 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP0, B_GAIN_MAP0_EN, 0x0, phy_idx); 1219 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP1, B_GAIN_MAP1_EN, 0x0, phy_idx); 1220 break; 1221 case RTW89_CHANNEL_WIDTH_10: 1222 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_BW, 0x0, phy_idx); 1223 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_SMALLBW, 0x2, phy_idx); 1224 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_PRICH, 0x0, phy_idx); 1225 rtw89_phy_write32_idx(rtwdev, R_DAC_CLK, B_DAC_CLK, 0x1, phy_idx); 1226 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP0, B_GAIN_MAP0_EN, 0x0, phy_idx); 1227 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP1, B_GAIN_MAP1_EN, 0x0, phy_idx); 1228 break; 1229 case RTW89_CHANNEL_WIDTH_20: 1230 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_BW, 0x0, phy_idx); 1231 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_SMALLBW, 0x0, phy_idx); 1232 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_PRICH, 0x0, phy_idx); 1233 rtw89_phy_write32_idx(rtwdev, R_DAC_CLK, B_DAC_CLK, 0x1, phy_idx); 1234 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP0, B_GAIN_MAP0_EN, 0x0, phy_idx); 1235 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP1, B_GAIN_MAP1_EN, 0x0, phy_idx); 1236 break; 1237 case RTW89_CHANNEL_WIDTH_40: 1238 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_BW, 0x1, phy_idx); 1239 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_SMALLBW, 0x0, phy_idx); 1240 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_PRICH, pri_sb, phy_idx); 1241 rtw89_phy_write32_idx(rtwdev, R_DAC_CLK, B_DAC_CLK, 0x1, phy_idx); 1242 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP0, B_GAIN_MAP0_EN, 0x0, phy_idx); 1243 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP1, B_GAIN_MAP1_EN, 0x0, phy_idx); 1244 break; 1245 case RTW89_CHANNEL_WIDTH_80: 1246 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_BW, 0x2, phy_idx); 1247 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_SMALLBW, 0x0, phy_idx); 1248 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_PRICH, pri_sb, phy_idx); 1249 rtw89_phy_write32_idx(rtwdev, R_DAC_CLK, B_DAC_CLK, 0x1, phy_idx); 1250 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP0, B_GAIN_MAP0_EN, 0x1, phy_idx); 1251 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP1, B_GAIN_MAP1_EN, 0x1, phy_idx); 1252 break; 1253 case RTW89_CHANNEL_WIDTH_160: 1254 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_BW, 0x3, phy_idx); 1255 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_SMALLBW, 0x0, phy_idx); 1256 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_PRICH, pri_sb, phy_idx); 1257 rtw89_phy_write32_idx(rtwdev, R_DAC_CLK, B_DAC_CLK, 0x1, phy_idx); 1258 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP0, B_GAIN_MAP0_EN, 0x1, phy_idx); 1259 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP1, B_GAIN_MAP1_EN, 0x1, phy_idx); 1260 break; 1261 default: 1262 rtw89_warn(rtwdev, "Fail to switch bw (bw:%d, pri_sb:%d)\n", bw, 1263 pri_sb); 1264 break; 1265 } 1266 1267 if (bw == RTW89_CHANNEL_WIDTH_40) 1268 rtw89_phy_write32_idx(rtwdev, R_FC0, B_BW40_2XFFT, 1, phy_idx); 1269 else 1270 rtw89_phy_write32_idx(rtwdev, R_FC0, B_BW40_2XFFT, 0, phy_idx); 1271 } 1272 1273 static u32 rtw8922a_spur_freq(struct rtw89_dev *rtwdev, 1274 const struct rtw89_chan *chan) 1275 { 1276 return 0; 1277 } 1278 1279 #define CARRIER_SPACING_312_5 312500 /* 312.5 kHz */ 1280 #define CARRIER_SPACING_78_125 78125 /* 78.125 kHz */ 1281 #define MAX_TONE_NUM 2048 1282 1283 static void rtw8922a_set_csi_tone_idx(struct rtw89_dev *rtwdev, 1284 const struct rtw89_chan *chan, 1285 enum rtw89_phy_idx phy_idx) 1286 { 1287 s32 freq_diff, csi_idx, csi_tone_idx; 1288 u32 spur_freq; 1289 1290 spur_freq = rtw8922a_spur_freq(rtwdev, chan); 1291 if (spur_freq == 0) { 1292 rtw89_phy_write32_idx(rtwdev, R_S0S1_CSI_WGT, B_S0S1_CSI_WGT_EN, 1293 0, phy_idx); 1294 return; 1295 } 1296 1297 freq_diff = (spur_freq - chan->freq) * 1000000; 1298 csi_idx = s32_div_u32_round_closest(freq_diff, CARRIER_SPACING_78_125); 1299 s32_div_u32_round_down(csi_idx, MAX_TONE_NUM, &csi_tone_idx); 1300 1301 rtw89_phy_write32_idx(rtwdev, R_S0S1_CSI_WGT, B_S0S1_CSI_WGT_TONE_IDX, 1302 csi_tone_idx, phy_idx); 1303 rtw89_phy_write32_idx(rtwdev, R_S0S1_CSI_WGT, B_S0S1_CSI_WGT_EN, 1, phy_idx); 1304 } 1305 1306 static const struct rtw89_nbi_reg_def rtw8922a_nbi_reg_def[] = { 1307 [RF_PATH_A] = { 1308 .notch1_idx = {0x41a0, 0xFF}, 1309 .notch1_frac_idx = {0x41a0, 0xC00}, 1310 .notch1_en = {0x41a0, 0x1000}, 1311 .notch2_idx = {0x41ac, 0xFF}, 1312 .notch2_frac_idx = {0x41ac, 0xC00}, 1313 .notch2_en = {0x41ac, 0x1000}, 1314 }, 1315 [RF_PATH_B] = { 1316 .notch1_idx = {0x45a0, 0xFF}, 1317 .notch1_frac_idx = {0x45a0, 0xC00}, 1318 .notch1_en = {0x45a0, 0x1000}, 1319 .notch2_idx = {0x45ac, 0xFF}, 1320 .notch2_frac_idx = {0x45ac, 0xC00}, 1321 .notch2_en = {0x45ac, 0x1000}, 1322 }, 1323 }; 1324 1325 static void rtw8922a_set_nbi_tone_idx(struct rtw89_dev *rtwdev, 1326 const struct rtw89_chan *chan, 1327 enum rtw89_rf_path path, 1328 enum rtw89_phy_idx phy_idx) 1329 { 1330 const struct rtw89_nbi_reg_def *nbi = &rtw8922a_nbi_reg_def[path]; 1331 s32 nbi_frac_idx, nbi_frac_tone_idx; 1332 s32 nbi_idx, nbi_tone_idx; 1333 bool notch2_chk = false; 1334 u32 spur_freq, fc; 1335 s32 freq_diff; 1336 1337 spur_freq = rtw8922a_spur_freq(rtwdev, chan); 1338 if (spur_freq == 0) { 1339 rtw89_phy_write32_idx(rtwdev, nbi->notch1_en.addr, 1340 nbi->notch1_en.mask, 0, phy_idx); 1341 rtw89_phy_write32_idx(rtwdev, nbi->notch2_en.addr, 1342 nbi->notch2_en.mask, 0, phy_idx); 1343 return; 1344 } 1345 1346 fc = chan->freq; 1347 if (chan->band_width == RTW89_CHANNEL_WIDTH_160) { 1348 fc = (spur_freq > fc) ? fc + 40 : fc - 40; 1349 if ((fc > spur_freq && 1350 chan->channel < chan->primary_channel) || 1351 (fc < spur_freq && 1352 chan->channel > chan->primary_channel)) 1353 notch2_chk = true; 1354 } 1355 1356 freq_diff = (spur_freq - fc) * 1000000; 1357 nbi_idx = s32_div_u32_round_down(freq_diff, CARRIER_SPACING_312_5, 1358 &nbi_frac_idx); 1359 1360 if (chan->band_width == RTW89_CHANNEL_WIDTH_20) { 1361 s32_div_u32_round_down(nbi_idx + 32, 64, &nbi_tone_idx); 1362 } else { 1363 u16 tone_para = (chan->band_width == RTW89_CHANNEL_WIDTH_40) ? 1364 128 : 256; 1365 1366 s32_div_u32_round_down(nbi_idx, tone_para, &nbi_tone_idx); 1367 } 1368 nbi_frac_tone_idx = 1369 s32_div_u32_round_closest(nbi_frac_idx, CARRIER_SPACING_78_125); 1370 1371 if (chan->band_width == RTW89_CHANNEL_WIDTH_160 && notch2_chk) { 1372 rtw89_phy_write32_idx(rtwdev, nbi->notch2_idx.addr, 1373 nbi->notch2_idx.mask, nbi_tone_idx, phy_idx); 1374 rtw89_phy_write32_idx(rtwdev, nbi->notch2_frac_idx.addr, 1375 nbi->notch2_frac_idx.mask, nbi_frac_tone_idx, 1376 phy_idx); 1377 rtw89_phy_write32_idx(rtwdev, nbi->notch2_en.addr, 1378 nbi->notch2_en.mask, 0, phy_idx); 1379 rtw89_phy_write32_idx(rtwdev, nbi->notch2_en.addr, 1380 nbi->notch2_en.mask, 1, phy_idx); 1381 rtw89_phy_write32_idx(rtwdev, nbi->notch1_en.addr, 1382 nbi->notch1_en.mask, 0, phy_idx); 1383 } else { 1384 rtw89_phy_write32_idx(rtwdev, nbi->notch1_idx.addr, 1385 nbi->notch1_idx.mask, nbi_tone_idx, phy_idx); 1386 rtw89_phy_write32_idx(rtwdev, nbi->notch1_frac_idx.addr, 1387 nbi->notch1_frac_idx.mask, nbi_frac_tone_idx, 1388 phy_idx); 1389 rtw89_phy_write32_idx(rtwdev, nbi->notch1_en.addr, 1390 nbi->notch1_en.mask, 0, phy_idx); 1391 rtw89_phy_write32_idx(rtwdev, nbi->notch1_en.addr, 1392 nbi->notch1_en.mask, 1, phy_idx); 1393 rtw89_phy_write32_idx(rtwdev, nbi->notch2_en.addr, 1394 nbi->notch2_en.mask, 0, phy_idx); 1395 } 1396 } 1397 1398 static void rtw8922a_spur_elimination(struct rtw89_dev *rtwdev, 1399 const struct rtw89_chan *chan, 1400 enum rtw89_phy_idx phy_idx) 1401 { 1402 rtw8922a_set_csi_tone_idx(rtwdev, chan, phy_idx); 1403 rtw8922a_set_nbi_tone_idx(rtwdev, chan, RF_PATH_A, phy_idx); 1404 rtw8922a_set_nbi_tone_idx(rtwdev, chan, RF_PATH_B, phy_idx); 1405 } 1406 1407 static void rtw8922a_ctrl_afe_dac(struct rtw89_dev *rtwdev, enum rtw89_bandwidth bw, 1408 enum rtw89_rf_path path) 1409 { 1410 u32 cr_ofst = 0x0; 1411 1412 if (path == RF_PATH_B) 1413 cr_ofst = 0x100; 1414 1415 switch (bw) { 1416 case RTW89_CHANNEL_WIDTH_5: 1417 case RTW89_CHANNEL_WIDTH_10: 1418 case RTW89_CHANNEL_WIDTH_20: 1419 case RTW89_CHANNEL_WIDTH_40: 1420 case RTW89_CHANNEL_WIDTH_80: 1421 rtw89_phy_write32_mask(rtwdev, R_AFEDAC0 + cr_ofst, B_AFEDAC0, 0xE); 1422 rtw89_phy_write32_mask(rtwdev, R_AFEDAC1 + cr_ofst, B_AFEDAC1, 0x7); 1423 break; 1424 case RTW89_CHANNEL_WIDTH_160: 1425 rtw89_phy_write32_mask(rtwdev, R_AFEDAC0 + cr_ofst, B_AFEDAC0, 0xD); 1426 rtw89_phy_write32_mask(rtwdev, R_AFEDAC1 + cr_ofst, B_AFEDAC1, 0x6); 1427 break; 1428 default: 1429 break; 1430 } 1431 } 1432 1433 static const struct rtw89_reg2_def bb_mcu0_init_reg[] = { 1434 {0x6990, 0x00000000}, 1435 {0x6994, 0x00000000}, 1436 {0x6998, 0x00000000}, 1437 {0x6820, 0xFFFFFFFE}, 1438 {0x6800, 0xC0000FFE}, 1439 {0x6808, 0x76543210}, 1440 {0x6814, 0xBFBFB000}, 1441 {0x6818, 0x0478C009}, 1442 {0x6800, 0xC0000FFF}, 1443 {0x6820, 0xFFFFFFFF}, 1444 }; 1445 1446 static const struct rtw89_reg2_def bb_mcu1_init_reg[] = { 1447 {0x6990, 0x00000000}, 1448 {0x6994, 0x00000000}, 1449 {0x6998, 0x00000000}, 1450 {0x6820, 0xFFFFFFFE}, 1451 {0x6800, 0xC0000FFE}, 1452 {0x6808, 0x76543210}, 1453 {0x6814, 0xBFBFB000}, 1454 {0x6818, 0x0478C009}, 1455 {0x6800, 0xC0000FFF}, 1456 {0x6820, 0xFFFFFFFF}, 1457 }; 1458 1459 static void rtw8922a_bbmcu_cr_init(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx) 1460 { 1461 const struct rtw89_reg2_def *reg; 1462 int size; 1463 int i; 1464 1465 if (phy_idx == RTW89_PHY_0) { 1466 reg = bb_mcu0_init_reg; 1467 size = ARRAY_SIZE(bb_mcu0_init_reg); 1468 } else { 1469 reg = bb_mcu1_init_reg; 1470 size = ARRAY_SIZE(bb_mcu1_init_reg); 1471 } 1472 1473 for (i = 0; i < size; i++, reg++) 1474 rtw89_bbmcu_write32(rtwdev, reg->addr, reg->data, phy_idx); 1475 } 1476 1477 static const u32 dmac_sys_mask[2] = {B_BE_DMAC_BB_PHY0_MASK, B_BE_DMAC_BB_PHY1_MASK}; 1478 static const u32 bbrst_mask[2] = {B_BE_FEN_BBPLAT_RSTB, B_BE_FEN_BB1PLAT_RSTB}; 1479 static const u32 glbrst_mask[2] = {B_BE_FEN_BB_IP_RSTN, B_BE_FEN_BB1_IP_RSTN}; 1480 static const u32 mcu_bootrdy_mask[2] = {B_BE_BOOT_RDY0, B_BE_BOOT_RDY1}; 1481 1482 static void rtw8922a_bb_preinit(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx) 1483 { 1484 u32 rdy = 0; 1485 1486 if (phy_idx == RTW89_PHY_1) 1487 rdy = 1; 1488 1489 rtw89_write32_mask(rtwdev, R_BE_DMAC_SYS_CR32B, dmac_sys_mask[phy_idx], 0x7FF9); 1490 rtw89_write32_mask(rtwdev, R_BE_FEN_RST_ENABLE, glbrst_mask[phy_idx], 0x0); 1491 rtw89_write32_mask(rtwdev, R_BE_FEN_RST_ENABLE, bbrst_mask[phy_idx], 0x0); 1492 rtw89_write32_mask(rtwdev, R_BE_FEN_RST_ENABLE, glbrst_mask[phy_idx], 0x1); 1493 rtw89_write32_mask(rtwdev, R_BE_FEN_RST_ENABLE, mcu_bootrdy_mask[phy_idx], rdy); 1494 rtw89_write32_mask(rtwdev, R_BE_MEM_PWR_CTRL, B_BE_MEM_BBMCU0_DS_V1, 0); 1495 1496 fsleep(1); 1497 rtw8922a_bbmcu_cr_init(rtwdev, phy_idx); 1498 } 1499 1500 static void rtw8922a_bb_postinit(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx) 1501 { 1502 if (phy_idx == RTW89_PHY_0) 1503 rtw89_write32_set(rtwdev, R_BE_FEN_RST_ENABLE, mcu_bootrdy_mask[phy_idx]); 1504 rtw89_write32_set(rtwdev, R_BE_FEN_RST_ENABLE, bbrst_mask[phy_idx]); 1505 1506 rtw89_phy_write32_set(rtwdev, R_BBCLK, B_CLK_640M); 1507 rtw89_phy_write32_clr(rtwdev, R_TXSCALE, B_TXFCTR_EN); 1508 rtw89_phy_set_phy_regs(rtwdev, R_TXFCTR, B_TXFCTR_THD, 0x200); 1509 rtw89_phy_set_phy_regs(rtwdev, R_SLOPE, B_EHT_RATE_TH, 0xA); 1510 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE, B_HE_RATE_TH, 0xA); 1511 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE2, B_HT_VHT_TH, 0xAAA); 1512 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE, B_EHT_MCS14, 0x1); 1513 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE2, B_EHT_MCS15, 0x1); 1514 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE3, B_EHTTB_EN, 0x0); 1515 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE3, B_HEERSU_EN, 0x0); 1516 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE3, B_HEMU_EN, 0x0); 1517 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE3, B_TB_EN, 0x0); 1518 rtw89_phy_set_phy_regs(rtwdev, R_SU_PUNC, B_SU_PUNC_EN, 0x1); 1519 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE5, B_HWGEN_EN, 0x1); 1520 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE5, B_PWROFST_COMP, 0x1); 1521 rtw89_phy_set_phy_regs(rtwdev, R_MAG_AB, B_BY_SLOPE, 0x1); 1522 rtw89_phy_set_phy_regs(rtwdev, R_MAG_A, B_MGA_AEND, 0xe0); 1523 rtw89_phy_set_phy_regs(rtwdev, R_MAG_AB, B_MAG_AB, 0xe0c000); 1524 rtw89_phy_set_phy_regs(rtwdev, R_SLOPE, B_SLOPE_A, 0x3FE0); 1525 rtw89_phy_set_phy_regs(rtwdev, R_SLOPE, B_SLOPE_B, 0x3FE0); 1526 rtw89_phy_set_phy_regs(rtwdev, R_SC_CORNER, B_SC_CORNER, 0x200); 1527 rtw89_phy_write32_idx(rtwdev, R_UDP_COEEF, B_UDP_COEEF, 0x0, phy_idx); 1528 rtw89_phy_write32_idx(rtwdev, R_UDP_COEEF, B_UDP_COEEF, 0x1, phy_idx); 1529 } 1530 1531 static void rtw8922a_bb_reset_en(struct rtw89_dev *rtwdev, enum rtw89_band band, 1532 bool en, enum rtw89_phy_idx phy_idx) 1533 { 1534 if (en) { 1535 rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, 1, phy_idx); 1536 if (band == RTW89_BAND_2G) 1537 rtw89_phy_write32_idx(rtwdev, R_RXCCA_BE1, 1538 B_RXCCA_BE1_DIS, 0x0, phy_idx); 1539 rtw89_phy_write32_idx(rtwdev, R_PD_CTRL, B_PD_HIT_DIS, 0x0, phy_idx); 1540 } else { 1541 rtw89_phy_write32_idx(rtwdev, R_RXCCA_BE1, B_RXCCA_BE1_DIS, 0x1, phy_idx); 1542 rtw89_phy_write32_idx(rtwdev, R_PD_CTRL, B_PD_HIT_DIS, 0x1, phy_idx); 1543 fsleep(1); 1544 rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, 0, phy_idx); 1545 } 1546 } 1547 1548 static int rtw8922a_ctrl_tx_path_tmac(struct rtw89_dev *rtwdev, 1549 enum rtw89_rf_path tx_path, 1550 enum rtw89_phy_idx phy_idx) 1551 { 1552 struct rtw89_reg2_def path_com_cr[] = { 1553 {0x11A00, 0x21C86900}, 1554 {0x11A04, 0x00E4E433}, 1555 {0x11A08, 0x39390CC9}, 1556 {0x11A0C, 0x4E433240}, 1557 {0x11A10, 0x90CC900E}, 1558 {0x11A14, 0x00240393}, 1559 {0x11A18, 0x201C8600}, 1560 }; 1561 int ret = 0; 1562 u32 reg; 1563 int i; 1564 1565 rtw89_phy_write32_idx(rtwdev, R_MAC_SEL, B_MAC_SEL, 0x0, phy_idx); 1566 1567 if (phy_idx == RTW89_PHY_1 && !rtwdev->dbcc_en) 1568 return 0; 1569 1570 if (tx_path == RF_PATH_A) { 1571 path_com_cr[0].data = 0x21C82900; 1572 path_com_cr[1].data = 0x00E4E431; 1573 path_com_cr[2].data = 0x39390C49; 1574 path_com_cr[3].data = 0x4E431240; 1575 path_com_cr[4].data = 0x90C4900E; 1576 path_com_cr[6].data = 0x201C8200; 1577 } else if (tx_path == RF_PATH_B) { 1578 path_com_cr[0].data = 0x21C04900; 1579 path_com_cr[1].data = 0x00E4E032; 1580 path_com_cr[2].data = 0x39380C89; 1581 path_com_cr[3].data = 0x4E032240; 1582 path_com_cr[4].data = 0x80C8900E; 1583 path_com_cr[6].data = 0x201C0400; 1584 } else if (tx_path == RF_PATH_AB) { 1585 path_com_cr[0].data = 0x21C86900; 1586 path_com_cr[1].data = 0x00E4E433; 1587 path_com_cr[2].data = 0x39390CC9; 1588 path_com_cr[3].data = 0x4E433240; 1589 path_com_cr[4].data = 0x90CC900E; 1590 path_com_cr[6].data = 0x201C8600; 1591 } else { 1592 ret = -EINVAL; 1593 } 1594 1595 for (i = 0; i < ARRAY_SIZE(path_com_cr); i++) { 1596 reg = rtw89_mac_reg_by_idx(rtwdev, path_com_cr[i].addr, phy_idx); 1597 rtw89_write32(rtwdev, reg, path_com_cr[i].data); 1598 } 1599 1600 return ret; 1601 } 1602 1603 static void rtw8922a_bb_reset(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx) 1604 { 1605 } 1606 1607 static int rtw8922a_cfg_rx_nss_limit(struct rtw89_dev *rtwdev, u8 rx_nss, 1608 enum rtw89_phy_idx phy_idx) 1609 { 1610 if (rx_nss == 1) { 1611 rtw89_phy_write32_idx(rtwdev, R_BRK_R, B_HTMCS_LMT, 0, phy_idx); 1612 rtw89_phy_write32_idx(rtwdev, R_BRK_R, B_VHTMCS_LMT, 0, phy_idx); 1613 rtw89_phy_write32_idx(rtwdev, R_BRK_HE, B_N_USR_MAX, 1614 HE_N_USER_MAX_8922A, phy_idx); 1615 rtw89_phy_write32_idx(rtwdev, R_BRK_HE, B_NSS_MAX, 0, phy_idx); 1616 rtw89_phy_write32_idx(rtwdev, R_BRK_HE, B_TB_NSS_MAX, 0, phy_idx); 1617 rtw89_phy_write32_idx(rtwdev, R_BRK_EHT, B_RXEHT_NSS_MAX, 0, phy_idx); 1618 rtw89_phy_write32_idx(rtwdev, R_BRK_RXEHT, B_RXEHTTB_NSS_MAX, 0, 1619 phy_idx); 1620 rtw89_phy_write32_idx(rtwdev, R_BRK_RXEHT, B_RXEHT_N_USER_MAX, 1621 HE_N_USER_MAX_8922A, phy_idx); 1622 } else if (rx_nss == 2) { 1623 rtw89_phy_write32_idx(rtwdev, R_BRK_R, B_HTMCS_LMT, 1, phy_idx); 1624 rtw89_phy_write32_idx(rtwdev, R_BRK_R, B_VHTMCS_LMT, 1, phy_idx); 1625 rtw89_phy_write32_idx(rtwdev, R_BRK_HE, B_N_USR_MAX, 1626 HE_N_USER_MAX_8922A, phy_idx); 1627 rtw89_phy_write32_idx(rtwdev, R_BRK_HE, B_NSS_MAX, 1, phy_idx); 1628 rtw89_phy_write32_idx(rtwdev, R_BRK_HE, B_TB_NSS_MAX, 1, phy_idx); 1629 rtw89_phy_write32_idx(rtwdev, R_BRK_EHT, B_RXEHT_NSS_MAX, 1, phy_idx); 1630 rtw89_phy_write32_idx(rtwdev, R_BRK_RXEHT, B_RXEHTTB_NSS_MAX, 1, 1631 phy_idx); 1632 rtw89_phy_write32_idx(rtwdev, R_BRK_RXEHT, B_RXEHT_N_USER_MAX, 1633 HE_N_USER_MAX_8922A, phy_idx); 1634 } else { 1635 return -EINVAL; 1636 } 1637 1638 return 0; 1639 } 1640 1641 static void rtw8922a_tssi_reset(struct rtw89_dev *rtwdev, 1642 enum rtw89_rf_path path, 1643 enum rtw89_phy_idx phy_idx) 1644 { 1645 if (rtwdev->mlo_dbcc_mode == MLO_1_PLUS_1_1RF) { 1646 if (phy_idx == RTW89_PHY_0) { 1647 rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTA, B_TXPWR_RSTA, 0x0); 1648 rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTA, B_TXPWR_RSTA, 0x1); 1649 } else { 1650 rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTB, B_TXPWR_RSTB, 0x0); 1651 rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTB, B_TXPWR_RSTB, 0x1); 1652 } 1653 } else { 1654 rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTA, B_TXPWR_RSTA, 0x0); 1655 rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTA, B_TXPWR_RSTA, 0x1); 1656 rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTB, B_TXPWR_RSTB, 0x0); 1657 rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTB, B_TXPWR_RSTB, 0x1); 1658 } 1659 } 1660 1661 static int rtw8922a_ctrl_rx_path_tmac(struct rtw89_dev *rtwdev, 1662 enum rtw89_rf_path rx_path, 1663 enum rtw89_phy_idx phy_idx) 1664 { 1665 u8 rx_nss = (rx_path == RF_PATH_AB) ? 2 : 1; 1666 1667 /* Set to 0 first to avoid abnormal EDCCA report */ 1668 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_ANT_RX_SG0, 0x0, phy_idx); 1669 1670 if (rx_path == RF_PATH_A) { 1671 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_ANT_RX_SG0, 0x1, phy_idx); 1672 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_RX_1RCCA, 1, phy_idx); 1673 rtw8922a_cfg_rx_nss_limit(rtwdev, rx_nss, phy_idx); 1674 rtw8922a_tssi_reset(rtwdev, rx_path, phy_idx); 1675 } else if (rx_path == RF_PATH_B) { 1676 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_ANT_RX_SG0, 0x2, phy_idx); 1677 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_RX_1RCCA, 2, phy_idx); 1678 rtw8922a_cfg_rx_nss_limit(rtwdev, rx_nss, phy_idx); 1679 rtw8922a_tssi_reset(rtwdev, rx_path, phy_idx); 1680 } else if (rx_path == RF_PATH_AB) { 1681 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_ANT_RX_SG0, 0x3, phy_idx); 1682 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_RX_1RCCA, 3, phy_idx); 1683 rtw8922a_cfg_rx_nss_limit(rtwdev, rx_nss, phy_idx); 1684 rtw8922a_tssi_reset(rtwdev, rx_path, phy_idx); 1685 } else { 1686 return -EINVAL; 1687 } 1688 1689 return 0; 1690 } 1691 1692 #define DIGITAL_PWR_COMP_REG_NUM 22 1693 static const u32 rtw8922a_digital_pwr_comp_val[][DIGITAL_PWR_COMP_REG_NUM] = { 1694 {0x012C0096, 0x044C02BC, 0x00322710, 0x015E0096, 0x03C8028A, 1695 0x0BB80708, 0x17701194, 0x02020100, 0x03030303, 0x01000303, 1696 0x05030302, 0x06060605, 0x06050300, 0x0A090807, 0x02000B0B, 1697 0x09080604, 0x0D0D0C0B, 0x08060400, 0x110F0C0B, 0x05001111, 1698 0x0D0C0907, 0x12121210}, 1699 {0x012C0096, 0x044C02BC, 0x00322710, 0x015E0096, 0x03C8028A, 1700 0x0BB80708, 0x17701194, 0x04030201, 0x05050505, 0x01000505, 1701 0x07060504, 0x09090908, 0x09070400, 0x0E0D0C0B, 0x03000E0E, 1702 0x0D0B0907, 0x1010100F, 0x0B080500, 0x1512100D, 0x05001515, 1703 0x100D0B08, 0x15151512}, 1704 }; 1705 1706 static void rtw8922a_set_digital_pwr_comp(struct rtw89_dev *rtwdev, 1707 bool enable, u8 nss, 1708 enum rtw89_rf_path path) 1709 { 1710 static const u32 ltpc_t0[2] = {R_BE_LTPC_T0_PATH0, R_BE_LTPC_T0_PATH1}; 1711 const u32 *digital_pwr_comp; 1712 u32 addr, val; 1713 u32 i; 1714 1715 if (nss == 1) 1716 digital_pwr_comp = rtw8922a_digital_pwr_comp_val[0]; 1717 else 1718 digital_pwr_comp = rtw8922a_digital_pwr_comp_val[1]; 1719 1720 addr = ltpc_t0[path]; 1721 for (i = 0; i < DIGITAL_PWR_COMP_REG_NUM; i++, addr += 4) { 1722 val = enable ? digital_pwr_comp[i] : 0; 1723 rtw89_phy_write32(rtwdev, addr, val); 1724 } 1725 } 1726 1727 static void rtw8922a_digital_pwr_comp(struct rtw89_dev *rtwdev, 1728 enum rtw89_phy_idx phy_idx) 1729 { 1730 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_CHANCTX_0); 1731 bool enable = chan->band_type != RTW89_BAND_2G; 1732 u8 path; 1733 1734 if (rtwdev->mlo_dbcc_mode == MLO_1_PLUS_1_1RF) { 1735 if (phy_idx == RTW89_PHY_0) 1736 path = RF_PATH_A; 1737 else 1738 path = RF_PATH_B; 1739 rtw8922a_set_digital_pwr_comp(rtwdev, enable, 1, path); 1740 } else { 1741 rtw8922a_set_digital_pwr_comp(rtwdev, enable, 2, RF_PATH_A); 1742 rtw8922a_set_digital_pwr_comp(rtwdev, enable, 2, RF_PATH_B); 1743 } 1744 } 1745 1746 static int rtw8922a_ctrl_mlo(struct rtw89_dev *rtwdev, enum rtw89_mlo_dbcc_mode mode) 1747 { 1748 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_CHANCTX_0); 1749 1750 if (mode == MLO_1_PLUS_1_1RF || mode == DBCC_LEGACY) { 1751 rtw89_phy_write32_mask(rtwdev, R_DBCC, B_DBCC_EN, 0x1); 1752 rtw89_phy_write32_mask(rtwdev, R_DBCC_FA, B_DBCC_FA, 0x0); 1753 } else if (mode == MLO_2_PLUS_0_1RF || mode == MLO_0_PLUS_2_1RF || 1754 mode == MLO_DBCC_NOT_SUPPORT) { 1755 rtw89_phy_write32_mask(rtwdev, R_DBCC, B_DBCC_EN, 0x0); 1756 rtw89_phy_write32_mask(rtwdev, R_DBCC_FA, B_DBCC_FA, 0x1); 1757 } else { 1758 return -EOPNOTSUPP; 1759 } 1760 1761 if (mode == MLO_2_PLUS_0_1RF) { 1762 rtw8922a_ctrl_afe_dac(rtwdev, chan->band_width, RF_PATH_A); 1763 rtw8922a_ctrl_afe_dac(rtwdev, chan->band_width, RF_PATH_B); 1764 } else { 1765 rtw89_warn(rtwdev, "unsupported MLO mode %d\n", mode); 1766 } 1767 1768 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0x6180); 1769 1770 if (mode == MLO_2_PLUS_0_1RF) { 1771 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xBBAB); 1772 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xABA9); 1773 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEBA9); 1774 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEAA9); 1775 } else if (mode == MLO_0_PLUS_2_1RF) { 1776 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xBBAB); 1777 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xAFFF); 1778 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEFFF); 1779 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEEFF); 1780 } else if ((mode == MLO_1_PLUS_1_1RF) || (mode == DBCC_LEGACY)) { 1781 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0x7BAB); 1782 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0x3BAB); 1783 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0x3AAB); 1784 } else { 1785 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0x180); 1786 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0x0); 1787 } 1788 1789 return 0; 1790 } 1791 1792 static void rtw8922a_bb_sethw(struct rtw89_dev *rtwdev) 1793 { 1794 u32 reg; 1795 1796 rtw89_phy_write32_clr(rtwdev, R_EN_SND_WO_NDP, B_EN_SND_WO_NDP); 1797 rtw89_phy_write32_clr(rtwdev, R_EN_SND_WO_NDP_C1, B_EN_SND_WO_NDP); 1798 1799 rtw89_write32_mask(rtwdev, R_BE_PWR_BOOST, B_BE_PWR_CTRL_SEL, 0); 1800 if (rtwdev->dbcc_en) { 1801 reg = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_BOOST, RTW89_MAC_1); 1802 rtw89_write32_mask(rtwdev, reg, B_BE_PWR_CTRL_SEL, 0); 1803 } 1804 1805 rtw8922a_ctrl_mlo(rtwdev, rtwdev->mlo_dbcc_mode); 1806 } 1807 1808 static void rtw8922a_ctrl_cck_en(struct rtw89_dev *rtwdev, bool cck_en, 1809 enum rtw89_phy_idx phy_idx) 1810 { 1811 if (cck_en) { 1812 rtw89_phy_write32_idx(rtwdev, R_RXCCA_BE1, B_RXCCA_BE1_DIS, 0, phy_idx); 1813 rtw89_phy_write32_idx(rtwdev, R_UPD_CLK_ADC, B_ENABLE_CCK, 1, phy_idx); 1814 rtw89_phy_write32_idx(rtwdev, R_PD_ARBITER_OFF, B_PD_ARBITER_OFF, 1815 0, phy_idx); 1816 } else { 1817 rtw89_phy_write32_idx(rtwdev, R_RXCCA_BE1, B_RXCCA_BE1_DIS, 1, phy_idx); 1818 rtw89_phy_write32_idx(rtwdev, R_UPD_CLK_ADC, B_ENABLE_CCK, 0, phy_idx); 1819 rtw89_phy_write32_idx(rtwdev, R_PD_ARBITER_OFF, B_PD_ARBITER_OFF, 1820 1, phy_idx); 1821 } 1822 } 1823 1824 static void rtw8922a_set_channel_bb(struct rtw89_dev *rtwdev, 1825 const struct rtw89_chan *chan, 1826 enum rtw89_phy_idx phy_idx) 1827 { 1828 bool cck_en = chan->band_type == RTW89_BAND_2G; 1829 u8 pri_sb = chan->pri_sb_idx; 1830 1831 if (cck_en) 1832 rtw8922a_ctrl_sco_cck(rtwdev, chan->primary_channel, 1833 chan->band_width, phy_idx); 1834 1835 rtw8922a_ctrl_ch(rtwdev, chan, phy_idx); 1836 rtw8922a_ctrl_bw(rtwdev, pri_sb, chan->band_width, phy_idx); 1837 rtw8922a_ctrl_cck_en(rtwdev, cck_en, phy_idx); 1838 rtw8922a_spur_elimination(rtwdev, chan, phy_idx); 1839 1840 rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, 1, phy_idx); 1841 rtw8922a_tssi_reset(rtwdev, RF_PATH_AB, phy_idx); 1842 } 1843 1844 static void rtw8922a_pre_set_channel_bb(struct rtw89_dev *rtwdev, 1845 enum rtw89_phy_idx phy_idx) 1846 { 1847 if (!rtwdev->dbcc_en) 1848 return; 1849 1850 if (phy_idx == RTW89_PHY_0) { 1851 rtw89_phy_write32_mask(rtwdev, R_DBCC, B_DBCC_EN, 0x0); 1852 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0x6180); 1853 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xBBAB); 1854 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xABA9); 1855 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEBA9); 1856 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEAA9); 1857 } else { 1858 rtw89_phy_write32_mask(rtwdev, R_DBCC, B_DBCC_EN, 0x0); 1859 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xBBAB); 1860 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xAFFF); 1861 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEFFF); 1862 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEEFF); 1863 } 1864 } 1865 1866 static void rtw8922a_post_set_channel_bb(struct rtw89_dev *rtwdev, 1867 enum rtw89_mlo_dbcc_mode mode, 1868 enum rtw89_phy_idx phy_idx) 1869 { 1870 if (!rtwdev->dbcc_en) 1871 return; 1872 1873 rtw8922a_digital_pwr_comp(rtwdev, phy_idx); 1874 rtw8922a_ctrl_mlo(rtwdev, mode); 1875 } 1876 1877 static void rtw8922a_set_channel(struct rtw89_dev *rtwdev, 1878 const struct rtw89_chan *chan, 1879 enum rtw89_mac_idx mac_idx, 1880 enum rtw89_phy_idx phy_idx) 1881 { 1882 rtw8922a_set_channel_mac(rtwdev, chan, mac_idx); 1883 rtw8922a_set_channel_bb(rtwdev, chan, phy_idx); 1884 rtw8922a_set_channel_rf(rtwdev, chan, phy_idx); 1885 } 1886 1887 static void rtw8922a_dfs_en_idx(struct rtw89_dev *rtwdev, 1888 enum rtw89_phy_idx phy_idx, enum rtw89_rf_path path, 1889 bool en) 1890 { 1891 u32 path_ofst = (path == RF_PATH_B) ? 0x100 : 0x0; 1892 1893 if (en) 1894 rtw89_phy_write32_idx(rtwdev, 0x2800 + path_ofst, BIT(1), 1, 1895 phy_idx); 1896 else 1897 rtw89_phy_write32_idx(rtwdev, 0x2800 + path_ofst, BIT(1), 0, 1898 phy_idx); 1899 } 1900 1901 static void rtw8922a_dfs_en(struct rtw89_dev *rtwdev, bool en, 1902 enum rtw89_phy_idx phy_idx) 1903 { 1904 rtw8922a_dfs_en_idx(rtwdev, phy_idx, RF_PATH_A, en); 1905 rtw8922a_dfs_en_idx(rtwdev, phy_idx, RF_PATH_B, en); 1906 } 1907 1908 static void rtw8922a_adc_en_path(struct rtw89_dev *rtwdev, 1909 enum rtw89_rf_path path, bool en) 1910 { 1911 u32 val; 1912 1913 val = rtw89_phy_read32_mask(rtwdev, R_ADC_FIFO_V1, B_ADC_FIFO_EN_V1); 1914 1915 if (en) { 1916 if (path == RF_PATH_A) 1917 val &= ~0x1; 1918 else 1919 val &= ~0x2; 1920 } else { 1921 if (path == RF_PATH_A) 1922 val |= 0x1; 1923 else 1924 val |= 0x2; 1925 } 1926 1927 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO_V1, B_ADC_FIFO_EN_V1, val); 1928 } 1929 1930 static void rtw8922a_adc_en(struct rtw89_dev *rtwdev, bool en, u8 phy_idx) 1931 { 1932 if (rtwdev->mlo_dbcc_mode == MLO_1_PLUS_1_1RF) { 1933 if (phy_idx == RTW89_PHY_0) 1934 rtw8922a_adc_en_path(rtwdev, RF_PATH_A, en); 1935 else 1936 rtw8922a_adc_en_path(rtwdev, RF_PATH_B, en); 1937 } else { 1938 rtw8922a_adc_en_path(rtwdev, RF_PATH_A, en); 1939 rtw8922a_adc_en_path(rtwdev, RF_PATH_B, en); 1940 } 1941 } 1942 1943 static 1944 void rtw8922a_hal_reset(struct rtw89_dev *rtwdev, 1945 enum rtw89_phy_idx phy_idx, enum rtw89_mac_idx mac_idx, 1946 enum rtw89_band band, u32 *tx_en, bool enter) 1947 { 1948 if (enter) { 1949 rtw89_chip_stop_sch_tx(rtwdev, mac_idx, tx_en, RTW89_SCH_TX_SEL_ALL); 1950 rtw89_mac_cfg_ppdu_status(rtwdev, mac_idx, false); 1951 rtw8922a_dfs_en(rtwdev, false, phy_idx); 1952 rtw8922a_tssi_cont_en_phyidx(rtwdev, false, phy_idx); 1953 rtw8922a_adc_en(rtwdev, false, phy_idx); 1954 fsleep(40); 1955 rtw8922a_bb_reset_en(rtwdev, band, false, phy_idx); 1956 } else { 1957 rtw89_mac_cfg_ppdu_status(rtwdev, mac_idx, true); 1958 rtw8922a_adc_en(rtwdev, true, phy_idx); 1959 rtw8922a_dfs_en(rtwdev, true, phy_idx); 1960 rtw8922a_tssi_cont_en_phyidx(rtwdev, true, phy_idx); 1961 rtw8922a_bb_reset_en(rtwdev, band, true, phy_idx); 1962 rtw89_chip_resume_sch_tx(rtwdev, mac_idx, *tx_en); 1963 } 1964 } 1965 1966 static void rtw8922a_set_channel_help(struct rtw89_dev *rtwdev, bool enter, 1967 struct rtw89_channel_help_params *p, 1968 const struct rtw89_chan *chan, 1969 enum rtw89_mac_idx mac_idx, 1970 enum rtw89_phy_idx phy_idx) 1971 { 1972 if (enter) { 1973 rtw8922a_pre_set_channel_bb(rtwdev, phy_idx); 1974 rtw8922a_pre_set_channel_rf(rtwdev, phy_idx); 1975 } 1976 1977 rtw8922a_hal_reset(rtwdev, phy_idx, mac_idx, chan->band_type, &p->tx_en, enter); 1978 1979 if (!enter) { 1980 rtw8922a_post_set_channel_bb(rtwdev, rtwdev->mlo_dbcc_mode, phy_idx); 1981 rtw8922a_post_set_channel_rf(rtwdev, phy_idx); 1982 } 1983 } 1984 1985 static void rtw8922a_rfk_init(struct rtw89_dev *rtwdev) 1986 { 1987 struct rtw89_rfk_mcc_info *rfk_mcc = &rtwdev->rfk_mcc; 1988 1989 rtwdev->is_tssi_mode[RF_PATH_A] = false; 1990 rtwdev->is_tssi_mode[RF_PATH_B] = false; 1991 memset(rfk_mcc, 0, sizeof(*rfk_mcc)); 1992 } 1993 1994 static void rtw8922a_rfk_init_late(struct rtw89_dev *rtwdev) 1995 { 1996 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_CHANCTX_0); 1997 1998 rtw89_phy_rfk_pre_ntfy_and_wait(rtwdev, RTW89_PHY_0, 5); 1999 2000 rtw89_phy_rfk_dack_and_wait(rtwdev, RTW89_PHY_0, chan, 58); 2001 rtw89_phy_rfk_rxdck_and_wait(rtwdev, RTW89_PHY_0, chan, 32); 2002 } 2003 2004 static void _wait_rx_mode(struct rtw89_dev *rtwdev, u8 kpath) 2005 { 2006 u32 rf_mode; 2007 u8 path; 2008 int ret; 2009 2010 for (path = 0; path < RF_PATH_NUM_8922A; path++) { 2011 if (!(kpath & BIT(path))) 2012 continue; 2013 2014 ret = read_poll_timeout_atomic(rtw89_read_rf, rf_mode, rf_mode != 2, 2015 2, 5000, false, rtwdev, path, 0x00, 2016 RR_MOD_MASK); 2017 rtw89_debug(rtwdev, RTW89_DBG_RFK, 2018 "[RFK] Wait S%d to Rx mode!! (ret = %d)\n", 2019 path, ret); 2020 } 2021 } 2022 2023 static void rtw8922a_rfk_channel(struct rtw89_dev *rtwdev, struct rtw89_vif *rtwvif) 2024 { 2025 enum rtw89_chanctx_idx chanctx_idx = rtwvif->chanctx_idx; 2026 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, chanctx_idx); 2027 enum rtw89_phy_idx phy_idx = rtwvif->phy_idx; 2028 u8 phy_map = rtw89_btc_phymap(rtwdev, phy_idx, RF_AB, chanctx_idx); 2029 u32 tx_en; 2030 2031 rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, BTC_WRFKT_CHLK, BTC_WRFK_START); 2032 rtw89_chip_stop_sch_tx(rtwdev, phy_idx, &tx_en, RTW89_SCH_TX_SEL_ALL); 2033 _wait_rx_mode(rtwdev, RF_AB); 2034 2035 rtw89_phy_rfk_pre_ntfy_and_wait(rtwdev, phy_idx, 5); 2036 rtw89_phy_rfk_txgapk_and_wait(rtwdev, phy_idx, chan, 54); 2037 rtw89_phy_rfk_iqk_and_wait(rtwdev, phy_idx, chan, 84); 2038 rtw89_phy_rfk_tssi_and_wait(rtwdev, phy_idx, chan, RTW89_TSSI_NORMAL, 6); 2039 rtw89_phy_rfk_dpk_and_wait(rtwdev, phy_idx, chan, 34); 2040 rtw89_phy_rfk_rxdck_and_wait(rtwdev, RTW89_PHY_0, chan, 32); 2041 2042 rtw89_chip_resume_sch_tx(rtwdev, phy_idx, tx_en); 2043 rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, BTC_WRFKT_CHLK, BTC_WRFK_STOP); 2044 } 2045 2046 static void rtw8922a_rfk_band_changed(struct rtw89_dev *rtwdev, 2047 enum rtw89_phy_idx phy_idx, 2048 const struct rtw89_chan *chan) 2049 { 2050 rtw89_phy_rfk_tssi_and_wait(rtwdev, phy_idx, chan, RTW89_TSSI_SCAN, 6); 2051 } 2052 2053 static void rtw8922a_rfk_scan(struct rtw89_dev *rtwdev, struct rtw89_vif *rtwvif, 2054 bool start) 2055 { 2056 } 2057 2058 static void rtw8922a_rfk_track(struct rtw89_dev *rtwdev) 2059 { 2060 } 2061 2062 static void rtw8922a_set_txpwr_ref(struct rtw89_dev *rtwdev, 2063 enum rtw89_phy_idx phy_idx) 2064 { 2065 s16 ref_ofdm = 0; 2066 s16 ref_cck = 0; 2067 2068 rtw89_debug(rtwdev, RTW89_DBG_TXPWR, "[TXPWR] set txpwr reference\n"); 2069 2070 rtw89_mac_txpwr_write32_mask(rtwdev, phy_idx, R_BE_PWR_REF_CTRL, 2071 B_BE_PWR_REF_CTRL_OFDM, ref_ofdm); 2072 rtw89_mac_txpwr_write32_mask(rtwdev, phy_idx, R_BE_PWR_REF_CTRL, 2073 B_BE_PWR_REF_CTRL_CCK, ref_cck); 2074 } 2075 2076 static void rtw8922a_bb_tx_triangular(struct rtw89_dev *rtwdev, bool en, 2077 enum rtw89_phy_idx phy_idx) 2078 { 2079 u8 ctrl = en ? 0x1 : 0x0; 2080 2081 rtw89_phy_write32_idx(rtwdev, R_BEDGE3, B_BEDGE_CFG, ctrl, phy_idx); 2082 } 2083 2084 static void rtw8922a_set_tx_shape(struct rtw89_dev *rtwdev, 2085 const struct rtw89_chan *chan, 2086 enum rtw89_phy_idx phy_idx) 2087 { 2088 const struct rtw89_rfe_parms *rfe_parms = rtwdev->rfe_parms; 2089 const struct rtw89_tx_shape *tx_shape = &rfe_parms->tx_shape; 2090 u8 tx_shape_idx; 2091 u8 band, regd; 2092 2093 band = chan->band_type; 2094 regd = rtw89_regd_get(rtwdev, band); 2095 tx_shape_idx = (*tx_shape->lmt)[band][RTW89_RS_OFDM][regd]; 2096 2097 if (tx_shape_idx == 0) 2098 rtw8922a_bb_tx_triangular(rtwdev, false, phy_idx); 2099 else 2100 rtw8922a_bb_tx_triangular(rtwdev, true, phy_idx); 2101 } 2102 2103 static void rtw8922a_set_txpwr(struct rtw89_dev *rtwdev, 2104 const struct rtw89_chan *chan, 2105 enum rtw89_phy_idx phy_idx) 2106 { 2107 rtw89_phy_set_txpwr_byrate(rtwdev, chan, phy_idx); 2108 rtw89_phy_set_txpwr_offset(rtwdev, chan, phy_idx); 2109 rtw8922a_set_tx_shape(rtwdev, chan, phy_idx); 2110 rtw89_phy_set_txpwr_limit(rtwdev, chan, phy_idx); 2111 rtw89_phy_set_txpwr_limit_ru(rtwdev, chan, phy_idx); 2112 } 2113 2114 static void rtw8922a_set_txpwr_ctrl(struct rtw89_dev *rtwdev, 2115 enum rtw89_phy_idx phy_idx) 2116 { 2117 rtw8922a_set_txpwr_ref(rtwdev, phy_idx); 2118 } 2119 2120 static void rtw8922a_ctrl_trx_path(struct rtw89_dev *rtwdev, 2121 enum rtw89_rf_path tx_path, u8 tx_nss, 2122 enum rtw89_rf_path rx_path, u8 rx_nss) 2123 { 2124 enum rtw89_phy_idx phy_idx; 2125 2126 for (phy_idx = RTW89_PHY_0; phy_idx <= RTW89_PHY_1; phy_idx++) { 2127 rtw8922a_ctrl_tx_path_tmac(rtwdev, tx_path, phy_idx); 2128 rtw8922a_ctrl_rx_path_tmac(rtwdev, rx_path, phy_idx); 2129 rtw8922a_cfg_rx_nss_limit(rtwdev, rx_nss, phy_idx); 2130 } 2131 } 2132 2133 static void rtw8922a_ctrl_nbtg_bt_tx(struct rtw89_dev *rtwdev, bool en, 2134 enum rtw89_phy_idx phy_idx) 2135 { 2136 if (en) { 2137 rtw89_phy_write32_idx(rtwdev, R_FORCE_FIR_A, B_FORCE_FIR_A, 0x3, phy_idx); 2138 rtw89_phy_write32_idx(rtwdev, R_RXBY_WBADC_A, B_RXBY_WBADC_A, 2139 0xf, phy_idx); 2140 rtw89_phy_write32_idx(rtwdev, R_BT_RXBY_WBADC_A, B_BT_RXBY_WBADC_A, 2141 0x0, phy_idx); 2142 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_A, B_BT_TRK_OFF_A, 0x0, phy_idx); 2143 rtw89_phy_write32_idx(rtwdev, R_OP1DB_A, B_OP1DB_A, 0x80, phy_idx); 2144 rtw89_phy_write32_idx(rtwdev, R_OP1DB1_A, B_TIA10_A, 0x8080, phy_idx); 2145 rtw89_phy_write32_idx(rtwdev, R_BACKOFF_A, B_LNA_IBADC_A, 0x34, phy_idx); 2146 rtw89_phy_write32_idx(rtwdev, R_BKOFF_A, B_BKOFF_IBADC_A, 0x34, phy_idx); 2147 rtw89_phy_write32_idx(rtwdev, R_FORCE_FIR_B, B_FORCE_FIR_B, 0x3, phy_idx); 2148 rtw89_phy_write32_idx(rtwdev, R_RXBY_WBADC_B, B_RXBY_WBADC_B, 2149 0xf, phy_idx); 2150 rtw89_phy_write32_idx(rtwdev, R_BT_RXBY_WBADC_B, B_BT_RXBY_WBADC_B, 2151 0x0, phy_idx); 2152 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_B, B_BT_TRK_OFF_B, 0x0, phy_idx); 2153 rtw89_phy_write32_idx(rtwdev, R_LNA_OP, B_LNA6, 0x80, phy_idx); 2154 rtw89_phy_write32_idx(rtwdev, R_LNA_TIA, B_TIA10_B, 0x8080, phy_idx); 2155 rtw89_phy_write32_idx(rtwdev, R_BACKOFF_B, B_LNA_IBADC_B, 0x34, phy_idx); 2156 rtw89_phy_write32_idx(rtwdev, R_BKOFF_B, B_BKOFF_IBADC_B, 0x34, phy_idx); 2157 } else { 2158 rtw89_phy_write32_idx(rtwdev, R_FORCE_FIR_A, B_FORCE_FIR_A, 0x0, phy_idx); 2159 rtw89_phy_write32_idx(rtwdev, R_RXBY_WBADC_A, B_RXBY_WBADC_A, 2160 0x0, phy_idx); 2161 rtw89_phy_write32_idx(rtwdev, R_BT_RXBY_WBADC_A, B_BT_RXBY_WBADC_A, 2162 0x1, phy_idx); 2163 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_A, B_BT_TRK_OFF_A, 0x1, phy_idx); 2164 rtw89_phy_write32_idx(rtwdev, R_OP1DB_A, B_OP1DB_A, 0x1a, phy_idx); 2165 rtw89_phy_write32_idx(rtwdev, R_OP1DB1_A, B_TIA10_A, 0x2a2a, phy_idx); 2166 rtw89_phy_write32_idx(rtwdev, R_BACKOFF_A, B_LNA_IBADC_A, 0x7a6, phy_idx); 2167 rtw89_phy_write32_idx(rtwdev, R_BKOFF_A, B_BKOFF_IBADC_A, 0x26, phy_idx); 2168 rtw89_phy_write32_idx(rtwdev, R_FORCE_FIR_B, B_FORCE_FIR_B, 0x0, phy_idx); 2169 rtw89_phy_write32_idx(rtwdev, R_RXBY_WBADC_B, B_RXBY_WBADC_B, 2170 0x0, phy_idx); 2171 rtw89_phy_write32_idx(rtwdev, R_BT_RXBY_WBADC_B, B_BT_RXBY_WBADC_B, 2172 0x1, phy_idx); 2173 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_B, B_BT_TRK_OFF_B, 0x1, phy_idx); 2174 rtw89_phy_write32_idx(rtwdev, R_LNA_OP, B_LNA6, 0x20, phy_idx); 2175 rtw89_phy_write32_idx(rtwdev, R_LNA_TIA, B_TIA10_B, 0x2a30, phy_idx); 2176 rtw89_phy_write32_idx(rtwdev, R_BACKOFF_B, B_LNA_IBADC_B, 0x7a6, phy_idx); 2177 rtw89_phy_write32_idx(rtwdev, R_BKOFF_B, B_BKOFF_IBADC_B, 0x26, phy_idx); 2178 } 2179 } 2180 2181 static void rtw8922a_bb_cfg_txrx_path(struct rtw89_dev *rtwdev) 2182 { 2183 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_CHANCTX_0); 2184 enum rtw89_band band = chan->band_type; 2185 struct rtw89_hal *hal = &rtwdev->hal; 2186 u8 ntx_path = RF_PATH_AB; 2187 u32 tx_en0, tx_en1; 2188 2189 if (hal->antenna_tx == RF_A) 2190 ntx_path = RF_PATH_A; 2191 else if (hal->antenna_tx == RF_B) 2192 ntx_path = RF_PATH_B; 2193 2194 rtw8922a_hal_reset(rtwdev, RTW89_PHY_0, RTW89_MAC_0, band, &tx_en0, true); 2195 if (rtwdev->dbcc_en) 2196 rtw8922a_hal_reset(rtwdev, RTW89_PHY_1, RTW89_MAC_1, band, 2197 &tx_en1, true); 2198 2199 rtw8922a_ctrl_trx_path(rtwdev, ntx_path, 2, RF_PATH_AB, 2); 2200 2201 rtw8922a_hal_reset(rtwdev, RTW89_PHY_0, RTW89_MAC_0, band, &tx_en0, false); 2202 if (rtwdev->dbcc_en) 2203 rtw8922a_hal_reset(rtwdev, RTW89_PHY_1, RTW89_MAC_1, band, 2204 &tx_en1, false); 2205 } 2206 2207 static u8 rtw8922a_get_thermal(struct rtw89_dev *rtwdev, enum rtw89_rf_path rf_path) 2208 { 2209 struct rtw89_power_trim_info *info = &rtwdev->pwr_trim; 2210 int th; 2211 2212 /* read thermal only if debugging */ 2213 if (!rtw89_debug_is_enabled(rtwdev, RTW89_DBG_CFO | RTW89_DBG_RFK_TRACK)) 2214 return 80; 2215 2216 rtw89_write_rf(rtwdev, rf_path, RR_TM, RR_TM_TRI, 0x1); 2217 rtw89_write_rf(rtwdev, rf_path, RR_TM, RR_TM_TRI, 0x0); 2218 rtw89_write_rf(rtwdev, rf_path, RR_TM, RR_TM_TRI, 0x1); 2219 2220 fsleep(200); 2221 2222 th = rtw89_read_rf(rtwdev, rf_path, RR_TM, RR_TM_VAL_V1); 2223 th += (s8)info->thermal_trim[rf_path]; 2224 2225 return clamp_t(int, th, 0, U8_MAX); 2226 } 2227 2228 static void rtw8922a_btc_set_rfe(struct rtw89_dev *rtwdev) 2229 { 2230 union rtw89_btc_module_info *md = &rtwdev->btc.mdinfo; 2231 struct rtw89_btc_module_v7 *module = &md->md_v7; 2232 2233 module->rfe_type = rtwdev->efuse.rfe_type; 2234 module->kt_ver = rtwdev->hal.cv; 2235 module->bt_solo = 0; 2236 module->switch_type = BTC_SWITCH_INTERNAL; 2237 module->wa_type = 0; 2238 2239 module->ant.type = BTC_ANT_SHARED; 2240 module->ant.num = 2; 2241 module->ant.isolation = 10; 2242 module->ant.diversity = 0; 2243 module->ant.single_pos = RF_PATH_A; 2244 module->ant.btg_pos = RF_PATH_B; 2245 2246 if (module->kt_ver <= 1) 2247 module->wa_type |= BTC_WA_HFP_ZB; 2248 2249 rtwdev->btc.cx.other.type = BTC_3CX_NONE; 2250 2251 if (module->rfe_type == 0) { 2252 rtwdev->btc.dm.error.map.rfe_type0 = true; 2253 return; 2254 } 2255 2256 module->ant.num = (module->rfe_type % 2) ? 2 : 3; 2257 2258 if (module->kt_ver == 0) 2259 module->ant.num = 2; 2260 2261 if (module->ant.num == 3) { 2262 module->ant.type = BTC_ANT_DEDICATED; 2263 module->bt_pos = BTC_BT_ALONE; 2264 } else { 2265 module->ant.type = BTC_ANT_SHARED; 2266 module->bt_pos = BTC_BT_BTG; 2267 } 2268 rtwdev->btc.btg_pos = module->ant.btg_pos; 2269 rtwdev->btc.ant_type = module->ant.type; 2270 } 2271 2272 static 2273 void rtw8922a_set_trx_mask(struct rtw89_dev *rtwdev, u8 path, u8 group, u32 val) 2274 { 2275 rtw89_write_rf(rtwdev, path, RR_LUTWA, RFREG_MASK, group); 2276 rtw89_write_rf(rtwdev, path, RR_LUTWD0, RFREG_MASK, val); 2277 } 2278 2279 static void rtw8922a_btc_init_cfg(struct rtw89_dev *rtwdev) 2280 { 2281 struct rtw89_btc *btc = &rtwdev->btc; 2282 struct rtw89_btc_ant_info_v7 *ant = &btc->mdinfo.md_v7.ant; 2283 u32 wl_pri, path_min, path_max; 2284 u8 path; 2285 2286 /* for 1-Ant && 1-ss case: only 1-path */ 2287 if (ant->num == 1) { 2288 path_min = ant->single_pos; 2289 path_max = path_min; 2290 } else { 2291 path_min = RF_PATH_A; 2292 path_max = RF_PATH_B; 2293 } 2294 2295 path = path_min; 2296 2297 for (path = path_min; path <= path_max; path++) { 2298 /* set DEBUG_LUT_RFMODE_MASK = 1 to start trx-mask-setup */ 2299 rtw89_write_rf(rtwdev, path, RR_LUTWE, RFREG_MASK, BIT(17)); 2300 2301 /* if GNT_WL=0 && BT=SS_group --> WL Tx/Rx = THRU */ 2302 rtw8922a_set_trx_mask(rtwdev, path, BTC_BT_SS_GROUP, 0x5ff); 2303 2304 /* if GNT_WL=0 && BT=Rx_group --> WL-Rx = THRU + WL-Tx = MASK */ 2305 rtw8922a_set_trx_mask(rtwdev, path, BTC_BT_RX_GROUP, 0x5df); 2306 2307 /* if GNT_WL = 0 && BT = Tx_group --> 2308 * Shared-Ant && BTG-path:WL mask(0x55f), others:WL THRU(0x5ff) 2309 */ 2310 if (btc->ant_type == BTC_ANT_SHARED && btc->btg_pos == path) 2311 rtw8922a_set_trx_mask(rtwdev, path, BTC_BT_TX_GROUP, 0x55f); 2312 else 2313 rtw8922a_set_trx_mask(rtwdev, path, BTC_BT_TX_GROUP, 0x5ff); 2314 2315 rtw89_write_rf(rtwdev, path, RR_LUTWE, RFREG_MASK, 0); 2316 } 2317 2318 /* set WL PTA Hi-Pri: Ack-Tx, beacon-tx, Trig-frame-Tx, Null-Tx*/ 2319 wl_pri = B_BTC_RSP_ACK_HI | B_BTC_TX_BCN_HI | B_BTC_TX_TRI_HI | 2320 B_BTC_TX_NULL_HI; 2321 rtw89_write32(rtwdev, R_BTC_COEX_WL_REQ_BE, wl_pri); 2322 2323 /* set PTA break table */ 2324 rtw89_write32(rtwdev, R_BE_BT_BREAK_TABLE, BTC_BREAK_PARAM); 2325 2326 /* ZB coex table init for HFP PTA req-cmd bit-4 define issue COEX-900*/ 2327 rtw89_write32(rtwdev, R_BTC_ZB_COEX_TBL_0, 0xda5a5a5a); 2328 2329 rtw89_write32(rtwdev, R_BTC_ZB_COEX_TBL_1, 0xda5a5a5a); 2330 2331 rtw89_write32(rtwdev, R_BTC_ZB_BREAK_TBL, 0xf0ffffff); 2332 btc->cx.wl.status.map.init_ok = true; 2333 } 2334 2335 static void 2336 rtw8922a_btc_set_wl_txpwr_ctrl(struct rtw89_dev *rtwdev, u32 txpwr_val) 2337 { 2338 u16 ctrl_all_time = u32_get_bits(txpwr_val, GENMASK(15, 0)); 2339 u16 ctrl_gnt_bt = u32_get_bits(txpwr_val, GENMASK(31, 16)); 2340 2341 switch (ctrl_all_time) { 2342 case 0xffff: 2343 rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_RATE_CTRL, 2344 B_BE_FORCE_PWR_BY_RATE_EN, 0x0); 2345 rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_RATE_CTRL, 2346 B_BE_FORCE_PWR_BY_RATE_VAL, 0x0); 2347 break; 2348 default: 2349 rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_RATE_CTRL, 2350 B_BE_FORCE_PWR_BY_RATE_VAL, ctrl_all_time); 2351 rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_RATE_CTRL, 2352 B_BE_FORCE_PWR_BY_RATE_EN, 0x1); 2353 break; 2354 } 2355 2356 switch (ctrl_gnt_bt) { 2357 case 0xffff: 2358 rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_REG_CTRL, 2359 B_BE_PWR_BT_EN, 0x0); 2360 rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_COEX_CTRL, 2361 B_BE_PWR_BT_VAL, 0x0); 2362 break; 2363 default: 2364 rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_COEX_CTRL, 2365 B_BE_PWR_BT_VAL, ctrl_gnt_bt); 2366 rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_REG_CTRL, 2367 B_BE_PWR_BT_EN, 0x1); 2368 break; 2369 } 2370 } 2371 2372 static 2373 s8 rtw8922a_btc_get_bt_rssi(struct rtw89_dev *rtwdev, s8 val) 2374 { 2375 return clamp_t(s8, val, -100, 0) + 100; 2376 } 2377 2378 static const struct rtw89_btc_rf_trx_para rtw89_btc_8922a_rf_ul[] = { 2379 {255, 0, 0, 7}, /* 0 -> original */ 2380 {255, 2, 0, 7}, /* 1 -> for BT-connected ACI issue && BTG co-rx */ 2381 {255, 0, 0, 7}, /* 2 ->reserved for shared-antenna */ 2382 {255, 0, 0, 7}, /* 3- >reserved for shared-antenna */ 2383 {255, 0, 0, 7}, /* 4 ->reserved for shared-antenna */ 2384 {255, 1, 0, 7}, /* the below id is for non-shared-antenna free-run */ 2385 {6, 1, 0, 7}, 2386 {13, 1, 0, 7}, 2387 {13, 1, 0, 7} 2388 }; 2389 2390 static const struct rtw89_btc_rf_trx_para rtw89_btc_8922a_rf_dl[] = { 2391 {255, 0, 0, 7}, /* 0 -> original */ 2392 {255, 2, 0, 7}, /* 1 -> reserved for shared-antenna */ 2393 {255, 0, 0, 7}, /* 2 ->reserved for shared-antenna */ 2394 {255, 0, 0, 7}, /* 3- >reserved for shared-antenna */ 2395 {255, 0, 0, 7}, /* 4 ->reserved for shared-antenna */ 2396 {255, 1, 0, 7}, /* the below id is for non-shared-antenna free-run */ 2397 {255, 1, 0, 7}, 2398 {255, 1, 0, 7}, 2399 {255, 1, 0, 7} 2400 }; 2401 2402 static const u8 rtw89_btc_8922a_wl_rssi_thres[BTC_WL_RSSI_THMAX] = {60, 50, 40, 30}; 2403 static const u8 rtw89_btc_8922a_bt_rssi_thres[BTC_BT_RSSI_THMAX] = {50, 40, 30, 20}; 2404 2405 static const struct rtw89_btc_fbtc_mreg rtw89_btc_8922a_mon_reg[] = { 2406 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe300), 2407 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe320), 2408 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe324), 2409 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe328), 2410 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe32c), 2411 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe330), 2412 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe334), 2413 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe338), 2414 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe344), 2415 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe348), 2416 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe34c), 2417 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe350), 2418 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0x11a2c), 2419 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0x11a50), 2420 RTW89_DEF_FBTC_MREG(REG_BB, 4, 0x980), 2421 RTW89_DEF_FBTC_MREG(REG_BB, 4, 0x660), 2422 RTW89_DEF_FBTC_MREG(REG_BB, 4, 0x1660), 2423 RTW89_DEF_FBTC_MREG(REG_BB, 4, 0x418c), 2424 RTW89_DEF_FBTC_MREG(REG_BB, 4, 0x518c), 2425 }; 2426 2427 static 2428 void rtw8922a_btc_update_bt_cnt(struct rtw89_dev *rtwdev) 2429 { 2430 /* Feature move to firmware */ 2431 } 2432 2433 static 2434 void rtw8922a_btc_wl_s1_standby(struct rtw89_dev *rtwdev, bool state) 2435 { 2436 if (!state) { 2437 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x80000); 2438 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x1); 2439 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD1, RFREG_MASK, 0x0c110); 2440 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0, RFREG_MASK, 0x01018); 2441 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x00000); 2442 2443 rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWE, RFREG_MASK, 0x80000); 2444 rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWA, RFREG_MASK, 0x1); 2445 rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWD1, RFREG_MASK, 0x0c110); 2446 rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWD0, RFREG_MASK, 0x01018); 2447 rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWE, RFREG_MASK, 0x00000); 2448 } else { 2449 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x80000); 2450 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x1); 2451 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD1, RFREG_MASK, 0x0c110); 2452 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0, RFREG_MASK, 0x09018); 2453 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x00000); 2454 2455 rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWE, RFREG_MASK, 0x80000); 2456 rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWA, RFREG_MASK, 0x1); 2457 rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWD1, RFREG_MASK, 0x0c110); 2458 rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWD0, RFREG_MASK, 0x09018); 2459 rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWE, RFREG_MASK, 0x00000); 2460 } 2461 } 2462 2463 static void rtw8922a_btc_set_wl_rx_gain(struct rtw89_dev *rtwdev, u32 level) 2464 { 2465 } 2466 2467 static void rtw8922a_fill_freq_with_ppdu(struct rtw89_dev *rtwdev, 2468 struct rtw89_rx_phy_ppdu *phy_ppdu, 2469 struct ieee80211_rx_status *status) 2470 { 2471 u8 chan_idx = phy_ppdu->chan_idx; 2472 enum nl80211_band band; 2473 u8 ch; 2474 2475 if (chan_idx == 0) 2476 return; 2477 2478 rtw89_decode_chan_idx(rtwdev, chan_idx, &ch, &band); 2479 status->freq = ieee80211_channel_to_frequency(ch, band); 2480 status->band = band; 2481 } 2482 2483 static void rtw8922a_query_ppdu(struct rtw89_dev *rtwdev, 2484 struct rtw89_rx_phy_ppdu *phy_ppdu, 2485 struct ieee80211_rx_status *status) 2486 { 2487 u8 path; 2488 u8 *rx_power = phy_ppdu->rssi; 2489 2490 status->signal = 2491 RTW89_RSSI_RAW_TO_DBM(max(rx_power[RF_PATH_A], rx_power[RF_PATH_B])); 2492 for (path = 0; path < rtwdev->chip->rf_path_num; path++) { 2493 status->chains |= BIT(path); 2494 status->chain_signal[path] = RTW89_RSSI_RAW_TO_DBM(rx_power[path]); 2495 } 2496 if (phy_ppdu->valid) 2497 rtw8922a_fill_freq_with_ppdu(rtwdev, phy_ppdu, status); 2498 } 2499 2500 static void rtw8922a_convert_rpl_to_rssi(struct rtw89_dev *rtwdev, 2501 struct rtw89_rx_phy_ppdu *phy_ppdu) 2502 { 2503 /* Mapping to BW: 5, 10, 20, 40, 80, 160, 80_80 */ 2504 static const u8 bw_compensate[] = {0, 0, 0, 6, 12, 18, 0}; 2505 u8 *rssi = phy_ppdu->rssi; 2506 u8 compensate = 0; 2507 u16 rpl_tmp; 2508 u8 i; 2509 2510 if (phy_ppdu->bw_idx < ARRAY_SIZE(bw_compensate)) 2511 compensate = bw_compensate[phy_ppdu->bw_idx]; 2512 2513 for (i = 0; i < RF_PATH_NUM_8922A; i++) { 2514 if (!(phy_ppdu->rx_path_en & BIT(i))) { 2515 rssi[i] = 0; 2516 phy_ppdu->rpl_path[i] = 0; 2517 phy_ppdu->rpl_fd[i] = 0; 2518 } 2519 if (phy_ppdu->rate >= RTW89_HW_RATE_OFDM6) { 2520 rpl_tmp = phy_ppdu->rpl_fd[i]; 2521 if (rpl_tmp) 2522 rpl_tmp += compensate; 2523 2524 phy_ppdu->rpl_path[i] = rpl_tmp; 2525 } 2526 rssi[i] = phy_ppdu->rpl_path[i]; 2527 } 2528 2529 phy_ppdu->rssi_avg = phy_ppdu->rpl_avg; 2530 } 2531 2532 static int rtw8922a_mac_enable_bb_rf(struct rtw89_dev *rtwdev) 2533 { 2534 rtw89_write8_set(rtwdev, R_BE_FEN_RST_ENABLE, 2535 B_BE_FEN_BBPLAT_RSTB | B_BE_FEN_BB_IP_RSTN); 2536 rtw89_write32(rtwdev, R_BE_DMAC_SYS_CR32B, 0x7FF97FF9); 2537 2538 return 0; 2539 } 2540 2541 static int rtw8922a_mac_disable_bb_rf(struct rtw89_dev *rtwdev) 2542 { 2543 rtw89_write8_clr(rtwdev, R_BE_FEN_RST_ENABLE, 2544 B_BE_FEN_BBPLAT_RSTB | B_BE_FEN_BB_IP_RSTN); 2545 2546 return 0; 2547 } 2548 2549 #ifdef CONFIG_PM 2550 static const struct wiphy_wowlan_support rtw_wowlan_stub_8922a = { 2551 .flags = WIPHY_WOWLAN_MAGIC_PKT | WIPHY_WOWLAN_DISCONNECT | 2552 WIPHY_WOWLAN_NET_DETECT, 2553 .n_patterns = RTW89_MAX_PATTERN_NUM, 2554 .pattern_max_len = RTW89_MAX_PATTERN_SIZE, 2555 .pattern_min_len = 1, 2556 .max_nd_match_sets = RTW89_SCANOFLD_MAX_SSID, 2557 }; 2558 #endif 2559 2560 static const struct rtw89_chip_ops rtw8922a_chip_ops = { 2561 .enable_bb_rf = rtw8922a_mac_enable_bb_rf, 2562 .disable_bb_rf = rtw8922a_mac_disable_bb_rf, 2563 .bb_preinit = rtw8922a_bb_preinit, 2564 .bb_postinit = rtw8922a_bb_postinit, 2565 .bb_reset = rtw8922a_bb_reset, 2566 .bb_sethw = rtw8922a_bb_sethw, 2567 .read_rf = rtw89_phy_read_rf_v2, 2568 .write_rf = rtw89_phy_write_rf_v2, 2569 .set_channel = rtw8922a_set_channel, 2570 .set_channel_help = rtw8922a_set_channel_help, 2571 .read_efuse = rtw8922a_read_efuse, 2572 .read_phycap = rtw8922a_read_phycap, 2573 .fem_setup = NULL, 2574 .rfe_gpio = NULL, 2575 .rfk_hw_init = rtw8922a_rfk_hw_init, 2576 .rfk_init = rtw8922a_rfk_init, 2577 .rfk_init_late = rtw8922a_rfk_init_late, 2578 .rfk_channel = rtw8922a_rfk_channel, 2579 .rfk_band_changed = rtw8922a_rfk_band_changed, 2580 .rfk_scan = rtw8922a_rfk_scan, 2581 .rfk_track = rtw8922a_rfk_track, 2582 .power_trim = rtw8922a_power_trim, 2583 .set_txpwr = rtw8922a_set_txpwr, 2584 .set_txpwr_ctrl = rtw8922a_set_txpwr_ctrl, 2585 .init_txpwr_unit = NULL, 2586 .get_thermal = rtw8922a_get_thermal, 2587 .ctrl_btg_bt_rx = rtw8922a_ctrl_btg_bt_rx, 2588 .query_ppdu = rtw8922a_query_ppdu, 2589 .convert_rpl_to_rssi = rtw8922a_convert_rpl_to_rssi, 2590 .ctrl_nbtg_bt_tx = rtw8922a_ctrl_nbtg_bt_tx, 2591 .cfg_txrx_path = rtw8922a_bb_cfg_txrx_path, 2592 .set_txpwr_ul_tb_offset = NULL, 2593 .digital_pwr_comp = rtw8922a_digital_pwr_comp, 2594 .pwr_on_func = rtw8922a_pwr_on_func, 2595 .pwr_off_func = rtw8922a_pwr_off_func, 2596 .query_rxdesc = rtw89_core_query_rxdesc_v2, 2597 .fill_txdesc = rtw89_core_fill_txdesc_v2, 2598 .fill_txdesc_fwcmd = rtw89_core_fill_txdesc_fwcmd_v2, 2599 .cfg_ctrl_path = rtw89_mac_cfg_ctrl_path_v2, 2600 .mac_cfg_gnt = rtw89_mac_cfg_gnt_v2, 2601 .stop_sch_tx = rtw89_mac_stop_sch_tx_v2, 2602 .resume_sch_tx = rtw89_mac_resume_sch_tx_v2, 2603 .h2c_dctl_sec_cam = rtw89_fw_h2c_dctl_sec_cam_v2, 2604 .h2c_default_cmac_tbl = rtw89_fw_h2c_default_cmac_tbl_g7, 2605 .h2c_assoc_cmac_tbl = rtw89_fw_h2c_assoc_cmac_tbl_g7, 2606 .h2c_ampdu_cmac_tbl = rtw89_fw_h2c_ampdu_cmac_tbl_g7, 2607 .h2c_default_dmac_tbl = rtw89_fw_h2c_default_dmac_tbl_v2, 2608 .h2c_update_beacon = rtw89_fw_h2c_update_beacon_be, 2609 .h2c_ba_cam = rtw89_fw_h2c_ba_cam_v1, 2610 2611 .btc_set_rfe = rtw8922a_btc_set_rfe, 2612 .btc_init_cfg = rtw8922a_btc_init_cfg, 2613 .btc_set_wl_pri = NULL, 2614 .btc_set_wl_txpwr_ctrl = rtw8922a_btc_set_wl_txpwr_ctrl, 2615 .btc_get_bt_rssi = rtw8922a_btc_get_bt_rssi, 2616 .btc_update_bt_cnt = rtw8922a_btc_update_bt_cnt, 2617 .btc_wl_s1_standby = rtw8922a_btc_wl_s1_standby, 2618 .btc_set_wl_rx_gain = rtw8922a_btc_set_wl_rx_gain, 2619 .btc_set_policy = rtw89_btc_set_policy_v1, 2620 }; 2621 2622 const struct rtw89_chip_info rtw8922a_chip_info = { 2623 .chip_id = RTL8922A, 2624 .chip_gen = RTW89_CHIP_BE, 2625 .ops = &rtw8922a_chip_ops, 2626 .mac_def = &rtw89_mac_gen_be, 2627 .phy_def = &rtw89_phy_gen_be, 2628 .fw_basename = RTW8922A_FW_BASENAME, 2629 .fw_format_max = RTW8922A_FW_FORMAT_MAX, 2630 .try_ce_fw = false, 2631 .bbmcu_nr = 1, 2632 .needed_fw_elms = RTW89_BE_GEN_DEF_NEEDED_FW_ELEMENTS, 2633 .fifo_size = 589824, 2634 .small_fifo_size = false, 2635 .dle_scc_rsvd_size = 0, 2636 .max_amsdu_limit = 8000, 2637 .dis_2g_40m_ul_ofdma = false, 2638 .rsvd_ple_ofst = 0x8f800, 2639 .hfc_param_ini = rtw8922a_hfc_param_ini_pcie, 2640 .dle_mem = rtw8922a_dle_mem_pcie, 2641 .wde_qempty_acq_grpnum = 4, 2642 .wde_qempty_mgq_grpsel = 4, 2643 .rf_base_addr = {0xe000, 0xf000}, 2644 .pwr_on_seq = NULL, 2645 .pwr_off_seq = NULL, 2646 .bb_table = NULL, 2647 .bb_gain_table = NULL, 2648 .rf_table = {}, 2649 .nctl_table = NULL, 2650 .nctl_post_table = NULL, 2651 .dflt_parms = NULL, /* load parm from fw */ 2652 .rfe_parms_conf = NULL, /* load parm from fw */ 2653 .txpwr_factor_rf = 2, 2654 .txpwr_factor_mac = 1, 2655 .dig_table = NULL, 2656 .dig_regs = &rtw8922a_dig_regs, 2657 .tssi_dbw_table = NULL, 2658 .support_macid_num = 32, 2659 .support_link_num = 2, 2660 .support_chanctx_num = 2, 2661 .support_rnr = true, 2662 .support_bands = BIT(NL80211_BAND_2GHZ) | 2663 BIT(NL80211_BAND_5GHZ) | 2664 BIT(NL80211_BAND_6GHZ), 2665 .support_bandwidths = BIT(NL80211_CHAN_WIDTH_20) | 2666 BIT(NL80211_CHAN_WIDTH_40) | 2667 BIT(NL80211_CHAN_WIDTH_80) | 2668 BIT(NL80211_CHAN_WIDTH_160), 2669 .support_unii4 = true, 2670 .ul_tb_waveform_ctrl = false, 2671 .ul_tb_pwr_diff = false, 2672 .hw_sec_hdr = true, 2673 .hw_mgmt_tx_encrypt = true, 2674 .rf_path_num = 2, 2675 .tx_nss = 2, 2676 .rx_nss = 2, 2677 .acam_num = 128, 2678 .bcam_num = 20, 2679 .scam_num = 32, 2680 .bacam_num = 24, 2681 .bacam_dynamic_num = 8, 2682 .bacam_ver = RTW89_BACAM_V1, 2683 .ppdu_max_usr = 16, 2684 .sec_ctrl_efuse_size = 4, 2685 .physical_efuse_size = 0x1300, 2686 .logical_efuse_size = 0x70000, 2687 .limit_efuse_size = 0x40000, 2688 .dav_phy_efuse_size = 0, 2689 .dav_log_efuse_size = 0, 2690 .efuse_blocks = rtw8922a_efuse_blocks, 2691 .phycap_addr = 0x1700, 2692 .phycap_size = 0x38, 2693 .para_ver = 0xf, 2694 .wlcx_desired = 0x07110000, 2695 .btcx_desired = 0x7, 2696 .scbd = 0x1, 2697 .mailbox = 0x1, 2698 2699 .afh_guard_ch = 6, 2700 .wl_rssi_thres = rtw89_btc_8922a_wl_rssi_thres, 2701 .bt_rssi_thres = rtw89_btc_8922a_bt_rssi_thres, 2702 .rssi_tol = 2, 2703 .mon_reg_num = ARRAY_SIZE(rtw89_btc_8922a_mon_reg), 2704 .mon_reg = rtw89_btc_8922a_mon_reg, 2705 .rf_para_ulink_num = ARRAY_SIZE(rtw89_btc_8922a_rf_ul), 2706 .rf_para_ulink = rtw89_btc_8922a_rf_ul, 2707 .rf_para_dlink_num = ARRAY_SIZE(rtw89_btc_8922a_rf_dl), 2708 .rf_para_dlink = rtw89_btc_8922a_rf_dl, 2709 .ps_mode_supported = BIT(RTW89_PS_MODE_RFOFF) | 2710 BIT(RTW89_PS_MODE_CLK_GATED) | 2711 BIT(RTW89_PS_MODE_PWR_GATED), 2712 .low_power_hci_modes = 0, 2713 .h2c_cctl_func_id = H2C_FUNC_MAC_CCTLINFO_UD_G7, 2714 .hci_func_en_addr = R_BE_HCI_FUNC_EN, 2715 .h2c_desc_size = sizeof(struct rtw89_rxdesc_short_v2), 2716 .txwd_body_size = sizeof(struct rtw89_txwd_body_v2), 2717 .txwd_info_size = sizeof(struct rtw89_txwd_info_v2), 2718 .h2c_ctrl_reg = R_BE_H2CREG_CTRL, 2719 .h2c_counter_reg = {R_BE_UDM1 + 1, B_BE_UDM1_HALMAC_H2C_DEQ_CNT_MASK >> 8}, 2720 .h2c_regs = rtw8922a_h2c_regs, 2721 .c2h_ctrl_reg = R_BE_C2HREG_CTRL, 2722 .c2h_counter_reg = {R_BE_UDM1 + 1, B_BE_UDM1_HALMAC_C2H_ENQ_CNT_MASK >> 8}, 2723 .c2h_regs = rtw8922a_c2h_regs, 2724 .page_regs = &rtw8922a_page_regs, 2725 .wow_reason_reg = rtw8922a_wow_wakeup_regs, 2726 .cfo_src_fd = true, 2727 .cfo_hw_comp = true, 2728 .dcfo_comp = NULL, 2729 .dcfo_comp_sft = 0, 2730 .imr_info = NULL, 2731 .imr_dmac_table = &rtw8922a_imr_dmac_table, 2732 .imr_cmac_table = &rtw8922a_imr_cmac_table, 2733 .rrsr_cfgs = &rtw8922a_rrsr_cfgs, 2734 .bss_clr_vld = {R_BSS_CLR_VLD_V2, B_BSS_CLR_VLD0_V2}, 2735 .bss_clr_map_reg = R_BSS_CLR_MAP_V2, 2736 .rfkill_init = &rtw8922a_rfkill_regs, 2737 .rfkill_get = {R_BE_GPIO_EXT_CTRL, B_BE_GPIO_IN_9}, 2738 .dma_ch_mask = 0, 2739 .edcca_regs = &rtw8922a_edcca_regs, 2740 #ifdef CONFIG_PM 2741 .wowlan_stub = &rtw_wowlan_stub_8922a, 2742 #endif 2743 .xtal_info = NULL, 2744 }; 2745 EXPORT_SYMBOL(rtw8922a_chip_info); 2746 2747 MODULE_FIRMWARE(RTW8922A_MODULE_FIRMWARE); 2748 MODULE_AUTHOR("Realtek Corporation"); 2749 MODULE_DESCRIPTION("Realtek 802.11be wireless 8922A driver"); 2750 MODULE_LICENSE("Dual BSD/GPL"); 2751