1 // SPDX-License-Identifier: GPL-2.0 OR BSD-3-Clause
2 /* Copyright(c) 2023 Realtek Corporation
3 */
4
5 #include "chan.h"
6 #include "coex.h"
7 #include "debug.h"
8 #include "efuse.h"
9 #include "fw.h"
10 #include "mac.h"
11 #include "phy.h"
12 #include "reg.h"
13 #include "rtw8922a.h"
14 #include "rtw8922a_rfk.h"
15 #include "util.h"
16
17 #define RTW8922A_FW_FORMAT_MAX 2
18 #define RTW8922A_FW_BASENAME "rtw89/rtw8922a_fw"
19 #define RTW8922A_MODULE_FIRMWARE \
20 RTW8922A_FW_BASENAME "-" __stringify(RTW8922A_FW_FORMAT_MAX) ".bin"
21
22 #define HE_N_USER_MAX_8922A 4
23
24 static const struct rtw89_hfc_ch_cfg rtw8922a_hfc_chcfg_pcie[] = {
25 {2, 1641, grp_0}, /* ACH 0 */
26 {2, 1641, grp_0}, /* ACH 1 */
27 {2, 1641, grp_0}, /* ACH 2 */
28 {2, 1641, grp_0}, /* ACH 3 */
29 {2, 1641, grp_1}, /* ACH 4 */
30 {2, 1641, grp_1}, /* ACH 5 */
31 {2, 1641, grp_1}, /* ACH 6 */
32 {2, 1641, grp_1}, /* ACH 7 */
33 {2, 1641, grp_0}, /* B0MGQ */
34 {2, 1641, grp_0}, /* B0HIQ */
35 {2, 1641, grp_1}, /* B1MGQ */
36 {2, 1641, grp_1}, /* B1HIQ */
37 {0, 0, 0}, /* FWCMDQ */
38 {0, 0, 0}, /* BMC */
39 {0, 0, 0}, /* H2D */
40 };
41
42 static const struct rtw89_hfc_pub_cfg rtw8922a_hfc_pubcfg_pcie = {
43 1651, /* Group 0 */
44 1651, /* Group 1 */
45 3302, /* Public Max */
46 0, /* WP threshold */
47 };
48
49 static const struct rtw89_hfc_param_ini rtw8922a_hfc_param_ini_pcie[] = {
50 [RTW89_QTA_SCC] = {rtw8922a_hfc_chcfg_pcie, &rtw8922a_hfc_pubcfg_pcie,
51 &rtw89_mac_size.hfc_prec_cfg_c0, RTW89_HCIFC_POH},
52 [RTW89_QTA_DBCC] = {rtw8922a_hfc_chcfg_pcie, &rtw8922a_hfc_pubcfg_pcie,
53 &rtw89_mac_size.hfc_prec_cfg_c0, RTW89_HCIFC_POH},
54 [RTW89_QTA_DLFW] = {NULL, NULL, &rtw89_mac_size.hfc_prec_cfg_c2,
55 RTW89_HCIFC_POH},
56 [RTW89_QTA_INVALID] = {NULL},
57 };
58
59 static const struct rtw89_dle_mem rtw8922a_dle_mem_pcie[] = {
60 [RTW89_QTA_SCC] = {RTW89_QTA_SCC, &rtw89_mac_size.wde_size0_v1,
61 &rtw89_mac_size.ple_size0_v1, &rtw89_mac_size.wde_qt0_v1,
62 &rtw89_mac_size.wde_qt0_v1, &rtw89_mac_size.ple_qt0,
63 &rtw89_mac_size.ple_qt1, &rtw89_mac_size.ple_rsvd_qt0,
64 &rtw89_mac_size.rsvd0_size0, &rtw89_mac_size.rsvd1_size0},
65 [RTW89_QTA_DBCC] = {RTW89_QTA_DBCC, &rtw89_mac_size.wde_size0_v1,
66 &rtw89_mac_size.ple_size0_v1, &rtw89_mac_size.wde_qt0_v1,
67 &rtw89_mac_size.wde_qt0_v1, &rtw89_mac_size.ple_qt0,
68 &rtw89_mac_size.ple_qt1, &rtw89_mac_size.ple_rsvd_qt0,
69 &rtw89_mac_size.rsvd0_size0, &rtw89_mac_size.rsvd1_size0},
70 [RTW89_QTA_DLFW] = {RTW89_QTA_DLFW, &rtw89_mac_size.wde_size4_v1,
71 &rtw89_mac_size.ple_size3_v1, &rtw89_mac_size.wde_qt4,
72 &rtw89_mac_size.wde_qt4, &rtw89_mac_size.ple_qt9,
73 &rtw89_mac_size.ple_qt9, &rtw89_mac_size.ple_rsvd_qt1,
74 &rtw89_mac_size.rsvd0_size0, &rtw89_mac_size.rsvd1_size0},
75 [RTW89_QTA_INVALID] = {RTW89_QTA_INVALID, NULL, NULL, NULL, NULL, NULL,
76 NULL},
77 };
78
79 static const u32 rtw8922a_h2c_regs[RTW89_H2CREG_MAX] = {
80 R_BE_H2CREG_DATA0, R_BE_H2CREG_DATA1, R_BE_H2CREG_DATA2,
81 R_BE_H2CREG_DATA3
82 };
83
84 static const u32 rtw8922a_c2h_regs[RTW89_H2CREG_MAX] = {
85 R_BE_C2HREG_DATA0, R_BE_C2HREG_DATA1, R_BE_C2HREG_DATA2,
86 R_BE_C2HREG_DATA3
87 };
88
89 static const u32 rtw8922a_wow_wakeup_regs[RTW89_WOW_REASON_NUM] = {
90 R_AX_C2HREG_DATA3_V1 + 3, R_BE_DBG_WOW,
91 };
92
93 static const struct rtw89_page_regs rtw8922a_page_regs = {
94 .hci_fc_ctrl = R_BE_HCI_FC_CTRL,
95 .ch_page_ctrl = R_BE_CH_PAGE_CTRL,
96 .ach_page_ctrl = R_BE_CH0_PAGE_CTRL,
97 .ach_page_info = R_BE_CH0_PAGE_INFO,
98 .pub_page_info3 = R_BE_PUB_PAGE_INFO3,
99 .pub_page_ctrl1 = R_BE_PUB_PAGE_CTRL1,
100 .pub_page_ctrl2 = R_BE_PUB_PAGE_CTRL2,
101 .pub_page_info1 = R_BE_PUB_PAGE_INFO1,
102 .pub_page_info2 = R_BE_PUB_PAGE_INFO2,
103 .wp_page_ctrl1 = R_BE_WP_PAGE_CTRL1,
104 .wp_page_ctrl2 = R_BE_WP_PAGE_CTRL2,
105 .wp_page_info1 = R_BE_WP_PAGE_INFO1,
106 };
107
108 static const struct rtw89_reg_imr rtw8922a_imr_dmac_regs[] = {
109 {R_BE_DISP_HOST_IMR, B_BE_DISP_HOST_IMR_CLR, B_BE_DISP_HOST_IMR_SET},
110 {R_BE_DISP_CPU_IMR, B_BE_DISP_CPU_IMR_CLR, B_BE_DISP_CPU_IMR_SET},
111 {R_BE_DISP_OTHER_IMR, B_BE_DISP_OTHER_IMR_CLR, B_BE_DISP_OTHER_IMR_SET},
112 {R_BE_PKTIN_ERR_IMR, B_BE_PKTIN_ERR_IMR_CLR, B_BE_PKTIN_ERR_IMR_SET},
113 {R_BE_INTERRUPT_MASK_REG, B_BE_INTERRUPT_MASK_REG_CLR, B_BE_INTERRUPT_MASK_REG_SET},
114 {R_BE_MLO_ERR_IDCT_IMR, B_BE_MLO_ERR_IDCT_IMR_CLR, B_BE_MLO_ERR_IDCT_IMR_SET},
115 {R_BE_MPDU_TX_ERR_IMR, B_BE_MPDU_TX_ERR_IMR_CLR, B_BE_MPDU_TX_ERR_IMR_SET},
116 {R_BE_MPDU_RX_ERR_IMR, B_BE_MPDU_RX_ERR_IMR_CLR, B_BE_MPDU_RX_ERR_IMR_SET},
117 {R_BE_SEC_ERROR_IMR, B_BE_SEC_ERROR_IMR_CLR, B_BE_SEC_ERROR_IMR_SET},
118 {R_BE_CPUIO_ERR_IMR, B_BE_CPUIO_ERR_IMR_CLR, B_BE_CPUIO_ERR_IMR_SET},
119 {R_BE_WDE_ERR_IMR, B_BE_WDE_ERR_IMR_CLR, B_BE_WDE_ERR_IMR_SET},
120 {R_BE_WDE_ERR1_IMR, B_BE_WDE_ERR1_IMR_CLR, B_BE_WDE_ERR1_IMR_SET},
121 {R_BE_PLE_ERR_IMR, B_BE_PLE_ERR_IMR_CLR, B_BE_PLE_ERR_IMR_SET},
122 {R_BE_PLE_ERRFLAG1_IMR, B_BE_PLE_ERRFLAG1_IMR_CLR, B_BE_PLE_ERRFLAG1_IMR_SET},
123 {R_BE_WDRLS_ERR_IMR, B_BE_WDRLS_ERR_IMR_CLR, B_BE_WDRLS_ERR_IMR_SET},
124 {R_BE_TXPKTCTL_B0_ERRFLAG_IMR, B_BE_TXPKTCTL_B0_ERRFLAG_IMR_CLR,
125 B_BE_TXPKTCTL_B0_ERRFLAG_IMR_SET},
126 {R_BE_TXPKTCTL_B1_ERRFLAG_IMR, B_BE_TXPKTCTL_B1_ERRFLAG_IMR_CLR,
127 B_BE_TXPKTCTL_B1_ERRFLAG_IMR_SET},
128 {R_BE_BBRPT_COM_ERR_IMR, B_BE_BBRPT_COM_ERR_IMR_CLR, B_BE_BBRPT_COM_ERR_IMR_SET},
129 {R_BE_BBRPT_CHINFO_ERR_IMR, B_BE_BBRPT_CHINFO_ERR_IMR_CLR,
130 B_BE_BBRPT_CHINFO_ERR_IMR_SET},
131 {R_BE_BBRPT_DFS_ERR_IMR, B_BE_BBRPT_DFS_ERR_IMR_CLR, B_BE_BBRPT_DFS_ERR_IMR_SET},
132 {R_BE_LA_ERRFLAG_IMR, B_BE_LA_ERRFLAG_IMR_CLR, B_BE_LA_ERRFLAG_IMR_SET},
133 {R_BE_CH_INFO_DBGFLAG_IMR, B_BE_CH_INFO_DBGFLAG_IMR_CLR, B_BE_CH_INFO_DBGFLAG_IMR_SET},
134 {R_BE_PLRLS_ERR_IMR, B_BE_PLRLS_ERR_IMR_CLR, B_BE_PLRLS_ERR_IMR_SET},
135 {R_BE_HAXI_IDCT_MSK, B_BE_HAXI_IDCT_MSK_CLR, B_BE_HAXI_IDCT_MSK_SET},
136 };
137
138 static const struct rtw89_imr_table rtw8922a_imr_dmac_table = {
139 .regs = rtw8922a_imr_dmac_regs,
140 .n_regs = ARRAY_SIZE(rtw8922a_imr_dmac_regs),
141 };
142
143 static const struct rtw89_reg_imr rtw8922a_imr_cmac_regs[] = {
144 {R_BE_RESP_IMR, B_BE_RESP_IMR_CLR, B_BE_RESP_IMR_SET},
145 {R_BE_RX_ERROR_FLAG_IMR, B_BE_RX_ERROR_FLAG_IMR_CLR, B_BE_RX_ERROR_FLAG_IMR_SET},
146 {R_BE_TX_ERROR_FLAG_IMR, B_BE_TX_ERROR_FLAG_IMR_CLR, B_BE_TX_ERROR_FLAG_IMR_SET},
147 {R_BE_RX_ERROR_FLAG_IMR_1, B_BE_TX_ERROR_FLAG_IMR_1_CLR, B_BE_TX_ERROR_FLAG_IMR_1_SET},
148 {R_BE_PTCL_IMR1, B_BE_PTCL_IMR1_CLR, B_BE_PTCL_IMR1_SET},
149 {R_BE_PTCL_IMR0, B_BE_PTCL_IMR0_CLR, B_BE_PTCL_IMR0_SET},
150 {R_BE_PTCL_IMR_2, B_BE_PTCL_IMR_2_CLR, B_BE_PTCL_IMR_2_SET},
151 {R_BE_SCHEDULE_ERR_IMR, B_BE_SCHEDULE_ERR_IMR_CLR, B_BE_SCHEDULE_ERR_IMR_SET},
152 {R_BE_C0_TXPWR_IMR, B_BE_C0_TXPWR_IMR_CLR, B_BE_C0_TXPWR_IMR_SET},
153 {R_BE_TRXPTCL_ERROR_INDICA_MASK, B_BE_TRXPTCL_ERROR_INDICA_MASK_CLR,
154 B_BE_TRXPTCL_ERROR_INDICA_MASK_SET},
155 {R_BE_RX_ERR_IMR, B_BE_RX_ERR_IMR_CLR, B_BE_RX_ERR_IMR_SET},
156 {R_BE_PHYINFO_ERR_IMR_V1, B_BE_PHYINFO_ERR_IMR_V1_CLR, B_BE_PHYINFO_ERR_IMR_V1_SET},
157 };
158
159 static const struct rtw89_imr_table rtw8922a_imr_cmac_table = {
160 .regs = rtw8922a_imr_cmac_regs,
161 .n_regs = ARRAY_SIZE(rtw8922a_imr_cmac_regs),
162 };
163
164 static const struct rtw89_rrsr_cfgs rtw8922a_rrsr_cfgs = {
165 .ref_rate = {R_BE_TRXPTCL_RESP_1, B_BE_WMAC_RESP_REF_RATE_SEL, 0},
166 .rsc = {R_BE_PTCL_RRSR1, B_BE_RSC_MASK, 2},
167 };
168
169 static const struct rtw89_rfkill_regs rtw8922a_rfkill_regs = {
170 .pinmux = {R_BE_GPIO8_15_FUNC_SEL,
171 B_BE_PINMUX_GPIO9_FUNC_SEL_MASK,
172 0xf},
173 .mode = {R_BE_GPIO_EXT_CTRL + 2,
174 (B_BE_GPIO_MOD_9 | B_BE_GPIO_IO_SEL_9) >> 16,
175 0x0},
176 };
177
178 static const struct rtw89_dig_regs rtw8922a_dig_regs = {
179 .seg0_pd_reg = R_SEG0R_PD_V2,
180 .pd_lower_bound_mask = B_SEG0R_PD_LOWER_BOUND_MSK,
181 .pd_spatial_reuse_en = B_SEG0R_PD_SPATIAL_REUSE_EN_MSK_V1,
182 .bmode_pd_reg = R_BMODE_PDTH_EN_V2,
183 .bmode_cca_rssi_limit_en = B_BMODE_PDTH_LIMIT_EN_MSK_V1,
184 .bmode_pd_lower_bound_reg = R_BMODE_PDTH_V2,
185 .bmode_rssi_nocca_low_th_mask = B_BMODE_PDTH_LOWER_BOUND_MSK_V1,
186 .p0_lna_init = {R_PATH0_LNA_INIT_V1, B_PATH0_LNA_INIT_IDX_MSK},
187 .p1_lna_init = {R_PATH1_LNA_INIT_V1, B_PATH1_LNA_INIT_IDX_MSK},
188 .p0_tia_init = {R_PATH0_TIA_INIT_V1, B_PATH0_TIA_INIT_IDX_MSK_V1},
189 .p1_tia_init = {R_PATH1_TIA_INIT_V1, B_PATH1_TIA_INIT_IDX_MSK_V1},
190 .p0_rxb_init = {R_PATH0_RXB_INIT_V1, B_PATH0_RXB_INIT_IDX_MSK_V1},
191 .p1_rxb_init = {R_PATH1_RXB_INIT_V1, B_PATH1_RXB_INIT_IDX_MSK_V1},
192 .p0_p20_pagcugc_en = {R_PATH0_P20_FOLLOW_BY_PAGCUGC_V3,
193 B_PATH0_P20_FOLLOW_BY_PAGCUGC_EN_MSK},
194 .p0_s20_pagcugc_en = {R_PATH0_S20_FOLLOW_BY_PAGCUGC_V3,
195 B_PATH0_S20_FOLLOW_BY_PAGCUGC_EN_MSK},
196 .p1_p20_pagcugc_en = {R_PATH1_P20_FOLLOW_BY_PAGCUGC_V3,
197 B_PATH1_P20_FOLLOW_BY_PAGCUGC_EN_MSK},
198 .p1_s20_pagcugc_en = {R_PATH1_S20_FOLLOW_BY_PAGCUGC_V3,
199 B_PATH1_S20_FOLLOW_BY_PAGCUGC_EN_MSK},
200 };
201
202 static const struct rtw89_edcca_regs rtw8922a_edcca_regs = {
203 .edcca_level = R_SEG0R_EDCCA_LVL_BE,
204 .edcca_mask = B_EDCCA_LVL_MSK0,
205 .edcca_p_mask = B_EDCCA_LVL_MSK1,
206 .ppdu_level = R_SEG0R_PPDU_LVL_BE,
207 .ppdu_mask = B_EDCCA_LVL_MSK1,
208 .rpt_a = R_EDCCA_RPT_A_BE,
209 .rpt_b = R_EDCCA_RPT_B_BE,
210 .rpt_sel = R_EDCCA_RPT_SEL_BE,
211 .rpt_sel_mask = B_EDCCA_RPT_SEL_MSK,
212 .rpt_sel_be = R_EDCCA_RPTREG_SEL_BE,
213 .rpt_sel_be_mask = B_EDCCA_RPTREG_SEL_BE_MSK,
214 .tx_collision_t2r_st = R_TX_COLLISION_T2R_ST_BE,
215 .tx_collision_t2r_st_mask = B_TX_COLLISION_T2R_ST_BE_M,
216 };
217
218 static const struct rtw89_efuse_block_cfg rtw8922a_efuse_blocks[] = {
219 [RTW89_EFUSE_BLOCK_SYS] = {.offset = 0x00000, .size = 0x310},
220 [RTW89_EFUSE_BLOCK_RF] = {.offset = 0x10000, .size = 0x240},
221 [RTW89_EFUSE_BLOCK_HCI_DIG_PCIE_SDIO] = {.offset = 0x20000, .size = 0x4800},
222 [RTW89_EFUSE_BLOCK_HCI_DIG_USB] = {.offset = 0x30000, .size = 0x890},
223 [RTW89_EFUSE_BLOCK_HCI_PHY_PCIE] = {.offset = 0x40000, .size = 0x200},
224 [RTW89_EFUSE_BLOCK_HCI_PHY_USB3] = {.offset = 0x50000, .size = 0x80},
225 [RTW89_EFUSE_BLOCK_HCI_PHY_USB2] = {.offset = 0x60000, .size = 0x0},
226 [RTW89_EFUSE_BLOCK_ADIE] = {.offset = 0x70000, .size = 0x10},
227 };
228
rtw8922a_ctrl_btg_bt_rx(struct rtw89_dev * rtwdev,bool en,enum rtw89_phy_idx phy_idx)229 static void rtw8922a_ctrl_btg_bt_rx(struct rtw89_dev *rtwdev, bool en,
230 enum rtw89_phy_idx phy_idx)
231 {
232 if (en) {
233 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_A, B_BT_SHARE_A, 0x1, phy_idx);
234 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_A, B_BTG_PATH_A, 0x0, phy_idx);
235 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_B, B_BT_SHARE_B, 0x1, phy_idx);
236 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_B, B_BTG_PATH_B, 0x1, phy_idx);
237 rtw89_phy_write32_idx(rtwdev, R_LNA_OP, B_LNA6, 0x20, phy_idx);
238 rtw89_phy_write32_idx(rtwdev, R_LNA_TIA, B_TIA0_B, 0x30, phy_idx);
239 rtw89_phy_write32_idx(rtwdev, R_PMAC_GNT, B_PMAC_GNT_P1, 0x0, phy_idx);
240 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_ANT_BT_SHARE, 0x1, phy_idx);
241 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_RX_BT_SG0, 0x2, phy_idx);
242 rtw89_phy_write32_idx(rtwdev, R_GNT_BT_WGT_EN, B_GNT_BT_WGT_EN,
243 0x1, phy_idx);
244 } else {
245 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_A, B_BT_SHARE_A, 0x0, phy_idx);
246 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_A, B_BTG_PATH_A, 0x0, phy_idx);
247 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_B, B_BT_SHARE_B, 0x0, phy_idx);
248 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_B, B_BTG_PATH_B, 0x0, phy_idx);
249 rtw89_phy_write32_idx(rtwdev, R_LNA_OP, B_LNA6, 0x1a, phy_idx);
250 rtw89_phy_write32_idx(rtwdev, R_LNA_TIA, B_TIA0_B, 0x2a, phy_idx);
251 rtw89_phy_write32_idx(rtwdev, R_PMAC_GNT, B_PMAC_GNT_P1, 0xc, phy_idx);
252 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_ANT_BT_SHARE, 0x0, phy_idx);
253 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_RX_BT_SG0, 0x0, phy_idx);
254 rtw89_phy_write32_idx(rtwdev, R_GNT_BT_WGT_EN, B_GNT_BT_WGT_EN,
255 0x0, phy_idx);
256 }
257 }
258
rtw8922a_pwr_on_func(struct rtw89_dev * rtwdev)259 static int rtw8922a_pwr_on_func(struct rtw89_dev *rtwdev)
260 {
261 struct rtw89_hal *hal = &rtwdev->hal;
262 u32 val32;
263 int ret;
264
265 rtw89_write32_clr(rtwdev, R_BE_SYS_PW_CTRL, B_BE_AFSM_WLSUS_EN |
266 B_BE_AFSM_PCIE_SUS_EN);
267 rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_DIS_WLBT_PDNSUSEN_SOPC);
268 rtw89_write32_set(rtwdev, R_BE_WLLPS_CTRL, B_BE_DIS_WLBT_LPSEN_LOPC);
269 rtw89_write32_clr(rtwdev, R_BE_SYS_PW_CTRL, B_BE_APDM_HPDN);
270 rtw89_write32_clr(rtwdev, R_BE_SYS_PW_CTRL, B_BE_APFM_SWLPS);
271
272 ret = read_poll_timeout(rtw89_read32, val32, val32 & B_BE_RDY_SYSPWR,
273 1000, 3000000, false, rtwdev, R_BE_SYS_PW_CTRL);
274 if (ret)
275 return ret;
276
277 rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_EN_WLON);
278 rtw89_write32_set(rtwdev, R_BE_WLRESUME_CTRL, B_BE_LPSROP_CMAC0 |
279 B_BE_LPSROP_CMAC1);
280 rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_APFN_ONMAC);
281
282 ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_BE_APFN_ONMAC),
283 1000, 3000000, false, rtwdev, R_BE_SYS_PW_CTRL);
284 if (ret)
285 return ret;
286
287 rtw89_write32_clr(rtwdev, R_BE_AFE_ON_CTRL1, B_BE_REG_CK_MON_CK960M_EN);
288 rtw89_write8_set(rtwdev, R_BE_ANAPAR_POW_MAC, B_BE_POW_PC_LDO_PORT0 |
289 B_BE_POW_PC_LDO_PORT1);
290 rtw89_write32_clr(rtwdev, R_BE_FEN_RST_ENABLE, B_BE_R_SYM_ISO_ADDA_P02PP |
291 B_BE_R_SYM_ISO_ADDA_P12PP);
292 rtw89_write8_set(rtwdev, R_BE_PLATFORM_ENABLE, B_BE_PLATFORM_EN);
293 rtw89_write32_set(rtwdev, R_BE_HCI_OPT_CTRL, B_BE_HAXIDMA_IO_EN);
294
295 ret = read_poll_timeout(rtw89_read32, val32, val32 & B_BE_HAXIDMA_IO_ST,
296 1000, 3000000, false, rtwdev, R_BE_HCI_OPT_CTRL);
297 if (ret)
298 return ret;
299
300 ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_BE_HAXIDMA_BACKUP_RESTORE_ST),
301 1000, 3000000, false, rtwdev, R_BE_HCI_OPT_CTRL);
302 if (ret)
303 return ret;
304
305 rtw89_write32_set(rtwdev, R_BE_HCI_OPT_CTRL, B_BE_HCI_WLAN_IO_EN);
306
307 ret = read_poll_timeout(rtw89_read32, val32, val32 & B_BE_HCI_WLAN_IO_ST,
308 1000, 3000000, false, rtwdev, R_BE_HCI_OPT_CTRL);
309 if (ret)
310 return ret;
311
312 rtw89_write32_clr(rtwdev, R_BE_SYS_SDIO_CTRL, B_BE_PCIE_FORCE_IBX_EN);
313
314 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_PLL, 0x02, 0x02);
315 if (ret)
316 return ret;
317 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_PLL, 0x01, 0x01);
318 if (ret)
319 return ret;
320
321 rtw89_write32_set(rtwdev, R_BE_SYS_ADIE_PAD_PWR_CTRL, B_BE_SYM_PADPDN_WL_RFC1_1P3);
322
323 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x40, 0x40);
324 if (ret)
325 return ret;
326
327 rtw89_write32_set(rtwdev, R_BE_SYS_ADIE_PAD_PWR_CTRL, B_BE_SYM_PADPDN_WL_RFC0_1P3);
328
329 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x20, 0x20);
330 if (ret)
331 return ret;
332 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x04, 0x04);
333 if (ret)
334 return ret;
335 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x08, 0x08);
336 if (ret)
337 return ret;
338 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x10);
339 if (ret)
340 return ret;
341 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S0, 0xEB, 0xFF);
342 if (ret)
343 return ret;
344 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S1, 0xEB, 0xFF);
345 if (ret)
346 return ret;
347 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x01, 0x01);
348 if (ret)
349 return ret;
350 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x02, 0x02);
351 if (ret)
352 return ret;
353 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x80);
354 if (ret)
355 return ret;
356 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_XREF_RF1, 0, 0x40);
357 if (ret)
358 return ret;
359 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_XREF_RF2, 0, 0x40);
360 if (ret)
361 return ret;
362 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_PLL_1, 0x40, 0x60);
363 if (ret)
364 return ret;
365
366 if (hal->cv != CHIP_CAV) {
367 rtw89_write32_set(rtwdev, R_BE_PMC_DBG_CTRL2, B_BE_SYSON_DIS_PMCR_BE_WRMSK);
368 rtw89_write32_set(rtwdev, R_BE_SYS_ISO_CTRL, B_BE_ISO_EB2CORE);
369 rtw89_write32_clr(rtwdev, R_BE_SYS_ISO_CTRL, B_BE_PWC_EV2EF_B);
370
371 mdelay(1);
372
373 rtw89_write32_clr(rtwdev, R_BE_SYS_ISO_CTRL, B_BE_PWC_EV2EF_S);
374 rtw89_write32_clr(rtwdev, R_BE_PMC_DBG_CTRL2, B_BE_SYSON_DIS_PMCR_BE_WRMSK);
375 }
376
377 rtw89_write32_set(rtwdev, R_BE_DMAC_FUNC_EN,
378 B_BE_MAC_FUNC_EN | B_BE_DMAC_FUNC_EN | B_BE_MPDU_PROC_EN |
379 B_BE_WD_RLS_EN | B_BE_DLE_WDE_EN | B_BE_TXPKT_CTRL_EN |
380 B_BE_STA_SCH_EN | B_BE_DLE_PLE_EN | B_BE_PKT_BUF_EN |
381 B_BE_DMAC_TBL_EN | B_BE_PKT_IN_EN | B_BE_DLE_CPUIO_EN |
382 B_BE_DISPATCHER_EN | B_BE_BBRPT_EN | B_BE_MAC_SEC_EN |
383 B_BE_H_AXIDMA_EN | B_BE_DMAC_MLO_EN | B_BE_PLRLS_EN |
384 B_BE_P_AXIDMA_EN | B_BE_DLE_DATACPUIO_EN | B_BE_LTR_CTL_EN);
385
386 set_bit(RTW89_FLAG_DMAC_FUNC, rtwdev->flags);
387
388 rtw89_write32_set(rtwdev, R_BE_CMAC_SHARE_FUNC_EN,
389 B_BE_CMAC_SHARE_EN | B_BE_RESPBA_EN | B_BE_ADDRSRCH_EN |
390 B_BE_BTCOEX_EN);
391 rtw89_write32_set(rtwdev, R_BE_CMAC_FUNC_EN,
392 B_BE_CMAC_EN | B_BE_CMAC_TXEN | B_BE_CMAC_RXEN |
393 B_BE_SIGB_EN | B_BE_PHYINTF_EN | B_BE_CMAC_DMA_EN |
394 B_BE_PTCLTOP_EN | B_BE_SCHEDULER_EN | B_BE_TMAC_EN |
395 B_BE_RMAC_EN | B_BE_TXTIME_EN | B_BE_RESP_PKTCTL_EN);
396
397 set_bit(RTW89_FLAG_CMAC0_FUNC, rtwdev->flags);
398
399 rtw89_write32_set(rtwdev, R_BE_FEN_RST_ENABLE, B_BE_FEN_BB_IP_RSTN |
400 B_BE_FEN_BBPLAT_RSTB);
401
402 return 0;
403 }
404
rtw8922a_pwr_off_func(struct rtw89_dev * rtwdev)405 static int rtw8922a_pwr_off_func(struct rtw89_dev *rtwdev)
406 {
407 u32 val32;
408 int ret;
409
410 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x10, 0x10);
411 if (ret)
412 return ret;
413 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x08);
414 if (ret)
415 return ret;
416 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x04);
417 if (ret)
418 return ret;
419 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S0, 0xC6, 0xFF);
420 if (ret)
421 return ret;
422 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S1, 0xC6, 0xFF);
423 if (ret)
424 return ret;
425 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x80, 0x80);
426 if (ret)
427 return ret;
428 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x02);
429 if (ret)
430 return ret;
431 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x01);
432 if (ret)
433 return ret;
434 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_PLL, 0x02, 0xFF);
435 if (ret)
436 return ret;
437 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_PLL, 0x00, 0xFF);
438 if (ret)
439 return ret;
440
441 rtw89_write32_set(rtwdev, R_BE_FEN_RST_ENABLE, B_BE_R_SYM_ISO_ADDA_P02PP |
442 B_BE_R_SYM_ISO_ADDA_P12PP);
443 rtw89_write8_clr(rtwdev, R_BE_ANAPAR_POW_MAC, B_BE_POW_PC_LDO_PORT0 |
444 B_BE_POW_PC_LDO_PORT1);
445 rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_EN_WLON);
446 rtw89_write8_clr(rtwdev, R_BE_FEN_RST_ENABLE, B_BE_FEN_BB_IP_RSTN |
447 B_BE_FEN_BBPLAT_RSTB);
448 rtw89_write32_clr(rtwdev, R_BE_SYS_ADIE_PAD_PWR_CTRL, B_BE_SYM_PADPDN_WL_RFC0_1P3);
449
450 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x20);
451 if (ret)
452 return ret;
453
454 rtw89_write32_clr(rtwdev, R_BE_SYS_ADIE_PAD_PWR_CTRL, B_BE_SYM_PADPDN_WL_RFC1_1P3);
455
456 ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x40);
457 if (ret)
458 return ret;
459
460 rtw89_write32_clr(rtwdev, R_BE_HCI_OPT_CTRL, B_BE_HAXIDMA_IO_EN);
461
462 ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_BE_HAXIDMA_IO_ST),
463 1000, 3000000, false, rtwdev, R_BE_HCI_OPT_CTRL);
464 if (ret)
465 return ret;
466
467 ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_BE_HAXIDMA_BACKUP_RESTORE_ST),
468 1000, 3000000, false, rtwdev, R_BE_HCI_OPT_CTRL);
469 if (ret)
470 return ret;
471
472 rtw89_write32_clr(rtwdev, R_BE_HCI_OPT_CTRL, B_BE_HCI_WLAN_IO_EN);
473
474 ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_BE_HCI_WLAN_IO_ST),
475 1000, 3000000, false, rtwdev, R_BE_HCI_OPT_CTRL);
476 if (ret)
477 return ret;
478
479 rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_APFM_OFFMAC);
480
481 ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_BE_APFM_OFFMAC),
482 1000, 3000000, false, rtwdev, R_BE_SYS_PW_CTRL);
483 if (ret)
484 return ret;
485
486 rtw89_write32(rtwdev, R_BE_WLLPS_CTRL, 0x0000A1B2);
487 rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_XTAL_OFF_A_DIE);
488 rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_APFM_SWLPS);
489 rtw89_write32(rtwdev, R_BE_UDM1, 0);
490
491 return 0;
492 }
493
rtw8922a_efuse_parsing_tssi(struct rtw89_dev * rtwdev,struct rtw8922a_efuse * map)494 static void rtw8922a_efuse_parsing_tssi(struct rtw89_dev *rtwdev,
495 struct rtw8922a_efuse *map)
496 {
497 struct rtw8922a_tssi_offset *ofst[] = {&map->path_a_tssi, &map->path_b_tssi};
498 u8 *bw40_1s_tssi_6g_ofst[] = {map->bw40_1s_tssi_6g_a, map->bw40_1s_tssi_6g_b};
499 struct rtw89_tssi_info *tssi = &rtwdev->tssi;
500 u8 i, j;
501
502 tssi->thermal[RF_PATH_A] = map->path_a_therm;
503 tssi->thermal[RF_PATH_B] = map->path_b_therm;
504
505 for (i = 0; i < RF_PATH_NUM_8922A; i++) {
506 memcpy(tssi->tssi_cck[i], ofst[i]->cck_tssi,
507 sizeof(ofst[i]->cck_tssi));
508
509 for (j = 0; j < TSSI_CCK_CH_GROUP_NUM; j++)
510 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
511 "[TSSI][EFUSE] path=%d cck[%d]=0x%x\n",
512 i, j, tssi->tssi_cck[i][j]);
513
514 memcpy(tssi->tssi_mcs[i], ofst[i]->bw40_tssi,
515 sizeof(ofst[i]->bw40_tssi));
516 memcpy(tssi->tssi_mcs[i] + TSSI_MCS_2G_CH_GROUP_NUM,
517 ofst[i]->bw40_1s_tssi_5g, sizeof(ofst[i]->bw40_1s_tssi_5g));
518 memcpy(tssi->tssi_6g_mcs[i], bw40_1s_tssi_6g_ofst[i],
519 sizeof(tssi->tssi_6g_mcs[i]));
520
521 for (j = 0; j < TSSI_MCS_CH_GROUP_NUM; j++)
522 rtw89_debug(rtwdev, RTW89_DBG_TSSI,
523 "[TSSI][EFUSE] path=%d mcs[%d]=0x%x\n",
524 i, j, tssi->tssi_mcs[i][j]);
525 }
526 }
527
rtw8922a_efuse_parsing_gain_offset(struct rtw89_dev * rtwdev,struct rtw8922a_efuse * map)528 static void rtw8922a_efuse_parsing_gain_offset(struct rtw89_dev *rtwdev,
529 struct rtw8922a_efuse *map)
530 {
531 struct rtw89_phy_efuse_gain *gain = &rtwdev->efuse_gain;
532 bool all_0xff = true, all_0x00 = true;
533 int i, j;
534 u8 t;
535
536 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_2G_CCK] = map->rx_gain_a._2g_cck;
537 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_2G_CCK] = map->rx_gain_b._2g_cck;
538 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_2G_OFDM] = map->rx_gain_a._2g_ofdm;
539 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_2G_OFDM] = map->rx_gain_b._2g_ofdm;
540 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_5G_LOW] = map->rx_gain_a._5g_low;
541 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_5G_LOW] = map->rx_gain_b._5g_low;
542 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_5G_MID] = map->rx_gain_a._5g_mid;
543 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_5G_MID] = map->rx_gain_b._5g_mid;
544 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_5G_HIGH] = map->rx_gain_a._5g_high;
545 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_5G_HIGH] = map->rx_gain_b._5g_high;
546 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_L0] = map->rx_gain_6g_a._6g_l0;
547 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_L0] = map->rx_gain_6g_b._6g_l0;
548 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_L1] = map->rx_gain_6g_a._6g_l1;
549 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_L1] = map->rx_gain_6g_b._6g_l1;
550 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_M0] = map->rx_gain_6g_a._6g_m0;
551 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_M0] = map->rx_gain_6g_b._6g_m0;
552 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_M1] = map->rx_gain_6g_a._6g_m1;
553 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_M1] = map->rx_gain_6g_b._6g_m1;
554 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_H0] = map->rx_gain_6g_a._6g_h0;
555 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_H0] = map->rx_gain_6g_b._6g_h0;
556 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_H1] = map->rx_gain_6g_a._6g_h1;
557 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_H1] = map->rx_gain_6g_b._6g_h1;
558 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_UH0] = map->rx_gain_6g_a._6g_uh0;
559 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_UH0] = map->rx_gain_6g_b._6g_uh0;
560 gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_UH1] = map->rx_gain_6g_a._6g_uh1;
561 gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_UH1] = map->rx_gain_6g_b._6g_uh1;
562
563 for (i = RF_PATH_A; i <= RF_PATH_B; i++)
564 for (j = 0; j < RTW89_GAIN_OFFSET_NR; j++) {
565 t = gain->offset[i][j];
566 if (t != 0xff)
567 all_0xff = false;
568 if (t != 0x0)
569 all_0x00 = false;
570
571 /* transform: sign-bit + U(7,2) to S(8,2) */
572 if (t & 0x80)
573 gain->offset[i][j] = (t ^ 0x7f) + 1;
574 }
575
576 gain->offset_valid = !all_0xff && !all_0x00;
577 }
578
rtw8922a_read_efuse_mac_addr(struct rtw89_dev * rtwdev,u32 addr)579 static void rtw8922a_read_efuse_mac_addr(struct rtw89_dev *rtwdev, u32 addr)
580 {
581 struct rtw89_efuse *efuse = &rtwdev->efuse;
582 u16 val;
583 int i;
584
585 for (i = 0; i < ETH_ALEN; i += 2, addr += 2) {
586 val = rtw89_read16(rtwdev, addr);
587 efuse->addr[i] = val & 0xff;
588 efuse->addr[i + 1] = val >> 8;
589 }
590 }
591
rtw8922a_read_efuse_pci_sdio(struct rtw89_dev * rtwdev,u8 * log_map)592 static int rtw8922a_read_efuse_pci_sdio(struct rtw89_dev *rtwdev, u8 *log_map)
593 {
594 struct rtw89_efuse *efuse = &rtwdev->efuse;
595
596 if (rtwdev->hci.type == RTW89_HCI_TYPE_PCIE)
597 rtw8922a_read_efuse_mac_addr(rtwdev, 0x3104);
598 else
599 ether_addr_copy(efuse->addr, log_map + 0x001A);
600
601 return 0;
602 }
603
rtw8922a_read_efuse_usb(struct rtw89_dev * rtwdev,u8 * log_map)604 static int rtw8922a_read_efuse_usb(struct rtw89_dev *rtwdev, u8 *log_map)
605 {
606 rtw8922a_read_efuse_mac_addr(rtwdev, 0x4078);
607
608 return 0;
609 }
610
rtw8922a_read_efuse_rf(struct rtw89_dev * rtwdev,u8 * log_map)611 static int rtw8922a_read_efuse_rf(struct rtw89_dev *rtwdev, u8 *log_map)
612 {
613 struct rtw8922a_efuse *map = (struct rtw8922a_efuse *)log_map;
614 struct rtw89_efuse *efuse = &rtwdev->efuse;
615
616 efuse->rfe_type = map->rfe_type;
617 efuse->xtal_cap = map->xtal_k;
618 efuse->country_code[0] = map->country_code[0];
619 efuse->country_code[1] = map->country_code[1];
620 rtw8922a_efuse_parsing_tssi(rtwdev, map);
621 rtw8922a_efuse_parsing_gain_offset(rtwdev, map);
622
623 rtw89_info(rtwdev, "chip rfe_type is %d\n", efuse->rfe_type);
624
625 return 0;
626 }
627
rtw8922a_read_efuse(struct rtw89_dev * rtwdev,u8 * log_map,enum rtw89_efuse_block block)628 static int rtw8922a_read_efuse(struct rtw89_dev *rtwdev, u8 *log_map,
629 enum rtw89_efuse_block block)
630 {
631 switch (block) {
632 case RTW89_EFUSE_BLOCK_HCI_DIG_PCIE_SDIO:
633 return rtw8922a_read_efuse_pci_sdio(rtwdev, log_map);
634 case RTW89_EFUSE_BLOCK_HCI_DIG_USB:
635 return rtw8922a_read_efuse_usb(rtwdev, log_map);
636 case RTW89_EFUSE_BLOCK_RF:
637 return rtw8922a_read_efuse_rf(rtwdev, log_map);
638 default:
639 return 0;
640 }
641 }
642
643 #define THM_TRIM_POSITIVE_MASK BIT(6)
644 #define THM_TRIM_MAGNITUDE_MASK GENMASK(5, 0)
645
rtw8922a_phycap_parsing_thermal_trim(struct rtw89_dev * rtwdev,u8 * phycap_map)646 static void rtw8922a_phycap_parsing_thermal_trim(struct rtw89_dev *rtwdev,
647 u8 *phycap_map)
648 {
649 static const u32 thm_trim_addr[RF_PATH_NUM_8922A] = {0x1706, 0x1733};
650 struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
651 u32 addr = rtwdev->chip->phycap_addr;
652 bool pg = true;
653 u8 pg_th;
654 s8 val;
655 u8 i;
656
657 for (i = 0; i < RF_PATH_NUM_8922A; i++) {
658 pg_th = phycap_map[thm_trim_addr[i] - addr];
659 if (pg_th == 0xff) {
660 info->thermal_trim[i] = 0;
661 pg = false;
662 break;
663 }
664
665 val = u8_get_bits(pg_th, THM_TRIM_MAGNITUDE_MASK);
666
667 if (!(pg_th & THM_TRIM_POSITIVE_MASK))
668 val *= -1;
669
670 info->thermal_trim[i] = val;
671
672 rtw89_debug(rtwdev, RTW89_DBG_RFK,
673 "[THERMAL][TRIM] path=%d thermal_trim=0x%x (%d)\n",
674 i, pg_th, val);
675 }
676
677 info->pg_thermal_trim = pg;
678 }
679
rtw8922a_phycap_parsing_pa_bias_trim(struct rtw89_dev * rtwdev,u8 * phycap_map)680 static void rtw8922a_phycap_parsing_pa_bias_trim(struct rtw89_dev *rtwdev,
681 u8 *phycap_map)
682 {
683 static const u32 pabias_trim_addr[RF_PATH_NUM_8922A] = {0x1707, 0x1734};
684 static const u32 check_pa_pad_trim_addr = 0x1700;
685 struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
686 u32 addr = rtwdev->chip->phycap_addr;
687 u8 val;
688 u8 i;
689
690 val = phycap_map[check_pa_pad_trim_addr - addr];
691 if (val != 0xff)
692 info->pg_pa_bias_trim = true;
693
694 for (i = 0; i < RF_PATH_NUM_8922A; i++) {
695 info->pa_bias_trim[i] = phycap_map[pabias_trim_addr[i] - addr];
696
697 rtw89_debug(rtwdev, RTW89_DBG_RFK,
698 "[PA_BIAS][TRIM] path=%d pa_bias_trim=0x%x\n",
699 i, info->pa_bias_trim[i]);
700 }
701 }
702
rtw8922a_pa_bias_trim(struct rtw89_dev * rtwdev)703 static void rtw8922a_pa_bias_trim(struct rtw89_dev *rtwdev)
704 {
705 struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
706 u8 pabias_2g, pabias_5g;
707 u8 i;
708
709 if (!info->pg_pa_bias_trim) {
710 rtw89_debug(rtwdev, RTW89_DBG_RFK,
711 "[PA_BIAS][TRIM] no PG, do nothing\n");
712
713 return;
714 }
715
716 for (i = 0; i < RF_PATH_NUM_8922A; i++) {
717 pabias_2g = FIELD_GET(GENMASK(3, 0), info->pa_bias_trim[i]);
718 pabias_5g = FIELD_GET(GENMASK(7, 4), info->pa_bias_trim[i]);
719
720 rtw89_debug(rtwdev, RTW89_DBG_RFK,
721 "[PA_BIAS][TRIM] path=%d 2G=0x%x 5G=0x%x\n",
722 i, pabias_2g, pabias_5g);
723
724 rtw89_write_rf(rtwdev, i, RR_BIASA, RR_BIASA_TXG_V1, pabias_2g);
725 rtw89_write_rf(rtwdev, i, RR_BIASA, RR_BIASA_TXA_V1, pabias_5g);
726 }
727 }
728
rtw8922a_phycap_parsing_pad_bias_trim(struct rtw89_dev * rtwdev,u8 * phycap_map)729 static void rtw8922a_phycap_parsing_pad_bias_trim(struct rtw89_dev *rtwdev,
730 u8 *phycap_map)
731 {
732 static const u32 pad_bias_trim_addr[RF_PATH_NUM_8922A] = {0x1708, 0x1735};
733 struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
734 u32 addr = rtwdev->chip->phycap_addr;
735 u8 i;
736
737 for (i = 0; i < RF_PATH_NUM_8922A; i++) {
738 info->pad_bias_trim[i] = phycap_map[pad_bias_trim_addr[i] - addr];
739
740 rtw89_debug(rtwdev, RTW89_DBG_RFK,
741 "[PAD_BIAS][TRIM] path=%d pad_bias_trim=0x%x\n",
742 i, info->pad_bias_trim[i]);
743 }
744 }
745
rtw8922a_pad_bias_trim(struct rtw89_dev * rtwdev)746 static void rtw8922a_pad_bias_trim(struct rtw89_dev *rtwdev)
747 {
748 struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
749 u8 pad_bias_2g, pad_bias_5g;
750 u8 i;
751
752 if (!info->pg_pa_bias_trim) {
753 rtw89_debug(rtwdev, RTW89_DBG_RFK,
754 "[PAD_BIAS][TRIM] no PG, do nothing\n");
755 return;
756 }
757
758 for (i = 0; i < RF_PATH_NUM_8922A; i++) {
759 pad_bias_2g = u8_get_bits(info->pad_bias_trim[i], GENMASK(3, 0));
760 pad_bias_5g = u8_get_bits(info->pad_bias_trim[i], GENMASK(7, 4));
761
762 rtw89_debug(rtwdev, RTW89_DBG_RFK,
763 "[PAD_BIAS][TRIM] path=%d 2G=0x%x 5G=0x%x\n",
764 i, pad_bias_2g, pad_bias_5g);
765
766 rtw89_write_rf(rtwdev, i, RR_BIASA, RR_BIASD_TXG_V1, pad_bias_2g);
767 rtw89_write_rf(rtwdev, i, RR_BIASA, RR_BIASD_TXA_V1, pad_bias_5g);
768 }
769 }
770
rtw8922a_read_phycap(struct rtw89_dev * rtwdev,u8 * phycap_map)771 static int rtw8922a_read_phycap(struct rtw89_dev *rtwdev, u8 *phycap_map)
772 {
773 rtw8922a_phycap_parsing_thermal_trim(rtwdev, phycap_map);
774 rtw8922a_phycap_parsing_pa_bias_trim(rtwdev, phycap_map);
775 rtw8922a_phycap_parsing_pad_bias_trim(rtwdev, phycap_map);
776
777 return 0;
778 }
779
rtw8922a_power_trim(struct rtw89_dev * rtwdev)780 static void rtw8922a_power_trim(struct rtw89_dev *rtwdev)
781 {
782 rtw8922a_pa_bias_trim(rtwdev);
783 rtw8922a_pad_bias_trim(rtwdev);
784 }
785
rtw8922a_set_channel_mac(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,u8 mac_idx)786 static void rtw8922a_set_channel_mac(struct rtw89_dev *rtwdev,
787 const struct rtw89_chan *chan,
788 u8 mac_idx)
789 {
790 u32 sub_carr = rtw89_mac_reg_by_idx(rtwdev, R_BE_TX_SUB_BAND_VALUE, mac_idx);
791 u32 chk_rate = rtw89_mac_reg_by_idx(rtwdev, R_BE_TXRATE_CHK, mac_idx);
792 u32 rf_mod = rtw89_mac_reg_by_idx(rtwdev, R_BE_WMAC_RFMOD, mac_idx);
793 u8 txsb20 = 0, txsb40 = 0, txsb80 = 0;
794 u8 rf_mod_val, chk_rate_mask;
795 u32 txsb;
796 u32 reg;
797
798 switch (chan->band_width) {
799 case RTW89_CHANNEL_WIDTH_160:
800 txsb80 = rtw89_phy_get_txsb(rtwdev, chan, RTW89_CHANNEL_WIDTH_80);
801 fallthrough;
802 case RTW89_CHANNEL_WIDTH_80:
803 txsb40 = rtw89_phy_get_txsb(rtwdev, chan, RTW89_CHANNEL_WIDTH_40);
804 fallthrough;
805 case RTW89_CHANNEL_WIDTH_40:
806 txsb20 = rtw89_phy_get_txsb(rtwdev, chan, RTW89_CHANNEL_WIDTH_20);
807 break;
808 default:
809 break;
810 }
811
812 switch (chan->band_width) {
813 case RTW89_CHANNEL_WIDTH_160:
814 rf_mod_val = BE_WMAC_RFMOD_160M;
815 txsb = u32_encode_bits(txsb20, B_BE_TXSB_20M_MASK) |
816 u32_encode_bits(txsb40, B_BE_TXSB_40M_MASK) |
817 u32_encode_bits(txsb80, B_BE_TXSB_80M_MASK);
818 break;
819 case RTW89_CHANNEL_WIDTH_80:
820 rf_mod_val = BE_WMAC_RFMOD_80M;
821 txsb = u32_encode_bits(txsb20, B_BE_TXSB_20M_MASK) |
822 u32_encode_bits(txsb40, B_BE_TXSB_40M_MASK);
823 break;
824 case RTW89_CHANNEL_WIDTH_40:
825 rf_mod_val = BE_WMAC_RFMOD_40M;
826 txsb = u32_encode_bits(txsb20, B_BE_TXSB_20M_MASK);
827 break;
828 case RTW89_CHANNEL_WIDTH_20:
829 default:
830 rf_mod_val = BE_WMAC_RFMOD_20M;
831 txsb = 0;
832 break;
833 }
834
835 if (txsb20 <= BE_PRI20_BITMAP_MAX)
836 txsb |= u32_encode_bits(BIT(txsb20), B_BE_PRI20_BITMAP_MASK);
837
838 rtw89_write8_mask(rtwdev, rf_mod, B_BE_WMAC_RFMOD_MASK, rf_mod_val);
839 rtw89_write32(rtwdev, sub_carr, txsb);
840
841 switch (chan->band_type) {
842 case RTW89_BAND_2G:
843 chk_rate_mask = B_BE_BAND_MODE;
844 break;
845 case RTW89_BAND_5G:
846 case RTW89_BAND_6G:
847 chk_rate_mask = B_BE_CHECK_CCK_EN | B_BE_RTS_LIMIT_IN_OFDM6;
848 break;
849 default:
850 rtw89_warn(rtwdev, "Invalid band_type:%d\n", chan->band_type);
851 return;
852 }
853
854 rtw89_write8_clr(rtwdev, chk_rate, B_BE_BAND_MODE | B_BE_CHECK_CCK_EN |
855 B_BE_RTS_LIMIT_IN_OFDM6);
856 rtw89_write8_set(rtwdev, chk_rate, chk_rate_mask);
857
858 switch (chan->band_width) {
859 case RTW89_CHANNEL_WIDTH_320:
860 case RTW89_CHANNEL_WIDTH_160:
861 case RTW89_CHANNEL_WIDTH_80:
862 case RTW89_CHANNEL_WIDTH_40:
863 reg = rtw89_mac_reg_by_idx(rtwdev, R_BE_PREBKF_CFG_1, mac_idx);
864 rtw89_write32_mask(rtwdev, reg, B_BE_SIFS_MACTXEN_T1_MASK, 0x41);
865 reg = rtw89_mac_reg_by_idx(rtwdev, R_BE_MUEDCA_EN, mac_idx);
866 rtw89_write32_mask(rtwdev, reg, B_BE_SIFS_MACTXEN_TB_T1_MASK, 0x41);
867 break;
868 default:
869 reg = rtw89_mac_reg_by_idx(rtwdev, R_BE_PREBKF_CFG_1, mac_idx);
870 rtw89_write32_mask(rtwdev, reg, B_BE_SIFS_MACTXEN_T1_MASK, 0x3f);
871 reg = rtw89_mac_reg_by_idx(rtwdev, R_BE_MUEDCA_EN, mac_idx);
872 rtw89_write32_mask(rtwdev, reg, B_BE_SIFS_MACTXEN_TB_T1_MASK, 0x3e);
873 break;
874 }
875 }
876
877 static const u32 rtw8922a_sco_barker_threshold[14] = {
878 0x1fe4f, 0x1ff5e, 0x2006c, 0x2017b, 0x2028a, 0x20399, 0x204a8, 0x205b6,
879 0x206c5, 0x207d4, 0x208e3, 0x209f2, 0x20b00, 0x20d8a
880 };
881
882 static const u32 rtw8922a_sco_cck_threshold[14] = {
883 0x2bdac, 0x2bf21, 0x2c095, 0x2c209, 0x2c37e, 0x2c4f2, 0x2c666, 0x2c7db,
884 0x2c94f, 0x2cac3, 0x2cc38, 0x2cdac, 0x2cf21, 0x2d29e
885 };
886
rtw8922a_ctrl_sco_cck(struct rtw89_dev * rtwdev,u8 primary_ch,enum rtw89_bandwidth bw,enum rtw89_phy_idx phy_idx)887 static int rtw8922a_ctrl_sco_cck(struct rtw89_dev *rtwdev,
888 u8 primary_ch, enum rtw89_bandwidth bw,
889 enum rtw89_phy_idx phy_idx)
890 {
891 u8 ch_element;
892
893 if (primary_ch >= 14)
894 return -EINVAL;
895
896 ch_element = primary_ch - 1;
897
898 rtw89_phy_write32_idx(rtwdev, R_BK_FC0INV, B_BK_FC0INV,
899 rtw8922a_sco_barker_threshold[ch_element],
900 phy_idx);
901 rtw89_phy_write32_idx(rtwdev, R_CCK_FC0INV, B_CCK_FC0INV,
902 rtw8922a_sco_cck_threshold[ch_element],
903 phy_idx);
904
905 return 0;
906 }
907
908 struct rtw8922a_bb_gain {
909 u32 gain_g[BB_PATH_NUM_8922A];
910 u32 gain_a[BB_PATH_NUM_8922A];
911 u32 gain_g_mask;
912 u32 gain_a_mask;
913 };
914
915 static const struct rtw89_reg_def rpl_comp_bw160[RTW89_BW20_SC_160M] = {
916 { .addr = 0x41E8, .mask = 0xFF00},
917 { .addr = 0x41E8, .mask = 0xFF0000},
918 { .addr = 0x41E8, .mask = 0xFF000000},
919 { .addr = 0x41EC, .mask = 0xFF},
920 { .addr = 0x41EC, .mask = 0xFF00},
921 { .addr = 0x41EC, .mask = 0xFF0000},
922 { .addr = 0x41EC, .mask = 0xFF000000},
923 { .addr = 0x41F0, .mask = 0xFF}
924 };
925
926 static const struct rtw89_reg_def rpl_comp_bw80[RTW89_BW20_SC_80M] = {
927 { .addr = 0x41F4, .mask = 0xFF},
928 { .addr = 0x41F4, .mask = 0xFF00},
929 { .addr = 0x41F4, .mask = 0xFF0000},
930 { .addr = 0x41F4, .mask = 0xFF000000}
931 };
932
933 static const struct rtw89_reg_def rpl_comp_bw40[RTW89_BW20_SC_40M] = {
934 { .addr = 0x41F0, .mask = 0xFF0000},
935 { .addr = 0x41F0, .mask = 0xFF000000}
936 };
937
938 static const struct rtw89_reg_def rpl_comp_bw20[RTW89_BW20_SC_20M] = {
939 { .addr = 0x41F0, .mask = 0xFF00}
940 };
941
942 static const struct rtw8922a_bb_gain bb_gain_lna[LNA_GAIN_NUM] = {
943 { .gain_g = {0x409c, 0x449c}, .gain_a = {0x406C, 0x446C},
944 .gain_g_mask = 0xFF00, .gain_a_mask = 0xFF},
945 { .gain_g = {0x409c, 0x449c}, .gain_a = {0x406C, 0x446C},
946 .gain_g_mask = 0xFF000000, .gain_a_mask = 0xFF0000},
947 { .gain_g = {0x40a0, 0x44a0}, .gain_a = {0x4070, 0x4470},
948 .gain_g_mask = 0xFF00, .gain_a_mask = 0xFF},
949 { .gain_g = {0x40a0, 0x44a0}, .gain_a = {0x4070, 0x4470},
950 .gain_g_mask = 0xFF000000, .gain_a_mask = 0xFF0000},
951 { .gain_g = {0x40a4, 0x44a4}, .gain_a = {0x4074, 0x4474},
952 .gain_g_mask = 0xFF00, .gain_a_mask = 0xFF},
953 { .gain_g = {0x40a4, 0x44a4}, .gain_a = {0x4074, 0x4474},
954 .gain_g_mask = 0xFF000000, .gain_a_mask = 0xFF0000},
955 { .gain_g = {0x40a8, 0x44a8}, .gain_a = {0x4078, 0x4478},
956 .gain_g_mask = 0xFF00, .gain_a_mask = 0xFF},
957 };
958
959 static const struct rtw8922a_bb_gain bb_gain_tia[TIA_GAIN_NUM] = {
960 { .gain_g = {0x4054, 0x4454}, .gain_a = {0x4054, 0x4454},
961 .gain_g_mask = 0x7FC0000, .gain_a_mask = 0x1FF},
962 { .gain_g = {0x4058, 0x4458}, .gain_a = {0x4054, 0x4454},
963 .gain_g_mask = 0x1FF, .gain_a_mask = 0x3FE00 },
964 };
965
966 static const struct rtw8922a_bb_gain bb_op1db_lna[LNA_GAIN_NUM] = {
967 { .gain_g = {0x40ac, 0x44ac}, .gain_a = {0x4078, 0x4478},
968 .gain_g_mask = 0xFF00, .gain_a_mask = 0xFF000000},
969 { .gain_g = {0x40ac, 0x44ac}, .gain_a = {0x407c, 0x447c},
970 .gain_g_mask = 0xFF0000, .gain_a_mask = 0xFF},
971 { .gain_g = {0x40ac, 0x44ac}, .gain_a = {0x407c, 0x447c},
972 .gain_g_mask = 0xFF000000, .gain_a_mask = 0xFF00},
973 { .gain_g = {0x40b0, 0x44b0}, .gain_a = {0x407c, 0x447c},
974 .gain_g_mask = 0xFF, .gain_a_mask = 0xFF0000},
975 { .gain_g = {0x40b0, 0x44b0}, .gain_a = {0x407c, 0x447c},
976 .gain_g_mask = 0xFF00, .gain_a_mask = 0xFF000000},
977 { .gain_g = {0x40b0, 0x44b0}, .gain_a = {0x4080, 0x4480},
978 .gain_g_mask = 0xFF0000, .gain_a_mask = 0xFF},
979 { .gain_g = {0x40b0, 0x44b0}, .gain_a = {0x4080, 0x4480},
980 .gain_g_mask = 0xFF000000, .gain_a_mask = 0xFF00},
981 };
982
983 static const struct rtw8922a_bb_gain bb_op1db_tia_lna[TIA_LNA_OP1DB_NUM] = {
984 { .gain_g = {0x40b4, 0x44b4}, .gain_a = {0x4080, 0x4480},
985 .gain_g_mask = 0xFF0000, .gain_a_mask = 0xFF000000},
986 { .gain_g = {0x40b4, 0x44b4}, .gain_a = {0x4084, 0x4484},
987 .gain_g_mask = 0xFF000000, .gain_a_mask = 0xFF},
988 { .gain_g = {0x40b8, 0x44b8}, .gain_a = {0x4084, 0x4484},
989 .gain_g_mask = 0xFF, .gain_a_mask = 0xFF00},
990 { .gain_g = {0x40b8, 0x44b8}, .gain_a = {0x4084, 0x4484},
991 .gain_g_mask = 0xFF00, .gain_a_mask = 0xFF0000},
992 { .gain_g = {0x40b8, 0x44b8}, .gain_a = {0x4084, 0x4484},
993 .gain_g_mask = 0xFF0000, .gain_a_mask = 0xFF000000},
994 { .gain_g = {0x40b8, 0x44b8}, .gain_a = {0x4088, 0x4488},
995 .gain_g_mask = 0xFF000000, .gain_a_mask = 0xFF},
996 { .gain_g = {0x40bc, 0x44bc}, .gain_a = {0x4088, 0x4488},
997 .gain_g_mask = 0xFF, .gain_a_mask = 0xFF00},
998 { .gain_g = {0x40bc, 0x44bc}, .gain_a = {0x4088, 0x4488},
999 .gain_g_mask = 0xFF00, .gain_a_mask = 0xFF0000},
1000 };
1001
1002 struct rtw8922a_bb_gain_bypass {
1003 u32 gain_g[BB_PATH_NUM_8922A];
1004 u32 gain_a[BB_PATH_NUM_8922A];
1005 u32 gain_mask_g;
1006 u32 gain_mask_a;
1007 };
1008
rtw8922a_set_rpl_gain(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_rf_path path,enum rtw89_phy_idx phy_idx)1009 static void rtw8922a_set_rpl_gain(struct rtw89_dev *rtwdev,
1010 const struct rtw89_chan *chan,
1011 enum rtw89_rf_path path,
1012 enum rtw89_phy_idx phy_idx)
1013 {
1014 const struct rtw89_phy_bb_gain_info_be *gain = &rtwdev->bb_gain.be;
1015 u8 gain_band = rtw89_subband_to_gain_band_be(chan->subband_type);
1016 u32 reg_path_ofst = 0;
1017 u32 mask;
1018 s32 val;
1019 u32 reg;
1020 int i;
1021
1022 if (path == RF_PATH_B)
1023 reg_path_ofst = 0x400;
1024
1025 for (i = 0; i < RTW89_BW20_SC_160M; i++) {
1026 reg = rpl_comp_bw160[i].addr | reg_path_ofst;
1027 mask = rpl_comp_bw160[i].mask;
1028 val = gain->rpl_ofst_160[gain_band][path][i];
1029 rtw89_phy_write32_idx(rtwdev, reg, mask, val, phy_idx);
1030 }
1031
1032 for (i = 0; i < RTW89_BW20_SC_80M; i++) {
1033 reg = rpl_comp_bw80[i].addr | reg_path_ofst;
1034 mask = rpl_comp_bw80[i].mask;
1035 val = gain->rpl_ofst_80[gain_band][path][i];
1036 rtw89_phy_write32_idx(rtwdev, reg, mask, val, phy_idx);
1037 }
1038
1039 for (i = 0; i < RTW89_BW20_SC_40M; i++) {
1040 reg = rpl_comp_bw40[i].addr | reg_path_ofst;
1041 mask = rpl_comp_bw40[i].mask;
1042 val = gain->rpl_ofst_40[gain_band][path][i];
1043 rtw89_phy_write32_idx(rtwdev, reg, mask, val, phy_idx);
1044 }
1045
1046 for (i = 0; i < RTW89_BW20_SC_20M; i++) {
1047 reg = rpl_comp_bw20[i].addr | reg_path_ofst;
1048 mask = rpl_comp_bw20[i].mask;
1049 val = gain->rpl_ofst_20[gain_band][path][i];
1050 rtw89_phy_write32_idx(rtwdev, reg, mask, val, phy_idx);
1051 }
1052 }
1053
rtw8922a_set_lna_tia_gain(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_rf_path path,enum rtw89_phy_idx phy_idx)1054 static void rtw8922a_set_lna_tia_gain(struct rtw89_dev *rtwdev,
1055 const struct rtw89_chan *chan,
1056 enum rtw89_rf_path path,
1057 enum rtw89_phy_idx phy_idx)
1058 {
1059 const struct rtw89_phy_bb_gain_info_be *gain = &rtwdev->bb_gain.be;
1060 u8 gain_band = rtw89_subband_to_gain_band_be(chan->subband_type);
1061 enum rtw89_phy_bb_bw_be bw_type;
1062 s32 val;
1063 u32 reg;
1064 u32 mask;
1065 int i;
1066
1067 bw_type = chan->band_width <= RTW89_CHANNEL_WIDTH_40 ?
1068 RTW89_BB_BW_20_40 : RTW89_BB_BW_80_160_320;
1069
1070 for (i = 0; i < LNA_GAIN_NUM; i++) {
1071 if (chan->band_type == RTW89_BAND_2G) {
1072 reg = bb_gain_lna[i].gain_g[path];
1073 mask = bb_gain_lna[i].gain_g_mask;
1074 } else {
1075 reg = bb_gain_lna[i].gain_a[path];
1076 mask = bb_gain_lna[i].gain_a_mask;
1077 }
1078 val = gain->lna_gain[gain_band][bw_type][path][i];
1079 rtw89_phy_write32_idx(rtwdev, reg, mask, val, phy_idx);
1080 }
1081
1082 for (i = 0; i < TIA_GAIN_NUM; i++) {
1083 if (chan->band_type == RTW89_BAND_2G) {
1084 reg = bb_gain_tia[i].gain_g[path];
1085 mask = bb_gain_tia[i].gain_g_mask;
1086 } else {
1087 reg = bb_gain_tia[i].gain_a[path];
1088 mask = bb_gain_tia[i].gain_a_mask;
1089 }
1090 val = gain->tia_gain[gain_band][bw_type][path][i];
1091 rtw89_phy_write32_idx(rtwdev, reg, mask, val, phy_idx);
1092 }
1093
1094 for (i = 0; i < LNA_GAIN_NUM; i++) {
1095 if (chan->band_type == RTW89_BAND_2G) {
1096 reg = bb_op1db_lna[i].gain_g[path];
1097 mask = bb_op1db_lna[i].gain_g_mask;
1098 } else {
1099 reg = bb_op1db_lna[i].gain_a[path];
1100 mask = bb_op1db_lna[i].gain_a_mask;
1101 }
1102 val = gain->lna_op1db[gain_band][bw_type][path][i];
1103 rtw89_phy_write32_idx(rtwdev, reg, mask, val, phy_idx);
1104 }
1105
1106 for (i = 0; i < TIA_LNA_OP1DB_NUM; i++) {
1107 if (chan->band_type == RTW89_BAND_2G) {
1108 reg = bb_op1db_tia_lna[i].gain_g[path];
1109 mask = bb_op1db_tia_lna[i].gain_g_mask;
1110 } else {
1111 reg = bb_op1db_tia_lna[i].gain_a[path];
1112 mask = bb_op1db_tia_lna[i].gain_a_mask;
1113 }
1114 val = gain->tia_lna_op1db[gain_band][bw_type][path][i];
1115 rtw89_phy_write32_idx(rtwdev, reg, mask, val, phy_idx);
1116 }
1117 }
1118
rtw8922a_set_gain(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_rf_path path,enum rtw89_phy_idx phy_idx)1119 static void rtw8922a_set_gain(struct rtw89_dev *rtwdev,
1120 const struct rtw89_chan *chan,
1121 enum rtw89_rf_path path,
1122 enum rtw89_phy_idx phy_idx)
1123 {
1124 rtw8922a_set_lna_tia_gain(rtwdev, chan, path, phy_idx);
1125 rtw8922a_set_rpl_gain(rtwdev, chan, path, phy_idx);
1126 }
1127
rtw8922a_set_rx_gain_normal_cck(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_rf_path path)1128 static void rtw8922a_set_rx_gain_normal_cck(struct rtw89_dev *rtwdev,
1129 const struct rtw89_chan *chan,
1130 enum rtw89_rf_path path)
1131 {
1132 struct rtw89_phy_efuse_gain *gain = &rtwdev->efuse_gain;
1133 s8 value = -gain->offset[path][RTW89_GAIN_OFFSET_2G_CCK]; /* S(8,2) */
1134 u8 fraction = value & 0x3;
1135
1136 if (fraction) {
1137 rtw89_phy_write32_mask(rtwdev, R_MGAIN_BIAS, B_MGAIN_BIAS_BW20,
1138 (0x4 - fraction) << 1);
1139 rtw89_phy_write32_mask(rtwdev, R_MGAIN_BIAS, B_MGAIN_BIAS_BW40,
1140 (0x4 - fraction) << 1);
1141
1142 value >>= 2;
1143 rtw89_phy_write32_mask(rtwdev, R_CCK_RPL_OFST, B_CCK_RPL_OFST,
1144 value + 1 + 0xdc);
1145 } else {
1146 rtw89_phy_write32_mask(rtwdev, R_MGAIN_BIAS, B_MGAIN_BIAS_BW20, 0);
1147 rtw89_phy_write32_mask(rtwdev, R_MGAIN_BIAS, B_MGAIN_BIAS_BW40, 0);
1148
1149 value >>= 2;
1150 rtw89_phy_write32_mask(rtwdev, R_CCK_RPL_OFST, B_CCK_RPL_OFST,
1151 value + 0xdc);
1152 }
1153 }
1154
rtw8922a_set_rx_gain_normal_ofdm(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_rf_path path)1155 static void rtw8922a_set_rx_gain_normal_ofdm(struct rtw89_dev *rtwdev,
1156 const struct rtw89_chan *chan,
1157 enum rtw89_rf_path path)
1158 {
1159 static const u32 rssi_tb_bias_comp[2] = {0x41f8, 0x45f8};
1160 static const u32 rssi_tb_ext_comp[2] = {0x4208, 0x4608};
1161 static const u32 rssi_ofst_addr[2] = {0x40c8, 0x44c8};
1162 static const u32 rpl_bias_comp[2] = {0x41e8, 0x45e8};
1163 static const u32 rpl_ext_comp[2] = {0x41f8, 0x45f8};
1164 struct rtw89_phy_efuse_gain *gain = &rtwdev->efuse_gain;
1165 enum rtw89_gain_offset gain_band;
1166 s8 v1, v2, v3;
1167 s32 value;
1168
1169 gain_band = rtw89_subband_to_gain_offset_band_of_ofdm(chan->subband_type);
1170 value = gain->offset[path][gain_band];
1171 rtw89_phy_write32_mask(rtwdev, rssi_ofst_addr[path], 0xff000000, value + 0xF8);
1172
1173 value *= -4;
1174 v1 = clamp_t(s32, value, S8_MIN, S8_MAX);
1175 value -= v1;
1176 v2 = clamp_t(s32, value, S8_MIN, S8_MAX);
1177 value -= v2;
1178 v3 = clamp_t(s32, value, S8_MIN, S8_MAX);
1179
1180 rtw89_phy_write32_mask(rtwdev, rpl_bias_comp[path], 0xff, v1);
1181 rtw89_phy_write32_mask(rtwdev, rpl_ext_comp[path], 0xff, v2);
1182 rtw89_phy_write32_mask(rtwdev, rpl_ext_comp[path], 0xff00, v3);
1183
1184 rtw89_phy_write32_mask(rtwdev, rssi_tb_bias_comp[path], 0xff0000, v1);
1185 rtw89_phy_write32_mask(rtwdev, rssi_tb_ext_comp[path], 0xff0000, v2);
1186 rtw89_phy_write32_mask(rtwdev, rssi_tb_ext_comp[path], 0xff000000, v3);
1187 }
1188
rtw8922a_set_rx_gain_normal(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_rf_path path)1189 static void rtw8922a_set_rx_gain_normal(struct rtw89_dev *rtwdev,
1190 const struct rtw89_chan *chan,
1191 enum rtw89_rf_path path)
1192 {
1193 struct rtw89_phy_efuse_gain *gain = &rtwdev->efuse_gain;
1194
1195 if (!gain->offset_valid)
1196 return;
1197
1198 if (chan->band_type == RTW89_BAND_2G)
1199 rtw8922a_set_rx_gain_normal_cck(rtwdev, chan, path);
1200
1201 rtw8922a_set_rx_gain_normal_ofdm(rtwdev, chan, path);
1202 }
1203
rtw8922a_set_cck_parameters(struct rtw89_dev * rtwdev,u8 central_ch,enum rtw89_phy_idx phy_idx)1204 static void rtw8922a_set_cck_parameters(struct rtw89_dev *rtwdev, u8 central_ch,
1205 enum rtw89_phy_idx phy_idx)
1206 {
1207 if (central_ch == 14) {
1208 rtw89_phy_write32_idx(rtwdev, R_PCOEFF01, B_PCOEFF01, 0x3b13ff, phy_idx);
1209 rtw89_phy_write32_idx(rtwdev, R_PCOEFF23, B_PCOEFF23, 0x1c42de, phy_idx);
1210 rtw89_phy_write32_idx(rtwdev, R_PCOEFF45, B_PCOEFF45, 0xfdb0ad, phy_idx);
1211 rtw89_phy_write32_idx(rtwdev, R_PCOEFF67, B_PCOEFF67, 0xf60f6e, phy_idx);
1212 rtw89_phy_write32_idx(rtwdev, R_PCOEFF89, B_PCOEFF89, 0xfd8f92, phy_idx);
1213 rtw89_phy_write32_idx(rtwdev, R_PCOEFFAB, B_PCOEFFAB, 0x02d011, phy_idx);
1214 rtw89_phy_write32_idx(rtwdev, R_PCOEFFCD, B_PCOEFFCD, 0x01c02c, phy_idx);
1215 rtw89_phy_write32_idx(rtwdev, R_PCOEFFEF, B_PCOEFFEF, 0xfff00a, phy_idx);
1216 } else {
1217 rtw89_phy_write32_idx(rtwdev, R_PCOEFF01, B_PCOEFF01, 0x3a63ca, phy_idx);
1218 rtw89_phy_write32_idx(rtwdev, R_PCOEFF23, B_PCOEFF23, 0x2a833f, phy_idx);
1219 rtw89_phy_write32_idx(rtwdev, R_PCOEFF45, B_PCOEFF45, 0x1491f8, phy_idx);
1220 rtw89_phy_write32_idx(rtwdev, R_PCOEFF67, B_PCOEFF67, 0x03c0b0, phy_idx);
1221 rtw89_phy_write32_idx(rtwdev, R_PCOEFF89, B_PCOEFF89, 0xfccff1, phy_idx);
1222 rtw89_phy_write32_idx(rtwdev, R_PCOEFFAB, B_PCOEFFAB, 0xfccfc3, phy_idx);
1223 rtw89_phy_write32_idx(rtwdev, R_PCOEFFCD, B_PCOEFFCD, 0xfebfdc, phy_idx);
1224 rtw89_phy_write32_idx(rtwdev, R_PCOEFFEF, B_PCOEFFEF, 0xffdff7, phy_idx);
1225 }
1226 }
1227
rtw8922a_ctrl_ch(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_phy_idx phy_idx)1228 static void rtw8922a_ctrl_ch(struct rtw89_dev *rtwdev,
1229 const struct rtw89_chan *chan,
1230 enum rtw89_phy_idx phy_idx)
1231 {
1232 static const u32 band_sel[2] = {0x4160, 0x4560};
1233 u16 central_freq = chan->freq;
1234 u8 central_ch = chan->channel;
1235 u8 band = chan->band_type;
1236 bool is_2g = band == RTW89_BAND_2G;
1237 u8 chan_idx;
1238 u8 path;
1239 u8 sco;
1240
1241 if (!central_freq) {
1242 rtw89_warn(rtwdev, "Invalid central_freq\n");
1243 return;
1244 }
1245
1246 rtw8922a_set_gain(rtwdev, chan, RF_PATH_A, phy_idx);
1247 rtw8922a_set_gain(rtwdev, chan, RF_PATH_B, phy_idx);
1248
1249 for (path = RF_PATH_A; path < BB_PATH_NUM_8922A; path++)
1250 rtw89_phy_write32_idx(rtwdev, band_sel[path], BIT((26)), is_2g, phy_idx);
1251
1252 rtw8922a_set_rx_gain_normal(rtwdev, chan, RF_PATH_A);
1253 rtw8922a_set_rx_gain_normal(rtwdev, chan, RF_PATH_B);
1254
1255 rtw89_phy_write32_idx(rtwdev, R_FC0, B_FC0, central_freq, phy_idx);
1256 sco = DIV_ROUND_CLOSEST(1 << 18, central_freq);
1257 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_FC0_INV, sco, phy_idx);
1258
1259 if (band == RTW89_BAND_2G)
1260 rtw8922a_set_cck_parameters(rtwdev, central_ch, phy_idx);
1261
1262 chan_idx = rtw89_encode_chan_idx(rtwdev, chan->primary_channel, band);
1263 rtw89_phy_write32_idx(rtwdev, R_MAC_PIN_SEL, B_CH_IDX_SEG0, chan_idx, phy_idx);
1264 }
1265
1266 static void
rtw8922a_ctrl_bw(struct rtw89_dev * rtwdev,u8 pri_sb,u8 bw,enum rtw89_phy_idx phy_idx)1267 rtw8922a_ctrl_bw(struct rtw89_dev *rtwdev, u8 pri_sb, u8 bw,
1268 enum rtw89_phy_idx phy_idx)
1269 {
1270 switch (bw) {
1271 case RTW89_CHANNEL_WIDTH_5:
1272 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_BW, 0x0, phy_idx);
1273 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_SMALLBW, 0x1, phy_idx);
1274 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_PRICH, 0x0, phy_idx);
1275 rtw89_phy_write32_idx(rtwdev, R_DAC_CLK, B_DAC_CLK, 0x1, phy_idx);
1276 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP0, B_GAIN_MAP0_EN, 0x0, phy_idx);
1277 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP1, B_GAIN_MAP1_EN, 0x0, phy_idx);
1278 break;
1279 case RTW89_CHANNEL_WIDTH_10:
1280 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_BW, 0x0, phy_idx);
1281 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_SMALLBW, 0x2, phy_idx);
1282 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_PRICH, 0x0, phy_idx);
1283 rtw89_phy_write32_idx(rtwdev, R_DAC_CLK, B_DAC_CLK, 0x1, phy_idx);
1284 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP0, B_GAIN_MAP0_EN, 0x0, phy_idx);
1285 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP1, B_GAIN_MAP1_EN, 0x0, phy_idx);
1286 break;
1287 case RTW89_CHANNEL_WIDTH_20:
1288 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_BW, 0x0, phy_idx);
1289 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_SMALLBW, 0x0, phy_idx);
1290 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_PRICH, 0x0, phy_idx);
1291 rtw89_phy_write32_idx(rtwdev, R_DAC_CLK, B_DAC_CLK, 0x1, phy_idx);
1292 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP0, B_GAIN_MAP0_EN, 0x0, phy_idx);
1293 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP1, B_GAIN_MAP1_EN, 0x0, phy_idx);
1294 break;
1295 case RTW89_CHANNEL_WIDTH_40:
1296 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_BW, 0x1, phy_idx);
1297 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_SMALLBW, 0x0, phy_idx);
1298 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_PRICH, pri_sb, phy_idx);
1299 rtw89_phy_write32_idx(rtwdev, R_DAC_CLK, B_DAC_CLK, 0x1, phy_idx);
1300 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP0, B_GAIN_MAP0_EN, 0x0, phy_idx);
1301 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP1, B_GAIN_MAP1_EN, 0x0, phy_idx);
1302 break;
1303 case RTW89_CHANNEL_WIDTH_80:
1304 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_BW, 0x2, phy_idx);
1305 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_SMALLBW, 0x0, phy_idx);
1306 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_PRICH, pri_sb, phy_idx);
1307 rtw89_phy_write32_idx(rtwdev, R_DAC_CLK, B_DAC_CLK, 0x1, phy_idx);
1308 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP0, B_GAIN_MAP0_EN, 0x1, phy_idx);
1309 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP1, B_GAIN_MAP1_EN, 0x1, phy_idx);
1310 break;
1311 case RTW89_CHANNEL_WIDTH_160:
1312 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_BW, 0x3, phy_idx);
1313 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_SMALLBW, 0x0, phy_idx);
1314 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_PRICH, pri_sb, phy_idx);
1315 rtw89_phy_write32_idx(rtwdev, R_DAC_CLK, B_DAC_CLK, 0x1, phy_idx);
1316 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP0, B_GAIN_MAP0_EN, 0x1, phy_idx);
1317 rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP1, B_GAIN_MAP1_EN, 0x1, phy_idx);
1318 break;
1319 default:
1320 rtw89_warn(rtwdev, "Fail to switch bw (bw:%d, pri_sb:%d)\n", bw,
1321 pri_sb);
1322 break;
1323 }
1324
1325 if (bw == RTW89_CHANNEL_WIDTH_40)
1326 rtw89_phy_write32_idx(rtwdev, R_FC0, B_BW40_2XFFT, 1, phy_idx);
1327 else
1328 rtw89_phy_write32_idx(rtwdev, R_FC0, B_BW40_2XFFT, 0, phy_idx);
1329 }
1330
rtw8922a_spur_freq(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan)1331 static u32 rtw8922a_spur_freq(struct rtw89_dev *rtwdev,
1332 const struct rtw89_chan *chan)
1333 {
1334 return 0;
1335 }
1336
1337 #define CARRIER_SPACING_312_5 312500 /* 312.5 kHz */
1338 #define CARRIER_SPACING_78_125 78125 /* 78.125 kHz */
1339 #define MAX_TONE_NUM 2048
1340
rtw8922a_set_csi_tone_idx(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_phy_idx phy_idx)1341 static void rtw8922a_set_csi_tone_idx(struct rtw89_dev *rtwdev,
1342 const struct rtw89_chan *chan,
1343 enum rtw89_phy_idx phy_idx)
1344 {
1345 s32 freq_diff, csi_idx, csi_tone_idx;
1346 u32 spur_freq;
1347
1348 spur_freq = rtw8922a_spur_freq(rtwdev, chan);
1349 if (spur_freq == 0) {
1350 rtw89_phy_write32_idx(rtwdev, R_S0S1_CSI_WGT, B_S0S1_CSI_WGT_EN,
1351 0, phy_idx);
1352 return;
1353 }
1354
1355 freq_diff = (spur_freq - chan->freq) * 1000000;
1356 csi_idx = s32_div_u32_round_closest(freq_diff, CARRIER_SPACING_78_125);
1357 s32_div_u32_round_down(csi_idx, MAX_TONE_NUM, &csi_tone_idx);
1358
1359 rtw89_phy_write32_idx(rtwdev, R_S0S1_CSI_WGT, B_S0S1_CSI_WGT_TONE_IDX,
1360 csi_tone_idx, phy_idx);
1361 rtw89_phy_write32_idx(rtwdev, R_S0S1_CSI_WGT, B_S0S1_CSI_WGT_EN, 1, phy_idx);
1362 }
1363
1364 static const struct rtw89_nbi_reg_def rtw8922a_nbi_reg_def[] = {
1365 [RF_PATH_A] = {
1366 .notch1_idx = {0x41a0, 0xFF},
1367 .notch1_frac_idx = {0x41a0, 0xC00},
1368 .notch1_en = {0x41a0, 0x1000},
1369 .notch2_idx = {0x41ac, 0xFF},
1370 .notch2_frac_idx = {0x41ac, 0xC00},
1371 .notch2_en = {0x41ac, 0x1000},
1372 },
1373 [RF_PATH_B] = {
1374 .notch1_idx = {0x45a0, 0xFF},
1375 .notch1_frac_idx = {0x45a0, 0xC00},
1376 .notch1_en = {0x45a0, 0x1000},
1377 .notch2_idx = {0x45ac, 0xFF},
1378 .notch2_frac_idx = {0x45ac, 0xC00},
1379 .notch2_en = {0x45ac, 0x1000},
1380 },
1381 };
1382
rtw8922a_set_nbi_tone_idx(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_rf_path path,enum rtw89_phy_idx phy_idx)1383 static void rtw8922a_set_nbi_tone_idx(struct rtw89_dev *rtwdev,
1384 const struct rtw89_chan *chan,
1385 enum rtw89_rf_path path,
1386 enum rtw89_phy_idx phy_idx)
1387 {
1388 const struct rtw89_nbi_reg_def *nbi = &rtw8922a_nbi_reg_def[path];
1389 s32 nbi_frac_idx, nbi_frac_tone_idx;
1390 s32 nbi_idx, nbi_tone_idx;
1391 bool notch2_chk = false;
1392 u32 spur_freq, fc;
1393 s32 freq_diff;
1394
1395 spur_freq = rtw8922a_spur_freq(rtwdev, chan);
1396 if (spur_freq == 0) {
1397 rtw89_phy_write32_idx(rtwdev, nbi->notch1_en.addr,
1398 nbi->notch1_en.mask, 0, phy_idx);
1399 rtw89_phy_write32_idx(rtwdev, nbi->notch2_en.addr,
1400 nbi->notch2_en.mask, 0, phy_idx);
1401 return;
1402 }
1403
1404 fc = chan->freq;
1405 if (chan->band_width == RTW89_CHANNEL_WIDTH_160) {
1406 fc = (spur_freq > fc) ? fc + 40 : fc - 40;
1407 if ((fc > spur_freq &&
1408 chan->channel < chan->primary_channel) ||
1409 (fc < spur_freq &&
1410 chan->channel > chan->primary_channel))
1411 notch2_chk = true;
1412 }
1413
1414 freq_diff = (spur_freq - fc) * 1000000;
1415 nbi_idx = s32_div_u32_round_down(freq_diff, CARRIER_SPACING_312_5,
1416 &nbi_frac_idx);
1417
1418 if (chan->band_width == RTW89_CHANNEL_WIDTH_20) {
1419 s32_div_u32_round_down(nbi_idx + 32, 64, &nbi_tone_idx);
1420 } else {
1421 u16 tone_para = (chan->band_width == RTW89_CHANNEL_WIDTH_40) ?
1422 128 : 256;
1423
1424 s32_div_u32_round_down(nbi_idx, tone_para, &nbi_tone_idx);
1425 }
1426 nbi_frac_tone_idx =
1427 s32_div_u32_round_closest(nbi_frac_idx, CARRIER_SPACING_78_125);
1428
1429 if (chan->band_width == RTW89_CHANNEL_WIDTH_160 && notch2_chk) {
1430 rtw89_phy_write32_idx(rtwdev, nbi->notch2_idx.addr,
1431 nbi->notch2_idx.mask, nbi_tone_idx, phy_idx);
1432 rtw89_phy_write32_idx(rtwdev, nbi->notch2_frac_idx.addr,
1433 nbi->notch2_frac_idx.mask, nbi_frac_tone_idx,
1434 phy_idx);
1435 rtw89_phy_write32_idx(rtwdev, nbi->notch2_en.addr,
1436 nbi->notch2_en.mask, 0, phy_idx);
1437 rtw89_phy_write32_idx(rtwdev, nbi->notch2_en.addr,
1438 nbi->notch2_en.mask, 1, phy_idx);
1439 rtw89_phy_write32_idx(rtwdev, nbi->notch1_en.addr,
1440 nbi->notch1_en.mask, 0, phy_idx);
1441 } else {
1442 rtw89_phy_write32_idx(rtwdev, nbi->notch1_idx.addr,
1443 nbi->notch1_idx.mask, nbi_tone_idx, phy_idx);
1444 rtw89_phy_write32_idx(rtwdev, nbi->notch1_frac_idx.addr,
1445 nbi->notch1_frac_idx.mask, nbi_frac_tone_idx,
1446 phy_idx);
1447 rtw89_phy_write32_idx(rtwdev, nbi->notch1_en.addr,
1448 nbi->notch1_en.mask, 0, phy_idx);
1449 rtw89_phy_write32_idx(rtwdev, nbi->notch1_en.addr,
1450 nbi->notch1_en.mask, 1, phy_idx);
1451 rtw89_phy_write32_idx(rtwdev, nbi->notch2_en.addr,
1452 nbi->notch2_en.mask, 0, phy_idx);
1453 }
1454 }
1455
rtw8922a_spur_elimination(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_phy_idx phy_idx)1456 static void rtw8922a_spur_elimination(struct rtw89_dev *rtwdev,
1457 const struct rtw89_chan *chan,
1458 enum rtw89_phy_idx phy_idx)
1459 {
1460 rtw8922a_set_csi_tone_idx(rtwdev, chan, phy_idx);
1461 rtw8922a_set_nbi_tone_idx(rtwdev, chan, RF_PATH_A, phy_idx);
1462 rtw8922a_set_nbi_tone_idx(rtwdev, chan, RF_PATH_B, phy_idx);
1463 }
1464
rtw8922a_ctrl_afe_dac(struct rtw89_dev * rtwdev,enum rtw89_bandwidth bw,enum rtw89_rf_path path)1465 static void rtw8922a_ctrl_afe_dac(struct rtw89_dev *rtwdev, enum rtw89_bandwidth bw,
1466 enum rtw89_rf_path path)
1467 {
1468 u32 cr_ofst = 0x0;
1469
1470 if (path == RF_PATH_B)
1471 cr_ofst = 0x100;
1472
1473 switch (bw) {
1474 case RTW89_CHANNEL_WIDTH_5:
1475 case RTW89_CHANNEL_WIDTH_10:
1476 case RTW89_CHANNEL_WIDTH_20:
1477 case RTW89_CHANNEL_WIDTH_40:
1478 case RTW89_CHANNEL_WIDTH_80:
1479 rtw89_phy_write32_mask(rtwdev, R_AFEDAC0 + cr_ofst, B_AFEDAC0, 0xE);
1480 rtw89_phy_write32_mask(rtwdev, R_AFEDAC1 + cr_ofst, B_AFEDAC1, 0x7);
1481 break;
1482 case RTW89_CHANNEL_WIDTH_160:
1483 rtw89_phy_write32_mask(rtwdev, R_AFEDAC0 + cr_ofst, B_AFEDAC0, 0xD);
1484 rtw89_phy_write32_mask(rtwdev, R_AFEDAC1 + cr_ofst, B_AFEDAC1, 0x6);
1485 break;
1486 default:
1487 break;
1488 }
1489 }
1490
1491 static const struct rtw89_reg2_def bb_mcu0_init_reg[] = {
1492 {0x6990, 0x00000000},
1493 {0x6994, 0x00000000},
1494 {0x6998, 0x00000000},
1495 {0x6820, 0xFFFFFFFE},
1496 {0x6800, 0xC0000FFE},
1497 {0x6808, 0x76543210},
1498 {0x6814, 0xBFBFB000},
1499 {0x6818, 0x0478C009},
1500 {0x6800, 0xC0000FFF},
1501 {0x6820, 0xFFFFFFFF},
1502 };
1503
1504 static const struct rtw89_reg2_def bb_mcu1_init_reg[] = {
1505 {0x6990, 0x00000000},
1506 {0x6994, 0x00000000},
1507 {0x6998, 0x00000000},
1508 {0x6820, 0xFFFFFFFE},
1509 {0x6800, 0xC0000FFE},
1510 {0x6808, 0x76543210},
1511 {0x6814, 0xBFBFB000},
1512 {0x6818, 0x0478C009},
1513 {0x6800, 0xC0000FFF},
1514 {0x6820, 0xFFFFFFFF},
1515 };
1516
rtw8922a_bbmcu_cr_init(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx)1517 static void rtw8922a_bbmcu_cr_init(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx)
1518 {
1519 const struct rtw89_reg2_def *reg;
1520 int size;
1521 int i;
1522
1523 if (phy_idx == RTW89_PHY_0) {
1524 reg = bb_mcu0_init_reg;
1525 size = ARRAY_SIZE(bb_mcu0_init_reg);
1526 } else {
1527 reg = bb_mcu1_init_reg;
1528 size = ARRAY_SIZE(bb_mcu1_init_reg);
1529 }
1530
1531 for (i = 0; i < size; i++, reg++)
1532 rtw89_bbmcu_write32(rtwdev, reg->addr, reg->data, phy_idx);
1533 }
1534
1535 static const u32 dmac_sys_mask[2] = {B_BE_DMAC_BB_PHY0_MASK, B_BE_DMAC_BB_PHY1_MASK};
1536 static const u32 bbrst_mask[2] = {B_BE_FEN_BBPLAT_RSTB, B_BE_FEN_BB1PLAT_RSTB};
1537 static const u32 glbrst_mask[2] = {B_BE_FEN_BB_IP_RSTN, B_BE_FEN_BB1_IP_RSTN};
1538 static const u32 mcu_bootrdy_mask[2] = {B_BE_BOOT_RDY0, B_BE_BOOT_RDY1};
1539
rtw8922a_bb_preinit(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx)1540 static void rtw8922a_bb_preinit(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx)
1541 {
1542 u32 rdy = 0;
1543
1544 if (phy_idx == RTW89_PHY_1)
1545 rdy = 1;
1546
1547 rtw89_write32_mask(rtwdev, R_BE_DMAC_SYS_CR32B, dmac_sys_mask[phy_idx], 0x7FF9);
1548 rtw89_write32_mask(rtwdev, R_BE_FEN_RST_ENABLE, glbrst_mask[phy_idx], 0x0);
1549 rtw89_write32_mask(rtwdev, R_BE_FEN_RST_ENABLE, bbrst_mask[phy_idx], 0x0);
1550 rtw89_write32_mask(rtwdev, R_BE_FEN_RST_ENABLE, glbrst_mask[phy_idx], 0x1);
1551 rtw89_write32_mask(rtwdev, R_BE_FEN_RST_ENABLE, mcu_bootrdy_mask[phy_idx], rdy);
1552 rtw89_write32_mask(rtwdev, R_BE_MEM_PWR_CTRL, B_BE_MEM_BBMCU0_DS_V1, 0);
1553
1554 fsleep(1);
1555 rtw8922a_bbmcu_cr_init(rtwdev, phy_idx);
1556 }
1557
rtw8922a_bb_postinit(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx)1558 static void rtw8922a_bb_postinit(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx)
1559 {
1560 if (phy_idx == RTW89_PHY_0)
1561 rtw89_write32_set(rtwdev, R_BE_FEN_RST_ENABLE, mcu_bootrdy_mask[phy_idx]);
1562 rtw89_write32_set(rtwdev, R_BE_FEN_RST_ENABLE, bbrst_mask[phy_idx]);
1563
1564 rtw89_phy_write32_set(rtwdev, R_BBCLK, B_CLK_640M);
1565 rtw89_phy_write32_clr(rtwdev, R_TXSCALE, B_TXFCTR_EN);
1566 rtw89_phy_set_phy_regs(rtwdev, R_TXFCTR, B_TXFCTR_THD, 0x200);
1567 rtw89_phy_set_phy_regs(rtwdev, R_SLOPE, B_EHT_RATE_TH, 0xA);
1568 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE, B_HE_RATE_TH, 0xA);
1569 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE2, B_HT_VHT_TH, 0xAAA);
1570 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE, B_EHT_MCS14, 0x1);
1571 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE2, B_EHT_MCS15, 0x1);
1572 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE3, B_EHTTB_EN, 0x0);
1573 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE3, B_HEERSU_EN, 0x0);
1574 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE3, B_HEMU_EN, 0x0);
1575 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE3, B_TB_EN, 0x0);
1576 rtw89_phy_set_phy_regs(rtwdev, R_SU_PUNC, B_SU_PUNC_EN, 0x1);
1577 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE5, B_HWGEN_EN, 0x1);
1578 rtw89_phy_set_phy_regs(rtwdev, R_BEDGE5, B_PWROFST_COMP, 0x1);
1579 rtw89_phy_set_phy_regs(rtwdev, R_MAG_AB, B_BY_SLOPE, 0x1);
1580 rtw89_phy_set_phy_regs(rtwdev, R_MAG_A, B_MGA_AEND, 0xe0);
1581 rtw89_phy_set_phy_regs(rtwdev, R_MAG_AB, B_MAG_AB, 0xe0c000);
1582 rtw89_phy_set_phy_regs(rtwdev, R_SLOPE, B_SLOPE_A, 0x3FE0);
1583 rtw89_phy_set_phy_regs(rtwdev, R_SLOPE, B_SLOPE_B, 0x3FE0);
1584 rtw89_phy_set_phy_regs(rtwdev, R_SC_CORNER, B_SC_CORNER, 0x200);
1585 rtw89_phy_write32_idx(rtwdev, R_UDP_COEEF, B_UDP_COEEF, 0x0, phy_idx);
1586 rtw89_phy_write32_idx(rtwdev, R_UDP_COEEF, B_UDP_COEEF, 0x1, phy_idx);
1587 }
1588
rtw8922a_bb_reset_en(struct rtw89_dev * rtwdev,enum rtw89_band band,bool en,enum rtw89_phy_idx phy_idx)1589 static void rtw8922a_bb_reset_en(struct rtw89_dev *rtwdev, enum rtw89_band band,
1590 bool en, enum rtw89_phy_idx phy_idx)
1591 {
1592 if (en) {
1593 rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, 1, phy_idx);
1594 if (band == RTW89_BAND_2G)
1595 rtw89_phy_write32_idx(rtwdev, R_RXCCA_BE1,
1596 B_RXCCA_BE1_DIS, 0x0, phy_idx);
1597 rtw89_phy_write32_idx(rtwdev, R_PD_CTRL, B_PD_HIT_DIS, 0x0, phy_idx);
1598 } else {
1599 rtw89_phy_write32_idx(rtwdev, R_RXCCA_BE1, B_RXCCA_BE1_DIS, 0x1, phy_idx);
1600 rtw89_phy_write32_idx(rtwdev, R_PD_CTRL, B_PD_HIT_DIS, 0x1, phy_idx);
1601 fsleep(1);
1602 rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, 0, phy_idx);
1603 }
1604 }
1605
rtw8922a_ctrl_tx_path_tmac(struct rtw89_dev * rtwdev,enum rtw89_rf_path tx_path,enum rtw89_phy_idx phy_idx)1606 static int rtw8922a_ctrl_tx_path_tmac(struct rtw89_dev *rtwdev,
1607 enum rtw89_rf_path tx_path,
1608 enum rtw89_phy_idx phy_idx)
1609 {
1610 struct rtw89_reg2_def path_com_cr[] = {
1611 {0x11A00, 0x21C86900},
1612 {0x11A04, 0x00E4E433},
1613 {0x11A08, 0x39390CC9},
1614 {0x11A0C, 0x4E433240},
1615 {0x11A10, 0x90CC900E},
1616 {0x11A14, 0x00240393},
1617 {0x11A18, 0x201C8600},
1618 };
1619 int ret = 0;
1620 u32 reg;
1621 int i;
1622
1623 rtw89_phy_write32_idx(rtwdev, R_MAC_SEL, B_MAC_SEL, 0x0, phy_idx);
1624
1625 if (phy_idx == RTW89_PHY_1 && !rtwdev->dbcc_en)
1626 return 0;
1627
1628 if (tx_path == RF_PATH_A) {
1629 path_com_cr[0].data = 0x21C82900;
1630 path_com_cr[1].data = 0x00E4E431;
1631 path_com_cr[2].data = 0x39390C49;
1632 path_com_cr[3].data = 0x4E431240;
1633 path_com_cr[4].data = 0x90C4900E;
1634 path_com_cr[6].data = 0x201C8200;
1635 } else if (tx_path == RF_PATH_B) {
1636 path_com_cr[0].data = 0x21C04900;
1637 path_com_cr[1].data = 0x00E4E032;
1638 path_com_cr[2].data = 0x39380C89;
1639 path_com_cr[3].data = 0x4E032240;
1640 path_com_cr[4].data = 0x80C8900E;
1641 path_com_cr[6].data = 0x201C0400;
1642 } else if (tx_path == RF_PATH_AB) {
1643 path_com_cr[0].data = 0x21C86900;
1644 path_com_cr[1].data = 0x00E4E433;
1645 path_com_cr[2].data = 0x39390CC9;
1646 path_com_cr[3].data = 0x4E433240;
1647 path_com_cr[4].data = 0x90CC900E;
1648 path_com_cr[6].data = 0x201C8600;
1649 } else {
1650 ret = -EINVAL;
1651 }
1652
1653 for (i = 0; i < ARRAY_SIZE(path_com_cr); i++) {
1654 reg = rtw89_mac_reg_by_idx(rtwdev, path_com_cr[i].addr, phy_idx);
1655 rtw89_write32(rtwdev, reg, path_com_cr[i].data);
1656 }
1657
1658 return ret;
1659 }
1660
rtw8922a_bb_reset(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx)1661 static void rtw8922a_bb_reset(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx)
1662 {
1663 }
1664
rtw8922a_cfg_rx_nss_limit(struct rtw89_dev * rtwdev,u8 rx_nss,enum rtw89_phy_idx phy_idx)1665 static int rtw8922a_cfg_rx_nss_limit(struct rtw89_dev *rtwdev, u8 rx_nss,
1666 enum rtw89_phy_idx phy_idx)
1667 {
1668 if (rx_nss == 1) {
1669 rtw89_phy_write32_idx(rtwdev, R_BRK_R, B_HTMCS_LMT, 0, phy_idx);
1670 rtw89_phy_write32_idx(rtwdev, R_BRK_R, B_VHTMCS_LMT, 0, phy_idx);
1671 rtw89_phy_write32_idx(rtwdev, R_BRK_HE, B_N_USR_MAX,
1672 HE_N_USER_MAX_8922A, phy_idx);
1673 rtw89_phy_write32_idx(rtwdev, R_BRK_HE, B_NSS_MAX, 0, phy_idx);
1674 rtw89_phy_write32_idx(rtwdev, R_BRK_HE, B_TB_NSS_MAX, 0, phy_idx);
1675 rtw89_phy_write32_idx(rtwdev, R_BRK_EHT, B_RXEHT_NSS_MAX, 0, phy_idx);
1676 rtw89_phy_write32_idx(rtwdev, R_BRK_RXEHT, B_RXEHTTB_NSS_MAX, 0,
1677 phy_idx);
1678 rtw89_phy_write32_idx(rtwdev, R_BRK_RXEHT, B_RXEHT_N_USER_MAX,
1679 HE_N_USER_MAX_8922A, phy_idx);
1680 } else if (rx_nss == 2) {
1681 rtw89_phy_write32_idx(rtwdev, R_BRK_R, B_HTMCS_LMT, 1, phy_idx);
1682 rtw89_phy_write32_idx(rtwdev, R_BRK_R, B_VHTMCS_LMT, 1, phy_idx);
1683 rtw89_phy_write32_idx(rtwdev, R_BRK_HE, B_N_USR_MAX,
1684 HE_N_USER_MAX_8922A, phy_idx);
1685 rtw89_phy_write32_idx(rtwdev, R_BRK_HE, B_NSS_MAX, 1, phy_idx);
1686 rtw89_phy_write32_idx(rtwdev, R_BRK_HE, B_TB_NSS_MAX, 1, phy_idx);
1687 rtw89_phy_write32_idx(rtwdev, R_BRK_EHT, B_RXEHT_NSS_MAX, 1, phy_idx);
1688 rtw89_phy_write32_idx(rtwdev, R_BRK_RXEHT, B_RXEHTTB_NSS_MAX, 1,
1689 phy_idx);
1690 rtw89_phy_write32_idx(rtwdev, R_BRK_RXEHT, B_RXEHT_N_USER_MAX,
1691 HE_N_USER_MAX_8922A, phy_idx);
1692 } else {
1693 return -EINVAL;
1694 }
1695
1696 return 0;
1697 }
1698
rtw8922a_tssi_reset(struct rtw89_dev * rtwdev,enum rtw89_rf_path path,enum rtw89_phy_idx phy_idx)1699 static void rtw8922a_tssi_reset(struct rtw89_dev *rtwdev,
1700 enum rtw89_rf_path path,
1701 enum rtw89_phy_idx phy_idx)
1702 {
1703 if (rtwdev->mlo_dbcc_mode == MLO_1_PLUS_1_1RF) {
1704 if (phy_idx == RTW89_PHY_0) {
1705 rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTA, B_TXPWR_RSTA, 0x0);
1706 rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTA, B_TXPWR_RSTA, 0x1);
1707 } else {
1708 rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTB, B_TXPWR_RSTB, 0x0);
1709 rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTB, B_TXPWR_RSTB, 0x1);
1710 }
1711 } else {
1712 rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTA, B_TXPWR_RSTA, 0x0);
1713 rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTA, B_TXPWR_RSTA, 0x1);
1714 rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTB, B_TXPWR_RSTB, 0x0);
1715 rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTB, B_TXPWR_RSTB, 0x1);
1716 }
1717 }
1718
rtw8922a_ctrl_rx_path_tmac(struct rtw89_dev * rtwdev,enum rtw89_rf_path rx_path,enum rtw89_phy_idx phy_idx)1719 static int rtw8922a_ctrl_rx_path_tmac(struct rtw89_dev *rtwdev,
1720 enum rtw89_rf_path rx_path,
1721 enum rtw89_phy_idx phy_idx)
1722 {
1723 u8 rx_nss = (rx_path == RF_PATH_AB) ? 2 : 1;
1724
1725 /* Set to 0 first to avoid abnormal EDCCA report */
1726 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_ANT_RX_SG0, 0x0, phy_idx);
1727
1728 if (rx_path == RF_PATH_A) {
1729 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_ANT_RX_SG0, 0x1, phy_idx);
1730 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_RX_1RCCA, 1, phy_idx);
1731 rtw8922a_cfg_rx_nss_limit(rtwdev, rx_nss, phy_idx);
1732 rtw8922a_tssi_reset(rtwdev, rx_path, phy_idx);
1733 } else if (rx_path == RF_PATH_B) {
1734 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_ANT_RX_SG0, 0x2, phy_idx);
1735 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_RX_1RCCA, 2, phy_idx);
1736 rtw8922a_cfg_rx_nss_limit(rtwdev, rx_nss, phy_idx);
1737 rtw8922a_tssi_reset(rtwdev, rx_path, phy_idx);
1738 } else if (rx_path == RF_PATH_AB) {
1739 rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_ANT_RX_SG0, 0x3, phy_idx);
1740 rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_RX_1RCCA, 3, phy_idx);
1741 rtw8922a_cfg_rx_nss_limit(rtwdev, rx_nss, phy_idx);
1742 rtw8922a_tssi_reset(rtwdev, rx_path, phy_idx);
1743 } else {
1744 return -EINVAL;
1745 }
1746
1747 return 0;
1748 }
1749
1750 #define DIGITAL_PWR_COMP_REG_NUM 22
1751 static const u32 rtw8922a_digital_pwr_comp_val[][DIGITAL_PWR_COMP_REG_NUM] = {
1752 {0x012C0096, 0x044C02BC, 0x00322710, 0x015E0096, 0x03C8028A,
1753 0x0BB80708, 0x17701194, 0x02020100, 0x03030303, 0x01000303,
1754 0x05030302, 0x06060605, 0x06050300, 0x0A090807, 0x02000B0B,
1755 0x09080604, 0x0D0D0C0B, 0x08060400, 0x110F0C0B, 0x05001111,
1756 0x0D0C0907, 0x12121210},
1757 {0x012C0096, 0x044C02BC, 0x00322710, 0x015E0096, 0x03C8028A,
1758 0x0BB80708, 0x17701194, 0x04030201, 0x05050505, 0x01000505,
1759 0x07060504, 0x09090908, 0x09070400, 0x0E0D0C0B, 0x03000E0E,
1760 0x0D0B0907, 0x1010100F, 0x0B080500, 0x1512100D, 0x05001515,
1761 0x100D0B08, 0x15151512},
1762 };
1763
rtw8922a_set_digital_pwr_comp(struct rtw89_dev * rtwdev,bool enable,u8 nss,enum rtw89_rf_path path)1764 static void rtw8922a_set_digital_pwr_comp(struct rtw89_dev *rtwdev,
1765 bool enable, u8 nss,
1766 enum rtw89_rf_path path)
1767 {
1768 static const u32 ltpc_t0[2] = {R_BE_LTPC_T0_PATH0, R_BE_LTPC_T0_PATH1};
1769 const u32 *digital_pwr_comp;
1770 u32 addr, val;
1771 u32 i;
1772
1773 if (nss == 1)
1774 digital_pwr_comp = rtw8922a_digital_pwr_comp_val[0];
1775 else
1776 digital_pwr_comp = rtw8922a_digital_pwr_comp_val[1];
1777
1778 addr = ltpc_t0[path];
1779 for (i = 0; i < DIGITAL_PWR_COMP_REG_NUM; i++, addr += 4) {
1780 val = enable ? digital_pwr_comp[i] : 0;
1781 rtw89_phy_write32(rtwdev, addr, val);
1782 }
1783 }
1784
rtw8922a_digital_pwr_comp(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx)1785 static void rtw8922a_digital_pwr_comp(struct rtw89_dev *rtwdev,
1786 enum rtw89_phy_idx phy_idx)
1787 {
1788 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_CHANCTX_0);
1789 bool enable = chan->band_type != RTW89_BAND_2G;
1790 u8 path;
1791
1792 if (rtwdev->mlo_dbcc_mode == MLO_1_PLUS_1_1RF) {
1793 if (phy_idx == RTW89_PHY_0)
1794 path = RF_PATH_A;
1795 else
1796 path = RF_PATH_B;
1797 rtw8922a_set_digital_pwr_comp(rtwdev, enable, 1, path);
1798 } else {
1799 rtw8922a_set_digital_pwr_comp(rtwdev, enable, 2, RF_PATH_A);
1800 rtw8922a_set_digital_pwr_comp(rtwdev, enable, 2, RF_PATH_B);
1801 }
1802 }
1803
rtw8922a_ctrl_mlo(struct rtw89_dev * rtwdev,enum rtw89_mlo_dbcc_mode mode)1804 static int rtw8922a_ctrl_mlo(struct rtw89_dev *rtwdev, enum rtw89_mlo_dbcc_mode mode)
1805 {
1806 const struct rtw89_chan *chan0, *chan1;
1807
1808 if (mode == MLO_1_PLUS_1_1RF || mode == DBCC_LEGACY) {
1809 rtw89_phy_write32_mask(rtwdev, R_DBCC, B_DBCC_EN, 0x1);
1810 rtw89_phy_write32_mask(rtwdev, R_DBCC_FA, B_DBCC_FA, 0x0);
1811 } else if (mode == MLO_2_PLUS_0_1RF || mode == MLO_0_PLUS_2_1RF ||
1812 mode == MLO_DBCC_NOT_SUPPORT) {
1813 rtw89_phy_write32_mask(rtwdev, R_DBCC, B_DBCC_EN, 0x0);
1814 rtw89_phy_write32_mask(rtwdev, R_DBCC_FA, B_DBCC_FA, 0x1);
1815 } else {
1816 return -EOPNOTSUPP;
1817 }
1818
1819 if (mode == MLO_1_PLUS_1_1RF) {
1820 chan0 = rtw89_mgnt_chan_get(rtwdev, 0);
1821 chan1 = rtw89_mgnt_chan_get(rtwdev, 1);
1822 } else if (mode == MLO_0_PLUS_2_1RF) {
1823 chan1 = rtw89_mgnt_chan_get(rtwdev, 1);
1824 chan0 = chan1;
1825 } else {
1826 chan0 = rtw89_mgnt_chan_get(rtwdev, 0);
1827 chan1 = chan0;
1828 }
1829
1830 rtw8922a_ctrl_afe_dac(rtwdev, chan0->band_width, RF_PATH_A);
1831 rtw8922a_ctrl_afe_dac(rtwdev, chan1->band_width, RF_PATH_B);
1832
1833 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0x6180);
1834
1835 if (mode == MLO_2_PLUS_0_1RF) {
1836 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xBBAB);
1837 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xABA9);
1838 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEBA9);
1839 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEAA9);
1840 } else if (mode == MLO_0_PLUS_2_1RF) {
1841 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xBBAB);
1842 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xAFFF);
1843 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEFFF);
1844 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEEFF);
1845 } else if ((mode == MLO_1_PLUS_1_1RF) || (mode == DBCC_LEGACY)) {
1846 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0x7BAB);
1847 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0x3BAB);
1848 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0x3AAB);
1849 } else {
1850 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0x180);
1851 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0x0);
1852 }
1853
1854 return 0;
1855 }
1856
rtw8922a_bb_sethw(struct rtw89_dev * rtwdev)1857 static void rtw8922a_bb_sethw(struct rtw89_dev *rtwdev)
1858 {
1859 u32 reg;
1860
1861 rtw89_phy_write32_clr(rtwdev, R_EN_SND_WO_NDP, B_EN_SND_WO_NDP);
1862 rtw89_phy_write32_clr(rtwdev, R_EN_SND_WO_NDP_C1, B_EN_SND_WO_NDP);
1863
1864 rtw89_write32_mask(rtwdev, R_BE_PWR_BOOST, B_BE_PWR_CTRL_SEL, 0);
1865 if (rtwdev->dbcc_en) {
1866 reg = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_BOOST, RTW89_MAC_1);
1867 rtw89_write32_mask(rtwdev, reg, B_BE_PWR_CTRL_SEL, 0);
1868 }
1869
1870 rtw8922a_ctrl_mlo(rtwdev, rtwdev->mlo_dbcc_mode);
1871 }
1872
rtw8922a_ctrl_cck_en(struct rtw89_dev * rtwdev,bool cck_en,enum rtw89_phy_idx phy_idx)1873 static void rtw8922a_ctrl_cck_en(struct rtw89_dev *rtwdev, bool cck_en,
1874 enum rtw89_phy_idx phy_idx)
1875 {
1876 if (cck_en) {
1877 rtw89_phy_write32_idx(rtwdev, R_RXCCA_BE1, B_RXCCA_BE1_DIS, 0, phy_idx);
1878 rtw89_phy_write32_idx(rtwdev, R_UPD_CLK_ADC, B_ENABLE_CCK, 1, phy_idx);
1879 rtw89_phy_write32_idx(rtwdev, R_PD_ARBITER_OFF, B_PD_ARBITER_OFF,
1880 0, phy_idx);
1881 } else {
1882 rtw89_phy_write32_idx(rtwdev, R_RXCCA_BE1, B_RXCCA_BE1_DIS, 1, phy_idx);
1883 rtw89_phy_write32_idx(rtwdev, R_UPD_CLK_ADC, B_ENABLE_CCK, 0, phy_idx);
1884 rtw89_phy_write32_idx(rtwdev, R_PD_ARBITER_OFF, B_PD_ARBITER_OFF,
1885 1, phy_idx);
1886 }
1887 }
1888
rtw8922a_set_channel_bb(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_phy_idx phy_idx)1889 static void rtw8922a_set_channel_bb(struct rtw89_dev *rtwdev,
1890 const struct rtw89_chan *chan,
1891 enum rtw89_phy_idx phy_idx)
1892 {
1893 bool cck_en = chan->band_type == RTW89_BAND_2G;
1894 u8 pri_sb = chan->pri_sb_idx;
1895
1896 if (cck_en)
1897 rtw8922a_ctrl_sco_cck(rtwdev, chan->primary_channel,
1898 chan->band_width, phy_idx);
1899
1900 rtw8922a_ctrl_ch(rtwdev, chan, phy_idx);
1901 rtw8922a_ctrl_bw(rtwdev, pri_sb, chan->band_width, phy_idx);
1902 rtw8922a_ctrl_cck_en(rtwdev, cck_en, phy_idx);
1903 rtw8922a_spur_elimination(rtwdev, chan, phy_idx);
1904
1905 rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, 1, phy_idx);
1906 rtw8922a_tssi_reset(rtwdev, RF_PATH_AB, phy_idx);
1907 }
1908
rtw8922a_pre_set_channel_bb(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx)1909 static void rtw8922a_pre_set_channel_bb(struct rtw89_dev *rtwdev,
1910 enum rtw89_phy_idx phy_idx)
1911 {
1912 if (!rtwdev->dbcc_en)
1913 return;
1914
1915 if (phy_idx == RTW89_PHY_0) {
1916 rtw89_phy_write32_mask(rtwdev, R_DBCC, B_DBCC_EN, 0x0);
1917 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0x6180);
1918 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xBBAB);
1919 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xABA9);
1920 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEBA9);
1921 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEAA9);
1922 } else {
1923 rtw89_phy_write32_mask(rtwdev, R_DBCC, B_DBCC_EN, 0x0);
1924 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xBBAB);
1925 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xAFFF);
1926 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEFFF);
1927 rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEEFF);
1928 }
1929 }
1930
rtw8922a_post_set_channel_bb(struct rtw89_dev * rtwdev,enum rtw89_mlo_dbcc_mode mode,enum rtw89_phy_idx phy_idx)1931 static void rtw8922a_post_set_channel_bb(struct rtw89_dev *rtwdev,
1932 enum rtw89_mlo_dbcc_mode mode,
1933 enum rtw89_phy_idx phy_idx)
1934 {
1935 if (!rtwdev->dbcc_en)
1936 return;
1937
1938 rtw8922a_digital_pwr_comp(rtwdev, phy_idx);
1939 rtw8922a_ctrl_mlo(rtwdev, mode);
1940 }
1941
rtw8922a_set_channel(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_mac_idx mac_idx,enum rtw89_phy_idx phy_idx)1942 static void rtw8922a_set_channel(struct rtw89_dev *rtwdev,
1943 const struct rtw89_chan *chan,
1944 enum rtw89_mac_idx mac_idx,
1945 enum rtw89_phy_idx phy_idx)
1946 {
1947 rtw8922a_set_channel_mac(rtwdev, chan, mac_idx);
1948 rtw8922a_set_channel_bb(rtwdev, chan, phy_idx);
1949 rtw8922a_set_channel_rf(rtwdev, chan, phy_idx);
1950 }
1951
rtw8922a_dfs_en_idx(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx,enum rtw89_rf_path path,bool en)1952 static void rtw8922a_dfs_en_idx(struct rtw89_dev *rtwdev,
1953 enum rtw89_phy_idx phy_idx, enum rtw89_rf_path path,
1954 bool en)
1955 {
1956 u32 path_ofst = (path == RF_PATH_B) ? 0x100 : 0x0;
1957
1958 if (en)
1959 rtw89_phy_write32_idx(rtwdev, 0x2800 + path_ofst, BIT(1), 1,
1960 phy_idx);
1961 else
1962 rtw89_phy_write32_idx(rtwdev, 0x2800 + path_ofst, BIT(1), 0,
1963 phy_idx);
1964 }
1965
rtw8922a_dfs_en(struct rtw89_dev * rtwdev,bool en,enum rtw89_phy_idx phy_idx)1966 static void rtw8922a_dfs_en(struct rtw89_dev *rtwdev, bool en,
1967 enum rtw89_phy_idx phy_idx)
1968 {
1969 rtw8922a_dfs_en_idx(rtwdev, phy_idx, RF_PATH_A, en);
1970 rtw8922a_dfs_en_idx(rtwdev, phy_idx, RF_PATH_B, en);
1971 }
1972
rtw8922a_adc_en_path(struct rtw89_dev * rtwdev,enum rtw89_rf_path path,bool en)1973 static void rtw8922a_adc_en_path(struct rtw89_dev *rtwdev,
1974 enum rtw89_rf_path path, bool en)
1975 {
1976 u32 val;
1977
1978 val = rtw89_phy_read32_mask(rtwdev, R_ADC_FIFO_V1, B_ADC_FIFO_EN_V1);
1979
1980 if (en) {
1981 if (path == RF_PATH_A)
1982 val &= ~0x1;
1983 else
1984 val &= ~0x2;
1985 } else {
1986 if (path == RF_PATH_A)
1987 val |= 0x1;
1988 else
1989 val |= 0x2;
1990 }
1991
1992 rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO_V1, B_ADC_FIFO_EN_V1, val);
1993 }
1994
rtw8922a_adc_en(struct rtw89_dev * rtwdev,bool en,u8 phy_idx)1995 static void rtw8922a_adc_en(struct rtw89_dev *rtwdev, bool en, u8 phy_idx)
1996 {
1997 if (rtwdev->mlo_dbcc_mode == MLO_1_PLUS_1_1RF) {
1998 if (phy_idx == RTW89_PHY_0)
1999 rtw8922a_adc_en_path(rtwdev, RF_PATH_A, en);
2000 else
2001 rtw8922a_adc_en_path(rtwdev, RF_PATH_B, en);
2002 } else {
2003 rtw8922a_adc_en_path(rtwdev, RF_PATH_A, en);
2004 rtw8922a_adc_en_path(rtwdev, RF_PATH_B, en);
2005 }
2006 }
2007
2008 static
rtw8922a_hal_reset(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx,enum rtw89_mac_idx mac_idx,enum rtw89_band band,u32 * tx_en,bool enter)2009 void rtw8922a_hal_reset(struct rtw89_dev *rtwdev,
2010 enum rtw89_phy_idx phy_idx, enum rtw89_mac_idx mac_idx,
2011 enum rtw89_band band, u32 *tx_en, bool enter)
2012 {
2013 if (enter) {
2014 rtw89_chip_stop_sch_tx(rtwdev, mac_idx, tx_en, RTW89_SCH_TX_SEL_ALL);
2015 rtw89_mac_cfg_ppdu_status(rtwdev, mac_idx, false);
2016 rtw8922a_dfs_en(rtwdev, false, phy_idx);
2017 rtw8922a_tssi_cont_en_phyidx(rtwdev, false, phy_idx);
2018 rtw8922a_adc_en(rtwdev, false, phy_idx);
2019 fsleep(40);
2020 rtw8922a_bb_reset_en(rtwdev, band, false, phy_idx);
2021 } else {
2022 rtw89_mac_cfg_ppdu_status(rtwdev, mac_idx, true);
2023 rtw8922a_adc_en(rtwdev, true, phy_idx);
2024 rtw8922a_dfs_en(rtwdev, true, phy_idx);
2025 rtw8922a_tssi_cont_en_phyidx(rtwdev, true, phy_idx);
2026 rtw8922a_bb_reset_en(rtwdev, band, true, phy_idx);
2027 rtw89_chip_resume_sch_tx(rtwdev, mac_idx, *tx_en);
2028 }
2029 }
2030
rtw8922a_set_channel_help(struct rtw89_dev * rtwdev,bool enter,struct rtw89_channel_help_params * p,const struct rtw89_chan * chan,enum rtw89_mac_idx mac_idx,enum rtw89_phy_idx phy_idx)2031 static void rtw8922a_set_channel_help(struct rtw89_dev *rtwdev, bool enter,
2032 struct rtw89_channel_help_params *p,
2033 const struct rtw89_chan *chan,
2034 enum rtw89_mac_idx mac_idx,
2035 enum rtw89_phy_idx phy_idx)
2036 {
2037 if (enter) {
2038 rtw8922a_pre_set_channel_bb(rtwdev, phy_idx);
2039 rtw8922a_pre_set_channel_rf(rtwdev, phy_idx);
2040 }
2041
2042 rtw8922a_hal_reset(rtwdev, phy_idx, mac_idx, chan->band_type, &p->tx_en, enter);
2043
2044 if (!enter) {
2045 rtw8922a_post_set_channel_bb(rtwdev, rtwdev->mlo_dbcc_mode, phy_idx);
2046 rtw8922a_post_set_channel_rf(rtwdev, phy_idx);
2047 }
2048 }
2049
rtw8922a_rfk_init(struct rtw89_dev * rtwdev)2050 static void rtw8922a_rfk_init(struct rtw89_dev *rtwdev)
2051 {
2052 struct rtw89_rfk_mcc_info *rfk_mcc = &rtwdev->rfk_mcc;
2053
2054 rtwdev->is_tssi_mode[RF_PATH_A] = false;
2055 rtwdev->is_tssi_mode[RF_PATH_B] = false;
2056 memset(rfk_mcc, 0, sizeof(*rfk_mcc));
2057 }
2058
__rtw8922a_rfk_init_late(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx,const struct rtw89_chan * chan)2059 static void __rtw8922a_rfk_init_late(struct rtw89_dev *rtwdev,
2060 enum rtw89_phy_idx phy_idx,
2061 const struct rtw89_chan *chan)
2062 {
2063 rtw89_phy_rfk_pre_ntfy_and_wait(rtwdev, phy_idx, 5);
2064
2065 rtw89_phy_rfk_dack_and_wait(rtwdev, phy_idx, chan, 58);
2066 rtw89_phy_rfk_rxdck_and_wait(rtwdev, phy_idx, chan, false, 32);
2067 }
2068
rtw8922a_rfk_init_late(struct rtw89_dev * rtwdev)2069 static void rtw8922a_rfk_init_late(struct rtw89_dev *rtwdev)
2070 {
2071 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_CHANCTX_0);
2072
2073 __rtw8922a_rfk_init_late(rtwdev, RTW89_PHY_0, chan);
2074 if (rtwdev->dbcc_en)
2075 __rtw8922a_rfk_init_late(rtwdev, RTW89_PHY_1, chan);
2076 }
2077
_wait_rx_mode(struct rtw89_dev * rtwdev,u8 kpath)2078 static void _wait_rx_mode(struct rtw89_dev *rtwdev, u8 kpath)
2079 {
2080 u32 rf_mode;
2081 u8 path;
2082 int ret;
2083
2084 for (path = 0; path < RF_PATH_NUM_8922A; path++) {
2085 if (!(kpath & BIT(path)))
2086 continue;
2087
2088 ret = read_poll_timeout_atomic(rtw89_read_rf, rf_mode, rf_mode != 2,
2089 2, 5000, false, rtwdev, path, 0x00,
2090 RR_MOD_MASK);
2091 rtw89_debug(rtwdev, RTW89_DBG_RFK,
2092 "[RFK] Wait S%d to Rx mode!! (ret = %d)\n",
2093 path, ret);
2094 }
2095 }
2096
rtw8922a_rfk_channel(struct rtw89_dev * rtwdev,struct rtw89_vif_link * rtwvif_link)2097 static void rtw8922a_rfk_channel(struct rtw89_dev *rtwdev,
2098 struct rtw89_vif_link *rtwvif_link)
2099 {
2100 enum rtw89_chanctx_idx chanctx_idx = rtwvif_link->chanctx_idx;
2101 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, chanctx_idx);
2102 enum rtw89_phy_idx phy_idx = rtwvif_link->phy_idx;
2103 u8 phy_map = rtw89_btc_phymap(rtwdev, phy_idx, RF_AB, chanctx_idx);
2104 u32 tx_en;
2105
2106 rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, BTC_WRFKT_CHLK, BTC_WRFK_START);
2107 rtw89_chip_stop_sch_tx(rtwdev, phy_idx, &tx_en, RTW89_SCH_TX_SEL_ALL);
2108 _wait_rx_mode(rtwdev, RF_AB);
2109
2110 rtw89_phy_rfk_pre_ntfy_and_wait(rtwdev, phy_idx, 5);
2111 rtw89_phy_rfk_txgapk_and_wait(rtwdev, phy_idx, chan, 54);
2112 rtw89_phy_rfk_iqk_and_wait(rtwdev, phy_idx, chan, 84);
2113 rtw89_phy_rfk_tssi_and_wait(rtwdev, phy_idx, chan, RTW89_TSSI_NORMAL, 20);
2114 rtw89_phy_rfk_dpk_and_wait(rtwdev, phy_idx, chan, 34);
2115 rtw89_phy_rfk_rxdck_and_wait(rtwdev, RTW89_PHY_0, chan, true, 32);
2116
2117 rtw89_chip_resume_sch_tx(rtwdev, phy_idx, tx_en);
2118 rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, BTC_WRFKT_CHLK, BTC_WRFK_STOP);
2119 }
2120
rtw8922a_rfk_band_changed(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx,const struct rtw89_chan * chan)2121 static void rtw8922a_rfk_band_changed(struct rtw89_dev *rtwdev,
2122 enum rtw89_phy_idx phy_idx,
2123 const struct rtw89_chan *chan)
2124 {
2125 rtw89_phy_rfk_tssi_and_wait(rtwdev, phy_idx, chan, RTW89_TSSI_SCAN, 6);
2126 }
2127
rtw8922a_rfk_scan(struct rtw89_dev * rtwdev,struct rtw89_vif_link * rtwvif_link,bool start)2128 static void rtw8922a_rfk_scan(struct rtw89_dev *rtwdev,
2129 struct rtw89_vif_link *rtwvif_link,
2130 bool start)
2131 {
2132 }
2133
rtw8922a_rfk_track(struct rtw89_dev * rtwdev)2134 static void rtw8922a_rfk_track(struct rtw89_dev *rtwdev)
2135 {
2136 }
2137
rtw8922a_set_txpwr_ref(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx)2138 static void rtw8922a_set_txpwr_ref(struct rtw89_dev *rtwdev,
2139 enum rtw89_phy_idx phy_idx)
2140 {
2141 s16 ref_ofdm = 0;
2142 s16 ref_cck = 0;
2143
2144 rtw89_debug(rtwdev, RTW89_DBG_TXPWR, "[TXPWR] set txpwr reference\n");
2145
2146 rtw89_mac_txpwr_write32_mask(rtwdev, phy_idx, R_BE_PWR_REF_CTRL,
2147 B_BE_PWR_REF_CTRL_OFDM, ref_ofdm);
2148 rtw89_mac_txpwr_write32_mask(rtwdev, phy_idx, R_BE_PWR_REF_CTRL,
2149 B_BE_PWR_REF_CTRL_CCK, ref_cck);
2150 }
2151
rtw8922a_bb_tx_triangular(struct rtw89_dev * rtwdev,bool en,enum rtw89_phy_idx phy_idx)2152 static void rtw8922a_bb_tx_triangular(struct rtw89_dev *rtwdev, bool en,
2153 enum rtw89_phy_idx phy_idx)
2154 {
2155 u8 ctrl = en ? 0x1 : 0x0;
2156
2157 rtw89_phy_write32_idx(rtwdev, R_BEDGE3, B_BEDGE_CFG, ctrl, phy_idx);
2158 }
2159
rtw8922a_set_tx_shape(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_phy_idx phy_idx)2160 static void rtw8922a_set_tx_shape(struct rtw89_dev *rtwdev,
2161 const struct rtw89_chan *chan,
2162 enum rtw89_phy_idx phy_idx)
2163 {
2164 const struct rtw89_rfe_parms *rfe_parms = rtwdev->rfe_parms;
2165 const struct rtw89_tx_shape *tx_shape = &rfe_parms->tx_shape;
2166 u8 tx_shape_idx;
2167 u8 band, regd;
2168
2169 band = chan->band_type;
2170 regd = rtw89_regd_get(rtwdev, band);
2171 tx_shape_idx = (*tx_shape->lmt)[band][RTW89_RS_OFDM][regd];
2172
2173 if (tx_shape_idx == 0)
2174 rtw8922a_bb_tx_triangular(rtwdev, false, phy_idx);
2175 else
2176 rtw8922a_bb_tx_triangular(rtwdev, true, phy_idx);
2177 }
2178
rtw8922a_set_txpwr(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_phy_idx phy_idx)2179 static void rtw8922a_set_txpwr(struct rtw89_dev *rtwdev,
2180 const struct rtw89_chan *chan,
2181 enum rtw89_phy_idx phy_idx)
2182 {
2183 rtw89_phy_set_txpwr_byrate(rtwdev, chan, phy_idx);
2184 rtw89_phy_set_txpwr_offset(rtwdev, chan, phy_idx);
2185 rtw8922a_set_tx_shape(rtwdev, chan, phy_idx);
2186 rtw89_phy_set_txpwr_limit(rtwdev, chan, phy_idx);
2187 rtw89_phy_set_txpwr_limit_ru(rtwdev, chan, phy_idx);
2188 }
2189
rtw8922a_set_txpwr_ctrl(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx)2190 static void rtw8922a_set_txpwr_ctrl(struct rtw89_dev *rtwdev,
2191 enum rtw89_phy_idx phy_idx)
2192 {
2193 rtw8922a_set_txpwr_ref(rtwdev, phy_idx);
2194 }
2195
rtw8922a_ctrl_trx_path(struct rtw89_dev * rtwdev,enum rtw89_rf_path tx_path,u8 tx_nss,enum rtw89_rf_path rx_path,u8 rx_nss)2196 static void rtw8922a_ctrl_trx_path(struct rtw89_dev *rtwdev,
2197 enum rtw89_rf_path tx_path, u8 tx_nss,
2198 enum rtw89_rf_path rx_path, u8 rx_nss)
2199 {
2200 enum rtw89_phy_idx phy_idx;
2201
2202 for (phy_idx = RTW89_PHY_0; phy_idx <= RTW89_PHY_1; phy_idx++) {
2203 rtw8922a_ctrl_tx_path_tmac(rtwdev, tx_path, phy_idx);
2204 rtw8922a_ctrl_rx_path_tmac(rtwdev, rx_path, phy_idx);
2205 rtw8922a_cfg_rx_nss_limit(rtwdev, rx_nss, phy_idx);
2206 }
2207 }
2208
rtw8922a_ctrl_nbtg_bt_tx(struct rtw89_dev * rtwdev,bool en,enum rtw89_phy_idx phy_idx)2209 static void rtw8922a_ctrl_nbtg_bt_tx(struct rtw89_dev *rtwdev, bool en,
2210 enum rtw89_phy_idx phy_idx)
2211 {
2212 if (en) {
2213 rtw89_phy_write32_idx(rtwdev, R_FORCE_FIR_A, B_FORCE_FIR_A, 0x3, phy_idx);
2214 rtw89_phy_write32_idx(rtwdev, R_RXBY_WBADC_A, B_RXBY_WBADC_A,
2215 0xf, phy_idx);
2216 rtw89_phy_write32_idx(rtwdev, R_BT_RXBY_WBADC_A, B_BT_RXBY_WBADC_A,
2217 0x0, phy_idx);
2218 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_A, B_BT_TRK_OFF_A, 0x0, phy_idx);
2219 rtw89_phy_write32_idx(rtwdev, R_OP1DB_A, B_OP1DB_A, 0x80, phy_idx);
2220 rtw89_phy_write32_idx(rtwdev, R_OP1DB1_A, B_TIA10_A, 0x8080, phy_idx);
2221 rtw89_phy_write32_idx(rtwdev, R_BACKOFF_A, B_LNA_IBADC_A, 0x34, phy_idx);
2222 rtw89_phy_write32_idx(rtwdev, R_BKOFF_A, B_BKOFF_IBADC_A, 0x34, phy_idx);
2223 rtw89_phy_write32_idx(rtwdev, R_FORCE_FIR_B, B_FORCE_FIR_B, 0x3, phy_idx);
2224 rtw89_phy_write32_idx(rtwdev, R_RXBY_WBADC_B, B_RXBY_WBADC_B,
2225 0xf, phy_idx);
2226 rtw89_phy_write32_idx(rtwdev, R_BT_RXBY_WBADC_B, B_BT_RXBY_WBADC_B,
2227 0x0, phy_idx);
2228 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_B, B_BT_TRK_OFF_B, 0x0, phy_idx);
2229 rtw89_phy_write32_idx(rtwdev, R_LNA_OP, B_LNA6, 0x80, phy_idx);
2230 rtw89_phy_write32_idx(rtwdev, R_LNA_TIA, B_TIA10_B, 0x8080, phy_idx);
2231 rtw89_phy_write32_idx(rtwdev, R_BACKOFF_B, B_LNA_IBADC_B, 0x34, phy_idx);
2232 rtw89_phy_write32_idx(rtwdev, R_BKOFF_B, B_BKOFF_IBADC_B, 0x34, phy_idx);
2233 } else {
2234 rtw89_phy_write32_idx(rtwdev, R_FORCE_FIR_A, B_FORCE_FIR_A, 0x0, phy_idx);
2235 rtw89_phy_write32_idx(rtwdev, R_RXBY_WBADC_A, B_RXBY_WBADC_A,
2236 0x0, phy_idx);
2237 rtw89_phy_write32_idx(rtwdev, R_BT_RXBY_WBADC_A, B_BT_RXBY_WBADC_A,
2238 0x1, phy_idx);
2239 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_A, B_BT_TRK_OFF_A, 0x1, phy_idx);
2240 rtw89_phy_write32_idx(rtwdev, R_OP1DB_A, B_OP1DB_A, 0x1a, phy_idx);
2241 rtw89_phy_write32_idx(rtwdev, R_OP1DB1_A, B_TIA10_A, 0x2a2a, phy_idx);
2242 rtw89_phy_write32_idx(rtwdev, R_BACKOFF_A, B_LNA_IBADC_A, 0x7a6, phy_idx);
2243 rtw89_phy_write32_idx(rtwdev, R_BKOFF_A, B_BKOFF_IBADC_A, 0x26, phy_idx);
2244 rtw89_phy_write32_idx(rtwdev, R_FORCE_FIR_B, B_FORCE_FIR_B, 0x0, phy_idx);
2245 rtw89_phy_write32_idx(rtwdev, R_RXBY_WBADC_B, B_RXBY_WBADC_B,
2246 0x0, phy_idx);
2247 rtw89_phy_write32_idx(rtwdev, R_BT_RXBY_WBADC_B, B_BT_RXBY_WBADC_B,
2248 0x1, phy_idx);
2249 rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_B, B_BT_TRK_OFF_B, 0x1, phy_idx);
2250 rtw89_phy_write32_idx(rtwdev, R_LNA_OP, B_LNA6, 0x20, phy_idx);
2251 rtw89_phy_write32_idx(rtwdev, R_LNA_TIA, B_TIA10_B, 0x2a30, phy_idx);
2252 rtw89_phy_write32_idx(rtwdev, R_BACKOFF_B, B_LNA_IBADC_B, 0x7a6, phy_idx);
2253 rtw89_phy_write32_idx(rtwdev, R_BKOFF_B, B_BKOFF_IBADC_B, 0x26, phy_idx);
2254 }
2255 }
2256
rtw8922a_bb_cfg_txrx_path(struct rtw89_dev * rtwdev)2257 static void rtw8922a_bb_cfg_txrx_path(struct rtw89_dev *rtwdev)
2258 {
2259 const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_CHANCTX_0);
2260 enum rtw89_band band = chan->band_type;
2261 struct rtw89_hal *hal = &rtwdev->hal;
2262 u8 ntx_path = RF_PATH_AB;
2263 u32 tx_en0, tx_en1;
2264
2265 if (hal->antenna_tx == RF_A)
2266 ntx_path = RF_PATH_A;
2267 else if (hal->antenna_tx == RF_B)
2268 ntx_path = RF_PATH_B;
2269
2270 rtw8922a_hal_reset(rtwdev, RTW89_PHY_0, RTW89_MAC_0, band, &tx_en0, true);
2271 if (rtwdev->dbcc_en)
2272 rtw8922a_hal_reset(rtwdev, RTW89_PHY_1, RTW89_MAC_1, band,
2273 &tx_en1, true);
2274
2275 rtw8922a_ctrl_trx_path(rtwdev, ntx_path, 2, RF_PATH_AB, 2);
2276
2277 rtw8922a_hal_reset(rtwdev, RTW89_PHY_0, RTW89_MAC_0, band, &tx_en0, false);
2278 if (rtwdev->dbcc_en)
2279 rtw8922a_hal_reset(rtwdev, RTW89_PHY_1, RTW89_MAC_1, band,
2280 &tx_en1, false);
2281 }
2282
rtw8922a_get_thermal(struct rtw89_dev * rtwdev,enum rtw89_rf_path rf_path)2283 static u8 rtw8922a_get_thermal(struct rtw89_dev *rtwdev, enum rtw89_rf_path rf_path)
2284 {
2285 struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
2286 struct rtw89_hal *hal = &rtwdev->hal;
2287 int th;
2288
2289 /* read thermal only if debugging or thermal protection enabled */
2290 if (!rtw89_debug_is_enabled(rtwdev, RTW89_DBG_CFO | RTW89_DBG_RFK_TRACK) &&
2291 !hal->thermal_prot_th)
2292 return 80;
2293
2294 rtw89_write_rf(rtwdev, rf_path, RR_TM, RR_TM_TRI, 0x1);
2295 rtw89_write_rf(rtwdev, rf_path, RR_TM, RR_TM_TRI, 0x0);
2296 rtw89_write_rf(rtwdev, rf_path, RR_TM, RR_TM_TRI, 0x1);
2297
2298 fsleep(200);
2299
2300 th = rtw89_read_rf(rtwdev, rf_path, RR_TM, RR_TM_VAL_V1);
2301 th += (s8)info->thermal_trim[rf_path];
2302
2303 return clamp_t(int, th, 0, U8_MAX);
2304 }
2305
rtw8922a_btc_set_rfe(struct rtw89_dev * rtwdev)2306 static void rtw8922a_btc_set_rfe(struct rtw89_dev *rtwdev)
2307 {
2308 union rtw89_btc_module_info *md = &rtwdev->btc.mdinfo;
2309 struct rtw89_btc_module_v7 *module = &md->md_v7;
2310
2311 module->rfe_type = rtwdev->efuse.rfe_type;
2312 module->kt_ver = rtwdev->hal.cv;
2313 module->bt_solo = 0;
2314 module->switch_type = BTC_SWITCH_INTERNAL;
2315 module->wa_type = 0;
2316
2317 module->ant.type = BTC_ANT_SHARED;
2318 module->ant.num = 2;
2319 module->ant.isolation = 10;
2320 module->ant.diversity = 0;
2321 module->ant.single_pos = RF_PATH_A;
2322 module->ant.btg_pos = RF_PATH_B;
2323
2324 if (module->kt_ver <= 1)
2325 module->wa_type |= BTC_WA_HFP_ZB;
2326
2327 rtwdev->btc.cx.other.type = BTC_3CX_NONE;
2328
2329 if (module->rfe_type == 0) {
2330 rtwdev->btc.dm.error.map.rfe_type0 = true;
2331 return;
2332 }
2333
2334 module->ant.num = (module->rfe_type % 2) ? 2 : 3;
2335
2336 if (module->kt_ver == 0)
2337 module->ant.num = 2;
2338
2339 if (module->ant.num == 3) {
2340 module->ant.type = BTC_ANT_DEDICATED;
2341 module->bt_pos = BTC_BT_ALONE;
2342 } else {
2343 module->ant.type = BTC_ANT_SHARED;
2344 module->bt_pos = BTC_BT_BTG;
2345 }
2346 rtwdev->btc.btg_pos = module->ant.btg_pos;
2347 rtwdev->btc.ant_type = module->ant.type;
2348 }
2349
2350 static
rtw8922a_set_trx_mask(struct rtw89_dev * rtwdev,u8 path,u8 group,u32 val)2351 void rtw8922a_set_trx_mask(struct rtw89_dev *rtwdev, u8 path, u8 group, u32 val)
2352 {
2353 rtw89_write_rf(rtwdev, path, RR_LUTWA, RFREG_MASK, group);
2354 rtw89_write_rf(rtwdev, path, RR_LUTWD0, RFREG_MASK, val);
2355 }
2356
rtw8922a_btc_init_cfg(struct rtw89_dev * rtwdev)2357 static void rtw8922a_btc_init_cfg(struct rtw89_dev *rtwdev)
2358 {
2359 struct rtw89_btc *btc = &rtwdev->btc;
2360 struct rtw89_btc_ant_info_v7 *ant = &btc->mdinfo.md_v7.ant;
2361 u32 wl_pri, path_min, path_max;
2362 u8 path;
2363
2364 /* for 1-Ant && 1-ss case: only 1-path */
2365 if (ant->num == 1) {
2366 path_min = ant->single_pos;
2367 path_max = path_min;
2368 } else {
2369 path_min = RF_PATH_A;
2370 path_max = RF_PATH_B;
2371 }
2372
2373 path = path_min;
2374
2375 for (path = path_min; path <= path_max; path++) {
2376 /* set DEBUG_LUT_RFMODE_MASK = 1 to start trx-mask-setup */
2377 rtw89_write_rf(rtwdev, path, RR_LUTWE, RFREG_MASK, BIT(17));
2378
2379 /* if GNT_WL=0 && BT=SS_group --> WL Tx/Rx = THRU */
2380 rtw8922a_set_trx_mask(rtwdev, path, BTC_BT_SS_GROUP, 0x5ff);
2381
2382 /* if GNT_WL=0 && BT=Rx_group --> WL-Rx = THRU + WL-Tx = MASK */
2383 rtw8922a_set_trx_mask(rtwdev, path, BTC_BT_RX_GROUP, 0x5df);
2384
2385 /* if GNT_WL = 0 && BT = Tx_group -->
2386 * Shared-Ant && BTG-path:WL mask(0x55f), others:WL THRU(0x5ff)
2387 */
2388 if (btc->ant_type == BTC_ANT_SHARED && btc->btg_pos == path)
2389 rtw8922a_set_trx_mask(rtwdev, path, BTC_BT_TX_GROUP, 0x55f);
2390 else
2391 rtw8922a_set_trx_mask(rtwdev, path, BTC_BT_TX_GROUP, 0x5ff);
2392
2393 rtw89_write_rf(rtwdev, path, RR_LUTWE, RFREG_MASK, 0);
2394 }
2395
2396 /* set WL PTA Hi-Pri: Ack-Tx, beacon-tx, Trig-frame-Tx, Null-Tx*/
2397 wl_pri = B_BTC_RSP_ACK_HI | B_BTC_TX_BCN_HI | B_BTC_TX_TRI_HI |
2398 B_BTC_TX_NULL_HI;
2399 rtw89_write32(rtwdev, R_BTC_COEX_WL_REQ_BE, wl_pri);
2400
2401 /* set PTA break table */
2402 rtw89_write32(rtwdev, R_BE_BT_BREAK_TABLE, BTC_BREAK_PARAM);
2403
2404 /* ZB coex table init for HFP PTA req-cmd bit-4 define issue COEX-900*/
2405 rtw89_write32(rtwdev, R_BTC_ZB_COEX_TBL_0, 0xda5a5a5a);
2406
2407 rtw89_write32(rtwdev, R_BTC_ZB_COEX_TBL_1, 0xda5a5a5a);
2408
2409 rtw89_write32(rtwdev, R_BTC_ZB_BREAK_TBL, 0xf0ffffff);
2410 btc->cx.wl.status.map.init_ok = true;
2411 }
2412
2413 static void
rtw8922a_btc_set_wl_txpwr_ctrl(struct rtw89_dev * rtwdev,u32 txpwr_val)2414 rtw8922a_btc_set_wl_txpwr_ctrl(struct rtw89_dev *rtwdev, u32 txpwr_val)
2415 {
2416 u16 ctrl_all_time = u32_get_bits(txpwr_val, GENMASK(15, 0));
2417 u16 ctrl_gnt_bt = u32_get_bits(txpwr_val, GENMASK(31, 16));
2418
2419 switch (ctrl_all_time) {
2420 case 0xffff:
2421 rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_RATE_CTRL,
2422 B_BE_FORCE_PWR_BY_RATE_EN, 0x0);
2423 rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_RATE_CTRL,
2424 B_BE_FORCE_PWR_BY_RATE_VAL, 0x0);
2425 break;
2426 default:
2427 rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_RATE_CTRL,
2428 B_BE_FORCE_PWR_BY_RATE_VAL, ctrl_all_time);
2429 rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_RATE_CTRL,
2430 B_BE_FORCE_PWR_BY_RATE_EN, 0x1);
2431 break;
2432 }
2433
2434 switch (ctrl_gnt_bt) {
2435 case 0xffff:
2436 rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_REG_CTRL,
2437 B_BE_PWR_BT_EN, 0x0);
2438 rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_COEX_CTRL,
2439 B_BE_PWR_BT_VAL, 0x0);
2440 break;
2441 default:
2442 rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_COEX_CTRL,
2443 B_BE_PWR_BT_VAL, ctrl_gnt_bt);
2444 rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_REG_CTRL,
2445 B_BE_PWR_BT_EN, 0x1);
2446 break;
2447 }
2448 }
2449
2450 static
rtw8922a_btc_get_bt_rssi(struct rtw89_dev * rtwdev,s8 val)2451 s8 rtw8922a_btc_get_bt_rssi(struct rtw89_dev *rtwdev, s8 val)
2452 {
2453 return clamp_t(s8, val, -100, 0) + 100;
2454 }
2455
2456 static const struct rtw89_btc_rf_trx_para rtw89_btc_8922a_rf_ul[] = {
2457 {255, 0, 0, 7}, /* 0 -> original */
2458 {255, 2, 0, 7}, /* 1 -> for BT-connected ACI issue && BTG co-rx */
2459 {255, 0, 0, 7}, /* 2 ->reserved for shared-antenna */
2460 {255, 0, 0, 7}, /* 3- >reserved for shared-antenna */
2461 {255, 0, 0, 7}, /* 4 ->reserved for shared-antenna */
2462 {255, 1, 0, 7}, /* the below id is for non-shared-antenna free-run */
2463 {6, 1, 0, 7},
2464 {13, 1, 0, 7},
2465 {13, 1, 0, 7}
2466 };
2467
2468 static const struct rtw89_btc_rf_trx_para rtw89_btc_8922a_rf_dl[] = {
2469 {255, 0, 0, 7}, /* 0 -> original */
2470 {255, 2, 0, 7}, /* 1 -> reserved for shared-antenna */
2471 {255, 0, 0, 7}, /* 2 ->reserved for shared-antenna */
2472 {255, 0, 0, 7}, /* 3- >reserved for shared-antenna */
2473 {255, 0, 0, 7}, /* 4 ->reserved for shared-antenna */
2474 {255, 1, 0, 7}, /* the below id is for non-shared-antenna free-run */
2475 {255, 1, 0, 7},
2476 {255, 1, 0, 7},
2477 {255, 1, 0, 7}
2478 };
2479
2480 static const u8 rtw89_btc_8922a_wl_rssi_thres[BTC_WL_RSSI_THMAX] = {60, 50, 40, 30};
2481 static const u8 rtw89_btc_8922a_bt_rssi_thres[BTC_BT_RSSI_THMAX] = {50, 40, 30, 20};
2482
2483 static const struct rtw89_btc_fbtc_mreg rtw89_btc_8922a_mon_reg[] = {
2484 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe300),
2485 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe320),
2486 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe324),
2487 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe328),
2488 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe32c),
2489 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe330),
2490 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe334),
2491 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe338),
2492 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe344),
2493 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe348),
2494 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe34c),
2495 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe350),
2496 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0x11a2c),
2497 RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0x11a50),
2498 RTW89_DEF_FBTC_MREG(REG_BB, 4, 0x980),
2499 RTW89_DEF_FBTC_MREG(REG_BB, 4, 0x660),
2500 RTW89_DEF_FBTC_MREG(REG_BB, 4, 0x1660),
2501 RTW89_DEF_FBTC_MREG(REG_BB, 4, 0x418c),
2502 RTW89_DEF_FBTC_MREG(REG_BB, 4, 0x518c),
2503 };
2504
2505 static
rtw8922a_btc_update_bt_cnt(struct rtw89_dev * rtwdev)2506 void rtw8922a_btc_update_bt_cnt(struct rtw89_dev *rtwdev)
2507 {
2508 /* Feature move to firmware */
2509 }
2510
2511 static
rtw8922a_btc_wl_s1_standby(struct rtw89_dev * rtwdev,bool state)2512 void rtw8922a_btc_wl_s1_standby(struct rtw89_dev *rtwdev, bool state)
2513 {
2514 if (!state) {
2515 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x80000);
2516 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x1);
2517 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD1, RFREG_MASK, 0x0c110);
2518 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0, RFREG_MASK, 0x01018);
2519 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x00000);
2520
2521 rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWE, RFREG_MASK, 0x80000);
2522 rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWA, RFREG_MASK, 0x1);
2523 rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWD1, RFREG_MASK, 0x0c110);
2524 rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWD0, RFREG_MASK, 0x01018);
2525 rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWE, RFREG_MASK, 0x00000);
2526 } else {
2527 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x80000);
2528 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x1);
2529 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD1, RFREG_MASK, 0x0c110);
2530 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0, RFREG_MASK, 0x09018);
2531 rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x00000);
2532
2533 rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWE, RFREG_MASK, 0x80000);
2534 rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWA, RFREG_MASK, 0x1);
2535 rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWD1, RFREG_MASK, 0x0c110);
2536 rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWD0, RFREG_MASK, 0x09018);
2537 rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWE, RFREG_MASK, 0x00000);
2538 }
2539 }
2540
rtw8922a_btc_set_wl_rx_gain(struct rtw89_dev * rtwdev,u32 level)2541 static void rtw8922a_btc_set_wl_rx_gain(struct rtw89_dev *rtwdev, u32 level)
2542 {
2543 }
2544
rtw8922a_fill_freq_with_ppdu(struct rtw89_dev * rtwdev,struct rtw89_rx_phy_ppdu * phy_ppdu,struct ieee80211_rx_status * status)2545 static void rtw8922a_fill_freq_with_ppdu(struct rtw89_dev *rtwdev,
2546 struct rtw89_rx_phy_ppdu *phy_ppdu,
2547 struct ieee80211_rx_status *status)
2548 {
2549 u8 chan_idx = phy_ppdu->chan_idx;
2550 enum nl80211_band band;
2551 u8 ch;
2552
2553 if (chan_idx == 0)
2554 return;
2555
2556 rtw89_decode_chan_idx(rtwdev, chan_idx, &ch, &band);
2557 status->freq = ieee80211_channel_to_frequency(ch, band);
2558 status->band = band;
2559 }
2560
rtw8922a_query_ppdu(struct rtw89_dev * rtwdev,struct rtw89_rx_phy_ppdu * phy_ppdu,struct ieee80211_rx_status * status)2561 static void rtw8922a_query_ppdu(struct rtw89_dev *rtwdev,
2562 struct rtw89_rx_phy_ppdu *phy_ppdu,
2563 struct ieee80211_rx_status *status)
2564 {
2565 u8 path;
2566 u8 *rx_power = phy_ppdu->rssi;
2567
2568 status->signal =
2569 RTW89_RSSI_RAW_TO_DBM(max(rx_power[RF_PATH_A], rx_power[RF_PATH_B]));
2570 for (path = 0; path < rtwdev->chip->rf_path_num; path++) {
2571 status->chains |= BIT(path);
2572 status->chain_signal[path] = RTW89_RSSI_RAW_TO_DBM(rx_power[path]);
2573 }
2574 if (phy_ppdu->valid)
2575 rtw8922a_fill_freq_with_ppdu(rtwdev, phy_ppdu, status);
2576 }
2577
rtw8922a_convert_rpl_to_rssi(struct rtw89_dev * rtwdev,struct rtw89_rx_phy_ppdu * phy_ppdu)2578 static void rtw8922a_convert_rpl_to_rssi(struct rtw89_dev *rtwdev,
2579 struct rtw89_rx_phy_ppdu *phy_ppdu)
2580 {
2581 /* Mapping to BW: 5, 10, 20, 40, 80, 160, 80_80 */
2582 static const u8 bw_compensate[] = {0, 0, 0, 6, 12, 18, 0};
2583 u8 *rssi = phy_ppdu->rssi;
2584 u8 compensate = 0;
2585 u16 rpl_tmp;
2586 u8 i;
2587
2588 if (phy_ppdu->bw_idx < ARRAY_SIZE(bw_compensate))
2589 compensate = bw_compensate[phy_ppdu->bw_idx];
2590
2591 for (i = 0; i < RF_PATH_NUM_8922A; i++) {
2592 if (!(phy_ppdu->rx_path_en & BIT(i))) {
2593 rssi[i] = 0;
2594 phy_ppdu->rpl_path[i] = 0;
2595 phy_ppdu->rpl_fd[i] = 0;
2596 }
2597 if (phy_ppdu->rate >= RTW89_HW_RATE_OFDM6) {
2598 rpl_tmp = phy_ppdu->rpl_fd[i];
2599 if (rpl_tmp)
2600 rpl_tmp += compensate;
2601
2602 phy_ppdu->rpl_path[i] = rpl_tmp;
2603 }
2604 rssi[i] = phy_ppdu->rpl_path[i];
2605 }
2606
2607 phy_ppdu->rssi_avg = phy_ppdu->rpl_avg;
2608 }
2609
rtw8922a_mac_enable_bb_rf(struct rtw89_dev * rtwdev)2610 static int rtw8922a_mac_enable_bb_rf(struct rtw89_dev *rtwdev)
2611 {
2612 rtw89_write8_set(rtwdev, R_BE_FEN_RST_ENABLE,
2613 B_BE_FEN_BBPLAT_RSTB | B_BE_FEN_BB_IP_RSTN);
2614 rtw89_write32(rtwdev, R_BE_DMAC_SYS_CR32B, 0x7FF97FF9);
2615
2616 return 0;
2617 }
2618
rtw8922a_mac_disable_bb_rf(struct rtw89_dev * rtwdev)2619 static int rtw8922a_mac_disable_bb_rf(struct rtw89_dev *rtwdev)
2620 {
2621 rtw89_write8_clr(rtwdev, R_BE_FEN_RST_ENABLE,
2622 B_BE_FEN_BBPLAT_RSTB | B_BE_FEN_BB_IP_RSTN);
2623
2624 return 0;
2625 }
2626
2627 #ifdef CONFIG_PM
2628 static const struct wiphy_wowlan_support rtw_wowlan_stub_8922a = {
2629 .flags = WIPHY_WOWLAN_MAGIC_PKT | WIPHY_WOWLAN_DISCONNECT |
2630 WIPHY_WOWLAN_NET_DETECT,
2631 .n_patterns = RTW89_MAX_PATTERN_NUM,
2632 .pattern_max_len = RTW89_MAX_PATTERN_SIZE,
2633 .pattern_min_len = 1,
2634 .max_nd_match_sets = RTW89_SCANOFLD_MAX_SSID,
2635 };
2636 #endif
2637
2638 static const struct rtw89_chip_ops rtw8922a_chip_ops = {
2639 .enable_bb_rf = rtw8922a_mac_enable_bb_rf,
2640 .disable_bb_rf = rtw8922a_mac_disable_bb_rf,
2641 .bb_preinit = rtw8922a_bb_preinit,
2642 .bb_postinit = rtw8922a_bb_postinit,
2643 .bb_reset = rtw8922a_bb_reset,
2644 .bb_sethw = rtw8922a_bb_sethw,
2645 .read_rf = rtw89_phy_read_rf_v2,
2646 .write_rf = rtw89_phy_write_rf_v2,
2647 .set_channel = rtw8922a_set_channel,
2648 .set_channel_help = rtw8922a_set_channel_help,
2649 .read_efuse = rtw8922a_read_efuse,
2650 .read_phycap = rtw8922a_read_phycap,
2651 .fem_setup = NULL,
2652 .rfe_gpio = NULL,
2653 .rfk_hw_init = rtw8922a_rfk_hw_init,
2654 .rfk_init = rtw8922a_rfk_init,
2655 .rfk_init_late = rtw8922a_rfk_init_late,
2656 .rfk_channel = rtw8922a_rfk_channel,
2657 .rfk_band_changed = rtw8922a_rfk_band_changed,
2658 .rfk_scan = rtw8922a_rfk_scan,
2659 .rfk_track = rtw8922a_rfk_track,
2660 .power_trim = rtw8922a_power_trim,
2661 .set_txpwr = rtw8922a_set_txpwr,
2662 .set_txpwr_ctrl = rtw8922a_set_txpwr_ctrl,
2663 .init_txpwr_unit = NULL,
2664 .get_thermal = rtw8922a_get_thermal,
2665 .ctrl_btg_bt_rx = rtw8922a_ctrl_btg_bt_rx,
2666 .query_ppdu = rtw8922a_query_ppdu,
2667 .convert_rpl_to_rssi = rtw8922a_convert_rpl_to_rssi,
2668 .ctrl_nbtg_bt_tx = rtw8922a_ctrl_nbtg_bt_tx,
2669 .cfg_txrx_path = rtw8922a_bb_cfg_txrx_path,
2670 .set_txpwr_ul_tb_offset = NULL,
2671 .digital_pwr_comp = rtw8922a_digital_pwr_comp,
2672 .pwr_on_func = rtw8922a_pwr_on_func,
2673 .pwr_off_func = rtw8922a_pwr_off_func,
2674 .query_rxdesc = rtw89_core_query_rxdesc_v2,
2675 .fill_txdesc = rtw89_core_fill_txdesc_v2,
2676 .fill_txdesc_fwcmd = rtw89_core_fill_txdesc_fwcmd_v2,
2677 .cfg_ctrl_path = rtw89_mac_cfg_ctrl_path_v2,
2678 .mac_cfg_gnt = rtw89_mac_cfg_gnt_v2,
2679 .stop_sch_tx = rtw89_mac_stop_sch_tx_v2,
2680 .resume_sch_tx = rtw89_mac_resume_sch_tx_v2,
2681 .h2c_dctl_sec_cam = rtw89_fw_h2c_dctl_sec_cam_v2,
2682 .h2c_default_cmac_tbl = rtw89_fw_h2c_default_cmac_tbl_g7,
2683 .h2c_assoc_cmac_tbl = rtw89_fw_h2c_assoc_cmac_tbl_g7,
2684 .h2c_ampdu_cmac_tbl = rtw89_fw_h2c_ampdu_cmac_tbl_g7,
2685 .h2c_default_dmac_tbl = rtw89_fw_h2c_default_dmac_tbl_v2,
2686 .h2c_update_beacon = rtw89_fw_h2c_update_beacon_be,
2687 .h2c_ba_cam = rtw89_fw_h2c_ba_cam_v1,
2688
2689 .btc_set_rfe = rtw8922a_btc_set_rfe,
2690 .btc_init_cfg = rtw8922a_btc_init_cfg,
2691 .btc_set_wl_pri = NULL,
2692 .btc_set_wl_txpwr_ctrl = rtw8922a_btc_set_wl_txpwr_ctrl,
2693 .btc_get_bt_rssi = rtw8922a_btc_get_bt_rssi,
2694 .btc_update_bt_cnt = rtw8922a_btc_update_bt_cnt,
2695 .btc_wl_s1_standby = rtw8922a_btc_wl_s1_standby,
2696 .btc_set_wl_rx_gain = rtw8922a_btc_set_wl_rx_gain,
2697 .btc_set_policy = rtw89_btc_set_policy_v1,
2698 };
2699
2700 const struct rtw89_chip_info rtw8922a_chip_info = {
2701 .chip_id = RTL8922A,
2702 .chip_gen = RTW89_CHIP_BE,
2703 .ops = &rtw8922a_chip_ops,
2704 .mac_def = &rtw89_mac_gen_be,
2705 .phy_def = &rtw89_phy_gen_be,
2706 .fw_basename = RTW8922A_FW_BASENAME,
2707 .fw_format_max = RTW8922A_FW_FORMAT_MAX,
2708 .try_ce_fw = false,
2709 .bbmcu_nr = 1,
2710 .needed_fw_elms = RTW89_BE_GEN_DEF_NEEDED_FW_ELEMENTS,
2711 .fifo_size = 589824,
2712 .small_fifo_size = false,
2713 .dle_scc_rsvd_size = 0,
2714 .max_amsdu_limit = 8000,
2715 .dis_2g_40m_ul_ofdma = false,
2716 .rsvd_ple_ofst = 0x8f800,
2717 .hfc_param_ini = rtw8922a_hfc_param_ini_pcie,
2718 .dle_mem = rtw8922a_dle_mem_pcie,
2719 .wde_qempty_acq_grpnum = 4,
2720 .wde_qempty_mgq_grpsel = 4,
2721 .rf_base_addr = {0xe000, 0xf000},
2722 .thermal_th = {0xad, 0xb4},
2723 .pwr_on_seq = NULL,
2724 .pwr_off_seq = NULL,
2725 .bb_table = NULL,
2726 .bb_gain_table = NULL,
2727 .rf_table = {},
2728 .nctl_table = NULL,
2729 .nctl_post_table = NULL,
2730 .dflt_parms = NULL, /* load parm from fw */
2731 .rfe_parms_conf = NULL, /* load parm from fw */
2732 .txpwr_factor_rf = 2,
2733 .txpwr_factor_mac = 1,
2734 .dig_table = NULL,
2735 .dig_regs = &rtw8922a_dig_regs,
2736 .tssi_dbw_table = NULL,
2737 .support_macid_num = 32,
2738 .support_link_num = 2,
2739 .support_chanctx_num = 2,
2740 .support_rnr = true,
2741 .support_bands = BIT(NL80211_BAND_2GHZ) |
2742 BIT(NL80211_BAND_5GHZ) |
2743 BIT(NL80211_BAND_6GHZ),
2744 .support_bandwidths = BIT(NL80211_CHAN_WIDTH_20) |
2745 BIT(NL80211_CHAN_WIDTH_40) |
2746 BIT(NL80211_CHAN_WIDTH_80) |
2747 BIT(NL80211_CHAN_WIDTH_160),
2748 .support_unii4 = true,
2749 .ul_tb_waveform_ctrl = false,
2750 .ul_tb_pwr_diff = false,
2751 .hw_sec_hdr = true,
2752 .hw_mgmt_tx_encrypt = true,
2753 .rf_path_num = 2,
2754 .tx_nss = 2,
2755 .rx_nss = 2,
2756 .acam_num = 128,
2757 .bcam_num = 20,
2758 .scam_num = 32,
2759 .bacam_num = 24,
2760 .bacam_dynamic_num = 8,
2761 .bacam_ver = RTW89_BACAM_V1,
2762 .ppdu_max_usr = 16,
2763 .sec_ctrl_efuse_size = 4,
2764 .physical_efuse_size = 0x1300,
2765 .logical_efuse_size = 0x70000,
2766 .limit_efuse_size = 0x40000,
2767 .dav_phy_efuse_size = 0,
2768 .dav_log_efuse_size = 0,
2769 .efuse_blocks = rtw8922a_efuse_blocks,
2770 .phycap_addr = 0x1700,
2771 .phycap_size = 0x38,
2772 .para_ver = 0xf,
2773 .wlcx_desired = 0x07110000,
2774 .btcx_desired = 0x7,
2775 .scbd = 0x1,
2776 .mailbox = 0x1,
2777
2778 .afh_guard_ch = 6,
2779 .wl_rssi_thres = rtw89_btc_8922a_wl_rssi_thres,
2780 .bt_rssi_thres = rtw89_btc_8922a_bt_rssi_thres,
2781 .rssi_tol = 2,
2782 .mon_reg_num = ARRAY_SIZE(rtw89_btc_8922a_mon_reg),
2783 .mon_reg = rtw89_btc_8922a_mon_reg,
2784 .rf_para_ulink_num = ARRAY_SIZE(rtw89_btc_8922a_rf_ul),
2785 .rf_para_ulink = rtw89_btc_8922a_rf_ul,
2786 .rf_para_dlink_num = ARRAY_SIZE(rtw89_btc_8922a_rf_dl),
2787 .rf_para_dlink = rtw89_btc_8922a_rf_dl,
2788 .ps_mode_supported = BIT(RTW89_PS_MODE_RFOFF) |
2789 BIT(RTW89_PS_MODE_CLK_GATED) |
2790 BIT(RTW89_PS_MODE_PWR_GATED),
2791 .low_power_hci_modes = 0,
2792 .h2c_cctl_func_id = H2C_FUNC_MAC_CCTLINFO_UD_G7,
2793 .hci_func_en_addr = R_BE_HCI_FUNC_EN,
2794 .h2c_desc_size = sizeof(struct rtw89_rxdesc_short_v2),
2795 .txwd_body_size = sizeof(struct rtw89_txwd_body_v2),
2796 .txwd_info_size = sizeof(struct rtw89_txwd_info_v2),
2797 .h2c_ctrl_reg = R_BE_H2CREG_CTRL,
2798 .h2c_counter_reg = {R_BE_UDM1 + 1, B_BE_UDM1_HALMAC_H2C_DEQ_CNT_MASK >> 8},
2799 .h2c_regs = rtw8922a_h2c_regs,
2800 .c2h_ctrl_reg = R_BE_C2HREG_CTRL,
2801 .c2h_counter_reg = {R_BE_UDM1 + 1, B_BE_UDM1_HALMAC_C2H_ENQ_CNT_MASK >> 8},
2802 .c2h_regs = rtw8922a_c2h_regs,
2803 .page_regs = &rtw8922a_page_regs,
2804 .wow_reason_reg = rtw8922a_wow_wakeup_regs,
2805 .cfo_src_fd = true,
2806 .cfo_hw_comp = true,
2807 .dcfo_comp = NULL,
2808 .dcfo_comp_sft = 0,
2809 .imr_info = NULL,
2810 .imr_dmac_table = &rtw8922a_imr_dmac_table,
2811 .imr_cmac_table = &rtw8922a_imr_cmac_table,
2812 .rrsr_cfgs = &rtw8922a_rrsr_cfgs,
2813 .bss_clr_vld = {R_BSS_CLR_VLD_V2, B_BSS_CLR_VLD0_V2},
2814 .bss_clr_map_reg = R_BSS_CLR_MAP_V2,
2815 .rfkill_init = &rtw8922a_rfkill_regs,
2816 .rfkill_get = {R_BE_GPIO_EXT_CTRL, B_BE_GPIO_IN_9},
2817 .dma_ch_mask = 0,
2818 .edcca_regs = &rtw8922a_edcca_regs,
2819 #ifdef CONFIG_PM
2820 .wowlan_stub = &rtw_wowlan_stub_8922a,
2821 #endif
2822 .xtal_info = NULL,
2823 };
2824 EXPORT_SYMBOL(rtw8922a_chip_info);
2825
2826 MODULE_FIRMWARE(RTW8922A_MODULE_FIRMWARE);
2827 MODULE_AUTHOR("Realtek Corporation");
2828 MODULE_DESCRIPTION("Realtek 802.11be wireless 8922A driver");
2829 MODULE_LICENSE("Dual BSD/GPL");
2830