xref: /linux/drivers/net/wireless/realtek/rtw89/rtw8922a.c (revision 8be4d31cb8aaeea27bde4b7ddb26e28a89062ebf)
1 // SPDX-License-Identifier: GPL-2.0 OR BSD-3-Clause
2 /* Copyright(c) 2023  Realtek Corporation
3  */
4 
5 #include "chan.h"
6 #include "coex.h"
7 #include "debug.h"
8 #include "efuse.h"
9 #include "fw.h"
10 #include "mac.h"
11 #include "phy.h"
12 #include "reg.h"
13 #include "rtw8922a.h"
14 #include "rtw8922a_rfk.h"
15 #include "sar.h"
16 #include "util.h"
17 
18 #define RTW8922A_FW_FORMAT_MAX 4
19 #define RTW8922A_FW_BASENAME "rtw89/rtw8922a_fw"
20 #define RTW8922A_MODULE_FIRMWARE \
21 	RTW8922A_FW_BASENAME "-" __stringify(RTW8922A_FW_FORMAT_MAX) ".bin"
22 
23 #define HE_N_USER_MAX_8922A 4
24 
25 static const struct rtw89_hfc_ch_cfg rtw8922a_hfc_chcfg_pcie[] = {
26 	{2, 1641, grp_0}, /* ACH 0 */
27 	{2, 1641, grp_0}, /* ACH 1 */
28 	{2, 1641, grp_0}, /* ACH 2 */
29 	{2, 1641, grp_0}, /* ACH 3 */
30 	{2, 1641, grp_1}, /* ACH 4 */
31 	{2, 1641, grp_1}, /* ACH 5 */
32 	{2, 1641, grp_1}, /* ACH 6 */
33 	{2, 1641, grp_1}, /* ACH 7 */
34 	{2, 1641, grp_0}, /* B0MGQ */
35 	{2, 1641, grp_0}, /* B0HIQ */
36 	{2, 1641, grp_1}, /* B1MGQ */
37 	{2, 1641, grp_1}, /* B1HIQ */
38 	{0, 0, 0}, /* FWCMDQ */
39 	{0, 0, 0}, /* BMC */
40 	{0, 0, 0}, /* H2D */
41 };
42 
43 static const struct rtw89_hfc_pub_cfg rtw8922a_hfc_pubcfg_pcie = {
44 	1651, /* Group 0 */
45 	1651, /* Group 1 */
46 	3302, /* Public Max */
47 	0, /* WP threshold */
48 };
49 
50 static const struct rtw89_hfc_param_ini rtw8922a_hfc_param_ini_pcie[] = {
51 	[RTW89_QTA_SCC] = {rtw8922a_hfc_chcfg_pcie, &rtw8922a_hfc_pubcfg_pcie,
52 			   &rtw89_mac_size.hfc_prec_cfg_c0, RTW89_HCIFC_POH},
53 	[RTW89_QTA_DBCC] = {rtw8922a_hfc_chcfg_pcie, &rtw8922a_hfc_pubcfg_pcie,
54 			   &rtw89_mac_size.hfc_prec_cfg_c0, RTW89_HCIFC_POH},
55 	[RTW89_QTA_DLFW] = {NULL, NULL, &rtw89_mac_size.hfc_prec_cfg_c2,
56 			    RTW89_HCIFC_POH},
57 	[RTW89_QTA_INVALID] = {NULL},
58 };
59 
60 static const struct rtw89_dle_mem rtw8922a_dle_mem_pcie[] = {
61 	[RTW89_QTA_SCC] = {RTW89_QTA_SCC, &rtw89_mac_size.wde_size0_v1,
62 			   &rtw89_mac_size.ple_size0_v1, &rtw89_mac_size.wde_qt0_v1,
63 			   &rtw89_mac_size.wde_qt0_v1, &rtw89_mac_size.ple_qt0,
64 			   &rtw89_mac_size.ple_qt1, &rtw89_mac_size.ple_rsvd_qt0,
65 			   &rtw89_mac_size.rsvd0_size0, &rtw89_mac_size.rsvd1_size0},
66 	[RTW89_QTA_DBCC] = {RTW89_QTA_DBCC, &rtw89_mac_size.wde_size0_v1,
67 			   &rtw89_mac_size.ple_size0_v1, &rtw89_mac_size.wde_qt0_v1,
68 			   &rtw89_mac_size.wde_qt0_v1, &rtw89_mac_size.ple_qt0,
69 			   &rtw89_mac_size.ple_qt1, &rtw89_mac_size.ple_rsvd_qt0,
70 			   &rtw89_mac_size.rsvd0_size0, &rtw89_mac_size.rsvd1_size0},
71 	[RTW89_QTA_DLFW] = {RTW89_QTA_DLFW, &rtw89_mac_size.wde_size4_v1,
72 			    &rtw89_mac_size.ple_size3_v1, &rtw89_mac_size.wde_qt4,
73 			    &rtw89_mac_size.wde_qt4, &rtw89_mac_size.ple_qt9,
74 			    &rtw89_mac_size.ple_qt9, &rtw89_mac_size.ple_rsvd_qt1,
75 			    &rtw89_mac_size.rsvd0_size0, &rtw89_mac_size.rsvd1_size0},
76 	[RTW89_QTA_INVALID] = {RTW89_QTA_INVALID, NULL, NULL, NULL, NULL, NULL,
77 			       NULL},
78 };
79 
80 static const u32 rtw8922a_h2c_regs[RTW89_H2CREG_MAX] = {
81 	R_BE_H2CREG_DATA0, R_BE_H2CREG_DATA1, R_BE_H2CREG_DATA2,
82 	R_BE_H2CREG_DATA3
83 };
84 
85 static const u32 rtw8922a_c2h_regs[RTW89_H2CREG_MAX] = {
86 	R_BE_C2HREG_DATA0, R_BE_C2HREG_DATA1, R_BE_C2HREG_DATA2,
87 	R_BE_C2HREG_DATA3
88 };
89 
90 static const u32 rtw8922a_wow_wakeup_regs[RTW89_WOW_REASON_NUM] = {
91 	R_AX_C2HREG_DATA3_V1 + 3, R_BE_DBG_WOW,
92 };
93 
94 static const struct rtw89_page_regs rtw8922a_page_regs = {
95 	.hci_fc_ctrl	= R_BE_HCI_FC_CTRL,
96 	.ch_page_ctrl	= R_BE_CH_PAGE_CTRL,
97 	.ach_page_ctrl	= R_BE_CH0_PAGE_CTRL,
98 	.ach_page_info	= R_BE_CH0_PAGE_INFO,
99 	.pub_page_info3	= R_BE_PUB_PAGE_INFO3,
100 	.pub_page_ctrl1	= R_BE_PUB_PAGE_CTRL1,
101 	.pub_page_ctrl2	= R_BE_PUB_PAGE_CTRL2,
102 	.pub_page_info1	= R_BE_PUB_PAGE_INFO1,
103 	.pub_page_info2 = R_BE_PUB_PAGE_INFO2,
104 	.wp_page_ctrl1	= R_BE_WP_PAGE_CTRL1,
105 	.wp_page_ctrl2	= R_BE_WP_PAGE_CTRL2,
106 	.wp_page_info1	= R_BE_WP_PAGE_INFO1,
107 };
108 
109 static const struct rtw89_reg_imr rtw8922a_imr_dmac_regs[] = {
110 	{R_BE_DISP_HOST_IMR, B_BE_DISP_HOST_IMR_CLR, B_BE_DISP_HOST_IMR_SET},
111 	{R_BE_DISP_CPU_IMR, B_BE_DISP_CPU_IMR_CLR, B_BE_DISP_CPU_IMR_SET},
112 	{R_BE_DISP_OTHER_IMR, B_BE_DISP_OTHER_IMR_CLR, B_BE_DISP_OTHER_IMR_SET},
113 	{R_BE_PKTIN_ERR_IMR, B_BE_PKTIN_ERR_IMR_CLR, B_BE_PKTIN_ERR_IMR_SET},
114 	{R_BE_INTERRUPT_MASK_REG, B_BE_INTERRUPT_MASK_REG_CLR, B_BE_INTERRUPT_MASK_REG_SET},
115 	{R_BE_MLO_ERR_IDCT_IMR, B_BE_MLO_ERR_IDCT_IMR_CLR, B_BE_MLO_ERR_IDCT_IMR_SET},
116 	{R_BE_MPDU_TX_ERR_IMR, B_BE_MPDU_TX_ERR_IMR_CLR, B_BE_MPDU_TX_ERR_IMR_SET},
117 	{R_BE_MPDU_RX_ERR_IMR, B_BE_MPDU_RX_ERR_IMR_CLR, B_BE_MPDU_RX_ERR_IMR_SET},
118 	{R_BE_SEC_ERROR_IMR, B_BE_SEC_ERROR_IMR_CLR, B_BE_SEC_ERROR_IMR_SET},
119 	{R_BE_CPUIO_ERR_IMR, B_BE_CPUIO_ERR_IMR_CLR, B_BE_CPUIO_ERR_IMR_SET},
120 	{R_BE_WDE_ERR_IMR, B_BE_WDE_ERR_IMR_CLR, B_BE_WDE_ERR_IMR_SET},
121 	{R_BE_WDE_ERR1_IMR, B_BE_WDE_ERR1_IMR_CLR, B_BE_WDE_ERR1_IMR_SET},
122 	{R_BE_PLE_ERR_IMR, B_BE_PLE_ERR_IMR_CLR, B_BE_PLE_ERR_IMR_SET},
123 	{R_BE_PLE_ERRFLAG1_IMR, B_BE_PLE_ERRFLAG1_IMR_CLR, B_BE_PLE_ERRFLAG1_IMR_SET},
124 	{R_BE_WDRLS_ERR_IMR, B_BE_WDRLS_ERR_IMR_CLR, B_BE_WDRLS_ERR_IMR_SET},
125 	{R_BE_TXPKTCTL_B0_ERRFLAG_IMR, B_BE_TXPKTCTL_B0_ERRFLAG_IMR_CLR,
126 	 B_BE_TXPKTCTL_B0_ERRFLAG_IMR_SET},
127 	{R_BE_TXPKTCTL_B1_ERRFLAG_IMR, B_BE_TXPKTCTL_B1_ERRFLAG_IMR_CLR,
128 	 B_BE_TXPKTCTL_B1_ERRFLAG_IMR_SET},
129 	{R_BE_BBRPT_COM_ERR_IMR, B_BE_BBRPT_COM_ERR_IMR_CLR, B_BE_BBRPT_COM_ERR_IMR_SET},
130 	{R_BE_BBRPT_CHINFO_ERR_IMR, B_BE_BBRPT_CHINFO_ERR_IMR_CLR,
131 	 B_BE_BBRPT_CHINFO_ERR_IMR_SET},
132 	{R_BE_BBRPT_DFS_ERR_IMR, B_BE_BBRPT_DFS_ERR_IMR_CLR, B_BE_BBRPT_DFS_ERR_IMR_SET},
133 	{R_BE_LA_ERRFLAG_IMR, B_BE_LA_ERRFLAG_IMR_CLR, B_BE_LA_ERRFLAG_IMR_SET},
134 	{R_BE_CH_INFO_DBGFLAG_IMR, B_BE_CH_INFO_DBGFLAG_IMR_CLR, B_BE_CH_INFO_DBGFLAG_IMR_SET},
135 	{R_BE_PLRLS_ERR_IMR, B_BE_PLRLS_ERR_IMR_CLR, B_BE_PLRLS_ERR_IMR_SET},
136 	{R_BE_HAXI_IDCT_MSK, B_BE_HAXI_IDCT_MSK_CLR, B_BE_HAXI_IDCT_MSK_SET},
137 };
138 
139 static const struct rtw89_imr_table rtw8922a_imr_dmac_table = {
140 	.regs = rtw8922a_imr_dmac_regs,
141 	.n_regs = ARRAY_SIZE(rtw8922a_imr_dmac_regs),
142 };
143 
144 static const struct rtw89_reg_imr rtw8922a_imr_cmac_regs[] = {
145 	{R_BE_RESP_IMR, B_BE_RESP_IMR_CLR, B_BE_RESP_IMR_SET},
146 	{R_BE_RX_ERROR_FLAG_IMR, B_BE_RX_ERROR_FLAG_IMR_CLR, B_BE_RX_ERROR_FLAG_IMR_SET},
147 	{R_BE_TX_ERROR_FLAG_IMR, B_BE_TX_ERROR_FLAG_IMR_CLR, B_BE_TX_ERROR_FLAG_IMR_SET},
148 	{R_BE_RX_ERROR_FLAG_IMR_1, B_BE_TX_ERROR_FLAG_IMR_1_CLR, B_BE_TX_ERROR_FLAG_IMR_1_SET},
149 	{R_BE_PTCL_IMR1, B_BE_PTCL_IMR1_CLR, B_BE_PTCL_IMR1_SET},
150 	{R_BE_PTCL_IMR0, B_BE_PTCL_IMR0_CLR, B_BE_PTCL_IMR0_SET},
151 	{R_BE_PTCL_IMR_2, B_BE_PTCL_IMR_2_CLR, B_BE_PTCL_IMR_2_SET},
152 	{R_BE_SCHEDULE_ERR_IMR, B_BE_SCHEDULE_ERR_IMR_CLR, B_BE_SCHEDULE_ERR_IMR_SET},
153 	{R_BE_C0_TXPWR_IMR, B_BE_C0_TXPWR_IMR_CLR, B_BE_C0_TXPWR_IMR_SET},
154 	{R_BE_TRXPTCL_ERROR_INDICA_MASK, B_BE_TRXPTCL_ERROR_INDICA_MASK_CLR,
155 	 B_BE_TRXPTCL_ERROR_INDICA_MASK_SET},
156 	{R_BE_RX_ERR_IMR, B_BE_RX_ERR_IMR_CLR, B_BE_RX_ERR_IMR_SET},
157 	{R_BE_PHYINFO_ERR_IMR_V1, B_BE_PHYINFO_ERR_IMR_V1_CLR, B_BE_PHYINFO_ERR_IMR_V1_SET},
158 };
159 
160 static const struct rtw89_imr_table rtw8922a_imr_cmac_table = {
161 	.regs = rtw8922a_imr_cmac_regs,
162 	.n_regs = ARRAY_SIZE(rtw8922a_imr_cmac_regs),
163 };
164 
165 static const struct rtw89_rrsr_cfgs rtw8922a_rrsr_cfgs = {
166 	.ref_rate = {R_BE_TRXPTCL_RESP_1, B_BE_WMAC_RESP_REF_RATE_SEL, 0},
167 	.rsc = {R_BE_PTCL_RRSR1, B_BE_RSC_MASK, 2},
168 };
169 
170 static const struct rtw89_rfkill_regs rtw8922a_rfkill_regs = {
171 	.pinmux = {R_BE_GPIO8_15_FUNC_SEL,
172 		   B_BE_PINMUX_GPIO9_FUNC_SEL_MASK,
173 		   0xf},
174 	.mode = {R_BE_GPIO_EXT_CTRL + 2,
175 		 (B_BE_GPIO_MOD_9 | B_BE_GPIO_IO_SEL_9) >> 16,
176 		 0x0},
177 };
178 
179 static const struct rtw89_dig_regs rtw8922a_dig_regs = {
180 	.seg0_pd_reg = R_SEG0R_PD_V2,
181 	.pd_lower_bound_mask = B_SEG0R_PD_LOWER_BOUND_MSK,
182 	.pd_spatial_reuse_en = B_SEG0R_PD_SPATIAL_REUSE_EN_MSK_V1,
183 	.bmode_pd_reg = R_BMODE_PDTH_EN_V2,
184 	.bmode_cca_rssi_limit_en = B_BMODE_PDTH_LIMIT_EN_MSK_V1,
185 	.bmode_pd_lower_bound_reg = R_BMODE_PDTH_V2,
186 	.bmode_rssi_nocca_low_th_mask = B_BMODE_PDTH_LOWER_BOUND_MSK_V1,
187 	.p0_lna_init = {R_PATH0_LNA_INIT_V1, B_PATH0_LNA_INIT_IDX_MSK},
188 	.p1_lna_init = {R_PATH1_LNA_INIT_V1, B_PATH1_LNA_INIT_IDX_MSK},
189 	.p0_tia_init = {R_PATH0_TIA_INIT_V1, B_PATH0_TIA_INIT_IDX_MSK_V1},
190 	.p1_tia_init = {R_PATH1_TIA_INIT_V1, B_PATH1_TIA_INIT_IDX_MSK_V1},
191 	.p0_rxb_init = {R_PATH0_RXB_INIT_V1, B_PATH0_RXB_INIT_IDX_MSK_V1},
192 	.p1_rxb_init = {R_PATH1_RXB_INIT_V1, B_PATH1_RXB_INIT_IDX_MSK_V1},
193 	.p0_p20_pagcugc_en = {R_PATH0_P20_FOLLOW_BY_PAGCUGC_V3,
194 			      B_PATH0_P20_FOLLOW_BY_PAGCUGC_EN_MSK},
195 	.p0_s20_pagcugc_en = {R_PATH0_S20_FOLLOW_BY_PAGCUGC_V3,
196 			      B_PATH0_S20_FOLLOW_BY_PAGCUGC_EN_MSK},
197 	.p1_p20_pagcugc_en = {R_PATH1_P20_FOLLOW_BY_PAGCUGC_V3,
198 			      B_PATH1_P20_FOLLOW_BY_PAGCUGC_EN_MSK},
199 	.p1_s20_pagcugc_en = {R_PATH1_S20_FOLLOW_BY_PAGCUGC_V3,
200 			      B_PATH1_S20_FOLLOW_BY_PAGCUGC_EN_MSK},
201 };
202 
203 static const struct rtw89_edcca_regs rtw8922a_edcca_regs = {
204 	.edcca_level			= R_SEG0R_EDCCA_LVL_BE,
205 	.edcca_mask			= B_EDCCA_LVL_MSK0,
206 	.edcca_p_mask			= B_EDCCA_LVL_MSK1,
207 	.ppdu_level			= R_SEG0R_PPDU_LVL_BE,
208 	.ppdu_mask			= B_EDCCA_LVL_MSK1,
209 	.p = {{
210 		.rpt_a			= R_EDCCA_RPT_A_BE,
211 		.rpt_b			= R_EDCCA_RPT_B_BE,
212 		.rpt_sel		= R_EDCCA_RPT_SEL_BE,
213 		.rpt_sel_mask		= B_EDCCA_RPT_SEL_MSK,
214 	}, {
215 		.rpt_a			= R_EDCCA_RPT_P1_A_BE,
216 		.rpt_b			= R_EDCCA_RPT_P1_B_BE,
217 		.rpt_sel		= R_EDCCA_RPT_SEL_BE,
218 		.rpt_sel_mask		= B_EDCCA_RPT_SEL_P1_MSK,
219 	}},
220 	.rpt_sel_be			= R_EDCCA_RPTREG_SEL_BE,
221 	.rpt_sel_be_mask		= B_EDCCA_RPTREG_SEL_BE_MSK,
222 	.tx_collision_t2r_st		= R_TX_COLLISION_T2R_ST_BE,
223 	.tx_collision_t2r_st_mask	= B_TX_COLLISION_T2R_ST_BE_M,
224 };
225 
226 static const struct rtw89_efuse_block_cfg rtw8922a_efuse_blocks[] = {
227 	[RTW89_EFUSE_BLOCK_SYS]			= {.offset = 0x00000, .size = 0x310},
228 	[RTW89_EFUSE_BLOCK_RF]			= {.offset = 0x10000, .size = 0x240},
229 	[RTW89_EFUSE_BLOCK_HCI_DIG_PCIE_SDIO]	= {.offset = 0x20000, .size = 0x4800},
230 	[RTW89_EFUSE_BLOCK_HCI_DIG_USB]		= {.offset = 0x30000, .size = 0x890},
231 	[RTW89_EFUSE_BLOCK_HCI_PHY_PCIE]	= {.offset = 0x40000, .size = 0x200},
232 	[RTW89_EFUSE_BLOCK_HCI_PHY_USB3]	= {.offset = 0x50000, .size = 0x80},
233 	[RTW89_EFUSE_BLOCK_HCI_PHY_USB2]	= {.offset = 0x60000, .size = 0x0},
234 	[RTW89_EFUSE_BLOCK_ADIE]		= {.offset = 0x70000, .size = 0x10},
235 };
236 
rtw8922a_ctrl_btg_bt_rx(struct rtw89_dev * rtwdev,bool en,enum rtw89_phy_idx phy_idx)237 static void rtw8922a_ctrl_btg_bt_rx(struct rtw89_dev *rtwdev, bool en,
238 				    enum rtw89_phy_idx phy_idx)
239 {
240 	if (en) {
241 		rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_A, B_BT_SHARE_A, 0x1, phy_idx);
242 		rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_A, B_BTG_PATH_A, 0x0, phy_idx);
243 		rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_B, B_BT_SHARE_B, 0x1, phy_idx);
244 		rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_B, B_BTG_PATH_B, 0x1, phy_idx);
245 		rtw89_phy_write32_idx(rtwdev, R_LNA_OP, B_LNA6, 0x20, phy_idx);
246 		rtw89_phy_write32_idx(rtwdev, R_LNA_TIA, B_TIA0_B, 0x30, phy_idx);
247 		rtw89_phy_write32_idx(rtwdev, R_PMAC_GNT, B_PMAC_GNT_P1, 0x0, phy_idx);
248 		rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_ANT_BT_SHARE, 0x1, phy_idx);
249 		rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_RX_BT_SG0, 0x2, phy_idx);
250 		rtw89_phy_write32_idx(rtwdev, R_GNT_BT_WGT_EN, B_GNT_BT_WGT_EN,
251 				      0x1, phy_idx);
252 	} else {
253 		rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_A, B_BT_SHARE_A, 0x0, phy_idx);
254 		rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_A, B_BTG_PATH_A, 0x0, phy_idx);
255 		rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_B, B_BT_SHARE_B, 0x0, phy_idx);
256 		rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_B, B_BTG_PATH_B, 0x0, phy_idx);
257 		rtw89_phy_write32_idx(rtwdev, R_LNA_OP, B_LNA6, 0x1a, phy_idx);
258 		rtw89_phy_write32_idx(rtwdev, R_LNA_TIA, B_TIA0_B, 0x2a, phy_idx);
259 		rtw89_phy_write32_idx(rtwdev, R_PMAC_GNT, B_PMAC_GNT_P1, 0xc, phy_idx);
260 		rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_ANT_BT_SHARE, 0x0, phy_idx);
261 		rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_RX_BT_SG0, 0x0, phy_idx);
262 		rtw89_phy_write32_idx(rtwdev, R_GNT_BT_WGT_EN, B_GNT_BT_WGT_EN,
263 				      0x0, phy_idx);
264 	}
265 }
266 
rtw8922a_pwr_on_func(struct rtw89_dev * rtwdev)267 static int rtw8922a_pwr_on_func(struct rtw89_dev *rtwdev)
268 {
269 	struct rtw89_hal *hal = &rtwdev->hal;
270 	u32 val32;
271 	int ret;
272 
273 	rtw89_write32_clr(rtwdev, R_BE_SYS_PW_CTRL, B_BE_AFSM_WLSUS_EN |
274 						    B_BE_AFSM_PCIE_SUS_EN);
275 	rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_DIS_WLBT_PDNSUSEN_SOPC);
276 	rtw89_write32_set(rtwdev, R_BE_WLLPS_CTRL, B_BE_DIS_WLBT_LPSEN_LOPC);
277 	rtw89_write32_clr(rtwdev, R_BE_SYS_PW_CTRL, B_BE_APDM_HPDN);
278 	rtw89_write32_clr(rtwdev, R_BE_SYS_PW_CTRL, B_BE_APFM_SWLPS);
279 
280 	ret = read_poll_timeout(rtw89_read32, val32, val32 & B_BE_RDY_SYSPWR,
281 				1000, 3000000, false, rtwdev, R_BE_SYS_PW_CTRL);
282 	if (ret)
283 		return ret;
284 
285 	rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_EN_WLON);
286 	rtw89_write32_set(rtwdev, R_BE_WLRESUME_CTRL, B_BE_LPSROP_CMAC0 |
287 						      B_BE_LPSROP_CMAC1);
288 	rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_APFN_ONMAC);
289 
290 	ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_BE_APFN_ONMAC),
291 				1000, 3000000, false, rtwdev, R_BE_SYS_PW_CTRL);
292 	if (ret)
293 		return ret;
294 
295 	rtw89_write32_clr(rtwdev, R_BE_AFE_ON_CTRL1, B_BE_REG_CK_MON_CK960M_EN);
296 	rtw89_write8_set(rtwdev, R_BE_ANAPAR_POW_MAC, B_BE_POW_PC_LDO_PORT0 |
297 						      B_BE_POW_PC_LDO_PORT1);
298 	rtw89_write32_clr(rtwdev, R_BE_FEN_RST_ENABLE, B_BE_R_SYM_ISO_ADDA_P02PP |
299 						       B_BE_R_SYM_ISO_ADDA_P12PP);
300 	rtw89_write8_set(rtwdev, R_BE_PLATFORM_ENABLE, B_BE_PLATFORM_EN);
301 	rtw89_write32_set(rtwdev, R_BE_HCI_OPT_CTRL, B_BE_HAXIDMA_IO_EN);
302 
303 	ret = read_poll_timeout(rtw89_read32, val32, val32 & B_BE_HAXIDMA_IO_ST,
304 				1000, 3000000, false, rtwdev, R_BE_HCI_OPT_CTRL);
305 	if (ret)
306 		return ret;
307 
308 	ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_BE_HAXIDMA_BACKUP_RESTORE_ST),
309 				1000, 3000000, false, rtwdev, R_BE_HCI_OPT_CTRL);
310 	if (ret)
311 		return ret;
312 
313 	rtw89_write32_set(rtwdev, R_BE_HCI_OPT_CTRL, B_BE_HCI_WLAN_IO_EN);
314 
315 	ret = read_poll_timeout(rtw89_read32, val32, val32 & B_BE_HCI_WLAN_IO_ST,
316 				1000, 3000000, false, rtwdev, R_BE_HCI_OPT_CTRL);
317 	if (ret)
318 		return ret;
319 
320 	rtw89_write32_clr(rtwdev, R_BE_SYS_SDIO_CTRL, B_BE_PCIE_FORCE_IBX_EN);
321 
322 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_PLL, 0x02, 0x02);
323 	if (ret)
324 		return ret;
325 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_PLL, 0x01, 0x01);
326 	if (ret)
327 		return ret;
328 
329 	rtw89_write32_set(rtwdev, R_BE_SYS_ADIE_PAD_PWR_CTRL, B_BE_SYM_PADPDN_WL_RFC1_1P3);
330 
331 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x40, 0x40);
332 	if (ret)
333 		return ret;
334 
335 	rtw89_write32_set(rtwdev, R_BE_SYS_ADIE_PAD_PWR_CTRL, B_BE_SYM_PADPDN_WL_RFC0_1P3);
336 
337 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x20, 0x20);
338 	if (ret)
339 		return ret;
340 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x04, 0x04);
341 	if (ret)
342 		return ret;
343 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x08, 0x08);
344 	if (ret)
345 		return ret;
346 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x10);
347 	if (ret)
348 		return ret;
349 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S0, 0xEB, 0xFF);
350 	if (ret)
351 		return ret;
352 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S1, 0xEB, 0xFF);
353 	if (ret)
354 		return ret;
355 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x01, 0x01);
356 	if (ret)
357 		return ret;
358 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x02, 0x02);
359 	if (ret)
360 		return ret;
361 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x80);
362 	if (ret)
363 		return ret;
364 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_XREF_RF1, 0, 0x40);
365 	if (ret)
366 		return ret;
367 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_XREF_RF2, 0, 0x40);
368 	if (ret)
369 		return ret;
370 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_PLL_1, 0x40, 0x60);
371 	if (ret)
372 		return ret;
373 
374 	if (hal->cv != CHIP_CAV) {
375 		rtw89_write32_set(rtwdev, R_BE_PMC_DBG_CTRL2, B_BE_SYSON_DIS_PMCR_BE_WRMSK);
376 		rtw89_write32_set(rtwdev, R_BE_SYS_ISO_CTRL, B_BE_ISO_EB2CORE);
377 		rtw89_write32_clr(rtwdev, R_BE_SYS_ISO_CTRL, B_BE_PWC_EV2EF_B);
378 
379 		mdelay(1);
380 
381 		rtw89_write32_clr(rtwdev, R_BE_SYS_ISO_CTRL, B_BE_PWC_EV2EF_S);
382 		rtw89_write32_clr(rtwdev, R_BE_PMC_DBG_CTRL2, B_BE_SYSON_DIS_PMCR_BE_WRMSK);
383 	}
384 
385 	rtw89_write32_set(rtwdev, R_BE_DMAC_FUNC_EN,
386 			  B_BE_MAC_FUNC_EN | B_BE_DMAC_FUNC_EN | B_BE_MPDU_PROC_EN |
387 			  B_BE_WD_RLS_EN | B_BE_DLE_WDE_EN | B_BE_TXPKT_CTRL_EN |
388 			  B_BE_STA_SCH_EN | B_BE_DLE_PLE_EN | B_BE_PKT_BUF_EN |
389 			  B_BE_DMAC_TBL_EN | B_BE_PKT_IN_EN | B_BE_DLE_CPUIO_EN |
390 			  B_BE_DISPATCHER_EN | B_BE_BBRPT_EN | B_BE_MAC_SEC_EN |
391 			  B_BE_H_AXIDMA_EN | B_BE_DMAC_MLO_EN | B_BE_PLRLS_EN |
392 			  B_BE_P_AXIDMA_EN | B_BE_DLE_DATACPUIO_EN | B_BE_LTR_CTL_EN);
393 
394 	set_bit(RTW89_FLAG_DMAC_FUNC, rtwdev->flags);
395 
396 	rtw89_write32_set(rtwdev, R_BE_CMAC_SHARE_FUNC_EN,
397 			  B_BE_CMAC_SHARE_EN | B_BE_RESPBA_EN | B_BE_ADDRSRCH_EN |
398 			  B_BE_BTCOEX_EN);
399 	rtw89_write32_set(rtwdev, R_BE_CMAC_FUNC_EN,
400 			  B_BE_CMAC_EN | B_BE_CMAC_TXEN |  B_BE_CMAC_RXEN |
401 			  B_BE_SIGB_EN | B_BE_PHYINTF_EN | B_BE_CMAC_DMA_EN |
402 			  B_BE_PTCLTOP_EN | B_BE_SCHEDULER_EN | B_BE_TMAC_EN |
403 			  B_BE_RMAC_EN | B_BE_TXTIME_EN | B_BE_RESP_PKTCTL_EN);
404 
405 	set_bit(RTW89_FLAG_CMAC0_FUNC, rtwdev->flags);
406 
407 	rtw89_write32_set(rtwdev, R_BE_FEN_RST_ENABLE, B_BE_FEN_BB_IP_RSTN |
408 						       B_BE_FEN_BBPLAT_RSTB);
409 
410 	return 0;
411 }
412 
rtw8922a_pwr_off_func(struct rtw89_dev * rtwdev)413 static int rtw8922a_pwr_off_func(struct rtw89_dev *rtwdev)
414 {
415 	u32 val32;
416 	int ret;
417 
418 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x10, 0x10);
419 	if (ret)
420 		return ret;
421 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x08);
422 	if (ret)
423 		return ret;
424 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x04);
425 	if (ret)
426 		return ret;
427 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S0, 0xC6, 0xFF);
428 	if (ret)
429 		return ret;
430 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S1, 0xC6, 0xFF);
431 	if (ret)
432 		return ret;
433 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x80, 0x80);
434 	if (ret)
435 		return ret;
436 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x02);
437 	if (ret)
438 		return ret;
439 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x01);
440 	if (ret)
441 		return ret;
442 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_PLL, 0x02, 0xFF);
443 	if (ret)
444 		return ret;
445 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_PLL, 0x00, 0xFF);
446 	if (ret)
447 		return ret;
448 
449 	rtw89_write32_set(rtwdev, R_BE_FEN_RST_ENABLE, B_BE_R_SYM_ISO_ADDA_P02PP |
450 						       B_BE_R_SYM_ISO_ADDA_P12PP);
451 	rtw89_write8_clr(rtwdev, R_BE_ANAPAR_POW_MAC, B_BE_POW_PC_LDO_PORT0 |
452 						      B_BE_POW_PC_LDO_PORT1);
453 	rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_EN_WLON);
454 	rtw89_write8_clr(rtwdev, R_BE_FEN_RST_ENABLE, B_BE_FEN_BB_IP_RSTN |
455 						      B_BE_FEN_BBPLAT_RSTB);
456 	rtw89_write32_clr(rtwdev, R_BE_SYS_ADIE_PAD_PWR_CTRL, B_BE_SYM_PADPDN_WL_RFC0_1P3);
457 
458 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x20);
459 	if (ret)
460 		return ret;
461 
462 	rtw89_write32_clr(rtwdev, R_BE_SYS_ADIE_PAD_PWR_CTRL, B_BE_SYM_PADPDN_WL_RFC1_1P3);
463 
464 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x40);
465 	if (ret)
466 		return ret;
467 
468 	rtw89_write32_clr(rtwdev, R_BE_HCI_OPT_CTRL, B_BE_HAXIDMA_IO_EN);
469 
470 	ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_BE_HAXIDMA_IO_ST),
471 				1000, 3000000, false, rtwdev, R_BE_HCI_OPT_CTRL);
472 	if (ret)
473 		return ret;
474 
475 	ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_BE_HAXIDMA_BACKUP_RESTORE_ST),
476 				1000, 3000000, false, rtwdev, R_BE_HCI_OPT_CTRL);
477 	if (ret)
478 		return ret;
479 
480 	rtw89_write32_clr(rtwdev, R_BE_HCI_OPT_CTRL, B_BE_HCI_WLAN_IO_EN);
481 
482 	ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_BE_HCI_WLAN_IO_ST),
483 				1000, 3000000, false, rtwdev, R_BE_HCI_OPT_CTRL);
484 	if (ret)
485 		return ret;
486 
487 	rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_APFM_OFFMAC);
488 
489 	ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_BE_APFM_OFFMAC),
490 				1000, 3000000, false, rtwdev, R_BE_SYS_PW_CTRL);
491 	if (ret)
492 		return ret;
493 
494 	rtw89_write32(rtwdev, R_BE_WLLPS_CTRL, 0x0000A1B2);
495 	rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_XTAL_OFF_A_DIE);
496 	rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_APFM_SWLPS);
497 	rtw89_write32(rtwdev, R_BE_UDM1, 0);
498 
499 	return 0;
500 }
501 
rtw8922a_efuse_parsing_tssi(struct rtw89_dev * rtwdev,struct rtw8922a_efuse * map)502 static void rtw8922a_efuse_parsing_tssi(struct rtw89_dev *rtwdev,
503 					struct rtw8922a_efuse *map)
504 {
505 	struct rtw8922a_tssi_offset *ofst[] = {&map->path_a_tssi, &map->path_b_tssi};
506 	u8 *bw40_1s_tssi_6g_ofst[] = {map->bw40_1s_tssi_6g_a, map->bw40_1s_tssi_6g_b};
507 	struct rtw89_tssi_info *tssi = &rtwdev->tssi;
508 	u8 i, j;
509 
510 	tssi->thermal[RF_PATH_A] = map->path_a_therm;
511 	tssi->thermal[RF_PATH_B] = map->path_b_therm;
512 
513 	for (i = 0; i < RF_PATH_NUM_8922A; i++) {
514 		memcpy(tssi->tssi_cck[i], ofst[i]->cck_tssi,
515 		       sizeof(ofst[i]->cck_tssi));
516 
517 		for (j = 0; j < TSSI_CCK_CH_GROUP_NUM; j++)
518 			rtw89_debug(rtwdev, RTW89_DBG_TSSI,
519 				    "[TSSI][EFUSE] path=%d cck[%d]=0x%x\n",
520 				    i, j, tssi->tssi_cck[i][j]);
521 
522 		memcpy(tssi->tssi_mcs[i], ofst[i]->bw40_tssi,
523 		       sizeof(ofst[i]->bw40_tssi));
524 		memcpy(tssi->tssi_mcs[i] + TSSI_MCS_2G_CH_GROUP_NUM,
525 		       ofst[i]->bw40_1s_tssi_5g, sizeof(ofst[i]->bw40_1s_tssi_5g));
526 		memcpy(tssi->tssi_6g_mcs[i], bw40_1s_tssi_6g_ofst[i],
527 		       sizeof(tssi->tssi_6g_mcs[i]));
528 
529 		for (j = 0; j < TSSI_MCS_CH_GROUP_NUM; j++)
530 			rtw89_debug(rtwdev, RTW89_DBG_TSSI,
531 				    "[TSSI][EFUSE] path=%d mcs[%d]=0x%x\n",
532 				    i, j, tssi->tssi_mcs[i][j]);
533 	}
534 }
535 
rtw8922a_efuse_parsing_gain_offset(struct rtw89_dev * rtwdev,struct rtw8922a_efuse * map)536 static void rtw8922a_efuse_parsing_gain_offset(struct rtw89_dev *rtwdev,
537 					       struct rtw8922a_efuse *map)
538 {
539 	struct rtw89_phy_efuse_gain *gain = &rtwdev->efuse_gain;
540 	bool all_0xff = true, all_0x00 = true;
541 	int i, j;
542 	u8 t;
543 
544 	gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_2G_CCK] = map->rx_gain_a._2g_cck;
545 	gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_2G_CCK] = map->rx_gain_b._2g_cck;
546 	gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_2G_OFDM] = map->rx_gain_a._2g_ofdm;
547 	gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_2G_OFDM] = map->rx_gain_b._2g_ofdm;
548 	gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_5G_LOW] = map->rx_gain_a._5g_low;
549 	gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_5G_LOW] = map->rx_gain_b._5g_low;
550 	gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_5G_MID] = map->rx_gain_a._5g_mid;
551 	gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_5G_MID] = map->rx_gain_b._5g_mid;
552 	gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_5G_HIGH] = map->rx_gain_a._5g_high;
553 	gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_5G_HIGH] = map->rx_gain_b._5g_high;
554 	gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_L0] = map->rx_gain_6g_a._6g_l0;
555 	gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_L0] = map->rx_gain_6g_b._6g_l0;
556 	gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_L1] = map->rx_gain_6g_a._6g_l1;
557 	gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_L1] = map->rx_gain_6g_b._6g_l1;
558 	gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_M0] = map->rx_gain_6g_a._6g_m0;
559 	gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_M0] = map->rx_gain_6g_b._6g_m0;
560 	gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_M1] = map->rx_gain_6g_a._6g_m1;
561 	gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_M1] = map->rx_gain_6g_b._6g_m1;
562 	gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_H0] = map->rx_gain_6g_a._6g_h0;
563 	gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_H0] = map->rx_gain_6g_b._6g_h0;
564 	gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_H1] = map->rx_gain_6g_a._6g_h1;
565 	gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_H1] = map->rx_gain_6g_b._6g_h1;
566 	gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_UH0] = map->rx_gain_6g_a._6g_uh0;
567 	gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_UH0] = map->rx_gain_6g_b._6g_uh0;
568 	gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_UH1] = map->rx_gain_6g_a._6g_uh1;
569 	gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_UH1] = map->rx_gain_6g_b._6g_uh1;
570 
571 	for (i = RF_PATH_A; i <= RF_PATH_B; i++)
572 		for (j = 0; j < RTW89_GAIN_OFFSET_NR; j++) {
573 			t = gain->offset[i][j];
574 			if (t != 0xff)
575 				all_0xff = false;
576 			if (t != 0x0)
577 				all_0x00 = false;
578 
579 			/* transform: sign-bit + U(7,2) to S(8,2) */
580 			if (t & 0x80)
581 				gain->offset[i][j] = (t ^ 0x7f) + 1;
582 		}
583 
584 	gain->offset_valid = !all_0xff && !all_0x00;
585 }
586 
rtw8922a_read_efuse_mac_addr(struct rtw89_dev * rtwdev,u32 addr)587 static void rtw8922a_read_efuse_mac_addr(struct rtw89_dev *rtwdev, u32 addr)
588 {
589 	struct rtw89_efuse *efuse = &rtwdev->efuse;
590 	u16 val;
591 	int i;
592 
593 	for (i = 0; i < ETH_ALEN; i += 2, addr += 2) {
594 		val = rtw89_read16(rtwdev, addr);
595 		efuse->addr[i] = val & 0xff;
596 		efuse->addr[i + 1] = val >> 8;
597 	}
598 }
599 
rtw8922a_read_efuse_pci_sdio(struct rtw89_dev * rtwdev,u8 * log_map)600 static int rtw8922a_read_efuse_pci_sdio(struct rtw89_dev *rtwdev, u8 *log_map)
601 {
602 	struct rtw89_efuse *efuse = &rtwdev->efuse;
603 
604 	if (rtwdev->hci.type == RTW89_HCI_TYPE_PCIE)
605 		rtw8922a_read_efuse_mac_addr(rtwdev, 0x3104);
606 	else
607 		ether_addr_copy(efuse->addr, log_map + 0x001A);
608 
609 	return 0;
610 }
611 
rtw8922a_read_efuse_usb(struct rtw89_dev * rtwdev,u8 * log_map)612 static int rtw8922a_read_efuse_usb(struct rtw89_dev *rtwdev, u8 *log_map)
613 {
614 	rtw8922a_read_efuse_mac_addr(rtwdev, 0x4078);
615 
616 	return 0;
617 }
618 
rtw8922a_read_efuse_rf(struct rtw89_dev * rtwdev,u8 * log_map)619 static int rtw8922a_read_efuse_rf(struct rtw89_dev *rtwdev, u8 *log_map)
620 {
621 	struct rtw8922a_efuse *map = (struct rtw8922a_efuse *)log_map;
622 	struct rtw89_efuse *efuse = &rtwdev->efuse;
623 
624 	efuse->rfe_type = map->rfe_type;
625 	efuse->xtal_cap = map->xtal_k;
626 	efuse->country_code[0] = map->country_code[0];
627 	efuse->country_code[1] = map->country_code[1];
628 	rtw8922a_efuse_parsing_tssi(rtwdev, map);
629 	rtw8922a_efuse_parsing_gain_offset(rtwdev, map);
630 
631 	rtw89_info(rtwdev, "chip rfe_type is %d\n", efuse->rfe_type);
632 
633 	return 0;
634 }
635 
rtw8922a_read_efuse(struct rtw89_dev * rtwdev,u8 * log_map,enum rtw89_efuse_block block)636 static int rtw8922a_read_efuse(struct rtw89_dev *rtwdev, u8 *log_map,
637 			       enum rtw89_efuse_block block)
638 {
639 	switch (block) {
640 	case RTW89_EFUSE_BLOCK_HCI_DIG_PCIE_SDIO:
641 		return rtw8922a_read_efuse_pci_sdio(rtwdev, log_map);
642 	case RTW89_EFUSE_BLOCK_HCI_DIG_USB:
643 		return rtw8922a_read_efuse_usb(rtwdev, log_map);
644 	case RTW89_EFUSE_BLOCK_RF:
645 		return rtw8922a_read_efuse_rf(rtwdev, log_map);
646 	default:
647 		return 0;
648 	}
649 }
650 
651 #define THM_TRIM_POSITIVE_MASK BIT(6)
652 #define THM_TRIM_MAGNITUDE_MASK GENMASK(5, 0)
653 
rtw8922a_phycap_parsing_thermal_trim(struct rtw89_dev * rtwdev,u8 * phycap_map)654 static void rtw8922a_phycap_parsing_thermal_trim(struct rtw89_dev *rtwdev,
655 						 u8 *phycap_map)
656 {
657 	static const u32 thm_trim_addr[RF_PATH_NUM_8922A] = {0x1706, 0x1733};
658 	struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
659 	u32 addr = rtwdev->chip->phycap_addr;
660 	bool pg = true;
661 	u8 pg_th;
662 	s8 val;
663 	u8 i;
664 
665 	for (i = 0; i < RF_PATH_NUM_8922A; i++) {
666 		pg_th = phycap_map[thm_trim_addr[i] - addr];
667 		if (pg_th == 0xff) {
668 			info->thermal_trim[i] = 0;
669 			pg = false;
670 			break;
671 		}
672 
673 		val = u8_get_bits(pg_th, THM_TRIM_MAGNITUDE_MASK);
674 
675 		if (!(pg_th & THM_TRIM_POSITIVE_MASK))
676 			val *= -1;
677 
678 		info->thermal_trim[i] = val;
679 
680 		rtw89_debug(rtwdev, RTW89_DBG_RFK,
681 			    "[THERMAL][TRIM] path=%d thermal_trim=0x%x (%d)\n",
682 			    i, pg_th, val);
683 	}
684 
685 	info->pg_thermal_trim = pg;
686 }
687 
rtw8922a_phycap_parsing_pa_bias_trim(struct rtw89_dev * rtwdev,u8 * phycap_map)688 static void rtw8922a_phycap_parsing_pa_bias_trim(struct rtw89_dev *rtwdev,
689 						 u8 *phycap_map)
690 {
691 	static const u32 pabias_trim_addr[RF_PATH_NUM_8922A] = {0x1707, 0x1734};
692 	static const u32 check_pa_pad_trim_addr = 0x1700;
693 	struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
694 	u32 addr = rtwdev->chip->phycap_addr;
695 	u8 val;
696 	u8 i;
697 
698 	val = phycap_map[check_pa_pad_trim_addr - addr];
699 	if (val != 0xff)
700 		info->pg_pa_bias_trim = true;
701 
702 	for (i = 0; i < RF_PATH_NUM_8922A; i++) {
703 		info->pa_bias_trim[i] = phycap_map[pabias_trim_addr[i] - addr];
704 
705 		rtw89_debug(rtwdev, RTW89_DBG_RFK,
706 			    "[PA_BIAS][TRIM] path=%d pa_bias_trim=0x%x\n",
707 			    i, info->pa_bias_trim[i]);
708 	}
709 }
710 
rtw8922a_pa_bias_trim(struct rtw89_dev * rtwdev)711 static void rtw8922a_pa_bias_trim(struct rtw89_dev *rtwdev)
712 {
713 	struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
714 	u8 pabias_2g, pabias_5g;
715 	u8 i;
716 
717 	if (!info->pg_pa_bias_trim) {
718 		rtw89_debug(rtwdev, RTW89_DBG_RFK,
719 			    "[PA_BIAS][TRIM] no PG, do nothing\n");
720 
721 		return;
722 	}
723 
724 	for (i = 0; i < RF_PATH_NUM_8922A; i++) {
725 		pabias_2g = FIELD_GET(GENMASK(3, 0), info->pa_bias_trim[i]);
726 		pabias_5g = FIELD_GET(GENMASK(7, 4), info->pa_bias_trim[i]);
727 
728 		rtw89_debug(rtwdev, RTW89_DBG_RFK,
729 			    "[PA_BIAS][TRIM] path=%d 2G=0x%x 5G=0x%x\n",
730 			    i, pabias_2g, pabias_5g);
731 
732 		rtw89_write_rf(rtwdev, i, RR_BIASA, RR_BIASA_TXG_V1, pabias_2g);
733 		rtw89_write_rf(rtwdev, i, RR_BIASA, RR_BIASA_TXA_V1, pabias_5g);
734 	}
735 }
736 
rtw8922a_phycap_parsing_pad_bias_trim(struct rtw89_dev * rtwdev,u8 * phycap_map)737 static void rtw8922a_phycap_parsing_pad_bias_trim(struct rtw89_dev *rtwdev,
738 						  u8 *phycap_map)
739 {
740 	static const u32 pad_bias_trim_addr[RF_PATH_NUM_8922A] = {0x1708, 0x1735};
741 	struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
742 	u32 addr = rtwdev->chip->phycap_addr;
743 	u8 i;
744 
745 	for (i = 0; i < RF_PATH_NUM_8922A; i++) {
746 		info->pad_bias_trim[i] = phycap_map[pad_bias_trim_addr[i] - addr];
747 
748 		rtw89_debug(rtwdev, RTW89_DBG_RFK,
749 			    "[PAD_BIAS][TRIM] path=%d pad_bias_trim=0x%x\n",
750 			    i, info->pad_bias_trim[i]);
751 	}
752 }
753 
rtw8922a_pad_bias_trim(struct rtw89_dev * rtwdev)754 static void rtw8922a_pad_bias_trim(struct rtw89_dev *rtwdev)
755 {
756 	struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
757 	u8 pad_bias_2g, pad_bias_5g;
758 	u8 i;
759 
760 	if (!info->pg_pa_bias_trim) {
761 		rtw89_debug(rtwdev, RTW89_DBG_RFK,
762 			    "[PAD_BIAS][TRIM] no PG, do nothing\n");
763 		return;
764 	}
765 
766 	for (i = 0; i < RF_PATH_NUM_8922A; i++) {
767 		pad_bias_2g = u8_get_bits(info->pad_bias_trim[i], GENMASK(3, 0));
768 		pad_bias_5g = u8_get_bits(info->pad_bias_trim[i], GENMASK(7, 4));
769 
770 		rtw89_debug(rtwdev, RTW89_DBG_RFK,
771 			    "[PAD_BIAS][TRIM] path=%d 2G=0x%x 5G=0x%x\n",
772 			    i, pad_bias_2g, pad_bias_5g);
773 
774 		rtw89_write_rf(rtwdev, i, RR_BIASA, RR_BIASD_TXG_V1, pad_bias_2g);
775 		rtw89_write_rf(rtwdev, i, RR_BIASA, RR_BIASD_TXA_V1, pad_bias_5g);
776 	}
777 }
778 
rtw8922a_read_phycap(struct rtw89_dev * rtwdev,u8 * phycap_map)779 static int rtw8922a_read_phycap(struct rtw89_dev *rtwdev, u8 *phycap_map)
780 {
781 	rtw8922a_phycap_parsing_thermal_trim(rtwdev, phycap_map);
782 	rtw8922a_phycap_parsing_pa_bias_trim(rtwdev, phycap_map);
783 	rtw8922a_phycap_parsing_pad_bias_trim(rtwdev, phycap_map);
784 
785 	return 0;
786 }
787 
rtw8922a_power_trim(struct rtw89_dev * rtwdev)788 static void rtw8922a_power_trim(struct rtw89_dev *rtwdev)
789 {
790 	rtw8922a_pa_bias_trim(rtwdev);
791 	rtw8922a_pad_bias_trim(rtwdev);
792 }
793 
rtw8922a_set_channel_mac(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,u8 mac_idx)794 static void rtw8922a_set_channel_mac(struct rtw89_dev *rtwdev,
795 				     const struct rtw89_chan *chan,
796 				     u8 mac_idx)
797 {
798 	u32 sub_carr = rtw89_mac_reg_by_idx(rtwdev, R_BE_TX_SUB_BAND_VALUE, mac_idx);
799 	u32 chk_rate = rtw89_mac_reg_by_idx(rtwdev, R_BE_TXRATE_CHK, mac_idx);
800 	u32 rf_mod = rtw89_mac_reg_by_idx(rtwdev, R_BE_WMAC_RFMOD, mac_idx);
801 	u8 txsb20 = 0, txsb40 = 0, txsb80 = 0;
802 	u8 rf_mod_val, chk_rate_mask;
803 	u32 txsb;
804 	u32 reg;
805 
806 	switch (chan->band_width) {
807 	case RTW89_CHANNEL_WIDTH_160:
808 		txsb80 = rtw89_phy_get_txsb(rtwdev, chan, RTW89_CHANNEL_WIDTH_80);
809 		fallthrough;
810 	case RTW89_CHANNEL_WIDTH_80:
811 		txsb40 = rtw89_phy_get_txsb(rtwdev, chan, RTW89_CHANNEL_WIDTH_40);
812 		fallthrough;
813 	case RTW89_CHANNEL_WIDTH_40:
814 		txsb20 = rtw89_phy_get_txsb(rtwdev, chan, RTW89_CHANNEL_WIDTH_20);
815 		break;
816 	default:
817 		break;
818 	}
819 
820 	switch (chan->band_width) {
821 	case RTW89_CHANNEL_WIDTH_160:
822 		rf_mod_val = BE_WMAC_RFMOD_160M;
823 		txsb = u32_encode_bits(txsb20, B_BE_TXSB_20M_MASK) |
824 		       u32_encode_bits(txsb40, B_BE_TXSB_40M_MASK) |
825 		       u32_encode_bits(txsb80, B_BE_TXSB_80M_MASK);
826 		break;
827 	case RTW89_CHANNEL_WIDTH_80:
828 		rf_mod_val = BE_WMAC_RFMOD_80M;
829 		txsb = u32_encode_bits(txsb20, B_BE_TXSB_20M_MASK) |
830 		       u32_encode_bits(txsb40, B_BE_TXSB_40M_MASK);
831 		break;
832 	case RTW89_CHANNEL_WIDTH_40:
833 		rf_mod_val = BE_WMAC_RFMOD_40M;
834 		txsb = u32_encode_bits(txsb20, B_BE_TXSB_20M_MASK);
835 		break;
836 	case RTW89_CHANNEL_WIDTH_20:
837 	default:
838 		rf_mod_val = BE_WMAC_RFMOD_20M;
839 		txsb = 0;
840 		break;
841 	}
842 
843 	if (txsb20 <= BE_PRI20_BITMAP_MAX)
844 		txsb |= u32_encode_bits(BIT(txsb20), B_BE_PRI20_BITMAP_MASK);
845 
846 	rtw89_write8_mask(rtwdev, rf_mod, B_BE_WMAC_RFMOD_MASK, rf_mod_val);
847 	rtw89_write32(rtwdev, sub_carr, txsb);
848 
849 	switch (chan->band_type) {
850 	case RTW89_BAND_2G:
851 		chk_rate_mask = B_BE_BAND_MODE;
852 		break;
853 	case RTW89_BAND_5G:
854 	case RTW89_BAND_6G:
855 		chk_rate_mask = B_BE_CHECK_CCK_EN | B_BE_RTS_LIMIT_IN_OFDM6;
856 		break;
857 	default:
858 		rtw89_warn(rtwdev, "Invalid band_type:%d\n", chan->band_type);
859 		return;
860 	}
861 
862 	rtw89_write8_clr(rtwdev, chk_rate, B_BE_BAND_MODE | B_BE_CHECK_CCK_EN |
863 					   B_BE_RTS_LIMIT_IN_OFDM6);
864 	rtw89_write8_set(rtwdev, chk_rate, chk_rate_mask);
865 
866 	switch (chan->band_width) {
867 	case RTW89_CHANNEL_WIDTH_320:
868 	case RTW89_CHANNEL_WIDTH_160:
869 	case RTW89_CHANNEL_WIDTH_80:
870 	case RTW89_CHANNEL_WIDTH_40:
871 		reg = rtw89_mac_reg_by_idx(rtwdev, R_BE_PREBKF_CFG_1, mac_idx);
872 		rtw89_write32_mask(rtwdev, reg, B_BE_SIFS_MACTXEN_T1_MASK, 0x41);
873 		reg = rtw89_mac_reg_by_idx(rtwdev, R_BE_MUEDCA_EN, mac_idx);
874 		rtw89_write32_mask(rtwdev, reg, B_BE_SIFS_MACTXEN_TB_T1_MASK, 0x41);
875 		break;
876 	default:
877 		reg = rtw89_mac_reg_by_idx(rtwdev, R_BE_PREBKF_CFG_1, mac_idx);
878 		rtw89_write32_mask(rtwdev, reg, B_BE_SIFS_MACTXEN_T1_MASK, 0x3f);
879 		reg = rtw89_mac_reg_by_idx(rtwdev, R_BE_MUEDCA_EN, mac_idx);
880 		rtw89_write32_mask(rtwdev, reg, B_BE_SIFS_MACTXEN_TB_T1_MASK, 0x3e);
881 		break;
882 	}
883 }
884 
885 static const u32 rtw8922a_sco_barker_threshold[14] = {
886 	0x1fe4f, 0x1ff5e, 0x2006c, 0x2017b, 0x2028a, 0x20399, 0x204a8, 0x205b6,
887 	0x206c5, 0x207d4, 0x208e3, 0x209f2, 0x20b00, 0x20d8a
888 };
889 
890 static const u32 rtw8922a_sco_cck_threshold[14] = {
891 	0x2bdac, 0x2bf21, 0x2c095, 0x2c209, 0x2c37e, 0x2c4f2, 0x2c666, 0x2c7db,
892 	0x2c94f, 0x2cac3, 0x2cc38, 0x2cdac, 0x2cf21, 0x2d29e
893 };
894 
rtw8922a_ctrl_sco_cck(struct rtw89_dev * rtwdev,u8 primary_ch,enum rtw89_bandwidth bw,enum rtw89_phy_idx phy_idx)895 static int rtw8922a_ctrl_sco_cck(struct rtw89_dev *rtwdev,
896 				 u8 primary_ch, enum rtw89_bandwidth bw,
897 				 enum rtw89_phy_idx phy_idx)
898 {
899 	u8 ch_element;
900 
901 	if (primary_ch >= 14)
902 		return -EINVAL;
903 
904 	ch_element = primary_ch - 1;
905 
906 	rtw89_phy_write32_idx(rtwdev, R_BK_FC0INV, B_BK_FC0INV,
907 			      rtw8922a_sco_barker_threshold[ch_element],
908 			      phy_idx);
909 	rtw89_phy_write32_idx(rtwdev, R_CCK_FC0INV, B_CCK_FC0INV,
910 			      rtw8922a_sco_cck_threshold[ch_element],
911 			      phy_idx);
912 
913 	return 0;
914 }
915 
916 struct rtw8922a_bb_gain {
917 	u32 gain_g[BB_PATH_NUM_8922A];
918 	u32 gain_a[BB_PATH_NUM_8922A];
919 	u32 gain_g_mask;
920 	u32 gain_a_mask;
921 };
922 
923 static const struct rtw89_reg_def rpl_comp_bw160[RTW89_BW20_SC_160M] = {
924 	{ .addr = 0x41E8, .mask = 0xFF00},
925 	{ .addr = 0x41E8, .mask = 0xFF0000},
926 	{ .addr = 0x41E8, .mask = 0xFF000000},
927 	{ .addr = 0x41EC, .mask = 0xFF},
928 	{ .addr = 0x41EC, .mask = 0xFF00},
929 	{ .addr = 0x41EC, .mask = 0xFF0000},
930 	{ .addr = 0x41EC, .mask = 0xFF000000},
931 	{ .addr = 0x41F0, .mask = 0xFF}
932 };
933 
934 static const struct rtw89_reg_def rpl_comp_bw80[RTW89_BW20_SC_80M] = {
935 	{ .addr = 0x41F4, .mask = 0xFF},
936 	{ .addr = 0x41F4, .mask = 0xFF00},
937 	{ .addr = 0x41F4, .mask = 0xFF0000},
938 	{ .addr = 0x41F4, .mask = 0xFF000000}
939 };
940 
941 static const struct rtw89_reg_def rpl_comp_bw40[RTW89_BW20_SC_40M] = {
942 	{ .addr = 0x41F0, .mask = 0xFF0000},
943 	{ .addr = 0x41F0, .mask = 0xFF000000}
944 };
945 
946 static const struct rtw89_reg_def rpl_comp_bw20[RTW89_BW20_SC_20M] = {
947 	{ .addr = 0x41F0, .mask = 0xFF00}
948 };
949 
950 static const struct rtw8922a_bb_gain bb_gain_lna[LNA_GAIN_NUM] = {
951 	{ .gain_g = {0x409c, 0x449c}, .gain_a = {0x406C, 0x446C},
952 	  .gain_g_mask = 0xFF00, .gain_a_mask = 0xFF},
953 	{ .gain_g = {0x409c, 0x449c}, .gain_a = {0x406C, 0x446C},
954 	  .gain_g_mask = 0xFF000000, .gain_a_mask = 0xFF0000},
955 	{ .gain_g = {0x40a0, 0x44a0}, .gain_a = {0x4070, 0x4470},
956 	  .gain_g_mask = 0xFF00, .gain_a_mask = 0xFF},
957 	{ .gain_g = {0x40a0, 0x44a0}, .gain_a = {0x4070, 0x4470},
958 	  .gain_g_mask = 0xFF000000, .gain_a_mask = 0xFF0000},
959 	{ .gain_g = {0x40a4, 0x44a4}, .gain_a = {0x4074, 0x4474},
960 	  .gain_g_mask = 0xFF00, .gain_a_mask = 0xFF},
961 	{ .gain_g = {0x40a4, 0x44a4}, .gain_a = {0x4074, 0x4474},
962 	  .gain_g_mask = 0xFF000000, .gain_a_mask = 0xFF0000},
963 	{ .gain_g = {0x40a8, 0x44a8}, .gain_a = {0x4078, 0x4478},
964 	  .gain_g_mask = 0xFF00, .gain_a_mask = 0xFF},
965 };
966 
967 static const struct rtw8922a_bb_gain bb_gain_tia[TIA_GAIN_NUM] = {
968 	{ .gain_g = {0x4054, 0x4454}, .gain_a = {0x4054, 0x4454},
969 	  .gain_g_mask = 0x7FC0000, .gain_a_mask = 0x1FF},
970 	{ .gain_g = {0x4058, 0x4458}, .gain_a = {0x4054, 0x4454},
971 	  .gain_g_mask = 0x1FF, .gain_a_mask = 0x3FE00 },
972 };
973 
974 static const struct rtw8922a_bb_gain bb_op1db_lna[LNA_GAIN_NUM] = {
975 	{ .gain_g = {0x40ac, 0x44ac}, .gain_a = {0x4078, 0x4478},
976 	  .gain_g_mask = 0xFF00, .gain_a_mask = 0xFF000000},
977 	{ .gain_g = {0x40ac, 0x44ac}, .gain_a = {0x407c, 0x447c},
978 	  .gain_g_mask = 0xFF0000, .gain_a_mask = 0xFF},
979 	{ .gain_g = {0x40ac, 0x44ac}, .gain_a = {0x407c, 0x447c},
980 	  .gain_g_mask = 0xFF000000, .gain_a_mask = 0xFF00},
981 	{ .gain_g = {0x40b0, 0x44b0}, .gain_a = {0x407c, 0x447c},
982 	  .gain_g_mask = 0xFF, .gain_a_mask = 0xFF0000},
983 	{ .gain_g = {0x40b0, 0x44b0}, .gain_a = {0x407c, 0x447c},
984 	  .gain_g_mask = 0xFF00, .gain_a_mask = 0xFF000000},
985 	{ .gain_g = {0x40b0, 0x44b0}, .gain_a = {0x4080, 0x4480},
986 	  .gain_g_mask = 0xFF0000, .gain_a_mask = 0xFF},
987 	{ .gain_g = {0x40b0, 0x44b0}, .gain_a = {0x4080, 0x4480},
988 	  .gain_g_mask = 0xFF000000, .gain_a_mask = 0xFF00},
989 };
990 
991 static const struct rtw8922a_bb_gain bb_op1db_tia_lna[TIA_LNA_OP1DB_NUM] = {
992 	{ .gain_g = {0x40b4, 0x44b4}, .gain_a = {0x4080, 0x4480},
993 	  .gain_g_mask = 0xFF0000, .gain_a_mask = 0xFF000000},
994 	{ .gain_g = {0x40b4, 0x44b4}, .gain_a = {0x4084, 0x4484},
995 	  .gain_g_mask = 0xFF000000, .gain_a_mask = 0xFF},
996 	{ .gain_g = {0x40b8, 0x44b8}, .gain_a = {0x4084, 0x4484},
997 	  .gain_g_mask = 0xFF, .gain_a_mask = 0xFF00},
998 	{ .gain_g = {0x40b8, 0x44b8}, .gain_a = {0x4084, 0x4484},
999 	  .gain_g_mask = 0xFF00, .gain_a_mask = 0xFF0000},
1000 	{ .gain_g = {0x40b8, 0x44b8}, .gain_a = {0x4084, 0x4484},
1001 	  .gain_g_mask = 0xFF0000, .gain_a_mask = 0xFF000000},
1002 	{ .gain_g = {0x40b8, 0x44b8}, .gain_a = {0x4088, 0x4488},
1003 	  .gain_g_mask = 0xFF000000, .gain_a_mask = 0xFF},
1004 	{ .gain_g = {0x40bc, 0x44bc}, .gain_a = {0x4088, 0x4488},
1005 	  .gain_g_mask = 0xFF, .gain_a_mask = 0xFF00},
1006 	{ .gain_g = {0x40bc, 0x44bc}, .gain_a = {0x4088, 0x4488},
1007 	  .gain_g_mask = 0xFF00, .gain_a_mask = 0xFF0000},
1008 };
1009 
1010 struct rtw8922a_bb_gain_bypass {
1011 	u32 gain_g[BB_PATH_NUM_8922A];
1012 	u32 gain_a[BB_PATH_NUM_8922A];
1013 	u32 gain_mask_g;
1014 	u32 gain_mask_a;
1015 };
1016 
rtw8922a_set_rpl_gain(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_rf_path path,enum rtw89_phy_idx phy_idx)1017 static void rtw8922a_set_rpl_gain(struct rtw89_dev *rtwdev,
1018 				  const struct rtw89_chan *chan,
1019 				  enum rtw89_rf_path path,
1020 				  enum rtw89_phy_idx phy_idx)
1021 {
1022 	const struct rtw89_phy_bb_gain_info_be *gain = &rtwdev->bb_gain.be;
1023 	u8 gain_band = rtw89_subband_to_gain_band_be(chan->subband_type);
1024 	u32 reg_path_ofst = 0;
1025 	u32 mask;
1026 	s32 val;
1027 	u32 reg;
1028 	int i;
1029 
1030 	if (path == RF_PATH_B)
1031 		reg_path_ofst = 0x400;
1032 
1033 	for (i = 0; i < RTW89_BW20_SC_160M; i++) {
1034 		reg = rpl_comp_bw160[i].addr | reg_path_ofst;
1035 		mask = rpl_comp_bw160[i].mask;
1036 		val = gain->rpl_ofst_160[gain_band][path][i];
1037 		rtw89_phy_write32_idx(rtwdev, reg, mask, val, phy_idx);
1038 	}
1039 
1040 	for (i = 0; i < RTW89_BW20_SC_80M; i++) {
1041 		reg = rpl_comp_bw80[i].addr | reg_path_ofst;
1042 		mask = rpl_comp_bw80[i].mask;
1043 		val = gain->rpl_ofst_80[gain_band][path][i];
1044 		rtw89_phy_write32_idx(rtwdev, reg, mask, val, phy_idx);
1045 	}
1046 
1047 	for (i = 0; i < RTW89_BW20_SC_40M; i++) {
1048 		reg = rpl_comp_bw40[i].addr | reg_path_ofst;
1049 		mask = rpl_comp_bw40[i].mask;
1050 		val = gain->rpl_ofst_40[gain_band][path][i];
1051 		rtw89_phy_write32_idx(rtwdev, reg, mask, val, phy_idx);
1052 	}
1053 
1054 	for (i = 0; i < RTW89_BW20_SC_20M; i++) {
1055 		reg = rpl_comp_bw20[i].addr | reg_path_ofst;
1056 		mask = rpl_comp_bw20[i].mask;
1057 		val = gain->rpl_ofst_20[gain_band][path][i];
1058 		rtw89_phy_write32_idx(rtwdev, reg, mask, val, phy_idx);
1059 	}
1060 }
1061 
rtw8922a_set_lna_tia_gain(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_rf_path path,enum rtw89_phy_idx phy_idx)1062 static void rtw8922a_set_lna_tia_gain(struct rtw89_dev *rtwdev,
1063 				      const struct rtw89_chan *chan,
1064 				      enum rtw89_rf_path path,
1065 				      enum rtw89_phy_idx phy_idx)
1066 {
1067 	const struct rtw89_phy_bb_gain_info_be *gain = &rtwdev->bb_gain.be;
1068 	u8 gain_band = rtw89_subband_to_gain_band_be(chan->subband_type);
1069 	enum rtw89_phy_bb_bw_be bw_type;
1070 	s32 val;
1071 	u32 reg;
1072 	u32 mask;
1073 	int i;
1074 
1075 	bw_type = chan->band_width <= RTW89_CHANNEL_WIDTH_40 ?
1076 		  RTW89_BB_BW_20_40 : RTW89_BB_BW_80_160_320;
1077 
1078 	for (i = 0; i < LNA_GAIN_NUM; i++) {
1079 		if (chan->band_type == RTW89_BAND_2G) {
1080 			reg = bb_gain_lna[i].gain_g[path];
1081 			mask = bb_gain_lna[i].gain_g_mask;
1082 		} else {
1083 			reg = bb_gain_lna[i].gain_a[path];
1084 			mask = bb_gain_lna[i].gain_a_mask;
1085 		}
1086 		val = gain->lna_gain[gain_band][bw_type][path][i];
1087 		rtw89_phy_write32_idx(rtwdev, reg, mask, val, phy_idx);
1088 	}
1089 
1090 	for (i = 0; i < TIA_GAIN_NUM; i++) {
1091 		if (chan->band_type == RTW89_BAND_2G) {
1092 			reg = bb_gain_tia[i].gain_g[path];
1093 			mask = bb_gain_tia[i].gain_g_mask;
1094 		} else {
1095 			reg = bb_gain_tia[i].gain_a[path];
1096 			mask = bb_gain_tia[i].gain_a_mask;
1097 		}
1098 		val = gain->tia_gain[gain_band][bw_type][path][i];
1099 		rtw89_phy_write32_idx(rtwdev, reg, mask, val, phy_idx);
1100 	}
1101 
1102 	for (i = 0; i < LNA_GAIN_NUM; i++) {
1103 		if (chan->band_type == RTW89_BAND_2G) {
1104 			reg = bb_op1db_lna[i].gain_g[path];
1105 			mask = bb_op1db_lna[i].gain_g_mask;
1106 		} else {
1107 			reg = bb_op1db_lna[i].gain_a[path];
1108 			mask = bb_op1db_lna[i].gain_a_mask;
1109 		}
1110 		val = gain->lna_op1db[gain_band][bw_type][path][i];
1111 		rtw89_phy_write32_idx(rtwdev, reg, mask, val, phy_idx);
1112 	}
1113 
1114 	for (i = 0; i < TIA_LNA_OP1DB_NUM; i++) {
1115 		if (chan->band_type == RTW89_BAND_2G) {
1116 			reg = bb_op1db_tia_lna[i].gain_g[path];
1117 			mask = bb_op1db_tia_lna[i].gain_g_mask;
1118 		} else {
1119 			reg = bb_op1db_tia_lna[i].gain_a[path];
1120 			mask = bb_op1db_tia_lna[i].gain_a_mask;
1121 		}
1122 		val = gain->tia_lna_op1db[gain_band][bw_type][path][i];
1123 		rtw89_phy_write32_idx(rtwdev, reg, mask, val, phy_idx);
1124 	}
1125 }
1126 
rtw8922a_set_gain(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_rf_path path,enum rtw89_phy_idx phy_idx)1127 static void rtw8922a_set_gain(struct rtw89_dev *rtwdev,
1128 			      const struct rtw89_chan *chan,
1129 			      enum rtw89_rf_path path,
1130 			      enum rtw89_phy_idx phy_idx)
1131 {
1132 	rtw8922a_set_lna_tia_gain(rtwdev, chan, path, phy_idx);
1133 	rtw8922a_set_rpl_gain(rtwdev, chan, path, phy_idx);
1134 }
1135 
rtw8922a_set_rx_gain_normal_cck(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_rf_path path)1136 static void rtw8922a_set_rx_gain_normal_cck(struct rtw89_dev *rtwdev,
1137 					    const struct rtw89_chan *chan,
1138 					    enum rtw89_rf_path path)
1139 {
1140 	struct rtw89_phy_efuse_gain *gain = &rtwdev->efuse_gain;
1141 	s8 value = -gain->offset[path][RTW89_GAIN_OFFSET_2G_CCK]; /* S(8,2) */
1142 	u8 fraction = value & 0x3;
1143 
1144 	if (fraction) {
1145 		rtw89_phy_write32_mask(rtwdev, R_MGAIN_BIAS, B_MGAIN_BIAS_BW20,
1146 				       (0x4 - fraction) << 1);
1147 		rtw89_phy_write32_mask(rtwdev, R_MGAIN_BIAS, B_MGAIN_BIAS_BW40,
1148 				       (0x4 - fraction) << 1);
1149 
1150 		value >>= 2;
1151 		rtw89_phy_write32_mask(rtwdev, R_CCK_RPL_OFST, B_CCK_RPL_OFST,
1152 				       value + 1 + 0xdc);
1153 	} else {
1154 		rtw89_phy_write32_mask(rtwdev, R_MGAIN_BIAS, B_MGAIN_BIAS_BW20, 0);
1155 		rtw89_phy_write32_mask(rtwdev, R_MGAIN_BIAS, B_MGAIN_BIAS_BW40, 0);
1156 
1157 		value >>= 2;
1158 		rtw89_phy_write32_mask(rtwdev, R_CCK_RPL_OFST, B_CCK_RPL_OFST,
1159 				       value + 0xdc);
1160 	}
1161 }
1162 
rtw8922a_set_rx_gain_normal_ofdm(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_rf_path path)1163 static void rtw8922a_set_rx_gain_normal_ofdm(struct rtw89_dev *rtwdev,
1164 					     const struct rtw89_chan *chan,
1165 					     enum rtw89_rf_path path)
1166 {
1167 	static const u32 rssi_tb_bias_comp[2] = {0x41f8, 0x45f8};
1168 	static const u32 rssi_tb_ext_comp[2] = {0x4208, 0x4608};
1169 	static const u32 rssi_ofst_addr[2] = {0x40c8, 0x44c8};
1170 	static const u32 rpl_bias_comp[2] = {0x41e8, 0x45e8};
1171 	static const u32 rpl_ext_comp[2] = {0x41f8, 0x45f8};
1172 	struct rtw89_phy_efuse_gain *gain = &rtwdev->efuse_gain;
1173 	enum rtw89_gain_offset gain_band;
1174 	s8 v1, v2, v3;
1175 	s32 value;
1176 
1177 	gain_band = rtw89_subband_to_gain_offset_band_of_ofdm(chan->subband_type);
1178 	value = gain->offset[path][gain_band];
1179 	rtw89_phy_write32_mask(rtwdev, rssi_ofst_addr[path], 0xff000000, value + 0xF8);
1180 
1181 	value *= -4;
1182 	v1 = clamp_t(s32, value, S8_MIN, S8_MAX);
1183 	value -= v1;
1184 	v2 = clamp_t(s32, value, S8_MIN, S8_MAX);
1185 	value -= v2;
1186 	v3 = clamp_t(s32, value, S8_MIN, S8_MAX);
1187 
1188 	rtw89_phy_write32_mask(rtwdev, rpl_bias_comp[path], 0xff, v1);
1189 	rtw89_phy_write32_mask(rtwdev, rpl_ext_comp[path], 0xff, v2);
1190 	rtw89_phy_write32_mask(rtwdev, rpl_ext_comp[path], 0xff00, v3);
1191 
1192 	rtw89_phy_write32_mask(rtwdev, rssi_tb_bias_comp[path], 0xff0000, v1);
1193 	rtw89_phy_write32_mask(rtwdev, rssi_tb_ext_comp[path], 0xff0000, v2);
1194 	rtw89_phy_write32_mask(rtwdev, rssi_tb_ext_comp[path], 0xff000000, v3);
1195 }
1196 
rtw8922a_set_rx_gain_normal(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_rf_path path)1197 static void rtw8922a_set_rx_gain_normal(struct rtw89_dev *rtwdev,
1198 					const struct rtw89_chan *chan,
1199 					enum rtw89_rf_path path)
1200 {
1201 	struct rtw89_phy_efuse_gain *gain = &rtwdev->efuse_gain;
1202 
1203 	if (!gain->offset_valid)
1204 		return;
1205 
1206 	if (chan->band_type == RTW89_BAND_2G)
1207 		rtw8922a_set_rx_gain_normal_cck(rtwdev, chan, path);
1208 
1209 	rtw8922a_set_rx_gain_normal_ofdm(rtwdev, chan, path);
1210 }
1211 
rtw8922a_set_cck_parameters(struct rtw89_dev * rtwdev,u8 central_ch,enum rtw89_phy_idx phy_idx)1212 static void rtw8922a_set_cck_parameters(struct rtw89_dev *rtwdev, u8 central_ch,
1213 					enum rtw89_phy_idx phy_idx)
1214 {
1215 	if (central_ch == 14) {
1216 		rtw89_phy_write32_idx(rtwdev, R_PCOEFF01, B_PCOEFF01, 0x3b13ff, phy_idx);
1217 		rtw89_phy_write32_idx(rtwdev, R_PCOEFF23, B_PCOEFF23, 0x1c42de, phy_idx);
1218 		rtw89_phy_write32_idx(rtwdev, R_PCOEFF45, B_PCOEFF45, 0xfdb0ad, phy_idx);
1219 		rtw89_phy_write32_idx(rtwdev, R_PCOEFF67, B_PCOEFF67, 0xf60f6e, phy_idx);
1220 		rtw89_phy_write32_idx(rtwdev, R_PCOEFF89, B_PCOEFF89, 0xfd8f92, phy_idx);
1221 		rtw89_phy_write32_idx(rtwdev, R_PCOEFFAB, B_PCOEFFAB, 0x02d011, phy_idx);
1222 		rtw89_phy_write32_idx(rtwdev, R_PCOEFFCD, B_PCOEFFCD, 0x01c02c, phy_idx);
1223 		rtw89_phy_write32_idx(rtwdev, R_PCOEFFEF, B_PCOEFFEF, 0xfff00a, phy_idx);
1224 	} else {
1225 		rtw89_phy_write32_idx(rtwdev, R_PCOEFF01, B_PCOEFF01, 0x3a63ca, phy_idx);
1226 		rtw89_phy_write32_idx(rtwdev, R_PCOEFF23, B_PCOEFF23, 0x2a833f, phy_idx);
1227 		rtw89_phy_write32_idx(rtwdev, R_PCOEFF45, B_PCOEFF45, 0x1491f8, phy_idx);
1228 		rtw89_phy_write32_idx(rtwdev, R_PCOEFF67, B_PCOEFF67, 0x03c0b0, phy_idx);
1229 		rtw89_phy_write32_idx(rtwdev, R_PCOEFF89, B_PCOEFF89, 0xfccff1, phy_idx);
1230 		rtw89_phy_write32_idx(rtwdev, R_PCOEFFAB, B_PCOEFFAB, 0xfccfc3, phy_idx);
1231 		rtw89_phy_write32_idx(rtwdev, R_PCOEFFCD, B_PCOEFFCD, 0xfebfdc, phy_idx);
1232 		rtw89_phy_write32_idx(rtwdev, R_PCOEFFEF, B_PCOEFFEF, 0xffdff7, phy_idx);
1233 	}
1234 }
1235 
rtw8922a_ctrl_ch(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_phy_idx phy_idx)1236 static void rtw8922a_ctrl_ch(struct rtw89_dev *rtwdev,
1237 			     const struct rtw89_chan *chan,
1238 			     enum rtw89_phy_idx phy_idx)
1239 {
1240 	static const u32 band_sel[2] = {0x4160, 0x4560};
1241 	u16 central_freq = chan->freq;
1242 	u8 central_ch = chan->channel;
1243 	u8 band = chan->band_type;
1244 	bool is_2g = band == RTW89_BAND_2G;
1245 	u8 chan_idx;
1246 	u8 path;
1247 	u8 sco;
1248 
1249 	if (!central_freq) {
1250 		rtw89_warn(rtwdev, "Invalid central_freq\n");
1251 		return;
1252 	}
1253 
1254 	rtw8922a_set_gain(rtwdev, chan, RF_PATH_A, phy_idx);
1255 	rtw8922a_set_gain(rtwdev, chan, RF_PATH_B, phy_idx);
1256 
1257 	for (path = RF_PATH_A; path < BB_PATH_NUM_8922A; path++)
1258 		rtw89_phy_write32_idx(rtwdev, band_sel[path], BIT((26)), is_2g, phy_idx);
1259 
1260 	rtw8922a_set_rx_gain_normal(rtwdev, chan, RF_PATH_A);
1261 	rtw8922a_set_rx_gain_normal(rtwdev, chan, RF_PATH_B);
1262 
1263 	rtw89_phy_write32_idx(rtwdev, R_FC0, B_FC0, central_freq, phy_idx);
1264 	sco = DIV_ROUND_CLOSEST(1 << 18, central_freq);
1265 	rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_FC0_INV, sco, phy_idx);
1266 
1267 	if (band == RTW89_BAND_2G)
1268 		rtw8922a_set_cck_parameters(rtwdev, central_ch, phy_idx);
1269 
1270 	chan_idx = rtw89_encode_chan_idx(rtwdev, chan->primary_channel, band);
1271 	rtw89_phy_write32_idx(rtwdev, R_MAC_PIN_SEL, B_CH_IDX_SEG0, chan_idx, phy_idx);
1272 }
1273 
1274 static void
rtw8922a_ctrl_bw(struct rtw89_dev * rtwdev,u8 pri_sb,u8 bw,enum rtw89_phy_idx phy_idx)1275 rtw8922a_ctrl_bw(struct rtw89_dev *rtwdev, u8 pri_sb, u8 bw,
1276 		 enum rtw89_phy_idx phy_idx)
1277 {
1278 	switch (bw) {
1279 	case RTW89_CHANNEL_WIDTH_5:
1280 		rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_BW, 0x0, phy_idx);
1281 		rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_SMALLBW, 0x1, phy_idx);
1282 		rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_PRICH, 0x0, phy_idx);
1283 		rtw89_phy_write32_idx(rtwdev, R_DAC_CLK, B_DAC_CLK, 0x1, phy_idx);
1284 		rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP0, B_GAIN_MAP0_EN, 0x0, phy_idx);
1285 		rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP1, B_GAIN_MAP1_EN, 0x0, phy_idx);
1286 		break;
1287 	case RTW89_CHANNEL_WIDTH_10:
1288 		rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_BW, 0x0, phy_idx);
1289 		rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_SMALLBW, 0x2, phy_idx);
1290 		rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_PRICH, 0x0, phy_idx);
1291 		rtw89_phy_write32_idx(rtwdev, R_DAC_CLK, B_DAC_CLK, 0x1, phy_idx);
1292 		rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP0, B_GAIN_MAP0_EN, 0x0, phy_idx);
1293 		rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP1, B_GAIN_MAP1_EN, 0x0, phy_idx);
1294 		break;
1295 	case RTW89_CHANNEL_WIDTH_20:
1296 		rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_BW, 0x0, phy_idx);
1297 		rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_SMALLBW, 0x0, phy_idx);
1298 		rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_PRICH, 0x0, phy_idx);
1299 		rtw89_phy_write32_idx(rtwdev, R_DAC_CLK, B_DAC_CLK, 0x1, phy_idx);
1300 		rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP0, B_GAIN_MAP0_EN, 0x0, phy_idx);
1301 		rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP1, B_GAIN_MAP1_EN, 0x0, phy_idx);
1302 		break;
1303 	case RTW89_CHANNEL_WIDTH_40:
1304 		rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_BW, 0x1, phy_idx);
1305 		rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_SMALLBW, 0x0, phy_idx);
1306 		rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_PRICH, pri_sb, phy_idx);
1307 		rtw89_phy_write32_idx(rtwdev, R_DAC_CLK, B_DAC_CLK, 0x1, phy_idx);
1308 		rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP0, B_GAIN_MAP0_EN, 0x0, phy_idx);
1309 		rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP1, B_GAIN_MAP1_EN, 0x0, phy_idx);
1310 		break;
1311 	case RTW89_CHANNEL_WIDTH_80:
1312 		rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_BW, 0x2, phy_idx);
1313 		rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_SMALLBW, 0x0, phy_idx);
1314 		rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_PRICH, pri_sb, phy_idx);
1315 		rtw89_phy_write32_idx(rtwdev, R_DAC_CLK, B_DAC_CLK, 0x1, phy_idx);
1316 		rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP0, B_GAIN_MAP0_EN, 0x1, phy_idx);
1317 		rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP1, B_GAIN_MAP1_EN, 0x1, phy_idx);
1318 		break;
1319 	case RTW89_CHANNEL_WIDTH_160:
1320 		rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_BW, 0x3, phy_idx);
1321 		rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_SMALLBW, 0x0, phy_idx);
1322 		rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_PRICH, pri_sb, phy_idx);
1323 		rtw89_phy_write32_idx(rtwdev, R_DAC_CLK, B_DAC_CLK, 0x1, phy_idx);
1324 		rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP0, B_GAIN_MAP0_EN, 0x1, phy_idx);
1325 		rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP1, B_GAIN_MAP1_EN, 0x1, phy_idx);
1326 		break;
1327 	default:
1328 		rtw89_warn(rtwdev, "Fail to switch bw (bw:%d, pri_sb:%d)\n", bw,
1329 			   pri_sb);
1330 		break;
1331 	}
1332 
1333 	if (bw == RTW89_CHANNEL_WIDTH_40)
1334 		rtw89_phy_write32_idx(rtwdev, R_FC0, B_BW40_2XFFT, 1, phy_idx);
1335 	else
1336 		rtw89_phy_write32_idx(rtwdev, R_FC0, B_BW40_2XFFT, 0, phy_idx);
1337 }
1338 
rtw8922a_spur_freq(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan)1339 static u32 rtw8922a_spur_freq(struct rtw89_dev *rtwdev,
1340 			      const struct rtw89_chan *chan)
1341 {
1342 	return 0;
1343 }
1344 
1345 #define CARRIER_SPACING_312_5 312500 /* 312.5 kHz */
1346 #define CARRIER_SPACING_78_125 78125 /* 78.125 kHz */
1347 #define MAX_TONE_NUM 2048
1348 
rtw8922a_set_csi_tone_idx(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_phy_idx phy_idx)1349 static void rtw8922a_set_csi_tone_idx(struct rtw89_dev *rtwdev,
1350 				      const struct rtw89_chan *chan,
1351 				      enum rtw89_phy_idx phy_idx)
1352 {
1353 	s32 freq_diff, csi_idx, csi_tone_idx;
1354 	u32 spur_freq;
1355 
1356 	spur_freq = rtw8922a_spur_freq(rtwdev, chan);
1357 	if (spur_freq == 0) {
1358 		rtw89_phy_write32_idx(rtwdev, R_S0S1_CSI_WGT, B_S0S1_CSI_WGT_EN,
1359 				      0, phy_idx);
1360 		return;
1361 	}
1362 
1363 	freq_diff = (spur_freq - chan->freq) * 1000000;
1364 	csi_idx = s32_div_u32_round_closest(freq_diff, CARRIER_SPACING_78_125);
1365 	s32_div_u32_round_down(csi_idx, MAX_TONE_NUM, &csi_tone_idx);
1366 
1367 	rtw89_phy_write32_idx(rtwdev, R_S0S1_CSI_WGT, B_S0S1_CSI_WGT_TONE_IDX,
1368 			      csi_tone_idx, phy_idx);
1369 	rtw89_phy_write32_idx(rtwdev, R_S0S1_CSI_WGT, B_S0S1_CSI_WGT_EN, 1, phy_idx);
1370 }
1371 
1372 static const struct rtw89_nbi_reg_def rtw8922a_nbi_reg_def[] = {
1373 	[RF_PATH_A] = {
1374 		.notch1_idx = {0x41a0, 0xFF},
1375 		.notch1_frac_idx = {0x41a0, 0xC00},
1376 		.notch1_en = {0x41a0, 0x1000},
1377 		.notch2_idx = {0x41ac, 0xFF},
1378 		.notch2_frac_idx = {0x41ac, 0xC00},
1379 		.notch2_en = {0x41ac, 0x1000},
1380 	},
1381 	[RF_PATH_B] = {
1382 		.notch1_idx = {0x45a0, 0xFF},
1383 		.notch1_frac_idx = {0x45a0, 0xC00},
1384 		.notch1_en = {0x45a0, 0x1000},
1385 		.notch2_idx = {0x45ac, 0xFF},
1386 		.notch2_frac_idx = {0x45ac, 0xC00},
1387 		.notch2_en = {0x45ac, 0x1000},
1388 	},
1389 };
1390 
rtw8922a_set_nbi_tone_idx(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_rf_path path,enum rtw89_phy_idx phy_idx)1391 static void rtw8922a_set_nbi_tone_idx(struct rtw89_dev *rtwdev,
1392 				      const struct rtw89_chan *chan,
1393 				      enum rtw89_rf_path path,
1394 				      enum rtw89_phy_idx phy_idx)
1395 {
1396 	const struct rtw89_nbi_reg_def *nbi = &rtw8922a_nbi_reg_def[path];
1397 	s32 nbi_frac_idx, nbi_frac_tone_idx;
1398 	s32 nbi_idx, nbi_tone_idx;
1399 	bool notch2_chk = false;
1400 	u32 spur_freq, fc;
1401 	s32 freq_diff;
1402 
1403 	spur_freq = rtw8922a_spur_freq(rtwdev, chan);
1404 	if (spur_freq == 0) {
1405 		rtw89_phy_write32_idx(rtwdev, nbi->notch1_en.addr,
1406 				      nbi->notch1_en.mask, 0, phy_idx);
1407 		rtw89_phy_write32_idx(rtwdev, nbi->notch2_en.addr,
1408 				      nbi->notch2_en.mask, 0, phy_idx);
1409 		return;
1410 	}
1411 
1412 	fc = chan->freq;
1413 	if (chan->band_width == RTW89_CHANNEL_WIDTH_160) {
1414 		fc = (spur_freq > fc) ? fc + 40 : fc - 40;
1415 		if ((fc > spur_freq &&
1416 		     chan->channel < chan->primary_channel) ||
1417 		    (fc < spur_freq &&
1418 		     chan->channel > chan->primary_channel))
1419 			notch2_chk = true;
1420 	}
1421 
1422 	freq_diff = (spur_freq - fc) * 1000000;
1423 	nbi_idx = s32_div_u32_round_down(freq_diff, CARRIER_SPACING_312_5,
1424 					 &nbi_frac_idx);
1425 
1426 	if (chan->band_width == RTW89_CHANNEL_WIDTH_20) {
1427 		s32_div_u32_round_down(nbi_idx + 32, 64, &nbi_tone_idx);
1428 	} else {
1429 		u16 tone_para = (chan->band_width == RTW89_CHANNEL_WIDTH_40) ?
1430 				128 : 256;
1431 
1432 		s32_div_u32_round_down(nbi_idx, tone_para, &nbi_tone_idx);
1433 	}
1434 	nbi_frac_tone_idx =
1435 		s32_div_u32_round_closest(nbi_frac_idx, CARRIER_SPACING_78_125);
1436 
1437 	if (chan->band_width == RTW89_CHANNEL_WIDTH_160 && notch2_chk) {
1438 		rtw89_phy_write32_idx(rtwdev, nbi->notch2_idx.addr,
1439 				      nbi->notch2_idx.mask, nbi_tone_idx, phy_idx);
1440 		rtw89_phy_write32_idx(rtwdev, nbi->notch2_frac_idx.addr,
1441 				      nbi->notch2_frac_idx.mask, nbi_frac_tone_idx,
1442 				      phy_idx);
1443 		rtw89_phy_write32_idx(rtwdev, nbi->notch2_en.addr,
1444 				      nbi->notch2_en.mask, 0, phy_idx);
1445 		rtw89_phy_write32_idx(rtwdev, nbi->notch2_en.addr,
1446 				      nbi->notch2_en.mask, 1, phy_idx);
1447 		rtw89_phy_write32_idx(rtwdev, nbi->notch1_en.addr,
1448 				      nbi->notch1_en.mask, 0, phy_idx);
1449 	} else {
1450 		rtw89_phy_write32_idx(rtwdev, nbi->notch1_idx.addr,
1451 				      nbi->notch1_idx.mask, nbi_tone_idx, phy_idx);
1452 		rtw89_phy_write32_idx(rtwdev, nbi->notch1_frac_idx.addr,
1453 				      nbi->notch1_frac_idx.mask, nbi_frac_tone_idx,
1454 				      phy_idx);
1455 		rtw89_phy_write32_idx(rtwdev, nbi->notch1_en.addr,
1456 				      nbi->notch1_en.mask, 0, phy_idx);
1457 		rtw89_phy_write32_idx(rtwdev, nbi->notch1_en.addr,
1458 				      nbi->notch1_en.mask, 1, phy_idx);
1459 		rtw89_phy_write32_idx(rtwdev, nbi->notch2_en.addr,
1460 				      nbi->notch2_en.mask, 0, phy_idx);
1461 	}
1462 }
1463 
rtw8922a_spur_elimination(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_phy_idx phy_idx)1464 static void rtw8922a_spur_elimination(struct rtw89_dev *rtwdev,
1465 				      const struct rtw89_chan *chan,
1466 				      enum rtw89_phy_idx phy_idx)
1467 {
1468 	rtw8922a_set_csi_tone_idx(rtwdev, chan, phy_idx);
1469 	rtw8922a_set_nbi_tone_idx(rtwdev, chan, RF_PATH_A, phy_idx);
1470 	rtw8922a_set_nbi_tone_idx(rtwdev, chan, RF_PATH_B, phy_idx);
1471 }
1472 
rtw8922a_ctrl_afe_dac(struct rtw89_dev * rtwdev,enum rtw89_bandwidth bw,enum rtw89_rf_path path)1473 static void rtw8922a_ctrl_afe_dac(struct rtw89_dev *rtwdev, enum rtw89_bandwidth bw,
1474 				  enum rtw89_rf_path path)
1475 {
1476 	u32 cr_ofst = 0x0;
1477 
1478 	if (path == RF_PATH_B)
1479 		cr_ofst = 0x100;
1480 
1481 	switch (bw) {
1482 	case RTW89_CHANNEL_WIDTH_5:
1483 	case RTW89_CHANNEL_WIDTH_10:
1484 	case RTW89_CHANNEL_WIDTH_20:
1485 	case RTW89_CHANNEL_WIDTH_40:
1486 	case RTW89_CHANNEL_WIDTH_80:
1487 		rtw89_phy_write32_mask(rtwdev, R_AFEDAC0 + cr_ofst, B_AFEDAC0, 0xE);
1488 		rtw89_phy_write32_mask(rtwdev, R_AFEDAC1 + cr_ofst, B_AFEDAC1, 0x7);
1489 		break;
1490 	case RTW89_CHANNEL_WIDTH_160:
1491 		rtw89_phy_write32_mask(rtwdev, R_AFEDAC0 + cr_ofst, B_AFEDAC0, 0xD);
1492 		rtw89_phy_write32_mask(rtwdev, R_AFEDAC1 + cr_ofst, B_AFEDAC1, 0x6);
1493 		break;
1494 	default:
1495 		break;
1496 	}
1497 }
1498 
1499 static const struct rtw89_reg2_def bb_mcu0_init_reg[] = {
1500 	{0x6990, 0x00000000},
1501 	{0x6994, 0x00000000},
1502 	{0x6998, 0x00000000},
1503 	{0x6820, 0xFFFFFFFE},
1504 	{0x6800, 0xC0000FFE},
1505 	{0x6808, 0x76543210},
1506 	{0x6814, 0xBFBFB000},
1507 	{0x6818, 0x0478C009},
1508 	{0x6800, 0xC0000FFF},
1509 	{0x6820, 0xFFFFFFFF},
1510 };
1511 
1512 static const struct rtw89_reg2_def bb_mcu1_init_reg[] = {
1513 	{0x6990, 0x00000000},
1514 	{0x6994, 0x00000000},
1515 	{0x6998, 0x00000000},
1516 	{0x6820, 0xFFFFFFFE},
1517 	{0x6800, 0xC0000FFE},
1518 	{0x6808, 0x76543210},
1519 	{0x6814, 0xBFBFB000},
1520 	{0x6818, 0x0478C009},
1521 	{0x6800, 0xC0000FFF},
1522 	{0x6820, 0xFFFFFFFF},
1523 };
1524 
rtw8922a_bbmcu_cr_init(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx)1525 static void rtw8922a_bbmcu_cr_init(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx)
1526 {
1527 	const struct rtw89_reg2_def *reg;
1528 	int size;
1529 	int i;
1530 
1531 	if (phy_idx == RTW89_PHY_0) {
1532 		reg = bb_mcu0_init_reg;
1533 		size = ARRAY_SIZE(bb_mcu0_init_reg);
1534 	} else {
1535 		reg = bb_mcu1_init_reg;
1536 		size = ARRAY_SIZE(bb_mcu1_init_reg);
1537 	}
1538 
1539 	for (i = 0; i < size; i++, reg++)
1540 		rtw89_bbmcu_write32(rtwdev, reg->addr, reg->data, phy_idx);
1541 }
1542 
1543 static const u32 dmac_sys_mask[2] = {B_BE_DMAC_BB_PHY0_MASK, B_BE_DMAC_BB_PHY1_MASK};
1544 static const u32 bbrst_mask[2] = {B_BE_FEN_BBPLAT_RSTB, B_BE_FEN_BB1PLAT_RSTB};
1545 static const u32 glbrst_mask[2] = {B_BE_FEN_BB_IP_RSTN, B_BE_FEN_BB1_IP_RSTN};
1546 static const u32 mcu_bootrdy_mask[2] = {B_BE_BOOT_RDY0, B_BE_BOOT_RDY1};
1547 
rtw8922a_bb_preinit(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx)1548 static void rtw8922a_bb_preinit(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx)
1549 {
1550 	u32 rdy = 0;
1551 
1552 	if (phy_idx == RTW89_PHY_1)
1553 		rdy = 1;
1554 
1555 	rtw89_write32_mask(rtwdev, R_BE_DMAC_SYS_CR32B, dmac_sys_mask[phy_idx], 0x7FF9);
1556 	rtw89_write32_mask(rtwdev, R_BE_FEN_RST_ENABLE, glbrst_mask[phy_idx], 0x0);
1557 	rtw89_write32_mask(rtwdev, R_BE_FEN_RST_ENABLE, bbrst_mask[phy_idx], 0x0);
1558 	rtw89_write32_mask(rtwdev, R_BE_FEN_RST_ENABLE, glbrst_mask[phy_idx], 0x1);
1559 	rtw89_write32_mask(rtwdev, R_BE_FEN_RST_ENABLE, mcu_bootrdy_mask[phy_idx], rdy);
1560 	rtw89_write32_mask(rtwdev, R_BE_MEM_PWR_CTRL, B_BE_MEM_BBMCU0_DS_V1, 0);
1561 
1562 	fsleep(1);
1563 	rtw8922a_bbmcu_cr_init(rtwdev, phy_idx);
1564 }
1565 
rtw8922a_bb_postinit(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx)1566 static void rtw8922a_bb_postinit(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx)
1567 {
1568 	if (phy_idx == RTW89_PHY_0)
1569 		rtw89_write32_set(rtwdev, R_BE_FEN_RST_ENABLE, mcu_bootrdy_mask[phy_idx]);
1570 	rtw89_write32_set(rtwdev, R_BE_FEN_RST_ENABLE, bbrst_mask[phy_idx]);
1571 
1572 	rtw89_phy_write32_set(rtwdev, R_BBCLK, B_CLK_640M);
1573 	rtw89_phy_write32_clr(rtwdev, R_TXSCALE, B_TXFCTR_EN);
1574 	rtw89_phy_set_phy_regs(rtwdev, R_TXFCTR, B_TXFCTR_THD, 0x200);
1575 	rtw89_phy_set_phy_regs(rtwdev, R_SLOPE, B_EHT_RATE_TH, 0xA);
1576 	rtw89_phy_set_phy_regs(rtwdev, R_BEDGE, B_HE_RATE_TH, 0xA);
1577 	rtw89_phy_set_phy_regs(rtwdev, R_BEDGE2, B_HT_VHT_TH, 0xAAA);
1578 	rtw89_phy_set_phy_regs(rtwdev, R_BEDGE, B_EHT_MCS14, 0x1);
1579 	rtw89_phy_set_phy_regs(rtwdev, R_BEDGE2, B_EHT_MCS15, 0x1);
1580 	rtw89_phy_set_phy_regs(rtwdev, R_BEDGE3, B_EHTTB_EN, 0x0);
1581 	rtw89_phy_set_phy_regs(rtwdev, R_BEDGE3, B_HEERSU_EN, 0x0);
1582 	rtw89_phy_set_phy_regs(rtwdev, R_BEDGE3, B_HEMU_EN, 0x0);
1583 	rtw89_phy_set_phy_regs(rtwdev, R_BEDGE3, B_TB_EN, 0x0);
1584 	rtw89_phy_set_phy_regs(rtwdev, R_SU_PUNC, B_SU_PUNC_EN, 0x1);
1585 	rtw89_phy_set_phy_regs(rtwdev, R_BEDGE5, B_HWGEN_EN, 0x1);
1586 	rtw89_phy_set_phy_regs(rtwdev, R_BEDGE5, B_PWROFST_COMP, 0x1);
1587 	rtw89_phy_set_phy_regs(rtwdev, R_MAG_AB, B_BY_SLOPE, 0x1);
1588 	rtw89_phy_set_phy_regs(rtwdev, R_MAG_A, B_MGA_AEND, 0xe0);
1589 	rtw89_phy_set_phy_regs(rtwdev, R_MAG_AB, B_MAG_AB, 0xe0c000);
1590 	rtw89_phy_set_phy_regs(rtwdev, R_SLOPE, B_SLOPE_A, 0x3FE0);
1591 	rtw89_phy_set_phy_regs(rtwdev, R_SLOPE, B_SLOPE_B, 0x3FE0);
1592 	rtw89_phy_set_phy_regs(rtwdev, R_SC_CORNER, B_SC_CORNER, 0x200);
1593 	rtw89_phy_write32_idx(rtwdev, R_UDP_COEEF, B_UDP_COEEF, 0x0, phy_idx);
1594 	rtw89_phy_write32_idx(rtwdev, R_UDP_COEEF, B_UDP_COEEF, 0x1, phy_idx);
1595 }
1596 
rtw8922a_bb_reset_en(struct rtw89_dev * rtwdev,enum rtw89_band band,bool en,enum rtw89_phy_idx phy_idx)1597 static void rtw8922a_bb_reset_en(struct rtw89_dev *rtwdev, enum rtw89_band band,
1598 				 bool en, enum rtw89_phy_idx phy_idx)
1599 {
1600 	if (en) {
1601 		rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, 1, phy_idx);
1602 		if (band == RTW89_BAND_2G)
1603 			rtw89_phy_write32_idx(rtwdev, R_RXCCA_BE1,
1604 					      B_RXCCA_BE1_DIS, 0x0, phy_idx);
1605 		rtw89_phy_write32_idx(rtwdev, R_PD_CTRL, B_PD_HIT_DIS, 0x0, phy_idx);
1606 	} else {
1607 		rtw89_phy_write32_idx(rtwdev, R_RXCCA_BE1, B_RXCCA_BE1_DIS, 0x1, phy_idx);
1608 		rtw89_phy_write32_idx(rtwdev, R_PD_CTRL, B_PD_HIT_DIS, 0x1, phy_idx);
1609 		fsleep(1);
1610 		rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, 0, phy_idx);
1611 	}
1612 }
1613 
rtw8922a_ctrl_tx_path_tmac(struct rtw89_dev * rtwdev,enum rtw89_rf_path tx_path,enum rtw89_phy_idx phy_idx)1614 static int rtw8922a_ctrl_tx_path_tmac(struct rtw89_dev *rtwdev,
1615 				      enum rtw89_rf_path tx_path,
1616 				      enum rtw89_phy_idx phy_idx)
1617 {
1618 	struct rtw89_reg2_def path_com_cr[] = {
1619 		{0x11A00, 0x21C86900},
1620 		{0x11A04, 0x00E4E433},
1621 		{0x11A08, 0x39390CC9},
1622 		{0x11A0C, 0x4E433240},
1623 		{0x11A10, 0x90CC900E},
1624 		{0x11A14, 0x00240393},
1625 		{0x11A18, 0x201C8600},
1626 	};
1627 	int ret = 0;
1628 	u32 reg;
1629 	int i;
1630 
1631 	rtw89_phy_write32_idx(rtwdev, R_MAC_SEL, B_MAC_SEL, 0x0, phy_idx);
1632 
1633 	if (phy_idx == RTW89_PHY_1 && !rtwdev->dbcc_en)
1634 		return 0;
1635 
1636 	if (tx_path == RF_PATH_A) {
1637 		path_com_cr[0].data = 0x21C82900;
1638 		path_com_cr[1].data = 0x00E4E431;
1639 		path_com_cr[2].data = 0x39390C49;
1640 		path_com_cr[3].data = 0x4E431240;
1641 		path_com_cr[4].data = 0x90C4900E;
1642 		path_com_cr[6].data = 0x201C8200;
1643 	} else if (tx_path == RF_PATH_B) {
1644 		path_com_cr[0].data = 0x21C04900;
1645 		path_com_cr[1].data = 0x00E4E032;
1646 		path_com_cr[2].data = 0x39380C89;
1647 		path_com_cr[3].data = 0x4E032240;
1648 		path_com_cr[4].data = 0x80C8900E;
1649 		path_com_cr[6].data = 0x201C0400;
1650 	} else if (tx_path == RF_PATH_AB) {
1651 		path_com_cr[0].data = 0x21C86900;
1652 		path_com_cr[1].data = 0x00E4E433;
1653 		path_com_cr[2].data = 0x39390CC9;
1654 		path_com_cr[3].data = 0x4E433240;
1655 		path_com_cr[4].data = 0x90CC900E;
1656 		path_com_cr[6].data = 0x201C8600;
1657 	} else {
1658 		ret = -EINVAL;
1659 	}
1660 
1661 	for (i = 0; i < ARRAY_SIZE(path_com_cr); i++) {
1662 		reg = rtw89_mac_reg_by_idx(rtwdev, path_com_cr[i].addr, phy_idx);
1663 		rtw89_write32(rtwdev, reg, path_com_cr[i].data);
1664 	}
1665 
1666 	return ret;
1667 }
1668 
rtw8922a_bb_reset(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx)1669 static void rtw8922a_bb_reset(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx)
1670 {
1671 }
1672 
rtw8922a_cfg_rx_nss_limit(struct rtw89_dev * rtwdev,u8 rx_nss,enum rtw89_phy_idx phy_idx)1673 static int rtw8922a_cfg_rx_nss_limit(struct rtw89_dev *rtwdev, u8 rx_nss,
1674 				     enum rtw89_phy_idx phy_idx)
1675 {
1676 	if (rx_nss == 1) {
1677 		rtw89_phy_write32_idx(rtwdev, R_BRK_R, B_HTMCS_LMT, 0, phy_idx);
1678 		rtw89_phy_write32_idx(rtwdev, R_BRK_R, B_VHTMCS_LMT, 0, phy_idx);
1679 		rtw89_phy_write32_idx(rtwdev, R_BRK_HE, B_N_USR_MAX,
1680 				      HE_N_USER_MAX_8922A, phy_idx);
1681 		rtw89_phy_write32_idx(rtwdev, R_BRK_HE, B_NSS_MAX, 0, phy_idx);
1682 		rtw89_phy_write32_idx(rtwdev, R_BRK_HE, B_TB_NSS_MAX, 0, phy_idx);
1683 		rtw89_phy_write32_idx(rtwdev, R_BRK_EHT, B_RXEHT_NSS_MAX, 0, phy_idx);
1684 		rtw89_phy_write32_idx(rtwdev, R_BRK_RXEHT, B_RXEHTTB_NSS_MAX, 0,
1685 				      phy_idx);
1686 		rtw89_phy_write32_idx(rtwdev, R_BRK_RXEHT, B_RXEHT_N_USER_MAX,
1687 				      HE_N_USER_MAX_8922A, phy_idx);
1688 	} else if (rx_nss == 2) {
1689 		rtw89_phy_write32_idx(rtwdev, R_BRK_R, B_HTMCS_LMT, 1, phy_idx);
1690 		rtw89_phy_write32_idx(rtwdev, R_BRK_R, B_VHTMCS_LMT, 1, phy_idx);
1691 		rtw89_phy_write32_idx(rtwdev, R_BRK_HE, B_N_USR_MAX,
1692 				      HE_N_USER_MAX_8922A, phy_idx);
1693 		rtw89_phy_write32_idx(rtwdev, R_BRK_HE, B_NSS_MAX, 1, phy_idx);
1694 		rtw89_phy_write32_idx(rtwdev, R_BRK_HE, B_TB_NSS_MAX, 1, phy_idx);
1695 		rtw89_phy_write32_idx(rtwdev, R_BRK_EHT, B_RXEHT_NSS_MAX, 1, phy_idx);
1696 		rtw89_phy_write32_idx(rtwdev, R_BRK_RXEHT, B_RXEHTTB_NSS_MAX, 1,
1697 				      phy_idx);
1698 		rtw89_phy_write32_idx(rtwdev, R_BRK_RXEHT, B_RXEHT_N_USER_MAX,
1699 				      HE_N_USER_MAX_8922A, phy_idx);
1700 	} else {
1701 		return -EINVAL;
1702 	}
1703 
1704 	return 0;
1705 }
1706 
rtw8922a_tssi_reset(struct rtw89_dev * rtwdev,enum rtw89_rf_path path,enum rtw89_phy_idx phy_idx)1707 static void rtw8922a_tssi_reset(struct rtw89_dev *rtwdev,
1708 				enum rtw89_rf_path path,
1709 				enum rtw89_phy_idx phy_idx)
1710 {
1711 	if (rtwdev->mlo_dbcc_mode == MLO_1_PLUS_1_1RF) {
1712 		if (phy_idx == RTW89_PHY_0) {
1713 			rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTA, B_TXPWR_RSTA, 0x0);
1714 			rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTA, B_TXPWR_RSTA, 0x1);
1715 		} else {
1716 			rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTB, B_TXPWR_RSTB, 0x0);
1717 			rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTB, B_TXPWR_RSTB, 0x1);
1718 		}
1719 	} else {
1720 		rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTA, B_TXPWR_RSTA, 0x0);
1721 		rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTA, B_TXPWR_RSTA, 0x1);
1722 		rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTB, B_TXPWR_RSTB, 0x0);
1723 		rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTB, B_TXPWR_RSTB, 0x1);
1724 	}
1725 }
1726 
rtw8922a_ctrl_rx_path_tmac(struct rtw89_dev * rtwdev,enum rtw89_rf_path rx_path,enum rtw89_phy_idx phy_idx)1727 static int rtw8922a_ctrl_rx_path_tmac(struct rtw89_dev *rtwdev,
1728 				      enum rtw89_rf_path rx_path,
1729 				      enum rtw89_phy_idx phy_idx)
1730 {
1731 	u8 rx_nss = (rx_path == RF_PATH_AB) ? 2 : 1;
1732 
1733 	/* Set to 0 first to avoid abnormal EDCCA report */
1734 	rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_ANT_RX_SG0, 0x0, phy_idx);
1735 
1736 	if (rx_path == RF_PATH_A) {
1737 		rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_ANT_RX_SG0, 0x1, phy_idx);
1738 		rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_RX_1RCCA, 1, phy_idx);
1739 		rtw8922a_cfg_rx_nss_limit(rtwdev, rx_nss, phy_idx);
1740 		rtw8922a_tssi_reset(rtwdev, rx_path, phy_idx);
1741 	} else if (rx_path == RF_PATH_B) {
1742 		rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_ANT_RX_SG0, 0x2, phy_idx);
1743 		rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_RX_1RCCA, 2, phy_idx);
1744 		rtw8922a_cfg_rx_nss_limit(rtwdev, rx_nss, phy_idx);
1745 		rtw8922a_tssi_reset(rtwdev, rx_path, phy_idx);
1746 	} else if (rx_path == RF_PATH_AB) {
1747 		rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_ANT_RX_SG0, 0x3, phy_idx);
1748 		rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_RX_1RCCA, 3, phy_idx);
1749 		rtw8922a_cfg_rx_nss_limit(rtwdev, rx_nss, phy_idx);
1750 		rtw8922a_tssi_reset(rtwdev, rx_path, phy_idx);
1751 	} else {
1752 		return -EINVAL;
1753 	}
1754 
1755 	return 0;
1756 }
1757 
1758 #define DIGITAL_PWR_COMP_REG_NUM 22
1759 static const u32 rtw8922a_digital_pwr_comp_val[][DIGITAL_PWR_COMP_REG_NUM] = {
1760 	{0x012C0096, 0x044C02BC, 0x00322710, 0x015E0096, 0x03C8028A,
1761 	 0x0BB80708, 0x17701194, 0x02020100, 0x03030303, 0x01000303,
1762 	 0x05030302, 0x06060605, 0x06050300, 0x0A090807, 0x02000B0B,
1763 	 0x09080604, 0x0D0D0C0B, 0x08060400, 0x110F0C0B, 0x05001111,
1764 	 0x0D0C0907, 0x12121210},
1765 	{0x012C0096, 0x044C02BC, 0x00322710, 0x015E0096, 0x03C8028A,
1766 	 0x0BB80708, 0x17701194, 0x04030201, 0x05050505, 0x01000505,
1767 	 0x07060504, 0x09090908, 0x09070400, 0x0E0D0C0B, 0x03000E0E,
1768 	 0x0D0B0907, 0x1010100F, 0x0B080500, 0x1512100D, 0x05001515,
1769 	 0x100D0B08, 0x15151512},
1770 };
1771 
rtw8922a_set_digital_pwr_comp(struct rtw89_dev * rtwdev,bool enable,u8 nss,enum rtw89_rf_path path)1772 static void rtw8922a_set_digital_pwr_comp(struct rtw89_dev *rtwdev,
1773 					  bool enable, u8 nss,
1774 					  enum rtw89_rf_path path)
1775 {
1776 	static const u32 ltpc_t0[2] = {R_BE_LTPC_T0_PATH0, R_BE_LTPC_T0_PATH1};
1777 	const u32 *digital_pwr_comp;
1778 	u32 addr, val;
1779 	u32 i;
1780 
1781 	if (nss == 1)
1782 		digital_pwr_comp = rtw8922a_digital_pwr_comp_val[0];
1783 	else
1784 		digital_pwr_comp = rtw8922a_digital_pwr_comp_val[1];
1785 
1786 	addr = ltpc_t0[path];
1787 	for (i = 0; i < DIGITAL_PWR_COMP_REG_NUM; i++, addr += 4) {
1788 		val = enable ? digital_pwr_comp[i] : 0;
1789 		rtw89_phy_write32(rtwdev, addr, val);
1790 	}
1791 }
1792 
rtw8922a_digital_pwr_comp(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx)1793 static void rtw8922a_digital_pwr_comp(struct rtw89_dev *rtwdev,
1794 				      enum rtw89_phy_idx phy_idx)
1795 {
1796 	const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_CHANCTX_0);
1797 	bool enable = chan->band_type != RTW89_BAND_2G;
1798 	u8 path;
1799 
1800 	if (rtwdev->mlo_dbcc_mode == MLO_1_PLUS_1_1RF) {
1801 		if (phy_idx == RTW89_PHY_0)
1802 			path = RF_PATH_A;
1803 		else
1804 			path = RF_PATH_B;
1805 		rtw8922a_set_digital_pwr_comp(rtwdev, enable, 1, path);
1806 	} else {
1807 		rtw8922a_set_digital_pwr_comp(rtwdev, enable, 2, RF_PATH_A);
1808 		rtw8922a_set_digital_pwr_comp(rtwdev, enable, 2, RF_PATH_B);
1809 	}
1810 }
1811 
rtw8922a_ctrl_mlo(struct rtw89_dev * rtwdev,enum rtw89_mlo_dbcc_mode mode)1812 static int rtw8922a_ctrl_mlo(struct rtw89_dev *rtwdev, enum rtw89_mlo_dbcc_mode mode)
1813 {
1814 	const struct rtw89_chan *chan0, *chan1;
1815 
1816 	if (mode == MLO_1_PLUS_1_1RF || mode == DBCC_LEGACY) {
1817 		rtw89_phy_write32_mask(rtwdev, R_DBCC, B_DBCC_EN, 0x1);
1818 		rtw89_phy_write32_mask(rtwdev, R_DBCC_FA, B_DBCC_FA, 0x0);
1819 	} else if (mode == MLO_2_PLUS_0_1RF || mode == MLO_0_PLUS_2_1RF ||
1820 		   mode == MLO_DBCC_NOT_SUPPORT) {
1821 		rtw89_phy_write32_mask(rtwdev, R_DBCC, B_DBCC_EN, 0x0);
1822 		rtw89_phy_write32_mask(rtwdev, R_DBCC_FA, B_DBCC_FA, 0x1);
1823 	} else {
1824 		return -EOPNOTSUPP;
1825 	}
1826 
1827 	if (mode == MLO_1_PLUS_1_1RF) {
1828 		chan0 = rtw89_mgnt_chan_get(rtwdev, 0);
1829 		chan1 = rtw89_mgnt_chan_get(rtwdev, 1);
1830 	} else if (mode == MLO_0_PLUS_2_1RF) {
1831 		chan1 = rtw89_mgnt_chan_get(rtwdev, 1);
1832 		chan0 = chan1;
1833 	} else {
1834 		chan0 = rtw89_mgnt_chan_get(rtwdev, 0);
1835 		chan1 = chan0;
1836 	}
1837 
1838 	rtw8922a_ctrl_afe_dac(rtwdev, chan0->band_width, RF_PATH_A);
1839 	rtw8922a_ctrl_afe_dac(rtwdev, chan1->band_width, RF_PATH_B);
1840 
1841 	rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0x6180);
1842 
1843 	if (mode == MLO_2_PLUS_0_1RF) {
1844 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xBBAB);
1845 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xABA9);
1846 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEBA9);
1847 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEAA9);
1848 	} else if (mode == MLO_0_PLUS_2_1RF) {
1849 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xBBAB);
1850 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xAFFF);
1851 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEFFF);
1852 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEEFF);
1853 	} else if ((mode == MLO_1_PLUS_1_1RF) || (mode == DBCC_LEGACY)) {
1854 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0x7BAB);
1855 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0x3BAB);
1856 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0x3AAB);
1857 	} else {
1858 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0x180);
1859 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0x0);
1860 	}
1861 
1862 	return 0;
1863 }
1864 
rtw8922a_bb_sethw(struct rtw89_dev * rtwdev)1865 static void rtw8922a_bb_sethw(struct rtw89_dev *rtwdev)
1866 {
1867 	u32 reg;
1868 
1869 	rtw89_phy_write32_clr(rtwdev, R_EN_SND_WO_NDP, B_EN_SND_WO_NDP);
1870 	rtw89_phy_write32_clr(rtwdev, R_EN_SND_WO_NDP_C1, B_EN_SND_WO_NDP);
1871 
1872 	rtw89_write32_mask(rtwdev, R_BE_PWR_BOOST, B_BE_PWR_CTRL_SEL, 0);
1873 	if (rtwdev->dbcc_en) {
1874 		reg = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_BOOST, RTW89_MAC_1);
1875 		rtw89_write32_mask(rtwdev, reg, B_BE_PWR_CTRL_SEL, 0);
1876 	}
1877 
1878 	rtw8922a_ctrl_mlo(rtwdev, rtwdev->mlo_dbcc_mode);
1879 }
1880 
rtw8922a_ctrl_cck_en(struct rtw89_dev * rtwdev,bool cck_en,enum rtw89_phy_idx phy_idx)1881 static void rtw8922a_ctrl_cck_en(struct rtw89_dev *rtwdev, bool cck_en,
1882 				 enum rtw89_phy_idx phy_idx)
1883 {
1884 	if (cck_en) {
1885 		rtw89_phy_write32_idx(rtwdev, R_RXCCA_BE1, B_RXCCA_BE1_DIS, 0, phy_idx);
1886 		rtw89_phy_write32_idx(rtwdev, R_UPD_CLK_ADC, B_ENABLE_CCK, 1, phy_idx);
1887 		rtw89_phy_write32_idx(rtwdev, R_PD_ARBITER_OFF, B_PD_ARBITER_OFF,
1888 				      0, phy_idx);
1889 	} else {
1890 		rtw89_phy_write32_idx(rtwdev, R_RXCCA_BE1, B_RXCCA_BE1_DIS, 1, phy_idx);
1891 		rtw89_phy_write32_idx(rtwdev, R_UPD_CLK_ADC, B_ENABLE_CCK, 0, phy_idx);
1892 		rtw89_phy_write32_idx(rtwdev, R_PD_ARBITER_OFF, B_PD_ARBITER_OFF,
1893 				      1, phy_idx);
1894 	}
1895 }
1896 
rtw8922a_set_channel_bb(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_phy_idx phy_idx)1897 static void rtw8922a_set_channel_bb(struct rtw89_dev *rtwdev,
1898 				    const struct rtw89_chan *chan,
1899 				    enum rtw89_phy_idx phy_idx)
1900 {
1901 	bool cck_en = chan->band_type == RTW89_BAND_2G;
1902 	u8 pri_sb = chan->pri_sb_idx;
1903 
1904 	if (cck_en)
1905 		rtw8922a_ctrl_sco_cck(rtwdev, chan->primary_channel,
1906 				      chan->band_width, phy_idx);
1907 
1908 	rtw8922a_ctrl_ch(rtwdev, chan, phy_idx);
1909 	rtw8922a_ctrl_bw(rtwdev, pri_sb, chan->band_width, phy_idx);
1910 	rtw8922a_ctrl_cck_en(rtwdev, cck_en, phy_idx);
1911 	rtw8922a_spur_elimination(rtwdev, chan, phy_idx);
1912 
1913 	rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, 1, phy_idx);
1914 	rtw8922a_tssi_reset(rtwdev, RF_PATH_AB, phy_idx);
1915 }
1916 
rtw8922a_pre_set_channel_bb(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx)1917 static void rtw8922a_pre_set_channel_bb(struct rtw89_dev *rtwdev,
1918 					enum rtw89_phy_idx phy_idx)
1919 {
1920 	if (!rtwdev->dbcc_en)
1921 		return;
1922 
1923 	if (phy_idx == RTW89_PHY_0) {
1924 		rtw89_phy_write32_mask(rtwdev, R_DBCC, B_DBCC_EN, 0x0);
1925 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0x6180);
1926 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xBBAB);
1927 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xABA9);
1928 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEBA9);
1929 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEAA9);
1930 	} else {
1931 		rtw89_phy_write32_mask(rtwdev, R_DBCC, B_DBCC_EN, 0x0);
1932 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xBBAB);
1933 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xAFFF);
1934 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEFFF);
1935 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEEFF);
1936 	}
1937 }
1938 
rtw8922a_post_set_channel_bb(struct rtw89_dev * rtwdev,enum rtw89_mlo_dbcc_mode mode,enum rtw89_phy_idx phy_idx)1939 static void rtw8922a_post_set_channel_bb(struct rtw89_dev *rtwdev,
1940 					 enum rtw89_mlo_dbcc_mode mode,
1941 					 enum rtw89_phy_idx phy_idx)
1942 {
1943 	if (!rtwdev->dbcc_en)
1944 		return;
1945 
1946 	rtw8922a_digital_pwr_comp(rtwdev, phy_idx);
1947 	rtw8922a_ctrl_mlo(rtwdev, mode);
1948 }
1949 
rtw8922a_set_channel(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_mac_idx mac_idx,enum rtw89_phy_idx phy_idx)1950 static void rtw8922a_set_channel(struct rtw89_dev *rtwdev,
1951 				 const struct rtw89_chan *chan,
1952 				 enum rtw89_mac_idx mac_idx,
1953 				 enum rtw89_phy_idx phy_idx)
1954 {
1955 	rtw8922a_set_channel_mac(rtwdev, chan, mac_idx);
1956 	rtw8922a_set_channel_bb(rtwdev, chan, phy_idx);
1957 	rtw8922a_set_channel_rf(rtwdev, chan, phy_idx);
1958 }
1959 
rtw8922a_dfs_en_idx(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx,enum rtw89_rf_path path,bool en)1960 static void rtw8922a_dfs_en_idx(struct rtw89_dev *rtwdev,
1961 				enum rtw89_phy_idx phy_idx, enum rtw89_rf_path path,
1962 				bool en)
1963 {
1964 	u32 path_ofst = (path == RF_PATH_B) ? 0x100 : 0x0;
1965 
1966 	if (en)
1967 		rtw89_phy_write32_idx(rtwdev, 0x2800 + path_ofst, BIT(1), 1,
1968 				      phy_idx);
1969 	else
1970 		rtw89_phy_write32_idx(rtwdev, 0x2800 + path_ofst, BIT(1), 0,
1971 				      phy_idx);
1972 }
1973 
rtw8922a_dfs_en(struct rtw89_dev * rtwdev,bool en,enum rtw89_phy_idx phy_idx)1974 static void rtw8922a_dfs_en(struct rtw89_dev *rtwdev, bool en,
1975 			    enum rtw89_phy_idx phy_idx)
1976 {
1977 	rtw8922a_dfs_en_idx(rtwdev, phy_idx, RF_PATH_A, en);
1978 	rtw8922a_dfs_en_idx(rtwdev, phy_idx, RF_PATH_B, en);
1979 }
1980 
rtw8922a_adc_en_path(struct rtw89_dev * rtwdev,enum rtw89_rf_path path,bool en)1981 static void rtw8922a_adc_en_path(struct rtw89_dev *rtwdev,
1982 				 enum rtw89_rf_path path, bool en)
1983 {
1984 	u32 val;
1985 
1986 	val = rtw89_phy_read32_mask(rtwdev, R_ADC_FIFO_V1, B_ADC_FIFO_EN_V1);
1987 
1988 	if (en) {
1989 		if (path == RF_PATH_A)
1990 			val &= ~0x1;
1991 		else
1992 			val &= ~0x2;
1993 	} else {
1994 		if (path == RF_PATH_A)
1995 			val |= 0x1;
1996 		else
1997 			val |= 0x2;
1998 	}
1999 
2000 	rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO_V1, B_ADC_FIFO_EN_V1, val);
2001 }
2002 
rtw8922a_adc_en(struct rtw89_dev * rtwdev,bool en,u8 phy_idx)2003 static void rtw8922a_adc_en(struct rtw89_dev *rtwdev, bool en, u8 phy_idx)
2004 {
2005 	if (rtwdev->mlo_dbcc_mode == MLO_1_PLUS_1_1RF) {
2006 		if (phy_idx == RTW89_PHY_0)
2007 			rtw8922a_adc_en_path(rtwdev, RF_PATH_A, en);
2008 		else
2009 			rtw8922a_adc_en_path(rtwdev, RF_PATH_B, en);
2010 	} else {
2011 		rtw8922a_adc_en_path(rtwdev, RF_PATH_A, en);
2012 		rtw8922a_adc_en_path(rtwdev, RF_PATH_B, en);
2013 	}
2014 }
2015 
2016 static
rtw8922a_hal_reset(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx,enum rtw89_mac_idx mac_idx,enum rtw89_band band,u32 * tx_en,bool enter)2017 void rtw8922a_hal_reset(struct rtw89_dev *rtwdev,
2018 			enum rtw89_phy_idx phy_idx, enum rtw89_mac_idx mac_idx,
2019 			enum rtw89_band band, u32 *tx_en, bool enter)
2020 {
2021 	if (enter) {
2022 		rtw89_chip_stop_sch_tx(rtwdev, mac_idx, tx_en, RTW89_SCH_TX_SEL_ALL);
2023 		rtw89_mac_cfg_ppdu_status(rtwdev, mac_idx, false);
2024 		rtw8922a_dfs_en(rtwdev, false, phy_idx);
2025 		rtw8922a_tssi_cont_en_phyidx(rtwdev, false, phy_idx);
2026 		rtw8922a_adc_en(rtwdev, false, phy_idx);
2027 		fsleep(40);
2028 		rtw8922a_bb_reset_en(rtwdev, band, false, phy_idx);
2029 	} else {
2030 		rtw89_mac_cfg_ppdu_status(rtwdev, mac_idx, true);
2031 		rtw8922a_adc_en(rtwdev, true, phy_idx);
2032 		rtw8922a_dfs_en(rtwdev, true, phy_idx);
2033 		rtw8922a_tssi_cont_en_phyidx(rtwdev, true, phy_idx);
2034 		rtw8922a_bb_reset_en(rtwdev, band, true, phy_idx);
2035 		rtw89_chip_resume_sch_tx(rtwdev, mac_idx, *tx_en);
2036 	}
2037 }
2038 
rtw8922a_set_channel_help(struct rtw89_dev * rtwdev,bool enter,struct rtw89_channel_help_params * p,const struct rtw89_chan * chan,enum rtw89_mac_idx mac_idx,enum rtw89_phy_idx phy_idx)2039 static void rtw8922a_set_channel_help(struct rtw89_dev *rtwdev, bool enter,
2040 				      struct rtw89_channel_help_params *p,
2041 				      const struct rtw89_chan *chan,
2042 				      enum rtw89_mac_idx mac_idx,
2043 				      enum rtw89_phy_idx phy_idx)
2044 {
2045 	if (enter) {
2046 		rtw8922a_pre_set_channel_bb(rtwdev, phy_idx);
2047 		rtw8922a_pre_set_channel_rf(rtwdev, phy_idx);
2048 	}
2049 
2050 	rtw8922a_hal_reset(rtwdev, phy_idx, mac_idx, chan->band_type, &p->tx_en, enter);
2051 
2052 	if (!enter) {
2053 		rtw8922a_post_set_channel_bb(rtwdev, rtwdev->mlo_dbcc_mode, phy_idx);
2054 		rtw8922a_post_set_channel_rf(rtwdev, phy_idx);
2055 	}
2056 }
2057 
rtw8922a_rfk_init(struct rtw89_dev * rtwdev)2058 static void rtw8922a_rfk_init(struct rtw89_dev *rtwdev)
2059 {
2060 	struct rtw89_rfk_mcc_info *rfk_mcc = &rtwdev->rfk_mcc;
2061 
2062 	rtwdev->is_tssi_mode[RF_PATH_A] = false;
2063 	rtwdev->is_tssi_mode[RF_PATH_B] = false;
2064 	memset(rfk_mcc, 0, sizeof(*rfk_mcc));
2065 }
2066 
__rtw8922a_rfk_init_late(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx,const struct rtw89_chan * chan)2067 static void __rtw8922a_rfk_init_late(struct rtw89_dev *rtwdev,
2068 				     enum rtw89_phy_idx phy_idx,
2069 				     const struct rtw89_chan *chan)
2070 {
2071 	rtw89_phy_rfk_pre_ntfy_and_wait(rtwdev, phy_idx, 5);
2072 
2073 	rtw89_phy_rfk_dack_and_wait(rtwdev, phy_idx, chan, 58);
2074 	if (!test_bit(RTW89_FLAG_SER_HANDLING, rtwdev->flags))
2075 		rtw89_phy_rfk_rxdck_and_wait(rtwdev, phy_idx, chan, false, 128);
2076 }
2077 
rtw8922a_rfk_init_late(struct rtw89_dev * rtwdev)2078 static void rtw8922a_rfk_init_late(struct rtw89_dev *rtwdev)
2079 {
2080 	const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_CHANCTX_0);
2081 
2082 	__rtw8922a_rfk_init_late(rtwdev, RTW89_PHY_0, chan);
2083 	if (rtwdev->dbcc_en)
2084 		__rtw8922a_rfk_init_late(rtwdev, RTW89_PHY_1, chan);
2085 }
2086 
_wait_rx_mode(struct rtw89_dev * rtwdev,u8 kpath)2087 static void _wait_rx_mode(struct rtw89_dev *rtwdev, u8 kpath)
2088 {
2089 	u32 rf_mode;
2090 	u8 path;
2091 	int ret;
2092 
2093 	for (path = 0; path < RF_PATH_NUM_8922A; path++) {
2094 		if (!(kpath & BIT(path)))
2095 			continue;
2096 
2097 		ret = read_poll_timeout_atomic(rtw89_read_rf, rf_mode, rf_mode != 2,
2098 					       2, 5000, false, rtwdev, path, 0x00,
2099 					       RR_MOD_MASK);
2100 		rtw89_debug(rtwdev, RTW89_DBG_RFK,
2101 			    "[RFK] Wait S%d to Rx mode!! (ret = %d)\n",
2102 			    path, ret);
2103 	}
2104 }
2105 
rtw8922a_rfk_channel(struct rtw89_dev * rtwdev,struct rtw89_vif_link * rtwvif_link)2106 static void rtw8922a_rfk_channel(struct rtw89_dev *rtwdev,
2107 				 struct rtw89_vif_link *rtwvif_link)
2108 {
2109 	enum rtw89_chanctx_idx chanctx_idx = rtwvif_link->chanctx_idx;
2110 	const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, chanctx_idx);
2111 	enum rtw89_phy_idx phy_idx = rtwvif_link->phy_idx;
2112 	u8 phy_map = rtw89_btc_phymap(rtwdev, phy_idx, RF_AB, chanctx_idx);
2113 	u32 tx_en;
2114 
2115 	rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, BTC_WRFKT_CHLK, BTC_WRFK_START);
2116 	rtw89_chip_stop_sch_tx(rtwdev, phy_idx, &tx_en, RTW89_SCH_TX_SEL_ALL);
2117 	_wait_rx_mode(rtwdev, RF_AB);
2118 
2119 	rtw89_phy_rfk_pre_ntfy_and_wait(rtwdev, phy_idx, 5);
2120 	rtw89_phy_rfk_txgapk_and_wait(rtwdev, phy_idx, chan, 54);
2121 	rtw89_phy_rfk_iqk_and_wait(rtwdev, phy_idx, chan, 84);
2122 	rtw89_phy_rfk_tssi_and_wait(rtwdev, phy_idx, chan, RTW89_TSSI_NORMAL, 20);
2123 	rtw89_phy_rfk_dpk_and_wait(rtwdev, phy_idx, chan, 34);
2124 	rtw89_phy_rfk_rxdck_and_wait(rtwdev, RTW89_PHY_0, chan, true, 32);
2125 
2126 	rtw89_chip_resume_sch_tx(rtwdev, phy_idx, tx_en);
2127 	rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, BTC_WRFKT_CHLK, BTC_WRFK_STOP);
2128 }
2129 
rtw8922a_rfk_band_changed(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx,const struct rtw89_chan * chan)2130 static void rtw8922a_rfk_band_changed(struct rtw89_dev *rtwdev,
2131 				      enum rtw89_phy_idx phy_idx,
2132 				      const struct rtw89_chan *chan)
2133 {
2134 	rtw89_phy_rfk_tssi_and_wait(rtwdev, phy_idx, chan, RTW89_TSSI_SCAN, 6);
2135 }
2136 
rtw8922a_rfk_scan(struct rtw89_dev * rtwdev,struct rtw89_vif_link * rtwvif_link,bool start)2137 static void rtw8922a_rfk_scan(struct rtw89_dev *rtwdev,
2138 			      struct rtw89_vif_link *rtwvif_link,
2139 			      bool start)
2140 {
2141 }
2142 
rtw8922a_rfk_track(struct rtw89_dev * rtwdev)2143 static void rtw8922a_rfk_track(struct rtw89_dev *rtwdev)
2144 {
2145 }
2146 
rtw8922a_set_txpwr_ref(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx)2147 static void rtw8922a_set_txpwr_ref(struct rtw89_dev *rtwdev,
2148 				   enum rtw89_phy_idx phy_idx)
2149 {
2150 	s16 ref_ofdm = 0;
2151 	s16 ref_cck = 0;
2152 
2153 	rtw89_debug(rtwdev, RTW89_DBG_TXPWR, "[TXPWR] set txpwr reference\n");
2154 
2155 	rtw89_mac_txpwr_write32_mask(rtwdev, phy_idx, R_BE_PWR_REF_CTRL,
2156 				     B_BE_PWR_REF_CTRL_OFDM, ref_ofdm);
2157 	rtw89_mac_txpwr_write32_mask(rtwdev, phy_idx, R_BE_PWR_REF_CTRL,
2158 				     B_BE_PWR_REF_CTRL_CCK, ref_cck);
2159 }
2160 
2161 static const struct rtw89_reg_def rtw8922a_txpwr_ref[][3] = {
2162 	{{ .addr = R_TXAGC_REF_DBM_P0, .mask = B_TXAGC_OFDM_REF_DBM_P0},
2163 	 { .addr = R_TXAGC_REF_DBM_P0, .mask = B_TXAGC_CCK_REF_DBM_P0},
2164 	 { .addr = R_TSSI_K_P0, .mask = B_TSSI_K_OFDM_P0}
2165 	},
2166 	{{ .addr = R_TXAGC_REF_DBM_RF1_P0, .mask = B_TXAGC_OFDM_REF_DBM_RF1_P0},
2167 	 { .addr = R_TXAGC_REF_DBM_RF1_P0, .mask = B_TXAGC_CCK_REF_DBM_RF1_P0},
2168 	 { .addr = R_TSSI_K_RF1_P0, .mask = B_TSSI_K_OFDM_RF1_P0}
2169 	},
2170 };
2171 
rtw8922a_set_txpwr_diff(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_phy_idx phy_idx)2172 static void rtw8922a_set_txpwr_diff(struct rtw89_dev *rtwdev,
2173 				    const struct rtw89_chan *chan,
2174 				    enum rtw89_phy_idx phy_idx)
2175 {
2176 	s16 pwr_ofst = rtw89_phy_ant_gain_pwr_offset(rtwdev, chan);
2177 	const struct rtw89_chip_info *chip = rtwdev->chip;
2178 	static const u32 path_ofst[] = {0x0, 0x100};
2179 	const struct rtw89_reg_def *txpwr_ref;
2180 	static const s16 tssi_k_base = 0x12;
2181 	s16 tssi_k_ofst = abs(pwr_ofst) + tssi_k_base;
2182 	s16 ofst_dec[RF_PATH_NUM_8922A];
2183 	s16 tssi_k[RF_PATH_NUM_8922A];
2184 	s16 pwr_ref_ofst;
2185 	s16 pwr_ref = 0;
2186 	u8 i;
2187 
2188 	if (rtwdev->hal.cv == CHIP_CAV)
2189 		pwr_ref = 16;
2190 
2191 	pwr_ref <<= chip->txpwr_factor_rf;
2192 	pwr_ref_ofst = pwr_ref - rtw89_phy_txpwr_bb_to_rf(rtwdev, abs(pwr_ofst));
2193 
2194 	ofst_dec[RF_PATH_A] = pwr_ofst > 0 ? pwr_ref : pwr_ref_ofst;
2195 	ofst_dec[RF_PATH_B] = pwr_ofst > 0 ? pwr_ref_ofst : pwr_ref;
2196 	tssi_k[RF_PATH_A] = pwr_ofst > 0 ? tssi_k_base : tssi_k_ofst;
2197 	tssi_k[RF_PATH_B] = pwr_ofst > 0 ? tssi_k_ofst : tssi_k_base;
2198 
2199 	for (i = 0; i < RF_PATH_NUM_8922A; i++) {
2200 		txpwr_ref = rtw8922a_txpwr_ref[phy_idx];
2201 
2202 		rtw89_phy_write32_mask(rtwdev, txpwr_ref[0].addr + path_ofst[i],
2203 				       txpwr_ref[0].mask, ofst_dec[i]);
2204 		rtw89_phy_write32_mask(rtwdev, txpwr_ref[1].addr + path_ofst[i],
2205 				       txpwr_ref[1].mask, ofst_dec[i]);
2206 		rtw89_phy_write32_mask(rtwdev, txpwr_ref[2].addr + path_ofst[i],
2207 				       txpwr_ref[2].mask, tssi_k[i]);
2208 	}
2209 }
2210 
rtw8922a_bb_tx_triangular(struct rtw89_dev * rtwdev,bool en,enum rtw89_phy_idx phy_idx)2211 static void rtw8922a_bb_tx_triangular(struct rtw89_dev *rtwdev, bool en,
2212 				      enum rtw89_phy_idx phy_idx)
2213 {
2214 	u8 ctrl = en ? 0x1 : 0x0;
2215 
2216 	rtw89_phy_write32_idx(rtwdev, R_BEDGE3, B_BEDGE_CFG, ctrl, phy_idx);
2217 }
2218 
rtw8922a_set_tx_shape(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_phy_idx phy_idx)2219 static void rtw8922a_set_tx_shape(struct rtw89_dev *rtwdev,
2220 				  const struct rtw89_chan *chan,
2221 				  enum rtw89_phy_idx phy_idx)
2222 {
2223 	const struct rtw89_rfe_parms *rfe_parms = rtwdev->rfe_parms;
2224 	const struct rtw89_tx_shape *tx_shape = &rfe_parms->tx_shape;
2225 	u8 tx_shape_idx;
2226 	u8 band, regd;
2227 
2228 	band = chan->band_type;
2229 	regd = rtw89_regd_get(rtwdev, band);
2230 	tx_shape_idx = (*tx_shape->lmt)[band][RTW89_RS_OFDM][regd];
2231 
2232 	if (tx_shape_idx == 0)
2233 		rtw8922a_bb_tx_triangular(rtwdev, false, phy_idx);
2234 	else
2235 		rtw8922a_bb_tx_triangular(rtwdev, true, phy_idx);
2236 }
2237 
rtw8922a_set_txpwr_sar_diff(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_phy_idx phy_idx)2238 static void rtw8922a_set_txpwr_sar_diff(struct rtw89_dev *rtwdev,
2239 					const struct rtw89_chan *chan,
2240 					enum rtw89_phy_idx phy_idx)
2241 {
2242 	struct rtw89_sar_parm sar_parm = {
2243 		.center_freq = chan->freq,
2244 		.force_path = true,
2245 	};
2246 	s16 sar_rf;
2247 	s8 sar_mac;
2248 
2249 	if (phy_idx != RTW89_PHY_0)
2250 		return;
2251 
2252 	sar_parm.path = RF_PATH_A;
2253 	sar_mac = rtw89_query_sar(rtwdev, &sar_parm);
2254 	sar_rf = rtw89_phy_txpwr_mac_to_rf(rtwdev, sar_mac);
2255 	rtw89_phy_write32_mask(rtwdev, R_P0_TXPWRB_BE, B_TXPWRB_MAX_BE, sar_rf);
2256 
2257 	sar_parm.path = RF_PATH_B;
2258 	sar_mac = rtw89_query_sar(rtwdev, &sar_parm);
2259 	sar_rf = rtw89_phy_txpwr_mac_to_rf(rtwdev, sar_mac);
2260 	rtw89_phy_write32_mask(rtwdev, R_P1_TXPWRB_BE, B_TXPWRB_MAX_BE, sar_rf);
2261 }
2262 
rtw8922a_set_txpwr(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_phy_idx phy_idx)2263 static void rtw8922a_set_txpwr(struct rtw89_dev *rtwdev,
2264 			       const struct rtw89_chan *chan,
2265 			       enum rtw89_phy_idx phy_idx)
2266 {
2267 	rtw89_phy_set_txpwr_byrate(rtwdev, chan, phy_idx);
2268 	rtw89_phy_set_txpwr_offset(rtwdev, chan, phy_idx);
2269 	rtw8922a_set_tx_shape(rtwdev, chan, phy_idx);
2270 	rtw89_phy_set_txpwr_limit(rtwdev, chan, phy_idx);
2271 	rtw89_phy_set_txpwr_limit_ru(rtwdev, chan, phy_idx);
2272 	rtw8922a_set_txpwr_diff(rtwdev, chan, phy_idx);
2273 	rtw8922a_set_txpwr_ref(rtwdev, phy_idx);
2274 	rtw8922a_set_txpwr_sar_diff(rtwdev, chan, phy_idx);
2275 }
2276 
rtw8922a_set_txpwr_ctrl(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx)2277 static void rtw8922a_set_txpwr_ctrl(struct rtw89_dev *rtwdev,
2278 				    enum rtw89_phy_idx phy_idx)
2279 {
2280 	rtw8922a_set_txpwr_ref(rtwdev, phy_idx);
2281 }
2282 
rtw8922a_ctrl_trx_path(struct rtw89_dev * rtwdev,enum rtw89_rf_path tx_path,u8 tx_nss,enum rtw89_rf_path rx_path,u8 rx_nss)2283 static void rtw8922a_ctrl_trx_path(struct rtw89_dev *rtwdev,
2284 				   enum rtw89_rf_path tx_path, u8 tx_nss,
2285 				   enum rtw89_rf_path rx_path, u8 rx_nss)
2286 {
2287 	enum rtw89_phy_idx phy_idx;
2288 
2289 	for (phy_idx = RTW89_PHY_0; phy_idx <= RTW89_PHY_1; phy_idx++) {
2290 		rtw8922a_ctrl_tx_path_tmac(rtwdev, tx_path, phy_idx);
2291 		rtw8922a_ctrl_rx_path_tmac(rtwdev, rx_path, phy_idx);
2292 		rtw8922a_cfg_rx_nss_limit(rtwdev, rx_nss, phy_idx);
2293 	}
2294 }
2295 
rtw8922a_ctrl_nbtg_bt_tx(struct rtw89_dev * rtwdev,bool en,enum rtw89_phy_idx phy_idx)2296 static void rtw8922a_ctrl_nbtg_bt_tx(struct rtw89_dev *rtwdev, bool en,
2297 				     enum rtw89_phy_idx phy_idx)
2298 {
2299 	if (en) {
2300 		rtw89_phy_write32_idx(rtwdev, R_FORCE_FIR_A, B_FORCE_FIR_A, 0x3, phy_idx);
2301 		rtw89_phy_write32_idx(rtwdev, R_RXBY_WBADC_A, B_RXBY_WBADC_A,
2302 				      0xf, phy_idx);
2303 		rtw89_phy_write32_idx(rtwdev, R_BT_RXBY_WBADC_A, B_BT_RXBY_WBADC_A,
2304 				      0x0, phy_idx);
2305 		rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_A, B_BT_TRK_OFF_A, 0x0, phy_idx);
2306 		rtw89_phy_write32_idx(rtwdev, R_OP1DB_A, B_OP1DB_A, 0x80, phy_idx);
2307 		rtw89_phy_write32_idx(rtwdev, R_OP1DB1_A, B_TIA10_A, 0x8080, phy_idx);
2308 		rtw89_phy_write32_idx(rtwdev, R_BACKOFF_A, B_LNA_IBADC_A, 0x34, phy_idx);
2309 		rtw89_phy_write32_idx(rtwdev, R_BKOFF_A, B_BKOFF_IBADC_A, 0x34, phy_idx);
2310 		rtw89_phy_write32_idx(rtwdev, R_FORCE_FIR_B, B_FORCE_FIR_B, 0x3, phy_idx);
2311 		rtw89_phy_write32_idx(rtwdev, R_RXBY_WBADC_B, B_RXBY_WBADC_B,
2312 				      0xf, phy_idx);
2313 		rtw89_phy_write32_idx(rtwdev, R_BT_RXBY_WBADC_B, B_BT_RXBY_WBADC_B,
2314 				      0x0, phy_idx);
2315 		rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_B, B_BT_TRK_OFF_B, 0x0, phy_idx);
2316 		rtw89_phy_write32_idx(rtwdev, R_LNA_OP, B_LNA6, 0x80, phy_idx);
2317 		rtw89_phy_write32_idx(rtwdev, R_LNA_TIA, B_TIA10_B, 0x8080, phy_idx);
2318 		rtw89_phy_write32_idx(rtwdev, R_BACKOFF_B, B_LNA_IBADC_B, 0x34, phy_idx);
2319 		rtw89_phy_write32_idx(rtwdev, R_BKOFF_B, B_BKOFF_IBADC_B, 0x34, phy_idx);
2320 	} else {
2321 		rtw89_phy_write32_idx(rtwdev, R_FORCE_FIR_A, B_FORCE_FIR_A, 0x0, phy_idx);
2322 		rtw89_phy_write32_idx(rtwdev, R_RXBY_WBADC_A, B_RXBY_WBADC_A,
2323 				      0x0, phy_idx);
2324 		rtw89_phy_write32_idx(rtwdev, R_BT_RXBY_WBADC_A, B_BT_RXBY_WBADC_A,
2325 				      0x1, phy_idx);
2326 		rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_A, B_BT_TRK_OFF_A, 0x1, phy_idx);
2327 		rtw89_phy_write32_idx(rtwdev, R_OP1DB_A, B_OP1DB_A, 0x1a, phy_idx);
2328 		rtw89_phy_write32_idx(rtwdev, R_OP1DB1_A, B_TIA10_A, 0x2a2a, phy_idx);
2329 		rtw89_phy_write32_idx(rtwdev, R_BACKOFF_A, B_LNA_IBADC_A, 0x7a6, phy_idx);
2330 		rtw89_phy_write32_idx(rtwdev, R_BKOFF_A, B_BKOFF_IBADC_A, 0x26, phy_idx);
2331 		rtw89_phy_write32_idx(rtwdev, R_FORCE_FIR_B, B_FORCE_FIR_B, 0x0, phy_idx);
2332 		rtw89_phy_write32_idx(rtwdev, R_RXBY_WBADC_B, B_RXBY_WBADC_B,
2333 				      0x0, phy_idx);
2334 		rtw89_phy_write32_idx(rtwdev, R_BT_RXBY_WBADC_B, B_BT_RXBY_WBADC_B,
2335 				      0x1, phy_idx);
2336 		rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_B, B_BT_TRK_OFF_B, 0x1, phy_idx);
2337 		rtw89_phy_write32_idx(rtwdev, R_LNA_OP, B_LNA6, 0x20, phy_idx);
2338 		rtw89_phy_write32_idx(rtwdev, R_LNA_TIA, B_TIA10_B, 0x2a30, phy_idx);
2339 		rtw89_phy_write32_idx(rtwdev, R_BACKOFF_B, B_LNA_IBADC_B, 0x7a6, phy_idx);
2340 		rtw89_phy_write32_idx(rtwdev, R_BKOFF_B, B_BKOFF_IBADC_B, 0x26, phy_idx);
2341 	}
2342 }
2343 
rtw8922a_bb_cfg_txrx_path(struct rtw89_dev * rtwdev)2344 static void rtw8922a_bb_cfg_txrx_path(struct rtw89_dev *rtwdev)
2345 {
2346 	const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_CHANCTX_0);
2347 	enum rtw89_band band = chan->band_type;
2348 	struct rtw89_hal *hal = &rtwdev->hal;
2349 	u8 ntx_path = RF_PATH_AB;
2350 	u32 tx_en0, tx_en1;
2351 
2352 	if (hal->antenna_tx == RF_A)
2353 		ntx_path = RF_PATH_A;
2354 	else if (hal->antenna_tx == RF_B)
2355 		ntx_path = RF_PATH_B;
2356 
2357 	rtw8922a_hal_reset(rtwdev, RTW89_PHY_0, RTW89_MAC_0, band, &tx_en0, true);
2358 	if (rtwdev->dbcc_en)
2359 		rtw8922a_hal_reset(rtwdev, RTW89_PHY_1, RTW89_MAC_1, band,
2360 				   &tx_en1, true);
2361 
2362 	rtw8922a_ctrl_trx_path(rtwdev, ntx_path, 2, RF_PATH_AB, 2);
2363 
2364 	rtw8922a_hal_reset(rtwdev, RTW89_PHY_0, RTW89_MAC_0, band, &tx_en0, false);
2365 	if (rtwdev->dbcc_en)
2366 		rtw8922a_hal_reset(rtwdev, RTW89_PHY_1, RTW89_MAC_1, band,
2367 				   &tx_en1, false);
2368 }
2369 
rtw8922a_get_thermal(struct rtw89_dev * rtwdev,enum rtw89_rf_path rf_path)2370 static u8 rtw8922a_get_thermal(struct rtw89_dev *rtwdev, enum rtw89_rf_path rf_path)
2371 {
2372 	struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
2373 	struct rtw89_hal *hal = &rtwdev->hal;
2374 	int th;
2375 
2376 	/* read thermal only if debugging or thermal protection enabled */
2377 	if (!rtw89_debug_is_enabled(rtwdev, RTW89_DBG_CFO | RTW89_DBG_RFK_TRACK) &&
2378 	    !hal->thermal_prot_th)
2379 		return 80;
2380 
2381 	rtw89_write_rf(rtwdev, rf_path, RR_TM, RR_TM_TRI, 0x1);
2382 	rtw89_write_rf(rtwdev, rf_path, RR_TM, RR_TM_TRI, 0x0);
2383 	rtw89_write_rf(rtwdev, rf_path, RR_TM, RR_TM_TRI, 0x1);
2384 
2385 	fsleep(200);
2386 
2387 	th = rtw89_read_rf(rtwdev, rf_path, RR_TM, RR_TM_VAL_V1);
2388 	th += (s8)info->thermal_trim[rf_path];
2389 
2390 	return clamp_t(int, th, 0, U8_MAX);
2391 }
2392 
rtw8922a_chan_to_rf18_val(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan)2393 static u32 rtw8922a_chan_to_rf18_val(struct rtw89_dev *rtwdev,
2394 				     const struct rtw89_chan *chan)
2395 {
2396 	u32 val = u32_encode_bits(chan->channel, RR_CFGCH_CH);
2397 
2398 	switch (chan->band_type) {
2399 	case RTW89_BAND_2G:
2400 	default:
2401 		break;
2402 	case RTW89_BAND_5G:
2403 		val |= u32_encode_bits(CFGCH_BAND1_5G, RR_CFGCH_BAND1) |
2404 		       u32_encode_bits(CFGCH_BAND0_5G, RR_CFGCH_BAND0);
2405 		break;
2406 	case RTW89_BAND_6G:
2407 		val |= u32_encode_bits(CFGCH_BAND1_6G, RR_CFGCH_BAND1) |
2408 		       u32_encode_bits(CFGCH_BAND0_6G, RR_CFGCH_BAND0);
2409 		break;
2410 	}
2411 
2412 	switch (chan->band_width) {
2413 	case RTW89_CHANNEL_WIDTH_5:
2414 	case RTW89_CHANNEL_WIDTH_10:
2415 	case RTW89_CHANNEL_WIDTH_20:
2416 	default:
2417 		break;
2418 	case RTW89_CHANNEL_WIDTH_40:
2419 		val |= u32_encode_bits(CFGCH_BW_V2_40M, RR_CFGCH_BW_V2);
2420 		break;
2421 	case RTW89_CHANNEL_WIDTH_80:
2422 		val |= u32_encode_bits(CFGCH_BW_V2_80M, RR_CFGCH_BW_V2);
2423 		break;
2424 	case RTW89_CHANNEL_WIDTH_160:
2425 		val |= u32_encode_bits(CFGCH_BW_V2_160M, RR_CFGCH_BW_V2);
2426 		break;
2427 	case RTW89_CHANNEL_WIDTH_320:
2428 		val |= u32_encode_bits(CFGCH_BW_V2_320M, RR_CFGCH_BW_V2);
2429 		break;
2430 	}
2431 
2432 	return val;
2433 }
2434 
rtw8922a_btc_set_rfe(struct rtw89_dev * rtwdev)2435 static void rtw8922a_btc_set_rfe(struct rtw89_dev *rtwdev)
2436 {
2437 	union rtw89_btc_module_info *md = &rtwdev->btc.mdinfo;
2438 	struct rtw89_btc_module_v7 *module = &md->md_v7;
2439 
2440 	module->rfe_type = rtwdev->efuse.rfe_type;
2441 	module->kt_ver = rtwdev->hal.cv;
2442 	module->bt_solo = 0;
2443 	module->switch_type = BTC_SWITCH_INTERNAL;
2444 	module->wa_type = 0;
2445 
2446 	module->ant.type = BTC_ANT_SHARED;
2447 	module->ant.num = 2;
2448 	module->ant.isolation = 10;
2449 	module->ant.diversity = 0;
2450 	module->ant.single_pos = RF_PATH_A;
2451 	module->ant.btg_pos = RF_PATH_B;
2452 
2453 	if (module->kt_ver <= 1)
2454 		module->wa_type |= BTC_WA_HFP_ZB;
2455 
2456 	rtwdev->btc.cx.other.type = BTC_3CX_NONE;
2457 
2458 	if (module->rfe_type == 0) {
2459 		rtwdev->btc.dm.error.map.rfe_type0 = true;
2460 		return;
2461 	}
2462 
2463 	module->ant.num = (module->rfe_type % 2) ?  2 : 3;
2464 
2465 	if (module->kt_ver == 0)
2466 		module->ant.num = 2;
2467 
2468 	if (module->ant.num == 3) {
2469 		module->ant.type = BTC_ANT_DEDICATED;
2470 		module->bt_pos = BTC_BT_ALONE;
2471 	} else {
2472 		module->ant.type = BTC_ANT_SHARED;
2473 		module->bt_pos = BTC_BT_BTG;
2474 	}
2475 	rtwdev->btc.btg_pos = module->ant.btg_pos;
2476 	rtwdev->btc.ant_type = module->ant.type;
2477 }
2478 
2479 static
rtw8922a_set_trx_mask(struct rtw89_dev * rtwdev,u8 path,u8 group,u32 val)2480 void rtw8922a_set_trx_mask(struct rtw89_dev *rtwdev, u8 path, u8 group, u32 val)
2481 {
2482 	rtw89_write_rf(rtwdev, path, RR_LUTWA, RFREG_MASK, group);
2483 	rtw89_write_rf(rtwdev, path, RR_LUTWD0, RFREG_MASK, val);
2484 }
2485 
rtw8922a_btc_init_cfg(struct rtw89_dev * rtwdev)2486 static void rtw8922a_btc_init_cfg(struct rtw89_dev *rtwdev)
2487 {
2488 	struct rtw89_btc *btc = &rtwdev->btc;
2489 	struct rtw89_btc_ant_info_v7 *ant = &btc->mdinfo.md_v7.ant;
2490 	u32 wl_pri, path_min, path_max;
2491 	u8 path;
2492 
2493 	/* for 1-Ant && 1-ss case: only 1-path */
2494 	if (ant->num == 1) {
2495 		path_min = ant->single_pos;
2496 		path_max = path_min;
2497 	} else {
2498 		path_min = RF_PATH_A;
2499 		path_max = RF_PATH_B;
2500 	}
2501 
2502 	path = path_min;
2503 
2504 	for (path = path_min; path <= path_max; path++) {
2505 		/* set DEBUG_LUT_RFMODE_MASK = 1 to start trx-mask-setup */
2506 		rtw89_write_rf(rtwdev, path, RR_LUTWE, RFREG_MASK, BIT(17));
2507 
2508 		/* if GNT_WL=0 && BT=SS_group --> WL Tx/Rx = THRU  */
2509 		rtw8922a_set_trx_mask(rtwdev, path, BTC_BT_SS_GROUP, 0x5ff);
2510 
2511 		/* if GNT_WL=0 && BT=Rx_group --> WL-Rx = THRU + WL-Tx = MASK */
2512 		rtw8922a_set_trx_mask(rtwdev, path, BTC_BT_RX_GROUP, 0x5df);
2513 
2514 		/* if GNT_WL = 0 && BT = Tx_group -->
2515 		 * Shared-Ant && BTG-path:WL mask(0x55f), others:WL THRU(0x5ff)
2516 		 */
2517 		if (btc->ant_type == BTC_ANT_SHARED && btc->btg_pos == path)
2518 			rtw8922a_set_trx_mask(rtwdev, path, BTC_BT_TX_GROUP, 0x55f);
2519 		else
2520 			rtw8922a_set_trx_mask(rtwdev, path, BTC_BT_TX_GROUP, 0x5ff);
2521 
2522 		rtw89_write_rf(rtwdev, path, RR_LUTWE, RFREG_MASK, 0);
2523 	}
2524 
2525 	/* set WL PTA Hi-Pri: Ack-Tx, beacon-tx, Trig-frame-Tx, Null-Tx*/
2526 	wl_pri = B_BTC_RSP_ACK_HI | B_BTC_TX_BCN_HI | B_BTC_TX_TRI_HI |
2527 		 B_BTC_TX_NULL_HI;
2528 	rtw89_write32(rtwdev, R_BTC_COEX_WL_REQ_BE, wl_pri);
2529 
2530 	/* set PTA break table */
2531 	rtw89_write32(rtwdev, R_BE_BT_BREAK_TABLE, BTC_BREAK_PARAM);
2532 
2533 	/* ZB coex table init for HFP PTA req-cmd bit-4 define issue COEX-900*/
2534 	rtw89_write32(rtwdev, R_BTC_ZB_COEX_TBL_0, 0xda5a5a5a);
2535 
2536 	rtw89_write32(rtwdev, R_BTC_ZB_COEX_TBL_1, 0xda5a5a5a);
2537 
2538 	rtw89_write32(rtwdev, R_BTC_ZB_BREAK_TBL, 0xf0ffffff);
2539 	btc->cx.wl.status.map.init_ok = true;
2540 }
2541 
2542 static void
rtw8922a_btc_set_wl_txpwr_ctrl(struct rtw89_dev * rtwdev,u32 txpwr_val)2543 rtw8922a_btc_set_wl_txpwr_ctrl(struct rtw89_dev *rtwdev, u32 txpwr_val)
2544 {
2545 	u16 ctrl_all_time = u32_get_bits(txpwr_val, GENMASK(15, 0));
2546 	u16 ctrl_gnt_bt = u32_get_bits(txpwr_val, GENMASK(31, 16));
2547 
2548 	switch (ctrl_all_time) {
2549 	case 0xffff:
2550 		rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_RATE_CTRL,
2551 					     B_BE_FORCE_PWR_BY_RATE_EN, 0x0);
2552 		rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_RATE_CTRL,
2553 					     B_BE_FORCE_PWR_BY_RATE_VAL, 0x0);
2554 		break;
2555 	default:
2556 		rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_RATE_CTRL,
2557 					     B_BE_FORCE_PWR_BY_RATE_VAL, ctrl_all_time);
2558 		rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_RATE_CTRL,
2559 					     B_BE_FORCE_PWR_BY_RATE_EN, 0x1);
2560 		break;
2561 	}
2562 
2563 	switch (ctrl_gnt_bt) {
2564 	case 0xffff:
2565 		rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_REG_CTRL,
2566 					     B_BE_PWR_BT_EN, 0x0);
2567 		rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_COEX_CTRL,
2568 					     B_BE_PWR_BT_VAL, 0x0);
2569 		break;
2570 	default:
2571 		rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_COEX_CTRL,
2572 					     B_BE_PWR_BT_VAL, ctrl_gnt_bt);
2573 		rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_REG_CTRL,
2574 					     B_BE_PWR_BT_EN, 0x1);
2575 		break;
2576 	}
2577 }
2578 
2579 static
rtw8922a_btc_get_bt_rssi(struct rtw89_dev * rtwdev,s8 val)2580 s8 rtw8922a_btc_get_bt_rssi(struct rtw89_dev *rtwdev, s8 val)
2581 {
2582 	return clamp_t(s8, val, -100, 0) + 100;
2583 }
2584 
2585 static const struct rtw89_btc_rf_trx_para rtw89_btc_8922a_rf_ul[] = {
2586 	{255, 0, 0, 7}, /* 0 -> original */
2587 	{255, 2, 0, 7}, /* 1 -> for BT-connected ACI issue && BTG co-rx */
2588 	{255, 0, 0, 7}, /* 2 ->reserved for shared-antenna */
2589 	{255, 0, 0, 7}, /* 3- >reserved for shared-antenna */
2590 	{255, 0, 0, 7}, /* 4 ->reserved for shared-antenna */
2591 	{255, 1, 0, 7}, /* the below id is for non-shared-antenna free-run */
2592 	{6, 1, 0, 7},
2593 	{13, 1, 0, 7},
2594 	{13, 1, 0, 7}
2595 };
2596 
2597 static const struct rtw89_btc_rf_trx_para rtw89_btc_8922a_rf_dl[] = {
2598 	{255, 0, 0, 7}, /* 0 -> original */
2599 	{255, 2, 0, 7}, /* 1 -> reserved for shared-antenna */
2600 	{255, 0, 0, 7}, /* 2 ->reserved for shared-antenna */
2601 	{255, 0, 0, 7}, /* 3- >reserved for shared-antenna */
2602 	{255, 0, 0, 7}, /* 4 ->reserved for shared-antenna */
2603 	{255, 1, 0, 7}, /* the below id is for non-shared-antenna free-run */
2604 	{255, 1, 0, 7},
2605 	{255, 1, 0, 7},
2606 	{255, 1, 0, 7}
2607 };
2608 
2609 static const u8 rtw89_btc_8922a_wl_rssi_thres[BTC_WL_RSSI_THMAX] = {60, 50, 40, 30};
2610 static const u8 rtw89_btc_8922a_bt_rssi_thres[BTC_BT_RSSI_THMAX] = {50, 40, 30, 20};
2611 
2612 static const struct rtw89_btc_fbtc_mreg rtw89_btc_8922a_mon_reg[] = {
2613 	RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe300),
2614 	RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe320),
2615 	RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe324),
2616 	RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe328),
2617 	RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe32c),
2618 	RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe330),
2619 	RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe334),
2620 	RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe338),
2621 	RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe344),
2622 	RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe348),
2623 	RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe34c),
2624 	RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe350),
2625 	RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0x11a2c),
2626 	RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0x11a50),
2627 	RTW89_DEF_FBTC_MREG(REG_BB, 4, 0x980),
2628 	RTW89_DEF_FBTC_MREG(REG_BB, 4, 0x660),
2629 	RTW89_DEF_FBTC_MREG(REG_BB, 4, 0x1660),
2630 	RTW89_DEF_FBTC_MREG(REG_BB, 4, 0x418c),
2631 	RTW89_DEF_FBTC_MREG(REG_BB, 4, 0x518c),
2632 };
2633 
2634 static
rtw8922a_btc_update_bt_cnt(struct rtw89_dev * rtwdev)2635 void rtw8922a_btc_update_bt_cnt(struct rtw89_dev *rtwdev)
2636 {
2637 	/* Feature move to firmware */
2638 }
2639 
2640 static
rtw8922a_btc_wl_s1_standby(struct rtw89_dev * rtwdev,bool state)2641 void rtw8922a_btc_wl_s1_standby(struct rtw89_dev *rtwdev, bool state)
2642 {
2643 	if (!state) {
2644 		rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x80000);
2645 		rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x1);
2646 		rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD1, RFREG_MASK, 0x0c110);
2647 		rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0, RFREG_MASK, 0x01018);
2648 		rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x00000);
2649 
2650 		rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWE, RFREG_MASK, 0x80000);
2651 		rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWA, RFREG_MASK, 0x1);
2652 		rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWD1, RFREG_MASK, 0x0c110);
2653 		rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWD0, RFREG_MASK, 0x01018);
2654 		rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWE, RFREG_MASK, 0x00000);
2655 	} else {
2656 		rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x80000);
2657 		rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x1);
2658 		rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD1, RFREG_MASK, 0x0c110);
2659 		rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0, RFREG_MASK, 0x09018);
2660 		rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x00000);
2661 
2662 		rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWE, RFREG_MASK, 0x80000);
2663 		rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWA, RFREG_MASK, 0x1);
2664 		rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWD1, RFREG_MASK, 0x0c110);
2665 		rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWD0, RFREG_MASK, 0x09018);
2666 		rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWE, RFREG_MASK, 0x00000);
2667 	}
2668 }
2669 
rtw8922a_btc_set_wl_rx_gain(struct rtw89_dev * rtwdev,u32 level)2670 static void rtw8922a_btc_set_wl_rx_gain(struct rtw89_dev *rtwdev, u32 level)
2671 {
2672 }
2673 
rtw8922a_fill_freq_with_ppdu(struct rtw89_dev * rtwdev,struct rtw89_rx_phy_ppdu * phy_ppdu,struct ieee80211_rx_status * status)2674 static void rtw8922a_fill_freq_with_ppdu(struct rtw89_dev *rtwdev,
2675 					 struct rtw89_rx_phy_ppdu *phy_ppdu,
2676 					 struct ieee80211_rx_status *status)
2677 {
2678 	u8 chan_idx = phy_ppdu->chan_idx;
2679 	enum nl80211_band band;
2680 	u8 ch;
2681 
2682 	if (chan_idx == 0)
2683 		return;
2684 
2685 	rtw89_decode_chan_idx(rtwdev, chan_idx, &ch, &band);
2686 	status->freq = ieee80211_channel_to_frequency(ch, band);
2687 	status->band = band;
2688 }
2689 
rtw8922a_query_ppdu(struct rtw89_dev * rtwdev,struct rtw89_rx_phy_ppdu * phy_ppdu,struct ieee80211_rx_status * status)2690 static void rtw8922a_query_ppdu(struct rtw89_dev *rtwdev,
2691 				struct rtw89_rx_phy_ppdu *phy_ppdu,
2692 				struct ieee80211_rx_status *status)
2693 {
2694 	u8 path;
2695 	u8 *rx_power = phy_ppdu->rssi;
2696 
2697 	if (!status->signal)
2698 		status->signal = RTW89_RSSI_RAW_TO_DBM(max(rx_power[RF_PATH_A],
2699 							   rx_power[RF_PATH_B]));
2700 
2701 	for (path = 0; path < rtwdev->chip->rf_path_num; path++) {
2702 		status->chains |= BIT(path);
2703 		status->chain_signal[path] = RTW89_RSSI_RAW_TO_DBM(rx_power[path]);
2704 	}
2705 	if (phy_ppdu->valid)
2706 		rtw8922a_fill_freq_with_ppdu(rtwdev, phy_ppdu, status);
2707 }
2708 
rtw8922a_convert_rpl_to_rssi(struct rtw89_dev * rtwdev,struct rtw89_rx_phy_ppdu * phy_ppdu)2709 static void rtw8922a_convert_rpl_to_rssi(struct rtw89_dev *rtwdev,
2710 					 struct rtw89_rx_phy_ppdu *phy_ppdu)
2711 {
2712 	/* Mapping to BW: 5, 10, 20, 40, 80, 160, 80_80 */
2713 	static const u8 bw_compensate[] = {0, 0, 0, 6, 12, 18, 0};
2714 	u8 *rssi = phy_ppdu->rssi;
2715 	u8 compensate = 0;
2716 	u16 rpl_tmp;
2717 	u8 i;
2718 
2719 	if (phy_ppdu->bw_idx < ARRAY_SIZE(bw_compensate))
2720 		compensate = bw_compensate[phy_ppdu->bw_idx];
2721 
2722 	for (i = 0; i < RF_PATH_NUM_8922A; i++) {
2723 		if (!(phy_ppdu->rx_path_en & BIT(i))) {
2724 			rssi[i] = 0;
2725 			phy_ppdu->rpl_path[i] = 0;
2726 			phy_ppdu->rpl_fd[i] = 0;
2727 		}
2728 		if (phy_ppdu->rate >= RTW89_HW_RATE_OFDM6) {
2729 			rpl_tmp = phy_ppdu->rpl_fd[i];
2730 			if (rpl_tmp)
2731 				rpl_tmp += compensate;
2732 
2733 			phy_ppdu->rpl_path[i] = rpl_tmp;
2734 		}
2735 		rssi[i] = phy_ppdu->rpl_path[i];
2736 	}
2737 
2738 	phy_ppdu->rssi_avg = phy_ppdu->rpl_avg;
2739 }
2740 
rtw8922a_phy_rpt_to_rssi(struct rtw89_dev * rtwdev,struct rtw89_rx_desc_info * desc_info,struct ieee80211_rx_status * rx_status)2741 static void rtw8922a_phy_rpt_to_rssi(struct rtw89_dev *rtwdev,
2742 				     struct rtw89_rx_desc_info *desc_info,
2743 				     struct ieee80211_rx_status *rx_status)
2744 {
2745 	if (desc_info->rssi <= 0x1 || (desc_info->rssi >> 2) > MAX_RSSI)
2746 		return;
2747 
2748 	rx_status->signal = (desc_info->rssi >> 2) - MAX_RSSI;
2749 }
2750 
rtw8922a_mac_enable_bb_rf(struct rtw89_dev * rtwdev)2751 static int rtw8922a_mac_enable_bb_rf(struct rtw89_dev *rtwdev)
2752 {
2753 	rtw89_write8_set(rtwdev, R_BE_FEN_RST_ENABLE,
2754 			 B_BE_FEN_BBPLAT_RSTB | B_BE_FEN_BB_IP_RSTN);
2755 	rtw89_write32(rtwdev, R_BE_DMAC_SYS_CR32B, 0x7FF97FF9);
2756 
2757 	return 0;
2758 }
2759 
rtw8922a_mac_disable_bb_rf(struct rtw89_dev * rtwdev)2760 static int rtw8922a_mac_disable_bb_rf(struct rtw89_dev *rtwdev)
2761 {
2762 	rtw89_write8_clr(rtwdev, R_BE_FEN_RST_ENABLE,
2763 			 B_BE_FEN_BBPLAT_RSTB | B_BE_FEN_BB_IP_RSTN);
2764 
2765 	return 0;
2766 }
2767 
2768 #ifdef CONFIG_PM
2769 static const struct wiphy_wowlan_support rtw_wowlan_stub_8922a = {
2770 	.flags = WIPHY_WOWLAN_MAGIC_PKT | WIPHY_WOWLAN_DISCONNECT |
2771 		 WIPHY_WOWLAN_NET_DETECT,
2772 	.n_patterns = RTW89_MAX_PATTERN_NUM,
2773 	.pattern_max_len = RTW89_MAX_PATTERN_SIZE,
2774 	.pattern_min_len = 1,
2775 	.max_nd_match_sets = RTW89_SCANOFLD_MAX_SSID,
2776 };
2777 #endif
2778 
2779 static const struct rtw89_chip_ops rtw8922a_chip_ops = {
2780 	.enable_bb_rf		= rtw8922a_mac_enable_bb_rf,
2781 	.disable_bb_rf		= rtw8922a_mac_disable_bb_rf,
2782 	.bb_preinit		= rtw8922a_bb_preinit,
2783 	.bb_postinit		= rtw8922a_bb_postinit,
2784 	.bb_reset		= rtw8922a_bb_reset,
2785 	.bb_sethw		= rtw8922a_bb_sethw,
2786 	.read_rf		= rtw89_phy_read_rf_v2,
2787 	.write_rf		= rtw89_phy_write_rf_v2,
2788 	.set_channel		= rtw8922a_set_channel,
2789 	.set_channel_help	= rtw8922a_set_channel_help,
2790 	.read_efuse		= rtw8922a_read_efuse,
2791 	.read_phycap		= rtw8922a_read_phycap,
2792 	.fem_setup		= NULL,
2793 	.rfe_gpio		= NULL,
2794 	.rfk_hw_init		= rtw8922a_rfk_hw_init,
2795 	.rfk_init		= rtw8922a_rfk_init,
2796 	.rfk_init_late		= rtw8922a_rfk_init_late,
2797 	.rfk_channel		= rtw8922a_rfk_channel,
2798 	.rfk_band_changed	= rtw8922a_rfk_band_changed,
2799 	.rfk_scan		= rtw8922a_rfk_scan,
2800 	.rfk_track		= rtw8922a_rfk_track,
2801 	.power_trim		= rtw8922a_power_trim,
2802 	.set_txpwr		= rtw8922a_set_txpwr,
2803 	.set_txpwr_ctrl		= rtw8922a_set_txpwr_ctrl,
2804 	.init_txpwr_unit	= NULL,
2805 	.get_thermal		= rtw8922a_get_thermal,
2806 	.chan_to_rf18_val	= rtw8922a_chan_to_rf18_val,
2807 	.ctrl_btg_bt_rx		= rtw8922a_ctrl_btg_bt_rx,
2808 	.query_ppdu		= rtw8922a_query_ppdu,
2809 	.convert_rpl_to_rssi	= rtw8922a_convert_rpl_to_rssi,
2810 	.phy_rpt_to_rssi	= rtw8922a_phy_rpt_to_rssi,
2811 	.ctrl_nbtg_bt_tx	= rtw8922a_ctrl_nbtg_bt_tx,
2812 	.cfg_txrx_path		= rtw8922a_bb_cfg_txrx_path,
2813 	.set_txpwr_ul_tb_offset	= NULL,
2814 	.digital_pwr_comp	= rtw8922a_digital_pwr_comp,
2815 	.pwr_on_func		= rtw8922a_pwr_on_func,
2816 	.pwr_off_func		= rtw8922a_pwr_off_func,
2817 	.query_rxdesc		= rtw89_core_query_rxdesc_v2,
2818 	.fill_txdesc		= rtw89_core_fill_txdesc_v2,
2819 	.fill_txdesc_fwcmd	= rtw89_core_fill_txdesc_fwcmd_v2,
2820 	.cfg_ctrl_path		= rtw89_mac_cfg_ctrl_path_v2,
2821 	.mac_cfg_gnt		= rtw89_mac_cfg_gnt_v2,
2822 	.stop_sch_tx		= rtw89_mac_stop_sch_tx_v2,
2823 	.resume_sch_tx		= rtw89_mac_resume_sch_tx_v2,
2824 	.h2c_dctl_sec_cam	= rtw89_fw_h2c_dctl_sec_cam_v2,
2825 	.h2c_default_cmac_tbl	= rtw89_fw_h2c_default_cmac_tbl_g7,
2826 	.h2c_assoc_cmac_tbl	= rtw89_fw_h2c_assoc_cmac_tbl_g7,
2827 	.h2c_ampdu_cmac_tbl	= rtw89_fw_h2c_ampdu_cmac_tbl_g7,
2828 	.h2c_txtime_cmac_tbl	= rtw89_fw_h2c_txtime_cmac_tbl_g7,
2829 	.h2c_punctured_cmac_tbl	= rtw89_fw_h2c_punctured_cmac_tbl_g7,
2830 	.h2c_default_dmac_tbl	= rtw89_fw_h2c_default_dmac_tbl_v2,
2831 	.h2c_update_beacon	= rtw89_fw_h2c_update_beacon_be,
2832 	.h2c_ba_cam		= rtw89_fw_h2c_ba_cam_v1,
2833 
2834 	.btc_set_rfe		= rtw8922a_btc_set_rfe,
2835 	.btc_init_cfg		= rtw8922a_btc_init_cfg,
2836 	.btc_set_wl_pri		= NULL,
2837 	.btc_set_wl_txpwr_ctrl	= rtw8922a_btc_set_wl_txpwr_ctrl,
2838 	.btc_get_bt_rssi	= rtw8922a_btc_get_bt_rssi,
2839 	.btc_update_bt_cnt	= rtw8922a_btc_update_bt_cnt,
2840 	.btc_wl_s1_standby	= rtw8922a_btc_wl_s1_standby,
2841 	.btc_set_wl_rx_gain	= rtw8922a_btc_set_wl_rx_gain,
2842 	.btc_set_policy		= rtw89_btc_set_policy_v1,
2843 };
2844 
2845 const struct rtw89_chip_info rtw8922a_chip_info = {
2846 	.chip_id		= RTL8922A,
2847 	.chip_gen		= RTW89_CHIP_BE,
2848 	.ops			= &rtw8922a_chip_ops,
2849 	.mac_def		= &rtw89_mac_gen_be,
2850 	.phy_def		= &rtw89_phy_gen_be,
2851 	.fw_basename		= RTW8922A_FW_BASENAME,
2852 	.fw_format_max		= RTW8922A_FW_FORMAT_MAX,
2853 	.try_ce_fw		= false,
2854 	.bbmcu_nr		= 1,
2855 	.needed_fw_elms		= RTW89_BE_GEN_DEF_NEEDED_FW_ELEMENTS,
2856 	.fw_blacklist		= &rtw89_fw_blacklist_default,
2857 	.fifo_size		= 589824,
2858 	.small_fifo_size	= false,
2859 	.dle_scc_rsvd_size	= 0,
2860 	.max_amsdu_limit	= 8000,
2861 	.dis_2g_40m_ul_ofdma	= false,
2862 	.rsvd_ple_ofst		= 0x8f800,
2863 	.hfc_param_ini		= {rtw8922a_hfc_param_ini_pcie, NULL, NULL},
2864 	.dle_mem		= {rtw8922a_dle_mem_pcie, NULL, NULL, NULL},
2865 	.wde_qempty_acq_grpnum	= 4,
2866 	.wde_qempty_mgq_grpsel	= 4,
2867 	.rf_base_addr		= {0xe000, 0xf000},
2868 	.thermal_th		= {0xad, 0xb4},
2869 	.pwr_on_seq		= NULL,
2870 	.pwr_off_seq		= NULL,
2871 	.bb_table		= NULL,
2872 	.bb_gain_table		= NULL,
2873 	.rf_table		= {},
2874 	.nctl_table		= NULL,
2875 	.nctl_post_table	= NULL,
2876 	.dflt_parms		= NULL, /* load parm from fw */
2877 	.rfe_parms_conf		= NULL, /* load parm from fw */
2878 	.txpwr_factor_bb	= 3,
2879 	.txpwr_factor_rf	= 2,
2880 	.txpwr_factor_mac	= 1,
2881 	.dig_table		= NULL,
2882 	.dig_regs		= &rtw8922a_dig_regs,
2883 	.tssi_dbw_table		= NULL,
2884 	.support_macid_num	= 32,
2885 	.support_link_num	= 2,
2886 	.support_chanctx_num	= 2,
2887 	.support_rnr		= true,
2888 	.support_bands		= BIT(NL80211_BAND_2GHZ) |
2889 				  BIT(NL80211_BAND_5GHZ) |
2890 				  BIT(NL80211_BAND_6GHZ),
2891 	.support_bandwidths	= BIT(NL80211_CHAN_WIDTH_20) |
2892 				  BIT(NL80211_CHAN_WIDTH_40) |
2893 				  BIT(NL80211_CHAN_WIDTH_80) |
2894 				  BIT(NL80211_CHAN_WIDTH_160),
2895 	.support_unii4		= true,
2896 	.support_ant_gain	= true,
2897 	.support_tas		= false,
2898 	.support_sar_by_ant	= true,
2899 	.ul_tb_waveform_ctrl	= false,
2900 	.ul_tb_pwr_diff		= false,
2901 	.rx_freq_frome_ie	= false,
2902 	.hw_sec_hdr		= true,
2903 	.hw_mgmt_tx_encrypt	= true,
2904 	.hw_tkip_crypto		= true,
2905 	.hw_mlo_bmc_crypto	= false,
2906 	.rf_path_num		= 2,
2907 	.tx_nss			= 2,
2908 	.rx_nss			= 2,
2909 	.acam_num		= 128,
2910 	.bcam_num		= 20,
2911 	.scam_num		= 32,
2912 	.bacam_num		= 24,
2913 	.bacam_dynamic_num	= 8,
2914 	.bacam_ver		= RTW89_BACAM_V1,
2915 	.ppdu_max_usr		= 16,
2916 	.sec_ctrl_efuse_size	= 4,
2917 	.physical_efuse_size	= 0x1300,
2918 	.logical_efuse_size	= 0x70000,
2919 	.limit_efuse_size	= 0x40000,
2920 	.dav_phy_efuse_size	= 0,
2921 	.dav_log_efuse_size	= 0,
2922 	.efuse_blocks		= rtw8922a_efuse_blocks,
2923 	.phycap_addr		= 0x1700,
2924 	.phycap_size		= 0x38,
2925 	.para_ver		= 0xf,
2926 	.wlcx_desired		= 0x07110000,
2927 	.scbd			= 0x1,
2928 	.mailbox		= 0x1,
2929 
2930 	.afh_guard_ch		= 6,
2931 	.wl_rssi_thres		= rtw89_btc_8922a_wl_rssi_thres,
2932 	.bt_rssi_thres		= rtw89_btc_8922a_bt_rssi_thres,
2933 	.rssi_tol		= 2,
2934 	.mon_reg_num		= ARRAY_SIZE(rtw89_btc_8922a_mon_reg),
2935 	.mon_reg		= rtw89_btc_8922a_mon_reg,
2936 	.rf_para_ulink_num	= ARRAY_SIZE(rtw89_btc_8922a_rf_ul),
2937 	.rf_para_ulink		= rtw89_btc_8922a_rf_ul,
2938 	.rf_para_dlink_num	= ARRAY_SIZE(rtw89_btc_8922a_rf_dl),
2939 	.rf_para_dlink		= rtw89_btc_8922a_rf_dl,
2940 	.ps_mode_supported	= BIT(RTW89_PS_MODE_RFOFF) |
2941 				  BIT(RTW89_PS_MODE_CLK_GATED) |
2942 				  BIT(RTW89_PS_MODE_PWR_GATED),
2943 	.low_power_hci_modes	= 0,
2944 	.h2c_cctl_func_id	= H2C_FUNC_MAC_CCTLINFO_UD_G7,
2945 	.hci_func_en_addr	= R_BE_HCI_FUNC_EN,
2946 	.h2c_desc_size		= sizeof(struct rtw89_rxdesc_short_v2),
2947 	.txwd_body_size		= sizeof(struct rtw89_txwd_body_v2),
2948 	.txwd_info_size		= sizeof(struct rtw89_txwd_info_v2),
2949 	.h2c_ctrl_reg		= R_BE_H2CREG_CTRL,
2950 	.h2c_counter_reg	= {R_BE_UDM1 + 1, B_BE_UDM1_HALMAC_H2C_DEQ_CNT_MASK >> 8},
2951 	.h2c_regs		= rtw8922a_h2c_regs,
2952 	.c2h_ctrl_reg		= R_BE_C2HREG_CTRL,
2953 	.c2h_counter_reg	= {R_BE_UDM1 + 1, B_BE_UDM1_HALMAC_C2H_ENQ_CNT_MASK >> 8},
2954 	.c2h_regs		= rtw8922a_c2h_regs,
2955 	.page_regs		= &rtw8922a_page_regs,
2956 	.wow_reason_reg		= rtw8922a_wow_wakeup_regs,
2957 	.cfo_src_fd		= true,
2958 	.cfo_hw_comp            = true,
2959 	.dcfo_comp		= NULL,
2960 	.dcfo_comp_sft		= 0,
2961 	.imr_info		= NULL,
2962 	.imr_dmac_table		= &rtw8922a_imr_dmac_table,
2963 	.imr_cmac_table		= &rtw8922a_imr_cmac_table,
2964 	.rrsr_cfgs		= &rtw8922a_rrsr_cfgs,
2965 	.bss_clr_vld		= {R_BSS_CLR_VLD_V2, B_BSS_CLR_VLD0_V2},
2966 	.bss_clr_map_reg	= R_BSS_CLR_MAP_V2,
2967 	.rfkill_init		= &rtw8922a_rfkill_regs,
2968 	.rfkill_get		= {R_BE_GPIO_EXT_CTRL, B_BE_GPIO_IN_9},
2969 	.dma_ch_mask		= 0,
2970 	.edcca_regs		= &rtw8922a_edcca_regs,
2971 #ifdef CONFIG_PM
2972 	.wowlan_stub		= &rtw_wowlan_stub_8922a,
2973 #endif
2974 	.xtal_info		= NULL,
2975 };
2976 EXPORT_SYMBOL(rtw8922a_chip_info);
2977 
2978 const struct rtw89_chip_variant rtw8922ae_vs_variant = {
2979 	.no_mcs_12_13 = true,
2980 	.fw_min_ver_code = RTW89_FW_VER_CODE(0, 35, 54, 0),
2981 };
2982 EXPORT_SYMBOL(rtw8922ae_vs_variant);
2983 
2984 MODULE_FIRMWARE(RTW8922A_MODULE_FIRMWARE);
2985 MODULE_AUTHOR("Realtek Corporation");
2986 MODULE_DESCRIPTION("Realtek 802.11be wireless 8922A driver");
2987 MODULE_LICENSE("Dual BSD/GPL");
2988