xref: /linux/drivers/net/wireless/realtek/rtw89/rtw8922a.c (revision 1a9239bb4253f9076b5b4b2a1a4e8d7defd77a95)
1 // SPDX-License-Identifier: GPL-2.0 OR BSD-3-Clause
2 /* Copyright(c) 2023  Realtek Corporation
3  */
4 
5 #include "chan.h"
6 #include "coex.h"
7 #include "debug.h"
8 #include "efuse.h"
9 #include "fw.h"
10 #include "mac.h"
11 #include "phy.h"
12 #include "reg.h"
13 #include "rtw8922a.h"
14 #include "rtw8922a_rfk.h"
15 #include "util.h"
16 
17 #define RTW8922A_FW_FORMAT_MAX 3
18 #define RTW8922A_FW_BASENAME "rtw89/rtw8922a_fw"
19 #define RTW8922A_MODULE_FIRMWARE \
20 	RTW8922A_FW_BASENAME "-" __stringify(RTW8922A_FW_FORMAT_MAX) ".bin"
21 
22 #define HE_N_USER_MAX_8922A 4
23 
24 static const struct rtw89_hfc_ch_cfg rtw8922a_hfc_chcfg_pcie[] = {
25 	{2, 1641, grp_0}, /* ACH 0 */
26 	{2, 1641, grp_0}, /* ACH 1 */
27 	{2, 1641, grp_0}, /* ACH 2 */
28 	{2, 1641, grp_0}, /* ACH 3 */
29 	{2, 1641, grp_1}, /* ACH 4 */
30 	{2, 1641, grp_1}, /* ACH 5 */
31 	{2, 1641, grp_1}, /* ACH 6 */
32 	{2, 1641, grp_1}, /* ACH 7 */
33 	{2, 1641, grp_0}, /* B0MGQ */
34 	{2, 1641, grp_0}, /* B0HIQ */
35 	{2, 1641, grp_1}, /* B1MGQ */
36 	{2, 1641, grp_1}, /* B1HIQ */
37 	{0, 0, 0}, /* FWCMDQ */
38 	{0, 0, 0}, /* BMC */
39 	{0, 0, 0}, /* H2D */
40 };
41 
42 static const struct rtw89_hfc_pub_cfg rtw8922a_hfc_pubcfg_pcie = {
43 	1651, /* Group 0 */
44 	1651, /* Group 1 */
45 	3302, /* Public Max */
46 	0, /* WP threshold */
47 };
48 
49 static const struct rtw89_hfc_param_ini rtw8922a_hfc_param_ini_pcie[] = {
50 	[RTW89_QTA_SCC] = {rtw8922a_hfc_chcfg_pcie, &rtw8922a_hfc_pubcfg_pcie,
51 			   &rtw89_mac_size.hfc_prec_cfg_c0, RTW89_HCIFC_POH},
52 	[RTW89_QTA_DBCC] = {rtw8922a_hfc_chcfg_pcie, &rtw8922a_hfc_pubcfg_pcie,
53 			   &rtw89_mac_size.hfc_prec_cfg_c0, RTW89_HCIFC_POH},
54 	[RTW89_QTA_DLFW] = {NULL, NULL, &rtw89_mac_size.hfc_prec_cfg_c2,
55 			    RTW89_HCIFC_POH},
56 	[RTW89_QTA_INVALID] = {NULL},
57 };
58 
59 static const struct rtw89_dle_mem rtw8922a_dle_mem_pcie[] = {
60 	[RTW89_QTA_SCC] = {RTW89_QTA_SCC, &rtw89_mac_size.wde_size0_v1,
61 			   &rtw89_mac_size.ple_size0_v1, &rtw89_mac_size.wde_qt0_v1,
62 			   &rtw89_mac_size.wde_qt0_v1, &rtw89_mac_size.ple_qt0,
63 			   &rtw89_mac_size.ple_qt1, &rtw89_mac_size.ple_rsvd_qt0,
64 			   &rtw89_mac_size.rsvd0_size0, &rtw89_mac_size.rsvd1_size0},
65 	[RTW89_QTA_DBCC] = {RTW89_QTA_DBCC, &rtw89_mac_size.wde_size0_v1,
66 			   &rtw89_mac_size.ple_size0_v1, &rtw89_mac_size.wde_qt0_v1,
67 			   &rtw89_mac_size.wde_qt0_v1, &rtw89_mac_size.ple_qt0,
68 			   &rtw89_mac_size.ple_qt1, &rtw89_mac_size.ple_rsvd_qt0,
69 			   &rtw89_mac_size.rsvd0_size0, &rtw89_mac_size.rsvd1_size0},
70 	[RTW89_QTA_DLFW] = {RTW89_QTA_DLFW, &rtw89_mac_size.wde_size4_v1,
71 			    &rtw89_mac_size.ple_size3_v1, &rtw89_mac_size.wde_qt4,
72 			    &rtw89_mac_size.wde_qt4, &rtw89_mac_size.ple_qt9,
73 			    &rtw89_mac_size.ple_qt9, &rtw89_mac_size.ple_rsvd_qt1,
74 			    &rtw89_mac_size.rsvd0_size0, &rtw89_mac_size.rsvd1_size0},
75 	[RTW89_QTA_INVALID] = {RTW89_QTA_INVALID, NULL, NULL, NULL, NULL, NULL,
76 			       NULL},
77 };
78 
79 static const u32 rtw8922a_h2c_regs[RTW89_H2CREG_MAX] = {
80 	R_BE_H2CREG_DATA0, R_BE_H2CREG_DATA1, R_BE_H2CREG_DATA2,
81 	R_BE_H2CREG_DATA3
82 };
83 
84 static const u32 rtw8922a_c2h_regs[RTW89_H2CREG_MAX] = {
85 	R_BE_C2HREG_DATA0, R_BE_C2HREG_DATA1, R_BE_C2HREG_DATA2,
86 	R_BE_C2HREG_DATA3
87 };
88 
89 static const u32 rtw8922a_wow_wakeup_regs[RTW89_WOW_REASON_NUM] = {
90 	R_AX_C2HREG_DATA3_V1 + 3, R_BE_DBG_WOW,
91 };
92 
93 static const struct rtw89_page_regs rtw8922a_page_regs = {
94 	.hci_fc_ctrl	= R_BE_HCI_FC_CTRL,
95 	.ch_page_ctrl	= R_BE_CH_PAGE_CTRL,
96 	.ach_page_ctrl	= R_BE_CH0_PAGE_CTRL,
97 	.ach_page_info	= R_BE_CH0_PAGE_INFO,
98 	.pub_page_info3	= R_BE_PUB_PAGE_INFO3,
99 	.pub_page_ctrl1	= R_BE_PUB_PAGE_CTRL1,
100 	.pub_page_ctrl2	= R_BE_PUB_PAGE_CTRL2,
101 	.pub_page_info1	= R_BE_PUB_PAGE_INFO1,
102 	.pub_page_info2 = R_BE_PUB_PAGE_INFO2,
103 	.wp_page_ctrl1	= R_BE_WP_PAGE_CTRL1,
104 	.wp_page_ctrl2	= R_BE_WP_PAGE_CTRL2,
105 	.wp_page_info1	= R_BE_WP_PAGE_INFO1,
106 };
107 
108 static const struct rtw89_reg_imr rtw8922a_imr_dmac_regs[] = {
109 	{R_BE_DISP_HOST_IMR, B_BE_DISP_HOST_IMR_CLR, B_BE_DISP_HOST_IMR_SET},
110 	{R_BE_DISP_CPU_IMR, B_BE_DISP_CPU_IMR_CLR, B_BE_DISP_CPU_IMR_SET},
111 	{R_BE_DISP_OTHER_IMR, B_BE_DISP_OTHER_IMR_CLR, B_BE_DISP_OTHER_IMR_SET},
112 	{R_BE_PKTIN_ERR_IMR, B_BE_PKTIN_ERR_IMR_CLR, B_BE_PKTIN_ERR_IMR_SET},
113 	{R_BE_INTERRUPT_MASK_REG, B_BE_INTERRUPT_MASK_REG_CLR, B_BE_INTERRUPT_MASK_REG_SET},
114 	{R_BE_MLO_ERR_IDCT_IMR, B_BE_MLO_ERR_IDCT_IMR_CLR, B_BE_MLO_ERR_IDCT_IMR_SET},
115 	{R_BE_MPDU_TX_ERR_IMR, B_BE_MPDU_TX_ERR_IMR_CLR, B_BE_MPDU_TX_ERR_IMR_SET},
116 	{R_BE_MPDU_RX_ERR_IMR, B_BE_MPDU_RX_ERR_IMR_CLR, B_BE_MPDU_RX_ERR_IMR_SET},
117 	{R_BE_SEC_ERROR_IMR, B_BE_SEC_ERROR_IMR_CLR, B_BE_SEC_ERROR_IMR_SET},
118 	{R_BE_CPUIO_ERR_IMR, B_BE_CPUIO_ERR_IMR_CLR, B_BE_CPUIO_ERR_IMR_SET},
119 	{R_BE_WDE_ERR_IMR, B_BE_WDE_ERR_IMR_CLR, B_BE_WDE_ERR_IMR_SET},
120 	{R_BE_WDE_ERR1_IMR, B_BE_WDE_ERR1_IMR_CLR, B_BE_WDE_ERR1_IMR_SET},
121 	{R_BE_PLE_ERR_IMR, B_BE_PLE_ERR_IMR_CLR, B_BE_PLE_ERR_IMR_SET},
122 	{R_BE_PLE_ERRFLAG1_IMR, B_BE_PLE_ERRFLAG1_IMR_CLR, B_BE_PLE_ERRFLAG1_IMR_SET},
123 	{R_BE_WDRLS_ERR_IMR, B_BE_WDRLS_ERR_IMR_CLR, B_BE_WDRLS_ERR_IMR_SET},
124 	{R_BE_TXPKTCTL_B0_ERRFLAG_IMR, B_BE_TXPKTCTL_B0_ERRFLAG_IMR_CLR,
125 	 B_BE_TXPKTCTL_B0_ERRFLAG_IMR_SET},
126 	{R_BE_TXPKTCTL_B1_ERRFLAG_IMR, B_BE_TXPKTCTL_B1_ERRFLAG_IMR_CLR,
127 	 B_BE_TXPKTCTL_B1_ERRFLAG_IMR_SET},
128 	{R_BE_BBRPT_COM_ERR_IMR, B_BE_BBRPT_COM_ERR_IMR_CLR, B_BE_BBRPT_COM_ERR_IMR_SET},
129 	{R_BE_BBRPT_CHINFO_ERR_IMR, B_BE_BBRPT_CHINFO_ERR_IMR_CLR,
130 	 B_BE_BBRPT_CHINFO_ERR_IMR_SET},
131 	{R_BE_BBRPT_DFS_ERR_IMR, B_BE_BBRPT_DFS_ERR_IMR_CLR, B_BE_BBRPT_DFS_ERR_IMR_SET},
132 	{R_BE_LA_ERRFLAG_IMR, B_BE_LA_ERRFLAG_IMR_CLR, B_BE_LA_ERRFLAG_IMR_SET},
133 	{R_BE_CH_INFO_DBGFLAG_IMR, B_BE_CH_INFO_DBGFLAG_IMR_CLR, B_BE_CH_INFO_DBGFLAG_IMR_SET},
134 	{R_BE_PLRLS_ERR_IMR, B_BE_PLRLS_ERR_IMR_CLR, B_BE_PLRLS_ERR_IMR_SET},
135 	{R_BE_HAXI_IDCT_MSK, B_BE_HAXI_IDCT_MSK_CLR, B_BE_HAXI_IDCT_MSK_SET},
136 };
137 
138 static const struct rtw89_imr_table rtw8922a_imr_dmac_table = {
139 	.regs = rtw8922a_imr_dmac_regs,
140 	.n_regs = ARRAY_SIZE(rtw8922a_imr_dmac_regs),
141 };
142 
143 static const struct rtw89_reg_imr rtw8922a_imr_cmac_regs[] = {
144 	{R_BE_RESP_IMR, B_BE_RESP_IMR_CLR, B_BE_RESP_IMR_SET},
145 	{R_BE_RX_ERROR_FLAG_IMR, B_BE_RX_ERROR_FLAG_IMR_CLR, B_BE_RX_ERROR_FLAG_IMR_SET},
146 	{R_BE_TX_ERROR_FLAG_IMR, B_BE_TX_ERROR_FLAG_IMR_CLR, B_BE_TX_ERROR_FLAG_IMR_SET},
147 	{R_BE_RX_ERROR_FLAG_IMR_1, B_BE_TX_ERROR_FLAG_IMR_1_CLR, B_BE_TX_ERROR_FLAG_IMR_1_SET},
148 	{R_BE_PTCL_IMR1, B_BE_PTCL_IMR1_CLR, B_BE_PTCL_IMR1_SET},
149 	{R_BE_PTCL_IMR0, B_BE_PTCL_IMR0_CLR, B_BE_PTCL_IMR0_SET},
150 	{R_BE_PTCL_IMR_2, B_BE_PTCL_IMR_2_CLR, B_BE_PTCL_IMR_2_SET},
151 	{R_BE_SCHEDULE_ERR_IMR, B_BE_SCHEDULE_ERR_IMR_CLR, B_BE_SCHEDULE_ERR_IMR_SET},
152 	{R_BE_C0_TXPWR_IMR, B_BE_C0_TXPWR_IMR_CLR, B_BE_C0_TXPWR_IMR_SET},
153 	{R_BE_TRXPTCL_ERROR_INDICA_MASK, B_BE_TRXPTCL_ERROR_INDICA_MASK_CLR,
154 	 B_BE_TRXPTCL_ERROR_INDICA_MASK_SET},
155 	{R_BE_RX_ERR_IMR, B_BE_RX_ERR_IMR_CLR, B_BE_RX_ERR_IMR_SET},
156 	{R_BE_PHYINFO_ERR_IMR_V1, B_BE_PHYINFO_ERR_IMR_V1_CLR, B_BE_PHYINFO_ERR_IMR_V1_SET},
157 };
158 
159 static const struct rtw89_imr_table rtw8922a_imr_cmac_table = {
160 	.regs = rtw8922a_imr_cmac_regs,
161 	.n_regs = ARRAY_SIZE(rtw8922a_imr_cmac_regs),
162 };
163 
164 static const struct rtw89_rrsr_cfgs rtw8922a_rrsr_cfgs = {
165 	.ref_rate = {R_BE_TRXPTCL_RESP_1, B_BE_WMAC_RESP_REF_RATE_SEL, 0},
166 	.rsc = {R_BE_PTCL_RRSR1, B_BE_RSC_MASK, 2},
167 };
168 
169 static const struct rtw89_rfkill_regs rtw8922a_rfkill_regs = {
170 	.pinmux = {R_BE_GPIO8_15_FUNC_SEL,
171 		   B_BE_PINMUX_GPIO9_FUNC_SEL_MASK,
172 		   0xf},
173 	.mode = {R_BE_GPIO_EXT_CTRL + 2,
174 		 (B_BE_GPIO_MOD_9 | B_BE_GPIO_IO_SEL_9) >> 16,
175 		 0x0},
176 };
177 
178 static const struct rtw89_dig_regs rtw8922a_dig_regs = {
179 	.seg0_pd_reg = R_SEG0R_PD_V2,
180 	.pd_lower_bound_mask = B_SEG0R_PD_LOWER_BOUND_MSK,
181 	.pd_spatial_reuse_en = B_SEG0R_PD_SPATIAL_REUSE_EN_MSK_V1,
182 	.bmode_pd_reg = R_BMODE_PDTH_EN_V2,
183 	.bmode_cca_rssi_limit_en = B_BMODE_PDTH_LIMIT_EN_MSK_V1,
184 	.bmode_pd_lower_bound_reg = R_BMODE_PDTH_V2,
185 	.bmode_rssi_nocca_low_th_mask = B_BMODE_PDTH_LOWER_BOUND_MSK_V1,
186 	.p0_lna_init = {R_PATH0_LNA_INIT_V1, B_PATH0_LNA_INIT_IDX_MSK},
187 	.p1_lna_init = {R_PATH1_LNA_INIT_V1, B_PATH1_LNA_INIT_IDX_MSK},
188 	.p0_tia_init = {R_PATH0_TIA_INIT_V1, B_PATH0_TIA_INIT_IDX_MSK_V1},
189 	.p1_tia_init = {R_PATH1_TIA_INIT_V1, B_PATH1_TIA_INIT_IDX_MSK_V1},
190 	.p0_rxb_init = {R_PATH0_RXB_INIT_V1, B_PATH0_RXB_INIT_IDX_MSK_V1},
191 	.p1_rxb_init = {R_PATH1_RXB_INIT_V1, B_PATH1_RXB_INIT_IDX_MSK_V1},
192 	.p0_p20_pagcugc_en = {R_PATH0_P20_FOLLOW_BY_PAGCUGC_V3,
193 			      B_PATH0_P20_FOLLOW_BY_PAGCUGC_EN_MSK},
194 	.p0_s20_pagcugc_en = {R_PATH0_S20_FOLLOW_BY_PAGCUGC_V3,
195 			      B_PATH0_S20_FOLLOW_BY_PAGCUGC_EN_MSK},
196 	.p1_p20_pagcugc_en = {R_PATH1_P20_FOLLOW_BY_PAGCUGC_V3,
197 			      B_PATH1_P20_FOLLOW_BY_PAGCUGC_EN_MSK},
198 	.p1_s20_pagcugc_en = {R_PATH1_S20_FOLLOW_BY_PAGCUGC_V3,
199 			      B_PATH1_S20_FOLLOW_BY_PAGCUGC_EN_MSK},
200 };
201 
202 static const struct rtw89_edcca_regs rtw8922a_edcca_regs = {
203 	.edcca_level			= R_SEG0R_EDCCA_LVL_BE,
204 	.edcca_mask			= B_EDCCA_LVL_MSK0,
205 	.edcca_p_mask			= B_EDCCA_LVL_MSK1,
206 	.ppdu_level			= R_SEG0R_PPDU_LVL_BE,
207 	.ppdu_mask			= B_EDCCA_LVL_MSK1,
208 	.p = {{
209 		.rpt_a			= R_EDCCA_RPT_A_BE,
210 		.rpt_b			= R_EDCCA_RPT_B_BE,
211 		.rpt_sel		= R_EDCCA_RPT_SEL_BE,
212 		.rpt_sel_mask		= B_EDCCA_RPT_SEL_MSK,
213 	}, {
214 		.rpt_a			= R_EDCCA_RPT_P1_A_BE,
215 		.rpt_b			= R_EDCCA_RPT_P1_B_BE,
216 		.rpt_sel		= R_EDCCA_RPT_SEL_BE,
217 		.rpt_sel_mask		= B_EDCCA_RPT_SEL_P1_MSK,
218 	}},
219 	.rpt_sel_be			= R_EDCCA_RPTREG_SEL_BE,
220 	.rpt_sel_be_mask		= B_EDCCA_RPTREG_SEL_BE_MSK,
221 	.tx_collision_t2r_st		= R_TX_COLLISION_T2R_ST_BE,
222 	.tx_collision_t2r_st_mask	= B_TX_COLLISION_T2R_ST_BE_M,
223 };
224 
225 static const struct rtw89_efuse_block_cfg rtw8922a_efuse_blocks[] = {
226 	[RTW89_EFUSE_BLOCK_SYS]			= {.offset = 0x00000, .size = 0x310},
227 	[RTW89_EFUSE_BLOCK_RF]			= {.offset = 0x10000, .size = 0x240},
228 	[RTW89_EFUSE_BLOCK_HCI_DIG_PCIE_SDIO]	= {.offset = 0x20000, .size = 0x4800},
229 	[RTW89_EFUSE_BLOCK_HCI_DIG_USB]		= {.offset = 0x30000, .size = 0x890},
230 	[RTW89_EFUSE_BLOCK_HCI_PHY_PCIE]	= {.offset = 0x40000, .size = 0x200},
231 	[RTW89_EFUSE_BLOCK_HCI_PHY_USB3]	= {.offset = 0x50000, .size = 0x80},
232 	[RTW89_EFUSE_BLOCK_HCI_PHY_USB2]	= {.offset = 0x60000, .size = 0x0},
233 	[RTW89_EFUSE_BLOCK_ADIE]		= {.offset = 0x70000, .size = 0x10},
234 };
235 
rtw8922a_ctrl_btg_bt_rx(struct rtw89_dev * rtwdev,bool en,enum rtw89_phy_idx phy_idx)236 static void rtw8922a_ctrl_btg_bt_rx(struct rtw89_dev *rtwdev, bool en,
237 				    enum rtw89_phy_idx phy_idx)
238 {
239 	if (en) {
240 		rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_A, B_BT_SHARE_A, 0x1, phy_idx);
241 		rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_A, B_BTG_PATH_A, 0x0, phy_idx);
242 		rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_B, B_BT_SHARE_B, 0x1, phy_idx);
243 		rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_B, B_BTG_PATH_B, 0x1, phy_idx);
244 		rtw89_phy_write32_idx(rtwdev, R_LNA_OP, B_LNA6, 0x20, phy_idx);
245 		rtw89_phy_write32_idx(rtwdev, R_LNA_TIA, B_TIA0_B, 0x30, phy_idx);
246 		rtw89_phy_write32_idx(rtwdev, R_PMAC_GNT, B_PMAC_GNT_P1, 0x0, phy_idx);
247 		rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_ANT_BT_SHARE, 0x1, phy_idx);
248 		rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_RX_BT_SG0, 0x2, phy_idx);
249 		rtw89_phy_write32_idx(rtwdev, R_GNT_BT_WGT_EN, B_GNT_BT_WGT_EN,
250 				      0x1, phy_idx);
251 	} else {
252 		rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_A, B_BT_SHARE_A, 0x0, phy_idx);
253 		rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_A, B_BTG_PATH_A, 0x0, phy_idx);
254 		rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_B, B_BT_SHARE_B, 0x0, phy_idx);
255 		rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_B, B_BTG_PATH_B, 0x0, phy_idx);
256 		rtw89_phy_write32_idx(rtwdev, R_LNA_OP, B_LNA6, 0x1a, phy_idx);
257 		rtw89_phy_write32_idx(rtwdev, R_LNA_TIA, B_TIA0_B, 0x2a, phy_idx);
258 		rtw89_phy_write32_idx(rtwdev, R_PMAC_GNT, B_PMAC_GNT_P1, 0xc, phy_idx);
259 		rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_ANT_BT_SHARE, 0x0, phy_idx);
260 		rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_RX_BT_SG0, 0x0, phy_idx);
261 		rtw89_phy_write32_idx(rtwdev, R_GNT_BT_WGT_EN, B_GNT_BT_WGT_EN,
262 				      0x0, phy_idx);
263 	}
264 }
265 
rtw8922a_pwr_on_func(struct rtw89_dev * rtwdev)266 static int rtw8922a_pwr_on_func(struct rtw89_dev *rtwdev)
267 {
268 	struct rtw89_hal *hal = &rtwdev->hal;
269 	u32 val32;
270 	int ret;
271 
272 	rtw89_write32_clr(rtwdev, R_BE_SYS_PW_CTRL, B_BE_AFSM_WLSUS_EN |
273 						    B_BE_AFSM_PCIE_SUS_EN);
274 	rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_DIS_WLBT_PDNSUSEN_SOPC);
275 	rtw89_write32_set(rtwdev, R_BE_WLLPS_CTRL, B_BE_DIS_WLBT_LPSEN_LOPC);
276 	rtw89_write32_clr(rtwdev, R_BE_SYS_PW_CTRL, B_BE_APDM_HPDN);
277 	rtw89_write32_clr(rtwdev, R_BE_SYS_PW_CTRL, B_BE_APFM_SWLPS);
278 
279 	ret = read_poll_timeout(rtw89_read32, val32, val32 & B_BE_RDY_SYSPWR,
280 				1000, 3000000, false, rtwdev, R_BE_SYS_PW_CTRL);
281 	if (ret)
282 		return ret;
283 
284 	rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_EN_WLON);
285 	rtw89_write32_set(rtwdev, R_BE_WLRESUME_CTRL, B_BE_LPSROP_CMAC0 |
286 						      B_BE_LPSROP_CMAC1);
287 	rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_APFN_ONMAC);
288 
289 	ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_BE_APFN_ONMAC),
290 				1000, 3000000, false, rtwdev, R_BE_SYS_PW_CTRL);
291 	if (ret)
292 		return ret;
293 
294 	rtw89_write32_clr(rtwdev, R_BE_AFE_ON_CTRL1, B_BE_REG_CK_MON_CK960M_EN);
295 	rtw89_write8_set(rtwdev, R_BE_ANAPAR_POW_MAC, B_BE_POW_PC_LDO_PORT0 |
296 						      B_BE_POW_PC_LDO_PORT1);
297 	rtw89_write32_clr(rtwdev, R_BE_FEN_RST_ENABLE, B_BE_R_SYM_ISO_ADDA_P02PP |
298 						       B_BE_R_SYM_ISO_ADDA_P12PP);
299 	rtw89_write8_set(rtwdev, R_BE_PLATFORM_ENABLE, B_BE_PLATFORM_EN);
300 	rtw89_write32_set(rtwdev, R_BE_HCI_OPT_CTRL, B_BE_HAXIDMA_IO_EN);
301 
302 	ret = read_poll_timeout(rtw89_read32, val32, val32 & B_BE_HAXIDMA_IO_ST,
303 				1000, 3000000, false, rtwdev, R_BE_HCI_OPT_CTRL);
304 	if (ret)
305 		return ret;
306 
307 	ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_BE_HAXIDMA_BACKUP_RESTORE_ST),
308 				1000, 3000000, false, rtwdev, R_BE_HCI_OPT_CTRL);
309 	if (ret)
310 		return ret;
311 
312 	rtw89_write32_set(rtwdev, R_BE_HCI_OPT_CTRL, B_BE_HCI_WLAN_IO_EN);
313 
314 	ret = read_poll_timeout(rtw89_read32, val32, val32 & B_BE_HCI_WLAN_IO_ST,
315 				1000, 3000000, false, rtwdev, R_BE_HCI_OPT_CTRL);
316 	if (ret)
317 		return ret;
318 
319 	rtw89_write32_clr(rtwdev, R_BE_SYS_SDIO_CTRL, B_BE_PCIE_FORCE_IBX_EN);
320 
321 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_PLL, 0x02, 0x02);
322 	if (ret)
323 		return ret;
324 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_PLL, 0x01, 0x01);
325 	if (ret)
326 		return ret;
327 
328 	rtw89_write32_set(rtwdev, R_BE_SYS_ADIE_PAD_PWR_CTRL, B_BE_SYM_PADPDN_WL_RFC1_1P3);
329 
330 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x40, 0x40);
331 	if (ret)
332 		return ret;
333 
334 	rtw89_write32_set(rtwdev, R_BE_SYS_ADIE_PAD_PWR_CTRL, B_BE_SYM_PADPDN_WL_RFC0_1P3);
335 
336 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x20, 0x20);
337 	if (ret)
338 		return ret;
339 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x04, 0x04);
340 	if (ret)
341 		return ret;
342 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x08, 0x08);
343 	if (ret)
344 		return ret;
345 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x10);
346 	if (ret)
347 		return ret;
348 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S0, 0xEB, 0xFF);
349 	if (ret)
350 		return ret;
351 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S1, 0xEB, 0xFF);
352 	if (ret)
353 		return ret;
354 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x01, 0x01);
355 	if (ret)
356 		return ret;
357 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x02, 0x02);
358 	if (ret)
359 		return ret;
360 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x80);
361 	if (ret)
362 		return ret;
363 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_XREF_RF1, 0, 0x40);
364 	if (ret)
365 		return ret;
366 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_XREF_RF2, 0, 0x40);
367 	if (ret)
368 		return ret;
369 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_PLL_1, 0x40, 0x60);
370 	if (ret)
371 		return ret;
372 
373 	if (hal->cv != CHIP_CAV) {
374 		rtw89_write32_set(rtwdev, R_BE_PMC_DBG_CTRL2, B_BE_SYSON_DIS_PMCR_BE_WRMSK);
375 		rtw89_write32_set(rtwdev, R_BE_SYS_ISO_CTRL, B_BE_ISO_EB2CORE);
376 		rtw89_write32_clr(rtwdev, R_BE_SYS_ISO_CTRL, B_BE_PWC_EV2EF_B);
377 
378 		mdelay(1);
379 
380 		rtw89_write32_clr(rtwdev, R_BE_SYS_ISO_CTRL, B_BE_PWC_EV2EF_S);
381 		rtw89_write32_clr(rtwdev, R_BE_PMC_DBG_CTRL2, B_BE_SYSON_DIS_PMCR_BE_WRMSK);
382 	}
383 
384 	rtw89_write32_set(rtwdev, R_BE_DMAC_FUNC_EN,
385 			  B_BE_MAC_FUNC_EN | B_BE_DMAC_FUNC_EN | B_BE_MPDU_PROC_EN |
386 			  B_BE_WD_RLS_EN | B_BE_DLE_WDE_EN | B_BE_TXPKT_CTRL_EN |
387 			  B_BE_STA_SCH_EN | B_BE_DLE_PLE_EN | B_BE_PKT_BUF_EN |
388 			  B_BE_DMAC_TBL_EN | B_BE_PKT_IN_EN | B_BE_DLE_CPUIO_EN |
389 			  B_BE_DISPATCHER_EN | B_BE_BBRPT_EN | B_BE_MAC_SEC_EN |
390 			  B_BE_H_AXIDMA_EN | B_BE_DMAC_MLO_EN | B_BE_PLRLS_EN |
391 			  B_BE_P_AXIDMA_EN | B_BE_DLE_DATACPUIO_EN | B_BE_LTR_CTL_EN);
392 
393 	set_bit(RTW89_FLAG_DMAC_FUNC, rtwdev->flags);
394 
395 	rtw89_write32_set(rtwdev, R_BE_CMAC_SHARE_FUNC_EN,
396 			  B_BE_CMAC_SHARE_EN | B_BE_RESPBA_EN | B_BE_ADDRSRCH_EN |
397 			  B_BE_BTCOEX_EN);
398 	rtw89_write32_set(rtwdev, R_BE_CMAC_FUNC_EN,
399 			  B_BE_CMAC_EN | B_BE_CMAC_TXEN |  B_BE_CMAC_RXEN |
400 			  B_BE_SIGB_EN | B_BE_PHYINTF_EN | B_BE_CMAC_DMA_EN |
401 			  B_BE_PTCLTOP_EN | B_BE_SCHEDULER_EN | B_BE_TMAC_EN |
402 			  B_BE_RMAC_EN | B_BE_TXTIME_EN | B_BE_RESP_PKTCTL_EN);
403 
404 	set_bit(RTW89_FLAG_CMAC0_FUNC, rtwdev->flags);
405 
406 	rtw89_write32_set(rtwdev, R_BE_FEN_RST_ENABLE, B_BE_FEN_BB_IP_RSTN |
407 						       B_BE_FEN_BBPLAT_RSTB);
408 
409 	return 0;
410 }
411 
rtw8922a_pwr_off_func(struct rtw89_dev * rtwdev)412 static int rtw8922a_pwr_off_func(struct rtw89_dev *rtwdev)
413 {
414 	u32 val32;
415 	int ret;
416 
417 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x10, 0x10);
418 	if (ret)
419 		return ret;
420 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x08);
421 	if (ret)
422 		return ret;
423 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x04);
424 	if (ret)
425 		return ret;
426 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S0, 0xC6, 0xFF);
427 	if (ret)
428 		return ret;
429 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_WL_RFC_S1, 0xC6, 0xFF);
430 	if (ret)
431 		return ret;
432 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0x80, 0x80);
433 	if (ret)
434 		return ret;
435 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x02);
436 	if (ret)
437 		return ret;
438 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x01);
439 	if (ret)
440 		return ret;
441 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_PLL, 0x02, 0xFF);
442 	if (ret)
443 		return ret;
444 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_PLL, 0x00, 0xFF);
445 	if (ret)
446 		return ret;
447 
448 	rtw89_write32_set(rtwdev, R_BE_FEN_RST_ENABLE, B_BE_R_SYM_ISO_ADDA_P02PP |
449 						       B_BE_R_SYM_ISO_ADDA_P12PP);
450 	rtw89_write8_clr(rtwdev, R_BE_ANAPAR_POW_MAC, B_BE_POW_PC_LDO_PORT0 |
451 						      B_BE_POW_PC_LDO_PORT1);
452 	rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_EN_WLON);
453 	rtw89_write8_clr(rtwdev, R_BE_FEN_RST_ENABLE, B_BE_FEN_BB_IP_RSTN |
454 						      B_BE_FEN_BBPLAT_RSTB);
455 	rtw89_write32_clr(rtwdev, R_BE_SYS_ADIE_PAD_PWR_CTRL, B_BE_SYM_PADPDN_WL_RFC0_1P3);
456 
457 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x20);
458 	if (ret)
459 		return ret;
460 
461 	rtw89_write32_clr(rtwdev, R_BE_SYS_ADIE_PAD_PWR_CTRL, B_BE_SYM_PADPDN_WL_RFC1_1P3);
462 
463 	ret = rtw89_mac_write_xtal_si(rtwdev, XTAL_SI_ANAPAR_WL, 0, 0x40);
464 	if (ret)
465 		return ret;
466 
467 	rtw89_write32_clr(rtwdev, R_BE_HCI_OPT_CTRL, B_BE_HAXIDMA_IO_EN);
468 
469 	ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_BE_HAXIDMA_IO_ST),
470 				1000, 3000000, false, rtwdev, R_BE_HCI_OPT_CTRL);
471 	if (ret)
472 		return ret;
473 
474 	ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_BE_HAXIDMA_BACKUP_RESTORE_ST),
475 				1000, 3000000, false, rtwdev, R_BE_HCI_OPT_CTRL);
476 	if (ret)
477 		return ret;
478 
479 	rtw89_write32_clr(rtwdev, R_BE_HCI_OPT_CTRL, B_BE_HCI_WLAN_IO_EN);
480 
481 	ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_BE_HCI_WLAN_IO_ST),
482 				1000, 3000000, false, rtwdev, R_BE_HCI_OPT_CTRL);
483 	if (ret)
484 		return ret;
485 
486 	rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_APFM_OFFMAC);
487 
488 	ret = read_poll_timeout(rtw89_read32, val32, !(val32 & B_BE_APFM_OFFMAC),
489 				1000, 3000000, false, rtwdev, R_BE_SYS_PW_CTRL);
490 	if (ret)
491 		return ret;
492 
493 	rtw89_write32(rtwdev, R_BE_WLLPS_CTRL, 0x0000A1B2);
494 	rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_XTAL_OFF_A_DIE);
495 	rtw89_write32_set(rtwdev, R_BE_SYS_PW_CTRL, B_BE_APFM_SWLPS);
496 	rtw89_write32(rtwdev, R_BE_UDM1, 0);
497 
498 	return 0;
499 }
500 
rtw8922a_efuse_parsing_tssi(struct rtw89_dev * rtwdev,struct rtw8922a_efuse * map)501 static void rtw8922a_efuse_parsing_tssi(struct rtw89_dev *rtwdev,
502 					struct rtw8922a_efuse *map)
503 {
504 	struct rtw8922a_tssi_offset *ofst[] = {&map->path_a_tssi, &map->path_b_tssi};
505 	u8 *bw40_1s_tssi_6g_ofst[] = {map->bw40_1s_tssi_6g_a, map->bw40_1s_tssi_6g_b};
506 	struct rtw89_tssi_info *tssi = &rtwdev->tssi;
507 	u8 i, j;
508 
509 	tssi->thermal[RF_PATH_A] = map->path_a_therm;
510 	tssi->thermal[RF_PATH_B] = map->path_b_therm;
511 
512 	for (i = 0; i < RF_PATH_NUM_8922A; i++) {
513 		memcpy(tssi->tssi_cck[i], ofst[i]->cck_tssi,
514 		       sizeof(ofst[i]->cck_tssi));
515 
516 		for (j = 0; j < TSSI_CCK_CH_GROUP_NUM; j++)
517 			rtw89_debug(rtwdev, RTW89_DBG_TSSI,
518 				    "[TSSI][EFUSE] path=%d cck[%d]=0x%x\n",
519 				    i, j, tssi->tssi_cck[i][j]);
520 
521 		memcpy(tssi->tssi_mcs[i], ofst[i]->bw40_tssi,
522 		       sizeof(ofst[i]->bw40_tssi));
523 		memcpy(tssi->tssi_mcs[i] + TSSI_MCS_2G_CH_GROUP_NUM,
524 		       ofst[i]->bw40_1s_tssi_5g, sizeof(ofst[i]->bw40_1s_tssi_5g));
525 		memcpy(tssi->tssi_6g_mcs[i], bw40_1s_tssi_6g_ofst[i],
526 		       sizeof(tssi->tssi_6g_mcs[i]));
527 
528 		for (j = 0; j < TSSI_MCS_CH_GROUP_NUM; j++)
529 			rtw89_debug(rtwdev, RTW89_DBG_TSSI,
530 				    "[TSSI][EFUSE] path=%d mcs[%d]=0x%x\n",
531 				    i, j, tssi->tssi_mcs[i][j]);
532 	}
533 }
534 
rtw8922a_efuse_parsing_gain_offset(struct rtw89_dev * rtwdev,struct rtw8922a_efuse * map)535 static void rtw8922a_efuse_parsing_gain_offset(struct rtw89_dev *rtwdev,
536 					       struct rtw8922a_efuse *map)
537 {
538 	struct rtw89_phy_efuse_gain *gain = &rtwdev->efuse_gain;
539 	bool all_0xff = true, all_0x00 = true;
540 	int i, j;
541 	u8 t;
542 
543 	gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_2G_CCK] = map->rx_gain_a._2g_cck;
544 	gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_2G_CCK] = map->rx_gain_b._2g_cck;
545 	gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_2G_OFDM] = map->rx_gain_a._2g_ofdm;
546 	gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_2G_OFDM] = map->rx_gain_b._2g_ofdm;
547 	gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_5G_LOW] = map->rx_gain_a._5g_low;
548 	gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_5G_LOW] = map->rx_gain_b._5g_low;
549 	gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_5G_MID] = map->rx_gain_a._5g_mid;
550 	gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_5G_MID] = map->rx_gain_b._5g_mid;
551 	gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_5G_HIGH] = map->rx_gain_a._5g_high;
552 	gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_5G_HIGH] = map->rx_gain_b._5g_high;
553 	gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_L0] = map->rx_gain_6g_a._6g_l0;
554 	gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_L0] = map->rx_gain_6g_b._6g_l0;
555 	gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_L1] = map->rx_gain_6g_a._6g_l1;
556 	gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_L1] = map->rx_gain_6g_b._6g_l1;
557 	gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_M0] = map->rx_gain_6g_a._6g_m0;
558 	gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_M0] = map->rx_gain_6g_b._6g_m0;
559 	gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_M1] = map->rx_gain_6g_a._6g_m1;
560 	gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_M1] = map->rx_gain_6g_b._6g_m1;
561 	gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_H0] = map->rx_gain_6g_a._6g_h0;
562 	gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_H0] = map->rx_gain_6g_b._6g_h0;
563 	gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_H1] = map->rx_gain_6g_a._6g_h1;
564 	gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_H1] = map->rx_gain_6g_b._6g_h1;
565 	gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_UH0] = map->rx_gain_6g_a._6g_uh0;
566 	gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_UH0] = map->rx_gain_6g_b._6g_uh0;
567 	gain->offset[RF_PATH_A][RTW89_GAIN_OFFSET_6G_UH1] = map->rx_gain_6g_a._6g_uh1;
568 	gain->offset[RF_PATH_B][RTW89_GAIN_OFFSET_6G_UH1] = map->rx_gain_6g_b._6g_uh1;
569 
570 	for (i = RF_PATH_A; i <= RF_PATH_B; i++)
571 		for (j = 0; j < RTW89_GAIN_OFFSET_NR; j++) {
572 			t = gain->offset[i][j];
573 			if (t != 0xff)
574 				all_0xff = false;
575 			if (t != 0x0)
576 				all_0x00 = false;
577 
578 			/* transform: sign-bit + U(7,2) to S(8,2) */
579 			if (t & 0x80)
580 				gain->offset[i][j] = (t ^ 0x7f) + 1;
581 		}
582 
583 	gain->offset_valid = !all_0xff && !all_0x00;
584 }
585 
rtw8922a_read_efuse_mac_addr(struct rtw89_dev * rtwdev,u32 addr)586 static void rtw8922a_read_efuse_mac_addr(struct rtw89_dev *rtwdev, u32 addr)
587 {
588 	struct rtw89_efuse *efuse = &rtwdev->efuse;
589 	u16 val;
590 	int i;
591 
592 	for (i = 0; i < ETH_ALEN; i += 2, addr += 2) {
593 		val = rtw89_read16(rtwdev, addr);
594 		efuse->addr[i] = val & 0xff;
595 		efuse->addr[i + 1] = val >> 8;
596 	}
597 }
598 
rtw8922a_read_efuse_pci_sdio(struct rtw89_dev * rtwdev,u8 * log_map)599 static int rtw8922a_read_efuse_pci_sdio(struct rtw89_dev *rtwdev, u8 *log_map)
600 {
601 	struct rtw89_efuse *efuse = &rtwdev->efuse;
602 
603 	if (rtwdev->hci.type == RTW89_HCI_TYPE_PCIE)
604 		rtw8922a_read_efuse_mac_addr(rtwdev, 0x3104);
605 	else
606 		ether_addr_copy(efuse->addr, log_map + 0x001A);
607 
608 	return 0;
609 }
610 
rtw8922a_read_efuse_usb(struct rtw89_dev * rtwdev,u8 * log_map)611 static int rtw8922a_read_efuse_usb(struct rtw89_dev *rtwdev, u8 *log_map)
612 {
613 	rtw8922a_read_efuse_mac_addr(rtwdev, 0x4078);
614 
615 	return 0;
616 }
617 
rtw8922a_read_efuse_rf(struct rtw89_dev * rtwdev,u8 * log_map)618 static int rtw8922a_read_efuse_rf(struct rtw89_dev *rtwdev, u8 *log_map)
619 {
620 	struct rtw8922a_efuse *map = (struct rtw8922a_efuse *)log_map;
621 	struct rtw89_efuse *efuse = &rtwdev->efuse;
622 
623 	efuse->rfe_type = map->rfe_type;
624 	efuse->xtal_cap = map->xtal_k;
625 	efuse->country_code[0] = map->country_code[0];
626 	efuse->country_code[1] = map->country_code[1];
627 	rtw8922a_efuse_parsing_tssi(rtwdev, map);
628 	rtw8922a_efuse_parsing_gain_offset(rtwdev, map);
629 
630 	rtw89_info(rtwdev, "chip rfe_type is %d\n", efuse->rfe_type);
631 
632 	return 0;
633 }
634 
rtw8922a_read_efuse(struct rtw89_dev * rtwdev,u8 * log_map,enum rtw89_efuse_block block)635 static int rtw8922a_read_efuse(struct rtw89_dev *rtwdev, u8 *log_map,
636 			       enum rtw89_efuse_block block)
637 {
638 	switch (block) {
639 	case RTW89_EFUSE_BLOCK_HCI_DIG_PCIE_SDIO:
640 		return rtw8922a_read_efuse_pci_sdio(rtwdev, log_map);
641 	case RTW89_EFUSE_BLOCK_HCI_DIG_USB:
642 		return rtw8922a_read_efuse_usb(rtwdev, log_map);
643 	case RTW89_EFUSE_BLOCK_RF:
644 		return rtw8922a_read_efuse_rf(rtwdev, log_map);
645 	default:
646 		return 0;
647 	}
648 }
649 
650 #define THM_TRIM_POSITIVE_MASK BIT(6)
651 #define THM_TRIM_MAGNITUDE_MASK GENMASK(5, 0)
652 
rtw8922a_phycap_parsing_thermal_trim(struct rtw89_dev * rtwdev,u8 * phycap_map)653 static void rtw8922a_phycap_parsing_thermal_trim(struct rtw89_dev *rtwdev,
654 						 u8 *phycap_map)
655 {
656 	static const u32 thm_trim_addr[RF_PATH_NUM_8922A] = {0x1706, 0x1733};
657 	struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
658 	u32 addr = rtwdev->chip->phycap_addr;
659 	bool pg = true;
660 	u8 pg_th;
661 	s8 val;
662 	u8 i;
663 
664 	for (i = 0; i < RF_PATH_NUM_8922A; i++) {
665 		pg_th = phycap_map[thm_trim_addr[i] - addr];
666 		if (pg_th == 0xff) {
667 			info->thermal_trim[i] = 0;
668 			pg = false;
669 			break;
670 		}
671 
672 		val = u8_get_bits(pg_th, THM_TRIM_MAGNITUDE_MASK);
673 
674 		if (!(pg_th & THM_TRIM_POSITIVE_MASK))
675 			val *= -1;
676 
677 		info->thermal_trim[i] = val;
678 
679 		rtw89_debug(rtwdev, RTW89_DBG_RFK,
680 			    "[THERMAL][TRIM] path=%d thermal_trim=0x%x (%d)\n",
681 			    i, pg_th, val);
682 	}
683 
684 	info->pg_thermal_trim = pg;
685 }
686 
rtw8922a_phycap_parsing_pa_bias_trim(struct rtw89_dev * rtwdev,u8 * phycap_map)687 static void rtw8922a_phycap_parsing_pa_bias_trim(struct rtw89_dev *rtwdev,
688 						 u8 *phycap_map)
689 {
690 	static const u32 pabias_trim_addr[RF_PATH_NUM_8922A] = {0x1707, 0x1734};
691 	static const u32 check_pa_pad_trim_addr = 0x1700;
692 	struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
693 	u32 addr = rtwdev->chip->phycap_addr;
694 	u8 val;
695 	u8 i;
696 
697 	val = phycap_map[check_pa_pad_trim_addr - addr];
698 	if (val != 0xff)
699 		info->pg_pa_bias_trim = true;
700 
701 	for (i = 0; i < RF_PATH_NUM_8922A; i++) {
702 		info->pa_bias_trim[i] = phycap_map[pabias_trim_addr[i] - addr];
703 
704 		rtw89_debug(rtwdev, RTW89_DBG_RFK,
705 			    "[PA_BIAS][TRIM] path=%d pa_bias_trim=0x%x\n",
706 			    i, info->pa_bias_trim[i]);
707 	}
708 }
709 
rtw8922a_pa_bias_trim(struct rtw89_dev * rtwdev)710 static void rtw8922a_pa_bias_trim(struct rtw89_dev *rtwdev)
711 {
712 	struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
713 	u8 pabias_2g, pabias_5g;
714 	u8 i;
715 
716 	if (!info->pg_pa_bias_trim) {
717 		rtw89_debug(rtwdev, RTW89_DBG_RFK,
718 			    "[PA_BIAS][TRIM] no PG, do nothing\n");
719 
720 		return;
721 	}
722 
723 	for (i = 0; i < RF_PATH_NUM_8922A; i++) {
724 		pabias_2g = FIELD_GET(GENMASK(3, 0), info->pa_bias_trim[i]);
725 		pabias_5g = FIELD_GET(GENMASK(7, 4), info->pa_bias_trim[i]);
726 
727 		rtw89_debug(rtwdev, RTW89_DBG_RFK,
728 			    "[PA_BIAS][TRIM] path=%d 2G=0x%x 5G=0x%x\n",
729 			    i, pabias_2g, pabias_5g);
730 
731 		rtw89_write_rf(rtwdev, i, RR_BIASA, RR_BIASA_TXG_V1, pabias_2g);
732 		rtw89_write_rf(rtwdev, i, RR_BIASA, RR_BIASA_TXA_V1, pabias_5g);
733 	}
734 }
735 
rtw8922a_phycap_parsing_pad_bias_trim(struct rtw89_dev * rtwdev,u8 * phycap_map)736 static void rtw8922a_phycap_parsing_pad_bias_trim(struct rtw89_dev *rtwdev,
737 						  u8 *phycap_map)
738 {
739 	static const u32 pad_bias_trim_addr[RF_PATH_NUM_8922A] = {0x1708, 0x1735};
740 	struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
741 	u32 addr = rtwdev->chip->phycap_addr;
742 	u8 i;
743 
744 	for (i = 0; i < RF_PATH_NUM_8922A; i++) {
745 		info->pad_bias_trim[i] = phycap_map[pad_bias_trim_addr[i] - addr];
746 
747 		rtw89_debug(rtwdev, RTW89_DBG_RFK,
748 			    "[PAD_BIAS][TRIM] path=%d pad_bias_trim=0x%x\n",
749 			    i, info->pad_bias_trim[i]);
750 	}
751 }
752 
rtw8922a_pad_bias_trim(struct rtw89_dev * rtwdev)753 static void rtw8922a_pad_bias_trim(struct rtw89_dev *rtwdev)
754 {
755 	struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
756 	u8 pad_bias_2g, pad_bias_5g;
757 	u8 i;
758 
759 	if (!info->pg_pa_bias_trim) {
760 		rtw89_debug(rtwdev, RTW89_DBG_RFK,
761 			    "[PAD_BIAS][TRIM] no PG, do nothing\n");
762 		return;
763 	}
764 
765 	for (i = 0; i < RF_PATH_NUM_8922A; i++) {
766 		pad_bias_2g = u8_get_bits(info->pad_bias_trim[i], GENMASK(3, 0));
767 		pad_bias_5g = u8_get_bits(info->pad_bias_trim[i], GENMASK(7, 4));
768 
769 		rtw89_debug(rtwdev, RTW89_DBG_RFK,
770 			    "[PAD_BIAS][TRIM] path=%d 2G=0x%x 5G=0x%x\n",
771 			    i, pad_bias_2g, pad_bias_5g);
772 
773 		rtw89_write_rf(rtwdev, i, RR_BIASA, RR_BIASD_TXG_V1, pad_bias_2g);
774 		rtw89_write_rf(rtwdev, i, RR_BIASA, RR_BIASD_TXA_V1, pad_bias_5g);
775 	}
776 }
777 
rtw8922a_read_phycap(struct rtw89_dev * rtwdev,u8 * phycap_map)778 static int rtw8922a_read_phycap(struct rtw89_dev *rtwdev, u8 *phycap_map)
779 {
780 	rtw8922a_phycap_parsing_thermal_trim(rtwdev, phycap_map);
781 	rtw8922a_phycap_parsing_pa_bias_trim(rtwdev, phycap_map);
782 	rtw8922a_phycap_parsing_pad_bias_trim(rtwdev, phycap_map);
783 
784 	return 0;
785 }
786 
rtw8922a_power_trim(struct rtw89_dev * rtwdev)787 static void rtw8922a_power_trim(struct rtw89_dev *rtwdev)
788 {
789 	rtw8922a_pa_bias_trim(rtwdev);
790 	rtw8922a_pad_bias_trim(rtwdev);
791 }
792 
rtw8922a_set_channel_mac(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,u8 mac_idx)793 static void rtw8922a_set_channel_mac(struct rtw89_dev *rtwdev,
794 				     const struct rtw89_chan *chan,
795 				     u8 mac_idx)
796 {
797 	u32 sub_carr = rtw89_mac_reg_by_idx(rtwdev, R_BE_TX_SUB_BAND_VALUE, mac_idx);
798 	u32 chk_rate = rtw89_mac_reg_by_idx(rtwdev, R_BE_TXRATE_CHK, mac_idx);
799 	u32 rf_mod = rtw89_mac_reg_by_idx(rtwdev, R_BE_WMAC_RFMOD, mac_idx);
800 	u8 txsb20 = 0, txsb40 = 0, txsb80 = 0;
801 	u8 rf_mod_val, chk_rate_mask;
802 	u32 txsb;
803 	u32 reg;
804 
805 	switch (chan->band_width) {
806 	case RTW89_CHANNEL_WIDTH_160:
807 		txsb80 = rtw89_phy_get_txsb(rtwdev, chan, RTW89_CHANNEL_WIDTH_80);
808 		fallthrough;
809 	case RTW89_CHANNEL_WIDTH_80:
810 		txsb40 = rtw89_phy_get_txsb(rtwdev, chan, RTW89_CHANNEL_WIDTH_40);
811 		fallthrough;
812 	case RTW89_CHANNEL_WIDTH_40:
813 		txsb20 = rtw89_phy_get_txsb(rtwdev, chan, RTW89_CHANNEL_WIDTH_20);
814 		break;
815 	default:
816 		break;
817 	}
818 
819 	switch (chan->band_width) {
820 	case RTW89_CHANNEL_WIDTH_160:
821 		rf_mod_val = BE_WMAC_RFMOD_160M;
822 		txsb = u32_encode_bits(txsb20, B_BE_TXSB_20M_MASK) |
823 		       u32_encode_bits(txsb40, B_BE_TXSB_40M_MASK) |
824 		       u32_encode_bits(txsb80, B_BE_TXSB_80M_MASK);
825 		break;
826 	case RTW89_CHANNEL_WIDTH_80:
827 		rf_mod_val = BE_WMAC_RFMOD_80M;
828 		txsb = u32_encode_bits(txsb20, B_BE_TXSB_20M_MASK) |
829 		       u32_encode_bits(txsb40, B_BE_TXSB_40M_MASK);
830 		break;
831 	case RTW89_CHANNEL_WIDTH_40:
832 		rf_mod_val = BE_WMAC_RFMOD_40M;
833 		txsb = u32_encode_bits(txsb20, B_BE_TXSB_20M_MASK);
834 		break;
835 	case RTW89_CHANNEL_WIDTH_20:
836 	default:
837 		rf_mod_val = BE_WMAC_RFMOD_20M;
838 		txsb = 0;
839 		break;
840 	}
841 
842 	if (txsb20 <= BE_PRI20_BITMAP_MAX)
843 		txsb |= u32_encode_bits(BIT(txsb20), B_BE_PRI20_BITMAP_MASK);
844 
845 	rtw89_write8_mask(rtwdev, rf_mod, B_BE_WMAC_RFMOD_MASK, rf_mod_val);
846 	rtw89_write32(rtwdev, sub_carr, txsb);
847 
848 	switch (chan->band_type) {
849 	case RTW89_BAND_2G:
850 		chk_rate_mask = B_BE_BAND_MODE;
851 		break;
852 	case RTW89_BAND_5G:
853 	case RTW89_BAND_6G:
854 		chk_rate_mask = B_BE_CHECK_CCK_EN | B_BE_RTS_LIMIT_IN_OFDM6;
855 		break;
856 	default:
857 		rtw89_warn(rtwdev, "Invalid band_type:%d\n", chan->band_type);
858 		return;
859 	}
860 
861 	rtw89_write8_clr(rtwdev, chk_rate, B_BE_BAND_MODE | B_BE_CHECK_CCK_EN |
862 					   B_BE_RTS_LIMIT_IN_OFDM6);
863 	rtw89_write8_set(rtwdev, chk_rate, chk_rate_mask);
864 
865 	switch (chan->band_width) {
866 	case RTW89_CHANNEL_WIDTH_320:
867 	case RTW89_CHANNEL_WIDTH_160:
868 	case RTW89_CHANNEL_WIDTH_80:
869 	case RTW89_CHANNEL_WIDTH_40:
870 		reg = rtw89_mac_reg_by_idx(rtwdev, R_BE_PREBKF_CFG_1, mac_idx);
871 		rtw89_write32_mask(rtwdev, reg, B_BE_SIFS_MACTXEN_T1_MASK, 0x41);
872 		reg = rtw89_mac_reg_by_idx(rtwdev, R_BE_MUEDCA_EN, mac_idx);
873 		rtw89_write32_mask(rtwdev, reg, B_BE_SIFS_MACTXEN_TB_T1_MASK, 0x41);
874 		break;
875 	default:
876 		reg = rtw89_mac_reg_by_idx(rtwdev, R_BE_PREBKF_CFG_1, mac_idx);
877 		rtw89_write32_mask(rtwdev, reg, B_BE_SIFS_MACTXEN_T1_MASK, 0x3f);
878 		reg = rtw89_mac_reg_by_idx(rtwdev, R_BE_MUEDCA_EN, mac_idx);
879 		rtw89_write32_mask(rtwdev, reg, B_BE_SIFS_MACTXEN_TB_T1_MASK, 0x3e);
880 		break;
881 	}
882 }
883 
884 static const u32 rtw8922a_sco_barker_threshold[14] = {
885 	0x1fe4f, 0x1ff5e, 0x2006c, 0x2017b, 0x2028a, 0x20399, 0x204a8, 0x205b6,
886 	0x206c5, 0x207d4, 0x208e3, 0x209f2, 0x20b00, 0x20d8a
887 };
888 
889 static const u32 rtw8922a_sco_cck_threshold[14] = {
890 	0x2bdac, 0x2bf21, 0x2c095, 0x2c209, 0x2c37e, 0x2c4f2, 0x2c666, 0x2c7db,
891 	0x2c94f, 0x2cac3, 0x2cc38, 0x2cdac, 0x2cf21, 0x2d29e
892 };
893 
rtw8922a_ctrl_sco_cck(struct rtw89_dev * rtwdev,u8 primary_ch,enum rtw89_bandwidth bw,enum rtw89_phy_idx phy_idx)894 static int rtw8922a_ctrl_sco_cck(struct rtw89_dev *rtwdev,
895 				 u8 primary_ch, enum rtw89_bandwidth bw,
896 				 enum rtw89_phy_idx phy_idx)
897 {
898 	u8 ch_element;
899 
900 	if (primary_ch >= 14)
901 		return -EINVAL;
902 
903 	ch_element = primary_ch - 1;
904 
905 	rtw89_phy_write32_idx(rtwdev, R_BK_FC0INV, B_BK_FC0INV,
906 			      rtw8922a_sco_barker_threshold[ch_element],
907 			      phy_idx);
908 	rtw89_phy_write32_idx(rtwdev, R_CCK_FC0INV, B_CCK_FC0INV,
909 			      rtw8922a_sco_cck_threshold[ch_element],
910 			      phy_idx);
911 
912 	return 0;
913 }
914 
915 struct rtw8922a_bb_gain {
916 	u32 gain_g[BB_PATH_NUM_8922A];
917 	u32 gain_a[BB_PATH_NUM_8922A];
918 	u32 gain_g_mask;
919 	u32 gain_a_mask;
920 };
921 
922 static const struct rtw89_reg_def rpl_comp_bw160[RTW89_BW20_SC_160M] = {
923 	{ .addr = 0x41E8, .mask = 0xFF00},
924 	{ .addr = 0x41E8, .mask = 0xFF0000},
925 	{ .addr = 0x41E8, .mask = 0xFF000000},
926 	{ .addr = 0x41EC, .mask = 0xFF},
927 	{ .addr = 0x41EC, .mask = 0xFF00},
928 	{ .addr = 0x41EC, .mask = 0xFF0000},
929 	{ .addr = 0x41EC, .mask = 0xFF000000},
930 	{ .addr = 0x41F0, .mask = 0xFF}
931 };
932 
933 static const struct rtw89_reg_def rpl_comp_bw80[RTW89_BW20_SC_80M] = {
934 	{ .addr = 0x41F4, .mask = 0xFF},
935 	{ .addr = 0x41F4, .mask = 0xFF00},
936 	{ .addr = 0x41F4, .mask = 0xFF0000},
937 	{ .addr = 0x41F4, .mask = 0xFF000000}
938 };
939 
940 static const struct rtw89_reg_def rpl_comp_bw40[RTW89_BW20_SC_40M] = {
941 	{ .addr = 0x41F0, .mask = 0xFF0000},
942 	{ .addr = 0x41F0, .mask = 0xFF000000}
943 };
944 
945 static const struct rtw89_reg_def rpl_comp_bw20[RTW89_BW20_SC_20M] = {
946 	{ .addr = 0x41F0, .mask = 0xFF00}
947 };
948 
949 static const struct rtw8922a_bb_gain bb_gain_lna[LNA_GAIN_NUM] = {
950 	{ .gain_g = {0x409c, 0x449c}, .gain_a = {0x406C, 0x446C},
951 	  .gain_g_mask = 0xFF00, .gain_a_mask = 0xFF},
952 	{ .gain_g = {0x409c, 0x449c}, .gain_a = {0x406C, 0x446C},
953 	  .gain_g_mask = 0xFF000000, .gain_a_mask = 0xFF0000},
954 	{ .gain_g = {0x40a0, 0x44a0}, .gain_a = {0x4070, 0x4470},
955 	  .gain_g_mask = 0xFF00, .gain_a_mask = 0xFF},
956 	{ .gain_g = {0x40a0, 0x44a0}, .gain_a = {0x4070, 0x4470},
957 	  .gain_g_mask = 0xFF000000, .gain_a_mask = 0xFF0000},
958 	{ .gain_g = {0x40a4, 0x44a4}, .gain_a = {0x4074, 0x4474},
959 	  .gain_g_mask = 0xFF00, .gain_a_mask = 0xFF},
960 	{ .gain_g = {0x40a4, 0x44a4}, .gain_a = {0x4074, 0x4474},
961 	  .gain_g_mask = 0xFF000000, .gain_a_mask = 0xFF0000},
962 	{ .gain_g = {0x40a8, 0x44a8}, .gain_a = {0x4078, 0x4478},
963 	  .gain_g_mask = 0xFF00, .gain_a_mask = 0xFF},
964 };
965 
966 static const struct rtw8922a_bb_gain bb_gain_tia[TIA_GAIN_NUM] = {
967 	{ .gain_g = {0x4054, 0x4454}, .gain_a = {0x4054, 0x4454},
968 	  .gain_g_mask = 0x7FC0000, .gain_a_mask = 0x1FF},
969 	{ .gain_g = {0x4058, 0x4458}, .gain_a = {0x4054, 0x4454},
970 	  .gain_g_mask = 0x1FF, .gain_a_mask = 0x3FE00 },
971 };
972 
973 static const struct rtw8922a_bb_gain bb_op1db_lna[LNA_GAIN_NUM] = {
974 	{ .gain_g = {0x40ac, 0x44ac}, .gain_a = {0x4078, 0x4478},
975 	  .gain_g_mask = 0xFF00, .gain_a_mask = 0xFF000000},
976 	{ .gain_g = {0x40ac, 0x44ac}, .gain_a = {0x407c, 0x447c},
977 	  .gain_g_mask = 0xFF0000, .gain_a_mask = 0xFF},
978 	{ .gain_g = {0x40ac, 0x44ac}, .gain_a = {0x407c, 0x447c},
979 	  .gain_g_mask = 0xFF000000, .gain_a_mask = 0xFF00},
980 	{ .gain_g = {0x40b0, 0x44b0}, .gain_a = {0x407c, 0x447c},
981 	  .gain_g_mask = 0xFF, .gain_a_mask = 0xFF0000},
982 	{ .gain_g = {0x40b0, 0x44b0}, .gain_a = {0x407c, 0x447c},
983 	  .gain_g_mask = 0xFF00, .gain_a_mask = 0xFF000000},
984 	{ .gain_g = {0x40b0, 0x44b0}, .gain_a = {0x4080, 0x4480},
985 	  .gain_g_mask = 0xFF0000, .gain_a_mask = 0xFF},
986 	{ .gain_g = {0x40b0, 0x44b0}, .gain_a = {0x4080, 0x4480},
987 	  .gain_g_mask = 0xFF000000, .gain_a_mask = 0xFF00},
988 };
989 
990 static const struct rtw8922a_bb_gain bb_op1db_tia_lna[TIA_LNA_OP1DB_NUM] = {
991 	{ .gain_g = {0x40b4, 0x44b4}, .gain_a = {0x4080, 0x4480},
992 	  .gain_g_mask = 0xFF0000, .gain_a_mask = 0xFF000000},
993 	{ .gain_g = {0x40b4, 0x44b4}, .gain_a = {0x4084, 0x4484},
994 	  .gain_g_mask = 0xFF000000, .gain_a_mask = 0xFF},
995 	{ .gain_g = {0x40b8, 0x44b8}, .gain_a = {0x4084, 0x4484},
996 	  .gain_g_mask = 0xFF, .gain_a_mask = 0xFF00},
997 	{ .gain_g = {0x40b8, 0x44b8}, .gain_a = {0x4084, 0x4484},
998 	  .gain_g_mask = 0xFF00, .gain_a_mask = 0xFF0000},
999 	{ .gain_g = {0x40b8, 0x44b8}, .gain_a = {0x4084, 0x4484},
1000 	  .gain_g_mask = 0xFF0000, .gain_a_mask = 0xFF000000},
1001 	{ .gain_g = {0x40b8, 0x44b8}, .gain_a = {0x4088, 0x4488},
1002 	  .gain_g_mask = 0xFF000000, .gain_a_mask = 0xFF},
1003 	{ .gain_g = {0x40bc, 0x44bc}, .gain_a = {0x4088, 0x4488},
1004 	  .gain_g_mask = 0xFF, .gain_a_mask = 0xFF00},
1005 	{ .gain_g = {0x40bc, 0x44bc}, .gain_a = {0x4088, 0x4488},
1006 	  .gain_g_mask = 0xFF00, .gain_a_mask = 0xFF0000},
1007 };
1008 
1009 struct rtw8922a_bb_gain_bypass {
1010 	u32 gain_g[BB_PATH_NUM_8922A];
1011 	u32 gain_a[BB_PATH_NUM_8922A];
1012 	u32 gain_mask_g;
1013 	u32 gain_mask_a;
1014 };
1015 
rtw8922a_set_rpl_gain(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_rf_path path,enum rtw89_phy_idx phy_idx)1016 static void rtw8922a_set_rpl_gain(struct rtw89_dev *rtwdev,
1017 				  const struct rtw89_chan *chan,
1018 				  enum rtw89_rf_path path,
1019 				  enum rtw89_phy_idx phy_idx)
1020 {
1021 	const struct rtw89_phy_bb_gain_info_be *gain = &rtwdev->bb_gain.be;
1022 	u8 gain_band = rtw89_subband_to_gain_band_be(chan->subband_type);
1023 	u32 reg_path_ofst = 0;
1024 	u32 mask;
1025 	s32 val;
1026 	u32 reg;
1027 	int i;
1028 
1029 	if (path == RF_PATH_B)
1030 		reg_path_ofst = 0x400;
1031 
1032 	for (i = 0; i < RTW89_BW20_SC_160M; i++) {
1033 		reg = rpl_comp_bw160[i].addr | reg_path_ofst;
1034 		mask = rpl_comp_bw160[i].mask;
1035 		val = gain->rpl_ofst_160[gain_band][path][i];
1036 		rtw89_phy_write32_idx(rtwdev, reg, mask, val, phy_idx);
1037 	}
1038 
1039 	for (i = 0; i < RTW89_BW20_SC_80M; i++) {
1040 		reg = rpl_comp_bw80[i].addr | reg_path_ofst;
1041 		mask = rpl_comp_bw80[i].mask;
1042 		val = gain->rpl_ofst_80[gain_band][path][i];
1043 		rtw89_phy_write32_idx(rtwdev, reg, mask, val, phy_idx);
1044 	}
1045 
1046 	for (i = 0; i < RTW89_BW20_SC_40M; i++) {
1047 		reg = rpl_comp_bw40[i].addr | reg_path_ofst;
1048 		mask = rpl_comp_bw40[i].mask;
1049 		val = gain->rpl_ofst_40[gain_band][path][i];
1050 		rtw89_phy_write32_idx(rtwdev, reg, mask, val, phy_idx);
1051 	}
1052 
1053 	for (i = 0; i < RTW89_BW20_SC_20M; i++) {
1054 		reg = rpl_comp_bw20[i].addr | reg_path_ofst;
1055 		mask = rpl_comp_bw20[i].mask;
1056 		val = gain->rpl_ofst_20[gain_band][path][i];
1057 		rtw89_phy_write32_idx(rtwdev, reg, mask, val, phy_idx);
1058 	}
1059 }
1060 
rtw8922a_set_lna_tia_gain(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_rf_path path,enum rtw89_phy_idx phy_idx)1061 static void rtw8922a_set_lna_tia_gain(struct rtw89_dev *rtwdev,
1062 				      const struct rtw89_chan *chan,
1063 				      enum rtw89_rf_path path,
1064 				      enum rtw89_phy_idx phy_idx)
1065 {
1066 	const struct rtw89_phy_bb_gain_info_be *gain = &rtwdev->bb_gain.be;
1067 	u8 gain_band = rtw89_subband_to_gain_band_be(chan->subband_type);
1068 	enum rtw89_phy_bb_bw_be bw_type;
1069 	s32 val;
1070 	u32 reg;
1071 	u32 mask;
1072 	int i;
1073 
1074 	bw_type = chan->band_width <= RTW89_CHANNEL_WIDTH_40 ?
1075 		  RTW89_BB_BW_20_40 : RTW89_BB_BW_80_160_320;
1076 
1077 	for (i = 0; i < LNA_GAIN_NUM; i++) {
1078 		if (chan->band_type == RTW89_BAND_2G) {
1079 			reg = bb_gain_lna[i].gain_g[path];
1080 			mask = bb_gain_lna[i].gain_g_mask;
1081 		} else {
1082 			reg = bb_gain_lna[i].gain_a[path];
1083 			mask = bb_gain_lna[i].gain_a_mask;
1084 		}
1085 		val = gain->lna_gain[gain_band][bw_type][path][i];
1086 		rtw89_phy_write32_idx(rtwdev, reg, mask, val, phy_idx);
1087 	}
1088 
1089 	for (i = 0; i < TIA_GAIN_NUM; i++) {
1090 		if (chan->band_type == RTW89_BAND_2G) {
1091 			reg = bb_gain_tia[i].gain_g[path];
1092 			mask = bb_gain_tia[i].gain_g_mask;
1093 		} else {
1094 			reg = bb_gain_tia[i].gain_a[path];
1095 			mask = bb_gain_tia[i].gain_a_mask;
1096 		}
1097 		val = gain->tia_gain[gain_band][bw_type][path][i];
1098 		rtw89_phy_write32_idx(rtwdev, reg, mask, val, phy_idx);
1099 	}
1100 
1101 	for (i = 0; i < LNA_GAIN_NUM; i++) {
1102 		if (chan->band_type == RTW89_BAND_2G) {
1103 			reg = bb_op1db_lna[i].gain_g[path];
1104 			mask = bb_op1db_lna[i].gain_g_mask;
1105 		} else {
1106 			reg = bb_op1db_lna[i].gain_a[path];
1107 			mask = bb_op1db_lna[i].gain_a_mask;
1108 		}
1109 		val = gain->lna_op1db[gain_band][bw_type][path][i];
1110 		rtw89_phy_write32_idx(rtwdev, reg, mask, val, phy_idx);
1111 	}
1112 
1113 	for (i = 0; i < TIA_LNA_OP1DB_NUM; i++) {
1114 		if (chan->band_type == RTW89_BAND_2G) {
1115 			reg = bb_op1db_tia_lna[i].gain_g[path];
1116 			mask = bb_op1db_tia_lna[i].gain_g_mask;
1117 		} else {
1118 			reg = bb_op1db_tia_lna[i].gain_a[path];
1119 			mask = bb_op1db_tia_lna[i].gain_a_mask;
1120 		}
1121 		val = gain->tia_lna_op1db[gain_band][bw_type][path][i];
1122 		rtw89_phy_write32_idx(rtwdev, reg, mask, val, phy_idx);
1123 	}
1124 }
1125 
rtw8922a_set_gain(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_rf_path path,enum rtw89_phy_idx phy_idx)1126 static void rtw8922a_set_gain(struct rtw89_dev *rtwdev,
1127 			      const struct rtw89_chan *chan,
1128 			      enum rtw89_rf_path path,
1129 			      enum rtw89_phy_idx phy_idx)
1130 {
1131 	rtw8922a_set_lna_tia_gain(rtwdev, chan, path, phy_idx);
1132 	rtw8922a_set_rpl_gain(rtwdev, chan, path, phy_idx);
1133 }
1134 
rtw8922a_set_rx_gain_normal_cck(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_rf_path path)1135 static void rtw8922a_set_rx_gain_normal_cck(struct rtw89_dev *rtwdev,
1136 					    const struct rtw89_chan *chan,
1137 					    enum rtw89_rf_path path)
1138 {
1139 	struct rtw89_phy_efuse_gain *gain = &rtwdev->efuse_gain;
1140 	s8 value = -gain->offset[path][RTW89_GAIN_OFFSET_2G_CCK]; /* S(8,2) */
1141 	u8 fraction = value & 0x3;
1142 
1143 	if (fraction) {
1144 		rtw89_phy_write32_mask(rtwdev, R_MGAIN_BIAS, B_MGAIN_BIAS_BW20,
1145 				       (0x4 - fraction) << 1);
1146 		rtw89_phy_write32_mask(rtwdev, R_MGAIN_BIAS, B_MGAIN_BIAS_BW40,
1147 				       (0x4 - fraction) << 1);
1148 
1149 		value >>= 2;
1150 		rtw89_phy_write32_mask(rtwdev, R_CCK_RPL_OFST, B_CCK_RPL_OFST,
1151 				       value + 1 + 0xdc);
1152 	} else {
1153 		rtw89_phy_write32_mask(rtwdev, R_MGAIN_BIAS, B_MGAIN_BIAS_BW20, 0);
1154 		rtw89_phy_write32_mask(rtwdev, R_MGAIN_BIAS, B_MGAIN_BIAS_BW40, 0);
1155 
1156 		value >>= 2;
1157 		rtw89_phy_write32_mask(rtwdev, R_CCK_RPL_OFST, B_CCK_RPL_OFST,
1158 				       value + 0xdc);
1159 	}
1160 }
1161 
rtw8922a_set_rx_gain_normal_ofdm(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_rf_path path)1162 static void rtw8922a_set_rx_gain_normal_ofdm(struct rtw89_dev *rtwdev,
1163 					     const struct rtw89_chan *chan,
1164 					     enum rtw89_rf_path path)
1165 {
1166 	static const u32 rssi_tb_bias_comp[2] = {0x41f8, 0x45f8};
1167 	static const u32 rssi_tb_ext_comp[2] = {0x4208, 0x4608};
1168 	static const u32 rssi_ofst_addr[2] = {0x40c8, 0x44c8};
1169 	static const u32 rpl_bias_comp[2] = {0x41e8, 0x45e8};
1170 	static const u32 rpl_ext_comp[2] = {0x41f8, 0x45f8};
1171 	struct rtw89_phy_efuse_gain *gain = &rtwdev->efuse_gain;
1172 	enum rtw89_gain_offset gain_band;
1173 	s8 v1, v2, v3;
1174 	s32 value;
1175 
1176 	gain_band = rtw89_subband_to_gain_offset_band_of_ofdm(chan->subband_type);
1177 	value = gain->offset[path][gain_band];
1178 	rtw89_phy_write32_mask(rtwdev, rssi_ofst_addr[path], 0xff000000, value + 0xF8);
1179 
1180 	value *= -4;
1181 	v1 = clamp_t(s32, value, S8_MIN, S8_MAX);
1182 	value -= v1;
1183 	v2 = clamp_t(s32, value, S8_MIN, S8_MAX);
1184 	value -= v2;
1185 	v3 = clamp_t(s32, value, S8_MIN, S8_MAX);
1186 
1187 	rtw89_phy_write32_mask(rtwdev, rpl_bias_comp[path], 0xff, v1);
1188 	rtw89_phy_write32_mask(rtwdev, rpl_ext_comp[path], 0xff, v2);
1189 	rtw89_phy_write32_mask(rtwdev, rpl_ext_comp[path], 0xff00, v3);
1190 
1191 	rtw89_phy_write32_mask(rtwdev, rssi_tb_bias_comp[path], 0xff0000, v1);
1192 	rtw89_phy_write32_mask(rtwdev, rssi_tb_ext_comp[path], 0xff0000, v2);
1193 	rtw89_phy_write32_mask(rtwdev, rssi_tb_ext_comp[path], 0xff000000, v3);
1194 }
1195 
rtw8922a_set_rx_gain_normal(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_rf_path path)1196 static void rtw8922a_set_rx_gain_normal(struct rtw89_dev *rtwdev,
1197 					const struct rtw89_chan *chan,
1198 					enum rtw89_rf_path path)
1199 {
1200 	struct rtw89_phy_efuse_gain *gain = &rtwdev->efuse_gain;
1201 
1202 	if (!gain->offset_valid)
1203 		return;
1204 
1205 	if (chan->band_type == RTW89_BAND_2G)
1206 		rtw8922a_set_rx_gain_normal_cck(rtwdev, chan, path);
1207 
1208 	rtw8922a_set_rx_gain_normal_ofdm(rtwdev, chan, path);
1209 }
1210 
rtw8922a_set_cck_parameters(struct rtw89_dev * rtwdev,u8 central_ch,enum rtw89_phy_idx phy_idx)1211 static void rtw8922a_set_cck_parameters(struct rtw89_dev *rtwdev, u8 central_ch,
1212 					enum rtw89_phy_idx phy_idx)
1213 {
1214 	if (central_ch == 14) {
1215 		rtw89_phy_write32_idx(rtwdev, R_PCOEFF01, B_PCOEFF01, 0x3b13ff, phy_idx);
1216 		rtw89_phy_write32_idx(rtwdev, R_PCOEFF23, B_PCOEFF23, 0x1c42de, phy_idx);
1217 		rtw89_phy_write32_idx(rtwdev, R_PCOEFF45, B_PCOEFF45, 0xfdb0ad, phy_idx);
1218 		rtw89_phy_write32_idx(rtwdev, R_PCOEFF67, B_PCOEFF67, 0xf60f6e, phy_idx);
1219 		rtw89_phy_write32_idx(rtwdev, R_PCOEFF89, B_PCOEFF89, 0xfd8f92, phy_idx);
1220 		rtw89_phy_write32_idx(rtwdev, R_PCOEFFAB, B_PCOEFFAB, 0x02d011, phy_idx);
1221 		rtw89_phy_write32_idx(rtwdev, R_PCOEFFCD, B_PCOEFFCD, 0x01c02c, phy_idx);
1222 		rtw89_phy_write32_idx(rtwdev, R_PCOEFFEF, B_PCOEFFEF, 0xfff00a, phy_idx);
1223 	} else {
1224 		rtw89_phy_write32_idx(rtwdev, R_PCOEFF01, B_PCOEFF01, 0x3a63ca, phy_idx);
1225 		rtw89_phy_write32_idx(rtwdev, R_PCOEFF23, B_PCOEFF23, 0x2a833f, phy_idx);
1226 		rtw89_phy_write32_idx(rtwdev, R_PCOEFF45, B_PCOEFF45, 0x1491f8, phy_idx);
1227 		rtw89_phy_write32_idx(rtwdev, R_PCOEFF67, B_PCOEFF67, 0x03c0b0, phy_idx);
1228 		rtw89_phy_write32_idx(rtwdev, R_PCOEFF89, B_PCOEFF89, 0xfccff1, phy_idx);
1229 		rtw89_phy_write32_idx(rtwdev, R_PCOEFFAB, B_PCOEFFAB, 0xfccfc3, phy_idx);
1230 		rtw89_phy_write32_idx(rtwdev, R_PCOEFFCD, B_PCOEFFCD, 0xfebfdc, phy_idx);
1231 		rtw89_phy_write32_idx(rtwdev, R_PCOEFFEF, B_PCOEFFEF, 0xffdff7, phy_idx);
1232 	}
1233 }
1234 
rtw8922a_ctrl_ch(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_phy_idx phy_idx)1235 static void rtw8922a_ctrl_ch(struct rtw89_dev *rtwdev,
1236 			     const struct rtw89_chan *chan,
1237 			     enum rtw89_phy_idx phy_idx)
1238 {
1239 	static const u32 band_sel[2] = {0x4160, 0x4560};
1240 	u16 central_freq = chan->freq;
1241 	u8 central_ch = chan->channel;
1242 	u8 band = chan->band_type;
1243 	bool is_2g = band == RTW89_BAND_2G;
1244 	u8 chan_idx;
1245 	u8 path;
1246 	u8 sco;
1247 
1248 	if (!central_freq) {
1249 		rtw89_warn(rtwdev, "Invalid central_freq\n");
1250 		return;
1251 	}
1252 
1253 	rtw8922a_set_gain(rtwdev, chan, RF_PATH_A, phy_idx);
1254 	rtw8922a_set_gain(rtwdev, chan, RF_PATH_B, phy_idx);
1255 
1256 	for (path = RF_PATH_A; path < BB_PATH_NUM_8922A; path++)
1257 		rtw89_phy_write32_idx(rtwdev, band_sel[path], BIT((26)), is_2g, phy_idx);
1258 
1259 	rtw8922a_set_rx_gain_normal(rtwdev, chan, RF_PATH_A);
1260 	rtw8922a_set_rx_gain_normal(rtwdev, chan, RF_PATH_B);
1261 
1262 	rtw89_phy_write32_idx(rtwdev, R_FC0, B_FC0, central_freq, phy_idx);
1263 	sco = DIV_ROUND_CLOSEST(1 << 18, central_freq);
1264 	rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_FC0_INV, sco, phy_idx);
1265 
1266 	if (band == RTW89_BAND_2G)
1267 		rtw8922a_set_cck_parameters(rtwdev, central_ch, phy_idx);
1268 
1269 	chan_idx = rtw89_encode_chan_idx(rtwdev, chan->primary_channel, band);
1270 	rtw89_phy_write32_idx(rtwdev, R_MAC_PIN_SEL, B_CH_IDX_SEG0, chan_idx, phy_idx);
1271 }
1272 
1273 static void
rtw8922a_ctrl_bw(struct rtw89_dev * rtwdev,u8 pri_sb,u8 bw,enum rtw89_phy_idx phy_idx)1274 rtw8922a_ctrl_bw(struct rtw89_dev *rtwdev, u8 pri_sb, u8 bw,
1275 		 enum rtw89_phy_idx phy_idx)
1276 {
1277 	switch (bw) {
1278 	case RTW89_CHANNEL_WIDTH_5:
1279 		rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_BW, 0x0, phy_idx);
1280 		rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_SMALLBW, 0x1, phy_idx);
1281 		rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_PRICH, 0x0, phy_idx);
1282 		rtw89_phy_write32_idx(rtwdev, R_DAC_CLK, B_DAC_CLK, 0x1, phy_idx);
1283 		rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP0, B_GAIN_MAP0_EN, 0x0, phy_idx);
1284 		rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP1, B_GAIN_MAP1_EN, 0x0, phy_idx);
1285 		break;
1286 	case RTW89_CHANNEL_WIDTH_10:
1287 		rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_BW, 0x0, phy_idx);
1288 		rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_SMALLBW, 0x2, phy_idx);
1289 		rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_PRICH, 0x0, phy_idx);
1290 		rtw89_phy_write32_idx(rtwdev, R_DAC_CLK, B_DAC_CLK, 0x1, phy_idx);
1291 		rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP0, B_GAIN_MAP0_EN, 0x0, phy_idx);
1292 		rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP1, B_GAIN_MAP1_EN, 0x0, phy_idx);
1293 		break;
1294 	case RTW89_CHANNEL_WIDTH_20:
1295 		rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_BW, 0x0, phy_idx);
1296 		rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_SMALLBW, 0x0, phy_idx);
1297 		rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_PRICH, 0x0, phy_idx);
1298 		rtw89_phy_write32_idx(rtwdev, R_DAC_CLK, B_DAC_CLK, 0x1, phy_idx);
1299 		rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP0, B_GAIN_MAP0_EN, 0x0, phy_idx);
1300 		rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP1, B_GAIN_MAP1_EN, 0x0, phy_idx);
1301 		break;
1302 	case RTW89_CHANNEL_WIDTH_40:
1303 		rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_BW, 0x1, phy_idx);
1304 		rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_SMALLBW, 0x0, phy_idx);
1305 		rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_PRICH, pri_sb, phy_idx);
1306 		rtw89_phy_write32_idx(rtwdev, R_DAC_CLK, B_DAC_CLK, 0x1, phy_idx);
1307 		rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP0, B_GAIN_MAP0_EN, 0x0, phy_idx);
1308 		rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP1, B_GAIN_MAP1_EN, 0x0, phy_idx);
1309 		break;
1310 	case RTW89_CHANNEL_WIDTH_80:
1311 		rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_BW, 0x2, phy_idx);
1312 		rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_SMALLBW, 0x0, phy_idx);
1313 		rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_PRICH, pri_sb, phy_idx);
1314 		rtw89_phy_write32_idx(rtwdev, R_DAC_CLK, B_DAC_CLK, 0x1, phy_idx);
1315 		rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP0, B_GAIN_MAP0_EN, 0x1, phy_idx);
1316 		rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP1, B_GAIN_MAP1_EN, 0x1, phy_idx);
1317 		break;
1318 	case RTW89_CHANNEL_WIDTH_160:
1319 		rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_BW, 0x3, phy_idx);
1320 		rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_SMALLBW, 0x0, phy_idx);
1321 		rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_CHBW_PRICH, pri_sb, phy_idx);
1322 		rtw89_phy_write32_idx(rtwdev, R_DAC_CLK, B_DAC_CLK, 0x1, phy_idx);
1323 		rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP0, B_GAIN_MAP0_EN, 0x1, phy_idx);
1324 		rtw89_phy_write32_idx(rtwdev, R_GAIN_MAP1, B_GAIN_MAP1_EN, 0x1, phy_idx);
1325 		break;
1326 	default:
1327 		rtw89_warn(rtwdev, "Fail to switch bw (bw:%d, pri_sb:%d)\n", bw,
1328 			   pri_sb);
1329 		break;
1330 	}
1331 
1332 	if (bw == RTW89_CHANNEL_WIDTH_40)
1333 		rtw89_phy_write32_idx(rtwdev, R_FC0, B_BW40_2XFFT, 1, phy_idx);
1334 	else
1335 		rtw89_phy_write32_idx(rtwdev, R_FC0, B_BW40_2XFFT, 0, phy_idx);
1336 }
1337 
rtw8922a_spur_freq(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan)1338 static u32 rtw8922a_spur_freq(struct rtw89_dev *rtwdev,
1339 			      const struct rtw89_chan *chan)
1340 {
1341 	return 0;
1342 }
1343 
1344 #define CARRIER_SPACING_312_5 312500 /* 312.5 kHz */
1345 #define CARRIER_SPACING_78_125 78125 /* 78.125 kHz */
1346 #define MAX_TONE_NUM 2048
1347 
rtw8922a_set_csi_tone_idx(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_phy_idx phy_idx)1348 static void rtw8922a_set_csi_tone_idx(struct rtw89_dev *rtwdev,
1349 				      const struct rtw89_chan *chan,
1350 				      enum rtw89_phy_idx phy_idx)
1351 {
1352 	s32 freq_diff, csi_idx, csi_tone_idx;
1353 	u32 spur_freq;
1354 
1355 	spur_freq = rtw8922a_spur_freq(rtwdev, chan);
1356 	if (spur_freq == 0) {
1357 		rtw89_phy_write32_idx(rtwdev, R_S0S1_CSI_WGT, B_S0S1_CSI_WGT_EN,
1358 				      0, phy_idx);
1359 		return;
1360 	}
1361 
1362 	freq_diff = (spur_freq - chan->freq) * 1000000;
1363 	csi_idx = s32_div_u32_round_closest(freq_diff, CARRIER_SPACING_78_125);
1364 	s32_div_u32_round_down(csi_idx, MAX_TONE_NUM, &csi_tone_idx);
1365 
1366 	rtw89_phy_write32_idx(rtwdev, R_S0S1_CSI_WGT, B_S0S1_CSI_WGT_TONE_IDX,
1367 			      csi_tone_idx, phy_idx);
1368 	rtw89_phy_write32_idx(rtwdev, R_S0S1_CSI_WGT, B_S0S1_CSI_WGT_EN, 1, phy_idx);
1369 }
1370 
1371 static const struct rtw89_nbi_reg_def rtw8922a_nbi_reg_def[] = {
1372 	[RF_PATH_A] = {
1373 		.notch1_idx = {0x41a0, 0xFF},
1374 		.notch1_frac_idx = {0x41a0, 0xC00},
1375 		.notch1_en = {0x41a0, 0x1000},
1376 		.notch2_idx = {0x41ac, 0xFF},
1377 		.notch2_frac_idx = {0x41ac, 0xC00},
1378 		.notch2_en = {0x41ac, 0x1000},
1379 	},
1380 	[RF_PATH_B] = {
1381 		.notch1_idx = {0x45a0, 0xFF},
1382 		.notch1_frac_idx = {0x45a0, 0xC00},
1383 		.notch1_en = {0x45a0, 0x1000},
1384 		.notch2_idx = {0x45ac, 0xFF},
1385 		.notch2_frac_idx = {0x45ac, 0xC00},
1386 		.notch2_en = {0x45ac, 0x1000},
1387 	},
1388 };
1389 
rtw8922a_set_nbi_tone_idx(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_rf_path path,enum rtw89_phy_idx phy_idx)1390 static void rtw8922a_set_nbi_tone_idx(struct rtw89_dev *rtwdev,
1391 				      const struct rtw89_chan *chan,
1392 				      enum rtw89_rf_path path,
1393 				      enum rtw89_phy_idx phy_idx)
1394 {
1395 	const struct rtw89_nbi_reg_def *nbi = &rtw8922a_nbi_reg_def[path];
1396 	s32 nbi_frac_idx, nbi_frac_tone_idx;
1397 	s32 nbi_idx, nbi_tone_idx;
1398 	bool notch2_chk = false;
1399 	u32 spur_freq, fc;
1400 	s32 freq_diff;
1401 
1402 	spur_freq = rtw8922a_spur_freq(rtwdev, chan);
1403 	if (spur_freq == 0) {
1404 		rtw89_phy_write32_idx(rtwdev, nbi->notch1_en.addr,
1405 				      nbi->notch1_en.mask, 0, phy_idx);
1406 		rtw89_phy_write32_idx(rtwdev, nbi->notch2_en.addr,
1407 				      nbi->notch2_en.mask, 0, phy_idx);
1408 		return;
1409 	}
1410 
1411 	fc = chan->freq;
1412 	if (chan->band_width == RTW89_CHANNEL_WIDTH_160) {
1413 		fc = (spur_freq > fc) ? fc + 40 : fc - 40;
1414 		if ((fc > spur_freq &&
1415 		     chan->channel < chan->primary_channel) ||
1416 		    (fc < spur_freq &&
1417 		     chan->channel > chan->primary_channel))
1418 			notch2_chk = true;
1419 	}
1420 
1421 	freq_diff = (spur_freq - fc) * 1000000;
1422 	nbi_idx = s32_div_u32_round_down(freq_diff, CARRIER_SPACING_312_5,
1423 					 &nbi_frac_idx);
1424 
1425 	if (chan->band_width == RTW89_CHANNEL_WIDTH_20) {
1426 		s32_div_u32_round_down(nbi_idx + 32, 64, &nbi_tone_idx);
1427 	} else {
1428 		u16 tone_para = (chan->band_width == RTW89_CHANNEL_WIDTH_40) ?
1429 				128 : 256;
1430 
1431 		s32_div_u32_round_down(nbi_idx, tone_para, &nbi_tone_idx);
1432 	}
1433 	nbi_frac_tone_idx =
1434 		s32_div_u32_round_closest(nbi_frac_idx, CARRIER_SPACING_78_125);
1435 
1436 	if (chan->band_width == RTW89_CHANNEL_WIDTH_160 && notch2_chk) {
1437 		rtw89_phy_write32_idx(rtwdev, nbi->notch2_idx.addr,
1438 				      nbi->notch2_idx.mask, nbi_tone_idx, phy_idx);
1439 		rtw89_phy_write32_idx(rtwdev, nbi->notch2_frac_idx.addr,
1440 				      nbi->notch2_frac_idx.mask, nbi_frac_tone_idx,
1441 				      phy_idx);
1442 		rtw89_phy_write32_idx(rtwdev, nbi->notch2_en.addr,
1443 				      nbi->notch2_en.mask, 0, phy_idx);
1444 		rtw89_phy_write32_idx(rtwdev, nbi->notch2_en.addr,
1445 				      nbi->notch2_en.mask, 1, phy_idx);
1446 		rtw89_phy_write32_idx(rtwdev, nbi->notch1_en.addr,
1447 				      nbi->notch1_en.mask, 0, phy_idx);
1448 	} else {
1449 		rtw89_phy_write32_idx(rtwdev, nbi->notch1_idx.addr,
1450 				      nbi->notch1_idx.mask, nbi_tone_idx, phy_idx);
1451 		rtw89_phy_write32_idx(rtwdev, nbi->notch1_frac_idx.addr,
1452 				      nbi->notch1_frac_idx.mask, nbi_frac_tone_idx,
1453 				      phy_idx);
1454 		rtw89_phy_write32_idx(rtwdev, nbi->notch1_en.addr,
1455 				      nbi->notch1_en.mask, 0, phy_idx);
1456 		rtw89_phy_write32_idx(rtwdev, nbi->notch1_en.addr,
1457 				      nbi->notch1_en.mask, 1, phy_idx);
1458 		rtw89_phy_write32_idx(rtwdev, nbi->notch2_en.addr,
1459 				      nbi->notch2_en.mask, 0, phy_idx);
1460 	}
1461 }
1462 
rtw8922a_spur_elimination(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_phy_idx phy_idx)1463 static void rtw8922a_spur_elimination(struct rtw89_dev *rtwdev,
1464 				      const struct rtw89_chan *chan,
1465 				      enum rtw89_phy_idx phy_idx)
1466 {
1467 	rtw8922a_set_csi_tone_idx(rtwdev, chan, phy_idx);
1468 	rtw8922a_set_nbi_tone_idx(rtwdev, chan, RF_PATH_A, phy_idx);
1469 	rtw8922a_set_nbi_tone_idx(rtwdev, chan, RF_PATH_B, phy_idx);
1470 }
1471 
rtw8922a_ctrl_afe_dac(struct rtw89_dev * rtwdev,enum rtw89_bandwidth bw,enum rtw89_rf_path path)1472 static void rtw8922a_ctrl_afe_dac(struct rtw89_dev *rtwdev, enum rtw89_bandwidth bw,
1473 				  enum rtw89_rf_path path)
1474 {
1475 	u32 cr_ofst = 0x0;
1476 
1477 	if (path == RF_PATH_B)
1478 		cr_ofst = 0x100;
1479 
1480 	switch (bw) {
1481 	case RTW89_CHANNEL_WIDTH_5:
1482 	case RTW89_CHANNEL_WIDTH_10:
1483 	case RTW89_CHANNEL_WIDTH_20:
1484 	case RTW89_CHANNEL_WIDTH_40:
1485 	case RTW89_CHANNEL_WIDTH_80:
1486 		rtw89_phy_write32_mask(rtwdev, R_AFEDAC0 + cr_ofst, B_AFEDAC0, 0xE);
1487 		rtw89_phy_write32_mask(rtwdev, R_AFEDAC1 + cr_ofst, B_AFEDAC1, 0x7);
1488 		break;
1489 	case RTW89_CHANNEL_WIDTH_160:
1490 		rtw89_phy_write32_mask(rtwdev, R_AFEDAC0 + cr_ofst, B_AFEDAC0, 0xD);
1491 		rtw89_phy_write32_mask(rtwdev, R_AFEDAC1 + cr_ofst, B_AFEDAC1, 0x6);
1492 		break;
1493 	default:
1494 		break;
1495 	}
1496 }
1497 
1498 static const struct rtw89_reg2_def bb_mcu0_init_reg[] = {
1499 	{0x6990, 0x00000000},
1500 	{0x6994, 0x00000000},
1501 	{0x6998, 0x00000000},
1502 	{0x6820, 0xFFFFFFFE},
1503 	{0x6800, 0xC0000FFE},
1504 	{0x6808, 0x76543210},
1505 	{0x6814, 0xBFBFB000},
1506 	{0x6818, 0x0478C009},
1507 	{0x6800, 0xC0000FFF},
1508 	{0x6820, 0xFFFFFFFF},
1509 };
1510 
1511 static const struct rtw89_reg2_def bb_mcu1_init_reg[] = {
1512 	{0x6990, 0x00000000},
1513 	{0x6994, 0x00000000},
1514 	{0x6998, 0x00000000},
1515 	{0x6820, 0xFFFFFFFE},
1516 	{0x6800, 0xC0000FFE},
1517 	{0x6808, 0x76543210},
1518 	{0x6814, 0xBFBFB000},
1519 	{0x6818, 0x0478C009},
1520 	{0x6800, 0xC0000FFF},
1521 	{0x6820, 0xFFFFFFFF},
1522 };
1523 
rtw8922a_bbmcu_cr_init(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx)1524 static void rtw8922a_bbmcu_cr_init(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx)
1525 {
1526 	const struct rtw89_reg2_def *reg;
1527 	int size;
1528 	int i;
1529 
1530 	if (phy_idx == RTW89_PHY_0) {
1531 		reg = bb_mcu0_init_reg;
1532 		size = ARRAY_SIZE(bb_mcu0_init_reg);
1533 	} else {
1534 		reg = bb_mcu1_init_reg;
1535 		size = ARRAY_SIZE(bb_mcu1_init_reg);
1536 	}
1537 
1538 	for (i = 0; i < size; i++, reg++)
1539 		rtw89_bbmcu_write32(rtwdev, reg->addr, reg->data, phy_idx);
1540 }
1541 
1542 static const u32 dmac_sys_mask[2] = {B_BE_DMAC_BB_PHY0_MASK, B_BE_DMAC_BB_PHY1_MASK};
1543 static const u32 bbrst_mask[2] = {B_BE_FEN_BBPLAT_RSTB, B_BE_FEN_BB1PLAT_RSTB};
1544 static const u32 glbrst_mask[2] = {B_BE_FEN_BB_IP_RSTN, B_BE_FEN_BB1_IP_RSTN};
1545 static const u32 mcu_bootrdy_mask[2] = {B_BE_BOOT_RDY0, B_BE_BOOT_RDY1};
1546 
rtw8922a_bb_preinit(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx)1547 static void rtw8922a_bb_preinit(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx)
1548 {
1549 	u32 rdy = 0;
1550 
1551 	if (phy_idx == RTW89_PHY_1)
1552 		rdy = 1;
1553 
1554 	rtw89_write32_mask(rtwdev, R_BE_DMAC_SYS_CR32B, dmac_sys_mask[phy_idx], 0x7FF9);
1555 	rtw89_write32_mask(rtwdev, R_BE_FEN_RST_ENABLE, glbrst_mask[phy_idx], 0x0);
1556 	rtw89_write32_mask(rtwdev, R_BE_FEN_RST_ENABLE, bbrst_mask[phy_idx], 0x0);
1557 	rtw89_write32_mask(rtwdev, R_BE_FEN_RST_ENABLE, glbrst_mask[phy_idx], 0x1);
1558 	rtw89_write32_mask(rtwdev, R_BE_FEN_RST_ENABLE, mcu_bootrdy_mask[phy_idx], rdy);
1559 	rtw89_write32_mask(rtwdev, R_BE_MEM_PWR_CTRL, B_BE_MEM_BBMCU0_DS_V1, 0);
1560 
1561 	fsleep(1);
1562 	rtw8922a_bbmcu_cr_init(rtwdev, phy_idx);
1563 }
1564 
rtw8922a_bb_postinit(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx)1565 static void rtw8922a_bb_postinit(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx)
1566 {
1567 	if (phy_idx == RTW89_PHY_0)
1568 		rtw89_write32_set(rtwdev, R_BE_FEN_RST_ENABLE, mcu_bootrdy_mask[phy_idx]);
1569 	rtw89_write32_set(rtwdev, R_BE_FEN_RST_ENABLE, bbrst_mask[phy_idx]);
1570 
1571 	rtw89_phy_write32_set(rtwdev, R_BBCLK, B_CLK_640M);
1572 	rtw89_phy_write32_clr(rtwdev, R_TXSCALE, B_TXFCTR_EN);
1573 	rtw89_phy_set_phy_regs(rtwdev, R_TXFCTR, B_TXFCTR_THD, 0x200);
1574 	rtw89_phy_set_phy_regs(rtwdev, R_SLOPE, B_EHT_RATE_TH, 0xA);
1575 	rtw89_phy_set_phy_regs(rtwdev, R_BEDGE, B_HE_RATE_TH, 0xA);
1576 	rtw89_phy_set_phy_regs(rtwdev, R_BEDGE2, B_HT_VHT_TH, 0xAAA);
1577 	rtw89_phy_set_phy_regs(rtwdev, R_BEDGE, B_EHT_MCS14, 0x1);
1578 	rtw89_phy_set_phy_regs(rtwdev, R_BEDGE2, B_EHT_MCS15, 0x1);
1579 	rtw89_phy_set_phy_regs(rtwdev, R_BEDGE3, B_EHTTB_EN, 0x0);
1580 	rtw89_phy_set_phy_regs(rtwdev, R_BEDGE3, B_HEERSU_EN, 0x0);
1581 	rtw89_phy_set_phy_regs(rtwdev, R_BEDGE3, B_HEMU_EN, 0x0);
1582 	rtw89_phy_set_phy_regs(rtwdev, R_BEDGE3, B_TB_EN, 0x0);
1583 	rtw89_phy_set_phy_regs(rtwdev, R_SU_PUNC, B_SU_PUNC_EN, 0x1);
1584 	rtw89_phy_set_phy_regs(rtwdev, R_BEDGE5, B_HWGEN_EN, 0x1);
1585 	rtw89_phy_set_phy_regs(rtwdev, R_BEDGE5, B_PWROFST_COMP, 0x1);
1586 	rtw89_phy_set_phy_regs(rtwdev, R_MAG_AB, B_BY_SLOPE, 0x1);
1587 	rtw89_phy_set_phy_regs(rtwdev, R_MAG_A, B_MGA_AEND, 0xe0);
1588 	rtw89_phy_set_phy_regs(rtwdev, R_MAG_AB, B_MAG_AB, 0xe0c000);
1589 	rtw89_phy_set_phy_regs(rtwdev, R_SLOPE, B_SLOPE_A, 0x3FE0);
1590 	rtw89_phy_set_phy_regs(rtwdev, R_SLOPE, B_SLOPE_B, 0x3FE0);
1591 	rtw89_phy_set_phy_regs(rtwdev, R_SC_CORNER, B_SC_CORNER, 0x200);
1592 	rtw89_phy_write32_idx(rtwdev, R_UDP_COEEF, B_UDP_COEEF, 0x0, phy_idx);
1593 	rtw89_phy_write32_idx(rtwdev, R_UDP_COEEF, B_UDP_COEEF, 0x1, phy_idx);
1594 }
1595 
rtw8922a_bb_reset_en(struct rtw89_dev * rtwdev,enum rtw89_band band,bool en,enum rtw89_phy_idx phy_idx)1596 static void rtw8922a_bb_reset_en(struct rtw89_dev *rtwdev, enum rtw89_band band,
1597 				 bool en, enum rtw89_phy_idx phy_idx)
1598 {
1599 	if (en) {
1600 		rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, 1, phy_idx);
1601 		if (band == RTW89_BAND_2G)
1602 			rtw89_phy_write32_idx(rtwdev, R_RXCCA_BE1,
1603 					      B_RXCCA_BE1_DIS, 0x0, phy_idx);
1604 		rtw89_phy_write32_idx(rtwdev, R_PD_CTRL, B_PD_HIT_DIS, 0x0, phy_idx);
1605 	} else {
1606 		rtw89_phy_write32_idx(rtwdev, R_RXCCA_BE1, B_RXCCA_BE1_DIS, 0x1, phy_idx);
1607 		rtw89_phy_write32_idx(rtwdev, R_PD_CTRL, B_PD_HIT_DIS, 0x1, phy_idx);
1608 		fsleep(1);
1609 		rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, 0, phy_idx);
1610 	}
1611 }
1612 
rtw8922a_ctrl_tx_path_tmac(struct rtw89_dev * rtwdev,enum rtw89_rf_path tx_path,enum rtw89_phy_idx phy_idx)1613 static int rtw8922a_ctrl_tx_path_tmac(struct rtw89_dev *rtwdev,
1614 				      enum rtw89_rf_path tx_path,
1615 				      enum rtw89_phy_idx phy_idx)
1616 {
1617 	struct rtw89_reg2_def path_com_cr[] = {
1618 		{0x11A00, 0x21C86900},
1619 		{0x11A04, 0x00E4E433},
1620 		{0x11A08, 0x39390CC9},
1621 		{0x11A0C, 0x4E433240},
1622 		{0x11A10, 0x90CC900E},
1623 		{0x11A14, 0x00240393},
1624 		{0x11A18, 0x201C8600},
1625 	};
1626 	int ret = 0;
1627 	u32 reg;
1628 	int i;
1629 
1630 	rtw89_phy_write32_idx(rtwdev, R_MAC_SEL, B_MAC_SEL, 0x0, phy_idx);
1631 
1632 	if (phy_idx == RTW89_PHY_1 && !rtwdev->dbcc_en)
1633 		return 0;
1634 
1635 	if (tx_path == RF_PATH_A) {
1636 		path_com_cr[0].data = 0x21C82900;
1637 		path_com_cr[1].data = 0x00E4E431;
1638 		path_com_cr[2].data = 0x39390C49;
1639 		path_com_cr[3].data = 0x4E431240;
1640 		path_com_cr[4].data = 0x90C4900E;
1641 		path_com_cr[6].data = 0x201C8200;
1642 	} else if (tx_path == RF_PATH_B) {
1643 		path_com_cr[0].data = 0x21C04900;
1644 		path_com_cr[1].data = 0x00E4E032;
1645 		path_com_cr[2].data = 0x39380C89;
1646 		path_com_cr[3].data = 0x4E032240;
1647 		path_com_cr[4].data = 0x80C8900E;
1648 		path_com_cr[6].data = 0x201C0400;
1649 	} else if (tx_path == RF_PATH_AB) {
1650 		path_com_cr[0].data = 0x21C86900;
1651 		path_com_cr[1].data = 0x00E4E433;
1652 		path_com_cr[2].data = 0x39390CC9;
1653 		path_com_cr[3].data = 0x4E433240;
1654 		path_com_cr[4].data = 0x90CC900E;
1655 		path_com_cr[6].data = 0x201C8600;
1656 	} else {
1657 		ret = -EINVAL;
1658 	}
1659 
1660 	for (i = 0; i < ARRAY_SIZE(path_com_cr); i++) {
1661 		reg = rtw89_mac_reg_by_idx(rtwdev, path_com_cr[i].addr, phy_idx);
1662 		rtw89_write32(rtwdev, reg, path_com_cr[i].data);
1663 	}
1664 
1665 	return ret;
1666 }
1667 
rtw8922a_bb_reset(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx)1668 static void rtw8922a_bb_reset(struct rtw89_dev *rtwdev, enum rtw89_phy_idx phy_idx)
1669 {
1670 }
1671 
rtw8922a_cfg_rx_nss_limit(struct rtw89_dev * rtwdev,u8 rx_nss,enum rtw89_phy_idx phy_idx)1672 static int rtw8922a_cfg_rx_nss_limit(struct rtw89_dev *rtwdev, u8 rx_nss,
1673 				     enum rtw89_phy_idx phy_idx)
1674 {
1675 	if (rx_nss == 1) {
1676 		rtw89_phy_write32_idx(rtwdev, R_BRK_R, B_HTMCS_LMT, 0, phy_idx);
1677 		rtw89_phy_write32_idx(rtwdev, R_BRK_R, B_VHTMCS_LMT, 0, phy_idx);
1678 		rtw89_phy_write32_idx(rtwdev, R_BRK_HE, B_N_USR_MAX,
1679 				      HE_N_USER_MAX_8922A, phy_idx);
1680 		rtw89_phy_write32_idx(rtwdev, R_BRK_HE, B_NSS_MAX, 0, phy_idx);
1681 		rtw89_phy_write32_idx(rtwdev, R_BRK_HE, B_TB_NSS_MAX, 0, phy_idx);
1682 		rtw89_phy_write32_idx(rtwdev, R_BRK_EHT, B_RXEHT_NSS_MAX, 0, phy_idx);
1683 		rtw89_phy_write32_idx(rtwdev, R_BRK_RXEHT, B_RXEHTTB_NSS_MAX, 0,
1684 				      phy_idx);
1685 		rtw89_phy_write32_idx(rtwdev, R_BRK_RXEHT, B_RXEHT_N_USER_MAX,
1686 				      HE_N_USER_MAX_8922A, phy_idx);
1687 	} else if (rx_nss == 2) {
1688 		rtw89_phy_write32_idx(rtwdev, R_BRK_R, B_HTMCS_LMT, 1, phy_idx);
1689 		rtw89_phy_write32_idx(rtwdev, R_BRK_R, B_VHTMCS_LMT, 1, phy_idx);
1690 		rtw89_phy_write32_idx(rtwdev, R_BRK_HE, B_N_USR_MAX,
1691 				      HE_N_USER_MAX_8922A, phy_idx);
1692 		rtw89_phy_write32_idx(rtwdev, R_BRK_HE, B_NSS_MAX, 1, phy_idx);
1693 		rtw89_phy_write32_idx(rtwdev, R_BRK_HE, B_TB_NSS_MAX, 1, phy_idx);
1694 		rtw89_phy_write32_idx(rtwdev, R_BRK_EHT, B_RXEHT_NSS_MAX, 1, phy_idx);
1695 		rtw89_phy_write32_idx(rtwdev, R_BRK_RXEHT, B_RXEHTTB_NSS_MAX, 1,
1696 				      phy_idx);
1697 		rtw89_phy_write32_idx(rtwdev, R_BRK_RXEHT, B_RXEHT_N_USER_MAX,
1698 				      HE_N_USER_MAX_8922A, phy_idx);
1699 	} else {
1700 		return -EINVAL;
1701 	}
1702 
1703 	return 0;
1704 }
1705 
rtw8922a_tssi_reset(struct rtw89_dev * rtwdev,enum rtw89_rf_path path,enum rtw89_phy_idx phy_idx)1706 static void rtw8922a_tssi_reset(struct rtw89_dev *rtwdev,
1707 				enum rtw89_rf_path path,
1708 				enum rtw89_phy_idx phy_idx)
1709 {
1710 	if (rtwdev->mlo_dbcc_mode == MLO_1_PLUS_1_1RF) {
1711 		if (phy_idx == RTW89_PHY_0) {
1712 			rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTA, B_TXPWR_RSTA, 0x0);
1713 			rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTA, B_TXPWR_RSTA, 0x1);
1714 		} else {
1715 			rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTB, B_TXPWR_RSTB, 0x0);
1716 			rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTB, B_TXPWR_RSTB, 0x1);
1717 		}
1718 	} else {
1719 		rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTA, B_TXPWR_RSTA, 0x0);
1720 		rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTA, B_TXPWR_RSTA, 0x1);
1721 		rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTB, B_TXPWR_RSTB, 0x0);
1722 		rtw89_phy_write32_mask(rtwdev, R_TXPWR_RSTB, B_TXPWR_RSTB, 0x1);
1723 	}
1724 }
1725 
rtw8922a_ctrl_rx_path_tmac(struct rtw89_dev * rtwdev,enum rtw89_rf_path rx_path,enum rtw89_phy_idx phy_idx)1726 static int rtw8922a_ctrl_rx_path_tmac(struct rtw89_dev *rtwdev,
1727 				      enum rtw89_rf_path rx_path,
1728 				      enum rtw89_phy_idx phy_idx)
1729 {
1730 	u8 rx_nss = (rx_path == RF_PATH_AB) ? 2 : 1;
1731 
1732 	/* Set to 0 first to avoid abnormal EDCCA report */
1733 	rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_ANT_RX_SG0, 0x0, phy_idx);
1734 
1735 	if (rx_path == RF_PATH_A) {
1736 		rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_ANT_RX_SG0, 0x1, phy_idx);
1737 		rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_RX_1RCCA, 1, phy_idx);
1738 		rtw8922a_cfg_rx_nss_limit(rtwdev, rx_nss, phy_idx);
1739 		rtw8922a_tssi_reset(rtwdev, rx_path, phy_idx);
1740 	} else if (rx_path == RF_PATH_B) {
1741 		rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_ANT_RX_SG0, 0x2, phy_idx);
1742 		rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_RX_1RCCA, 2, phy_idx);
1743 		rtw8922a_cfg_rx_nss_limit(rtwdev, rx_nss, phy_idx);
1744 		rtw8922a_tssi_reset(rtwdev, rx_path, phy_idx);
1745 	} else if (rx_path == RF_PATH_AB) {
1746 		rtw89_phy_write32_idx(rtwdev, R_ANT_CHBW, B_ANT_RX_SG0, 0x3, phy_idx);
1747 		rtw89_phy_write32_idx(rtwdev, R_FC0INV_SBW, B_RX_1RCCA, 3, phy_idx);
1748 		rtw8922a_cfg_rx_nss_limit(rtwdev, rx_nss, phy_idx);
1749 		rtw8922a_tssi_reset(rtwdev, rx_path, phy_idx);
1750 	} else {
1751 		return -EINVAL;
1752 	}
1753 
1754 	return 0;
1755 }
1756 
1757 #define DIGITAL_PWR_COMP_REG_NUM 22
1758 static const u32 rtw8922a_digital_pwr_comp_val[][DIGITAL_PWR_COMP_REG_NUM] = {
1759 	{0x012C0096, 0x044C02BC, 0x00322710, 0x015E0096, 0x03C8028A,
1760 	 0x0BB80708, 0x17701194, 0x02020100, 0x03030303, 0x01000303,
1761 	 0x05030302, 0x06060605, 0x06050300, 0x0A090807, 0x02000B0B,
1762 	 0x09080604, 0x0D0D0C0B, 0x08060400, 0x110F0C0B, 0x05001111,
1763 	 0x0D0C0907, 0x12121210},
1764 	{0x012C0096, 0x044C02BC, 0x00322710, 0x015E0096, 0x03C8028A,
1765 	 0x0BB80708, 0x17701194, 0x04030201, 0x05050505, 0x01000505,
1766 	 0x07060504, 0x09090908, 0x09070400, 0x0E0D0C0B, 0x03000E0E,
1767 	 0x0D0B0907, 0x1010100F, 0x0B080500, 0x1512100D, 0x05001515,
1768 	 0x100D0B08, 0x15151512},
1769 };
1770 
rtw8922a_set_digital_pwr_comp(struct rtw89_dev * rtwdev,bool enable,u8 nss,enum rtw89_rf_path path)1771 static void rtw8922a_set_digital_pwr_comp(struct rtw89_dev *rtwdev,
1772 					  bool enable, u8 nss,
1773 					  enum rtw89_rf_path path)
1774 {
1775 	static const u32 ltpc_t0[2] = {R_BE_LTPC_T0_PATH0, R_BE_LTPC_T0_PATH1};
1776 	const u32 *digital_pwr_comp;
1777 	u32 addr, val;
1778 	u32 i;
1779 
1780 	if (nss == 1)
1781 		digital_pwr_comp = rtw8922a_digital_pwr_comp_val[0];
1782 	else
1783 		digital_pwr_comp = rtw8922a_digital_pwr_comp_val[1];
1784 
1785 	addr = ltpc_t0[path];
1786 	for (i = 0; i < DIGITAL_PWR_COMP_REG_NUM; i++, addr += 4) {
1787 		val = enable ? digital_pwr_comp[i] : 0;
1788 		rtw89_phy_write32(rtwdev, addr, val);
1789 	}
1790 }
1791 
rtw8922a_digital_pwr_comp(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx)1792 static void rtw8922a_digital_pwr_comp(struct rtw89_dev *rtwdev,
1793 				      enum rtw89_phy_idx phy_idx)
1794 {
1795 	const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_CHANCTX_0);
1796 	bool enable = chan->band_type != RTW89_BAND_2G;
1797 	u8 path;
1798 
1799 	if (rtwdev->mlo_dbcc_mode == MLO_1_PLUS_1_1RF) {
1800 		if (phy_idx == RTW89_PHY_0)
1801 			path = RF_PATH_A;
1802 		else
1803 			path = RF_PATH_B;
1804 		rtw8922a_set_digital_pwr_comp(rtwdev, enable, 1, path);
1805 	} else {
1806 		rtw8922a_set_digital_pwr_comp(rtwdev, enable, 2, RF_PATH_A);
1807 		rtw8922a_set_digital_pwr_comp(rtwdev, enable, 2, RF_PATH_B);
1808 	}
1809 }
1810 
rtw8922a_ctrl_mlo(struct rtw89_dev * rtwdev,enum rtw89_mlo_dbcc_mode mode)1811 static int rtw8922a_ctrl_mlo(struct rtw89_dev *rtwdev, enum rtw89_mlo_dbcc_mode mode)
1812 {
1813 	const struct rtw89_chan *chan0, *chan1;
1814 
1815 	if (mode == MLO_1_PLUS_1_1RF || mode == DBCC_LEGACY) {
1816 		rtw89_phy_write32_mask(rtwdev, R_DBCC, B_DBCC_EN, 0x1);
1817 		rtw89_phy_write32_mask(rtwdev, R_DBCC_FA, B_DBCC_FA, 0x0);
1818 	} else if (mode == MLO_2_PLUS_0_1RF || mode == MLO_0_PLUS_2_1RF ||
1819 		   mode == MLO_DBCC_NOT_SUPPORT) {
1820 		rtw89_phy_write32_mask(rtwdev, R_DBCC, B_DBCC_EN, 0x0);
1821 		rtw89_phy_write32_mask(rtwdev, R_DBCC_FA, B_DBCC_FA, 0x1);
1822 	} else {
1823 		return -EOPNOTSUPP;
1824 	}
1825 
1826 	if (mode == MLO_1_PLUS_1_1RF) {
1827 		chan0 = rtw89_mgnt_chan_get(rtwdev, 0);
1828 		chan1 = rtw89_mgnt_chan_get(rtwdev, 1);
1829 	} else if (mode == MLO_0_PLUS_2_1RF) {
1830 		chan1 = rtw89_mgnt_chan_get(rtwdev, 1);
1831 		chan0 = chan1;
1832 	} else {
1833 		chan0 = rtw89_mgnt_chan_get(rtwdev, 0);
1834 		chan1 = chan0;
1835 	}
1836 
1837 	rtw8922a_ctrl_afe_dac(rtwdev, chan0->band_width, RF_PATH_A);
1838 	rtw8922a_ctrl_afe_dac(rtwdev, chan1->band_width, RF_PATH_B);
1839 
1840 	rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0x6180);
1841 
1842 	if (mode == MLO_2_PLUS_0_1RF) {
1843 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xBBAB);
1844 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xABA9);
1845 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEBA9);
1846 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEAA9);
1847 	} else if (mode == MLO_0_PLUS_2_1RF) {
1848 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xBBAB);
1849 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xAFFF);
1850 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEFFF);
1851 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEEFF);
1852 	} else if ((mode == MLO_1_PLUS_1_1RF) || (mode == DBCC_LEGACY)) {
1853 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0x7BAB);
1854 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0x3BAB);
1855 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0x3AAB);
1856 	} else {
1857 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0x180);
1858 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0x0);
1859 	}
1860 
1861 	return 0;
1862 }
1863 
rtw8922a_bb_sethw(struct rtw89_dev * rtwdev)1864 static void rtw8922a_bb_sethw(struct rtw89_dev *rtwdev)
1865 {
1866 	u32 reg;
1867 
1868 	rtw89_phy_write32_clr(rtwdev, R_EN_SND_WO_NDP, B_EN_SND_WO_NDP);
1869 	rtw89_phy_write32_clr(rtwdev, R_EN_SND_WO_NDP_C1, B_EN_SND_WO_NDP);
1870 
1871 	rtw89_write32_mask(rtwdev, R_BE_PWR_BOOST, B_BE_PWR_CTRL_SEL, 0);
1872 	if (rtwdev->dbcc_en) {
1873 		reg = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_BOOST, RTW89_MAC_1);
1874 		rtw89_write32_mask(rtwdev, reg, B_BE_PWR_CTRL_SEL, 0);
1875 	}
1876 
1877 	rtw8922a_ctrl_mlo(rtwdev, rtwdev->mlo_dbcc_mode);
1878 }
1879 
rtw8922a_ctrl_cck_en(struct rtw89_dev * rtwdev,bool cck_en,enum rtw89_phy_idx phy_idx)1880 static void rtw8922a_ctrl_cck_en(struct rtw89_dev *rtwdev, bool cck_en,
1881 				 enum rtw89_phy_idx phy_idx)
1882 {
1883 	if (cck_en) {
1884 		rtw89_phy_write32_idx(rtwdev, R_RXCCA_BE1, B_RXCCA_BE1_DIS, 0, phy_idx);
1885 		rtw89_phy_write32_idx(rtwdev, R_UPD_CLK_ADC, B_ENABLE_CCK, 1, phy_idx);
1886 		rtw89_phy_write32_idx(rtwdev, R_PD_ARBITER_OFF, B_PD_ARBITER_OFF,
1887 				      0, phy_idx);
1888 	} else {
1889 		rtw89_phy_write32_idx(rtwdev, R_RXCCA_BE1, B_RXCCA_BE1_DIS, 1, phy_idx);
1890 		rtw89_phy_write32_idx(rtwdev, R_UPD_CLK_ADC, B_ENABLE_CCK, 0, phy_idx);
1891 		rtw89_phy_write32_idx(rtwdev, R_PD_ARBITER_OFF, B_PD_ARBITER_OFF,
1892 				      1, phy_idx);
1893 	}
1894 }
1895 
rtw8922a_set_channel_bb(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_phy_idx phy_idx)1896 static void rtw8922a_set_channel_bb(struct rtw89_dev *rtwdev,
1897 				    const struct rtw89_chan *chan,
1898 				    enum rtw89_phy_idx phy_idx)
1899 {
1900 	bool cck_en = chan->band_type == RTW89_BAND_2G;
1901 	u8 pri_sb = chan->pri_sb_idx;
1902 
1903 	if (cck_en)
1904 		rtw8922a_ctrl_sco_cck(rtwdev, chan->primary_channel,
1905 				      chan->band_width, phy_idx);
1906 
1907 	rtw8922a_ctrl_ch(rtwdev, chan, phy_idx);
1908 	rtw8922a_ctrl_bw(rtwdev, pri_sb, chan->band_width, phy_idx);
1909 	rtw8922a_ctrl_cck_en(rtwdev, cck_en, phy_idx);
1910 	rtw8922a_spur_elimination(rtwdev, chan, phy_idx);
1911 
1912 	rtw89_phy_write32_idx(rtwdev, R_RSTB_ASYNC, B_RSTB_ASYNC_ALL, 1, phy_idx);
1913 	rtw8922a_tssi_reset(rtwdev, RF_PATH_AB, phy_idx);
1914 }
1915 
rtw8922a_pre_set_channel_bb(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx)1916 static void rtw8922a_pre_set_channel_bb(struct rtw89_dev *rtwdev,
1917 					enum rtw89_phy_idx phy_idx)
1918 {
1919 	if (!rtwdev->dbcc_en)
1920 		return;
1921 
1922 	if (phy_idx == RTW89_PHY_0) {
1923 		rtw89_phy_write32_mask(rtwdev, R_DBCC, B_DBCC_EN, 0x0);
1924 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0x6180);
1925 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xBBAB);
1926 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xABA9);
1927 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEBA9);
1928 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEAA9);
1929 	} else {
1930 		rtw89_phy_write32_mask(rtwdev, R_DBCC, B_DBCC_EN, 0x0);
1931 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xBBAB);
1932 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xAFFF);
1933 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEFFF);
1934 		rtw89_phy_write32_mask(rtwdev, R_EMLSR, B_EMLSR_PARM, 0xEEFF);
1935 	}
1936 }
1937 
rtw8922a_post_set_channel_bb(struct rtw89_dev * rtwdev,enum rtw89_mlo_dbcc_mode mode,enum rtw89_phy_idx phy_idx)1938 static void rtw8922a_post_set_channel_bb(struct rtw89_dev *rtwdev,
1939 					 enum rtw89_mlo_dbcc_mode mode,
1940 					 enum rtw89_phy_idx phy_idx)
1941 {
1942 	if (!rtwdev->dbcc_en)
1943 		return;
1944 
1945 	rtw8922a_digital_pwr_comp(rtwdev, phy_idx);
1946 	rtw8922a_ctrl_mlo(rtwdev, mode);
1947 }
1948 
rtw8922a_set_channel(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_mac_idx mac_idx,enum rtw89_phy_idx phy_idx)1949 static void rtw8922a_set_channel(struct rtw89_dev *rtwdev,
1950 				 const struct rtw89_chan *chan,
1951 				 enum rtw89_mac_idx mac_idx,
1952 				 enum rtw89_phy_idx phy_idx)
1953 {
1954 	rtw8922a_set_channel_mac(rtwdev, chan, mac_idx);
1955 	rtw8922a_set_channel_bb(rtwdev, chan, phy_idx);
1956 	rtw8922a_set_channel_rf(rtwdev, chan, phy_idx);
1957 }
1958 
rtw8922a_dfs_en_idx(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx,enum rtw89_rf_path path,bool en)1959 static void rtw8922a_dfs_en_idx(struct rtw89_dev *rtwdev,
1960 				enum rtw89_phy_idx phy_idx, enum rtw89_rf_path path,
1961 				bool en)
1962 {
1963 	u32 path_ofst = (path == RF_PATH_B) ? 0x100 : 0x0;
1964 
1965 	if (en)
1966 		rtw89_phy_write32_idx(rtwdev, 0x2800 + path_ofst, BIT(1), 1,
1967 				      phy_idx);
1968 	else
1969 		rtw89_phy_write32_idx(rtwdev, 0x2800 + path_ofst, BIT(1), 0,
1970 				      phy_idx);
1971 }
1972 
rtw8922a_dfs_en(struct rtw89_dev * rtwdev,bool en,enum rtw89_phy_idx phy_idx)1973 static void rtw8922a_dfs_en(struct rtw89_dev *rtwdev, bool en,
1974 			    enum rtw89_phy_idx phy_idx)
1975 {
1976 	rtw8922a_dfs_en_idx(rtwdev, phy_idx, RF_PATH_A, en);
1977 	rtw8922a_dfs_en_idx(rtwdev, phy_idx, RF_PATH_B, en);
1978 }
1979 
rtw8922a_adc_en_path(struct rtw89_dev * rtwdev,enum rtw89_rf_path path,bool en)1980 static void rtw8922a_adc_en_path(struct rtw89_dev *rtwdev,
1981 				 enum rtw89_rf_path path, bool en)
1982 {
1983 	u32 val;
1984 
1985 	val = rtw89_phy_read32_mask(rtwdev, R_ADC_FIFO_V1, B_ADC_FIFO_EN_V1);
1986 
1987 	if (en) {
1988 		if (path == RF_PATH_A)
1989 			val &= ~0x1;
1990 		else
1991 			val &= ~0x2;
1992 	} else {
1993 		if (path == RF_PATH_A)
1994 			val |= 0x1;
1995 		else
1996 			val |= 0x2;
1997 	}
1998 
1999 	rtw89_phy_write32_mask(rtwdev, R_ADC_FIFO_V1, B_ADC_FIFO_EN_V1, val);
2000 }
2001 
rtw8922a_adc_en(struct rtw89_dev * rtwdev,bool en,u8 phy_idx)2002 static void rtw8922a_adc_en(struct rtw89_dev *rtwdev, bool en, u8 phy_idx)
2003 {
2004 	if (rtwdev->mlo_dbcc_mode == MLO_1_PLUS_1_1RF) {
2005 		if (phy_idx == RTW89_PHY_0)
2006 			rtw8922a_adc_en_path(rtwdev, RF_PATH_A, en);
2007 		else
2008 			rtw8922a_adc_en_path(rtwdev, RF_PATH_B, en);
2009 	} else {
2010 		rtw8922a_adc_en_path(rtwdev, RF_PATH_A, en);
2011 		rtw8922a_adc_en_path(rtwdev, RF_PATH_B, en);
2012 	}
2013 }
2014 
2015 static
rtw8922a_hal_reset(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx,enum rtw89_mac_idx mac_idx,enum rtw89_band band,u32 * tx_en,bool enter)2016 void rtw8922a_hal_reset(struct rtw89_dev *rtwdev,
2017 			enum rtw89_phy_idx phy_idx, enum rtw89_mac_idx mac_idx,
2018 			enum rtw89_band band, u32 *tx_en, bool enter)
2019 {
2020 	if (enter) {
2021 		rtw89_chip_stop_sch_tx(rtwdev, mac_idx, tx_en, RTW89_SCH_TX_SEL_ALL);
2022 		rtw89_mac_cfg_ppdu_status(rtwdev, mac_idx, false);
2023 		rtw8922a_dfs_en(rtwdev, false, phy_idx);
2024 		rtw8922a_tssi_cont_en_phyidx(rtwdev, false, phy_idx);
2025 		rtw8922a_adc_en(rtwdev, false, phy_idx);
2026 		fsleep(40);
2027 		rtw8922a_bb_reset_en(rtwdev, band, false, phy_idx);
2028 	} else {
2029 		rtw89_mac_cfg_ppdu_status(rtwdev, mac_idx, true);
2030 		rtw8922a_adc_en(rtwdev, true, phy_idx);
2031 		rtw8922a_dfs_en(rtwdev, true, phy_idx);
2032 		rtw8922a_tssi_cont_en_phyidx(rtwdev, true, phy_idx);
2033 		rtw8922a_bb_reset_en(rtwdev, band, true, phy_idx);
2034 		rtw89_chip_resume_sch_tx(rtwdev, mac_idx, *tx_en);
2035 	}
2036 }
2037 
rtw8922a_set_channel_help(struct rtw89_dev * rtwdev,bool enter,struct rtw89_channel_help_params * p,const struct rtw89_chan * chan,enum rtw89_mac_idx mac_idx,enum rtw89_phy_idx phy_idx)2038 static void rtw8922a_set_channel_help(struct rtw89_dev *rtwdev, bool enter,
2039 				      struct rtw89_channel_help_params *p,
2040 				      const struct rtw89_chan *chan,
2041 				      enum rtw89_mac_idx mac_idx,
2042 				      enum rtw89_phy_idx phy_idx)
2043 {
2044 	if (enter) {
2045 		rtw8922a_pre_set_channel_bb(rtwdev, phy_idx);
2046 		rtw8922a_pre_set_channel_rf(rtwdev, phy_idx);
2047 	}
2048 
2049 	rtw8922a_hal_reset(rtwdev, phy_idx, mac_idx, chan->band_type, &p->tx_en, enter);
2050 
2051 	if (!enter) {
2052 		rtw8922a_post_set_channel_bb(rtwdev, rtwdev->mlo_dbcc_mode, phy_idx);
2053 		rtw8922a_post_set_channel_rf(rtwdev, phy_idx);
2054 	}
2055 }
2056 
rtw8922a_rfk_init(struct rtw89_dev * rtwdev)2057 static void rtw8922a_rfk_init(struct rtw89_dev *rtwdev)
2058 {
2059 	struct rtw89_rfk_mcc_info *rfk_mcc = &rtwdev->rfk_mcc;
2060 
2061 	rtwdev->is_tssi_mode[RF_PATH_A] = false;
2062 	rtwdev->is_tssi_mode[RF_PATH_B] = false;
2063 	memset(rfk_mcc, 0, sizeof(*rfk_mcc));
2064 }
2065 
__rtw8922a_rfk_init_late(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx,const struct rtw89_chan * chan)2066 static void __rtw8922a_rfk_init_late(struct rtw89_dev *rtwdev,
2067 				     enum rtw89_phy_idx phy_idx,
2068 				     const struct rtw89_chan *chan)
2069 {
2070 	rtw89_phy_rfk_pre_ntfy_and_wait(rtwdev, phy_idx, 5);
2071 
2072 	rtw89_phy_rfk_dack_and_wait(rtwdev, phy_idx, chan, 58);
2073 	rtw89_phy_rfk_rxdck_and_wait(rtwdev, phy_idx, chan, false, 32);
2074 }
2075 
rtw8922a_rfk_init_late(struct rtw89_dev * rtwdev)2076 static void rtw8922a_rfk_init_late(struct rtw89_dev *rtwdev)
2077 {
2078 	const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_CHANCTX_0);
2079 
2080 	__rtw8922a_rfk_init_late(rtwdev, RTW89_PHY_0, chan);
2081 	if (rtwdev->dbcc_en)
2082 		__rtw8922a_rfk_init_late(rtwdev, RTW89_PHY_1, chan);
2083 }
2084 
_wait_rx_mode(struct rtw89_dev * rtwdev,u8 kpath)2085 static void _wait_rx_mode(struct rtw89_dev *rtwdev, u8 kpath)
2086 {
2087 	u32 rf_mode;
2088 	u8 path;
2089 	int ret;
2090 
2091 	for (path = 0; path < RF_PATH_NUM_8922A; path++) {
2092 		if (!(kpath & BIT(path)))
2093 			continue;
2094 
2095 		ret = read_poll_timeout_atomic(rtw89_read_rf, rf_mode, rf_mode != 2,
2096 					       2, 5000, false, rtwdev, path, 0x00,
2097 					       RR_MOD_MASK);
2098 		rtw89_debug(rtwdev, RTW89_DBG_RFK,
2099 			    "[RFK] Wait S%d to Rx mode!! (ret = %d)\n",
2100 			    path, ret);
2101 	}
2102 }
2103 
rtw8922a_rfk_channel(struct rtw89_dev * rtwdev,struct rtw89_vif_link * rtwvif_link)2104 static void rtw8922a_rfk_channel(struct rtw89_dev *rtwdev,
2105 				 struct rtw89_vif_link *rtwvif_link)
2106 {
2107 	enum rtw89_chanctx_idx chanctx_idx = rtwvif_link->chanctx_idx;
2108 	const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, chanctx_idx);
2109 	enum rtw89_phy_idx phy_idx = rtwvif_link->phy_idx;
2110 	u8 phy_map = rtw89_btc_phymap(rtwdev, phy_idx, RF_AB, chanctx_idx);
2111 	u32 tx_en;
2112 
2113 	rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, BTC_WRFKT_CHLK, BTC_WRFK_START);
2114 	rtw89_chip_stop_sch_tx(rtwdev, phy_idx, &tx_en, RTW89_SCH_TX_SEL_ALL);
2115 	_wait_rx_mode(rtwdev, RF_AB);
2116 
2117 	rtw89_phy_rfk_pre_ntfy_and_wait(rtwdev, phy_idx, 5);
2118 	rtw89_phy_rfk_txgapk_and_wait(rtwdev, phy_idx, chan, 54);
2119 	rtw89_phy_rfk_iqk_and_wait(rtwdev, phy_idx, chan, 84);
2120 	rtw89_phy_rfk_tssi_and_wait(rtwdev, phy_idx, chan, RTW89_TSSI_NORMAL, 20);
2121 	rtw89_phy_rfk_dpk_and_wait(rtwdev, phy_idx, chan, 34);
2122 	rtw89_phy_rfk_rxdck_and_wait(rtwdev, RTW89_PHY_0, chan, true, 32);
2123 
2124 	rtw89_chip_resume_sch_tx(rtwdev, phy_idx, tx_en);
2125 	rtw89_btc_ntfy_wl_rfk(rtwdev, phy_map, BTC_WRFKT_CHLK, BTC_WRFK_STOP);
2126 }
2127 
rtw8922a_rfk_band_changed(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx,const struct rtw89_chan * chan)2128 static void rtw8922a_rfk_band_changed(struct rtw89_dev *rtwdev,
2129 				      enum rtw89_phy_idx phy_idx,
2130 				      const struct rtw89_chan *chan)
2131 {
2132 	rtw89_phy_rfk_tssi_and_wait(rtwdev, phy_idx, chan, RTW89_TSSI_SCAN, 6);
2133 }
2134 
rtw8922a_rfk_scan(struct rtw89_dev * rtwdev,struct rtw89_vif_link * rtwvif_link,bool start)2135 static void rtw8922a_rfk_scan(struct rtw89_dev *rtwdev,
2136 			      struct rtw89_vif_link *rtwvif_link,
2137 			      bool start)
2138 {
2139 }
2140 
rtw8922a_rfk_track(struct rtw89_dev * rtwdev)2141 static void rtw8922a_rfk_track(struct rtw89_dev *rtwdev)
2142 {
2143 }
2144 
rtw8922a_set_txpwr_ref(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx)2145 static void rtw8922a_set_txpwr_ref(struct rtw89_dev *rtwdev,
2146 				   enum rtw89_phy_idx phy_idx)
2147 {
2148 	s16 ref_ofdm = 0;
2149 	s16 ref_cck = 0;
2150 
2151 	rtw89_debug(rtwdev, RTW89_DBG_TXPWR, "[TXPWR] set txpwr reference\n");
2152 
2153 	rtw89_mac_txpwr_write32_mask(rtwdev, phy_idx, R_BE_PWR_REF_CTRL,
2154 				     B_BE_PWR_REF_CTRL_OFDM, ref_ofdm);
2155 	rtw89_mac_txpwr_write32_mask(rtwdev, phy_idx, R_BE_PWR_REF_CTRL,
2156 				     B_BE_PWR_REF_CTRL_CCK, ref_cck);
2157 }
2158 
2159 static const struct rtw89_reg_def rtw8922a_txpwr_ref[][3] = {
2160 	{{ .addr = R_TXAGC_REF_DBM_P0, .mask = B_TXAGC_OFDM_REF_DBM_P0},
2161 	 { .addr = R_TXAGC_REF_DBM_P0, .mask = B_TXAGC_CCK_REF_DBM_P0},
2162 	 { .addr = R_TSSI_K_P0, .mask = B_TSSI_K_OFDM_P0}
2163 	},
2164 	{{ .addr = R_TXAGC_REF_DBM_RF1_P0, .mask = B_TXAGC_OFDM_REF_DBM_RF1_P0},
2165 	 { .addr = R_TXAGC_REF_DBM_RF1_P0, .mask = B_TXAGC_CCK_REF_DBM_RF1_P0},
2166 	 { .addr = R_TSSI_K_RF1_P0, .mask = B_TSSI_K_OFDM_RF1_P0}
2167 	},
2168 };
2169 
rtw8922a_set_txpwr_diff(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_phy_idx phy_idx)2170 static void rtw8922a_set_txpwr_diff(struct rtw89_dev *rtwdev,
2171 				    const struct rtw89_chan *chan,
2172 				    enum rtw89_phy_idx phy_idx)
2173 {
2174 	s16 pwr_ofst = rtw89_phy_ant_gain_pwr_offset(rtwdev, chan);
2175 	const struct rtw89_chip_info *chip = rtwdev->chip;
2176 	static const u32 path_ofst[] = {0x0, 0x100};
2177 	const struct rtw89_reg_def *txpwr_ref;
2178 	static const s16 tssi_k_base = 0x12;
2179 	s16 tssi_k_ofst = abs(pwr_ofst) + tssi_k_base;
2180 	s16 ofst_dec[RF_PATH_NUM_8922A];
2181 	s16 tssi_k[RF_PATH_NUM_8922A];
2182 	s16 pwr_ref_ofst;
2183 	s16 pwr_ref = 0;
2184 	u8 i;
2185 
2186 	if (rtwdev->hal.cv == CHIP_CAV)
2187 		pwr_ref = 16;
2188 
2189 	pwr_ref <<= chip->txpwr_factor_rf;
2190 	pwr_ref_ofst = pwr_ref - rtw89_phy_txpwr_bb_to_rf(rtwdev, abs(pwr_ofst));
2191 
2192 	ofst_dec[RF_PATH_A] = pwr_ofst > 0 ? pwr_ref : pwr_ref_ofst;
2193 	ofst_dec[RF_PATH_B] = pwr_ofst > 0 ? pwr_ref_ofst : pwr_ref;
2194 	tssi_k[RF_PATH_A] = pwr_ofst > 0 ? tssi_k_base : tssi_k_ofst;
2195 	tssi_k[RF_PATH_B] = pwr_ofst > 0 ? tssi_k_ofst : tssi_k_base;
2196 
2197 	for (i = 0; i < RF_PATH_NUM_8922A; i++) {
2198 		txpwr_ref = rtw8922a_txpwr_ref[phy_idx];
2199 
2200 		rtw89_phy_write32_mask(rtwdev, txpwr_ref[0].addr + path_ofst[i],
2201 				       txpwr_ref[0].mask, ofst_dec[i]);
2202 		rtw89_phy_write32_mask(rtwdev, txpwr_ref[1].addr + path_ofst[i],
2203 				       txpwr_ref[1].mask, ofst_dec[i]);
2204 		rtw89_phy_write32_mask(rtwdev, txpwr_ref[2].addr + path_ofst[i],
2205 				       txpwr_ref[2].mask, tssi_k[i]);
2206 	}
2207 }
2208 
rtw8922a_bb_tx_triangular(struct rtw89_dev * rtwdev,bool en,enum rtw89_phy_idx phy_idx)2209 static void rtw8922a_bb_tx_triangular(struct rtw89_dev *rtwdev, bool en,
2210 				      enum rtw89_phy_idx phy_idx)
2211 {
2212 	u8 ctrl = en ? 0x1 : 0x0;
2213 
2214 	rtw89_phy_write32_idx(rtwdev, R_BEDGE3, B_BEDGE_CFG, ctrl, phy_idx);
2215 }
2216 
rtw8922a_set_tx_shape(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_phy_idx phy_idx)2217 static void rtw8922a_set_tx_shape(struct rtw89_dev *rtwdev,
2218 				  const struct rtw89_chan *chan,
2219 				  enum rtw89_phy_idx phy_idx)
2220 {
2221 	const struct rtw89_rfe_parms *rfe_parms = rtwdev->rfe_parms;
2222 	const struct rtw89_tx_shape *tx_shape = &rfe_parms->tx_shape;
2223 	u8 tx_shape_idx;
2224 	u8 band, regd;
2225 
2226 	band = chan->band_type;
2227 	regd = rtw89_regd_get(rtwdev, band);
2228 	tx_shape_idx = (*tx_shape->lmt)[band][RTW89_RS_OFDM][regd];
2229 
2230 	if (tx_shape_idx == 0)
2231 		rtw8922a_bb_tx_triangular(rtwdev, false, phy_idx);
2232 	else
2233 		rtw8922a_bb_tx_triangular(rtwdev, true, phy_idx);
2234 }
2235 
rtw8922a_set_txpwr(struct rtw89_dev * rtwdev,const struct rtw89_chan * chan,enum rtw89_phy_idx phy_idx)2236 static void rtw8922a_set_txpwr(struct rtw89_dev *rtwdev,
2237 			       const struct rtw89_chan *chan,
2238 			       enum rtw89_phy_idx phy_idx)
2239 {
2240 	rtw89_phy_set_txpwr_byrate(rtwdev, chan, phy_idx);
2241 	rtw89_phy_set_txpwr_offset(rtwdev, chan, phy_idx);
2242 	rtw8922a_set_tx_shape(rtwdev, chan, phy_idx);
2243 	rtw89_phy_set_txpwr_limit(rtwdev, chan, phy_idx);
2244 	rtw89_phy_set_txpwr_limit_ru(rtwdev, chan, phy_idx);
2245 	rtw8922a_set_txpwr_diff(rtwdev, chan, phy_idx);
2246 	rtw8922a_set_txpwr_ref(rtwdev, phy_idx);
2247 }
2248 
rtw8922a_set_txpwr_ctrl(struct rtw89_dev * rtwdev,enum rtw89_phy_idx phy_idx)2249 static void rtw8922a_set_txpwr_ctrl(struct rtw89_dev *rtwdev,
2250 				    enum rtw89_phy_idx phy_idx)
2251 {
2252 	rtw8922a_set_txpwr_ref(rtwdev, phy_idx);
2253 }
2254 
rtw8922a_ctrl_trx_path(struct rtw89_dev * rtwdev,enum rtw89_rf_path tx_path,u8 tx_nss,enum rtw89_rf_path rx_path,u8 rx_nss)2255 static void rtw8922a_ctrl_trx_path(struct rtw89_dev *rtwdev,
2256 				   enum rtw89_rf_path tx_path, u8 tx_nss,
2257 				   enum rtw89_rf_path rx_path, u8 rx_nss)
2258 {
2259 	enum rtw89_phy_idx phy_idx;
2260 
2261 	for (phy_idx = RTW89_PHY_0; phy_idx <= RTW89_PHY_1; phy_idx++) {
2262 		rtw8922a_ctrl_tx_path_tmac(rtwdev, tx_path, phy_idx);
2263 		rtw8922a_ctrl_rx_path_tmac(rtwdev, rx_path, phy_idx);
2264 		rtw8922a_cfg_rx_nss_limit(rtwdev, rx_nss, phy_idx);
2265 	}
2266 }
2267 
rtw8922a_ctrl_nbtg_bt_tx(struct rtw89_dev * rtwdev,bool en,enum rtw89_phy_idx phy_idx)2268 static void rtw8922a_ctrl_nbtg_bt_tx(struct rtw89_dev *rtwdev, bool en,
2269 				     enum rtw89_phy_idx phy_idx)
2270 {
2271 	if (en) {
2272 		rtw89_phy_write32_idx(rtwdev, R_FORCE_FIR_A, B_FORCE_FIR_A, 0x3, phy_idx);
2273 		rtw89_phy_write32_idx(rtwdev, R_RXBY_WBADC_A, B_RXBY_WBADC_A,
2274 				      0xf, phy_idx);
2275 		rtw89_phy_write32_idx(rtwdev, R_BT_RXBY_WBADC_A, B_BT_RXBY_WBADC_A,
2276 				      0x0, phy_idx);
2277 		rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_A, B_BT_TRK_OFF_A, 0x0, phy_idx);
2278 		rtw89_phy_write32_idx(rtwdev, R_OP1DB_A, B_OP1DB_A, 0x80, phy_idx);
2279 		rtw89_phy_write32_idx(rtwdev, R_OP1DB1_A, B_TIA10_A, 0x8080, phy_idx);
2280 		rtw89_phy_write32_idx(rtwdev, R_BACKOFF_A, B_LNA_IBADC_A, 0x34, phy_idx);
2281 		rtw89_phy_write32_idx(rtwdev, R_BKOFF_A, B_BKOFF_IBADC_A, 0x34, phy_idx);
2282 		rtw89_phy_write32_idx(rtwdev, R_FORCE_FIR_B, B_FORCE_FIR_B, 0x3, phy_idx);
2283 		rtw89_phy_write32_idx(rtwdev, R_RXBY_WBADC_B, B_RXBY_WBADC_B,
2284 				      0xf, phy_idx);
2285 		rtw89_phy_write32_idx(rtwdev, R_BT_RXBY_WBADC_B, B_BT_RXBY_WBADC_B,
2286 				      0x0, phy_idx);
2287 		rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_B, B_BT_TRK_OFF_B, 0x0, phy_idx);
2288 		rtw89_phy_write32_idx(rtwdev, R_LNA_OP, B_LNA6, 0x80, phy_idx);
2289 		rtw89_phy_write32_idx(rtwdev, R_LNA_TIA, B_TIA10_B, 0x8080, phy_idx);
2290 		rtw89_phy_write32_idx(rtwdev, R_BACKOFF_B, B_LNA_IBADC_B, 0x34, phy_idx);
2291 		rtw89_phy_write32_idx(rtwdev, R_BKOFF_B, B_BKOFF_IBADC_B, 0x34, phy_idx);
2292 	} else {
2293 		rtw89_phy_write32_idx(rtwdev, R_FORCE_FIR_A, B_FORCE_FIR_A, 0x0, phy_idx);
2294 		rtw89_phy_write32_idx(rtwdev, R_RXBY_WBADC_A, B_RXBY_WBADC_A,
2295 				      0x0, phy_idx);
2296 		rtw89_phy_write32_idx(rtwdev, R_BT_RXBY_WBADC_A, B_BT_RXBY_WBADC_A,
2297 				      0x1, phy_idx);
2298 		rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_A, B_BT_TRK_OFF_A, 0x1, phy_idx);
2299 		rtw89_phy_write32_idx(rtwdev, R_OP1DB_A, B_OP1DB_A, 0x1a, phy_idx);
2300 		rtw89_phy_write32_idx(rtwdev, R_OP1DB1_A, B_TIA10_A, 0x2a2a, phy_idx);
2301 		rtw89_phy_write32_idx(rtwdev, R_BACKOFF_A, B_LNA_IBADC_A, 0x7a6, phy_idx);
2302 		rtw89_phy_write32_idx(rtwdev, R_BKOFF_A, B_BKOFF_IBADC_A, 0x26, phy_idx);
2303 		rtw89_phy_write32_idx(rtwdev, R_FORCE_FIR_B, B_FORCE_FIR_B, 0x0, phy_idx);
2304 		rtw89_phy_write32_idx(rtwdev, R_RXBY_WBADC_B, B_RXBY_WBADC_B,
2305 				      0x0, phy_idx);
2306 		rtw89_phy_write32_idx(rtwdev, R_BT_RXBY_WBADC_B, B_BT_RXBY_WBADC_B,
2307 				      0x1, phy_idx);
2308 		rtw89_phy_write32_idx(rtwdev, R_BT_SHARE_B, B_BT_TRK_OFF_B, 0x1, phy_idx);
2309 		rtw89_phy_write32_idx(rtwdev, R_LNA_OP, B_LNA6, 0x20, phy_idx);
2310 		rtw89_phy_write32_idx(rtwdev, R_LNA_TIA, B_TIA10_B, 0x2a30, phy_idx);
2311 		rtw89_phy_write32_idx(rtwdev, R_BACKOFF_B, B_LNA_IBADC_B, 0x7a6, phy_idx);
2312 		rtw89_phy_write32_idx(rtwdev, R_BKOFF_B, B_BKOFF_IBADC_B, 0x26, phy_idx);
2313 	}
2314 }
2315 
rtw8922a_bb_cfg_txrx_path(struct rtw89_dev * rtwdev)2316 static void rtw8922a_bb_cfg_txrx_path(struct rtw89_dev *rtwdev)
2317 {
2318 	const struct rtw89_chan *chan = rtw89_chan_get(rtwdev, RTW89_CHANCTX_0);
2319 	enum rtw89_band band = chan->band_type;
2320 	struct rtw89_hal *hal = &rtwdev->hal;
2321 	u8 ntx_path = RF_PATH_AB;
2322 	u32 tx_en0, tx_en1;
2323 
2324 	if (hal->antenna_tx == RF_A)
2325 		ntx_path = RF_PATH_A;
2326 	else if (hal->antenna_tx == RF_B)
2327 		ntx_path = RF_PATH_B;
2328 
2329 	rtw8922a_hal_reset(rtwdev, RTW89_PHY_0, RTW89_MAC_0, band, &tx_en0, true);
2330 	if (rtwdev->dbcc_en)
2331 		rtw8922a_hal_reset(rtwdev, RTW89_PHY_1, RTW89_MAC_1, band,
2332 				   &tx_en1, true);
2333 
2334 	rtw8922a_ctrl_trx_path(rtwdev, ntx_path, 2, RF_PATH_AB, 2);
2335 
2336 	rtw8922a_hal_reset(rtwdev, RTW89_PHY_0, RTW89_MAC_0, band, &tx_en0, false);
2337 	if (rtwdev->dbcc_en)
2338 		rtw8922a_hal_reset(rtwdev, RTW89_PHY_1, RTW89_MAC_1, band,
2339 				   &tx_en1, false);
2340 }
2341 
rtw8922a_get_thermal(struct rtw89_dev * rtwdev,enum rtw89_rf_path rf_path)2342 static u8 rtw8922a_get_thermal(struct rtw89_dev *rtwdev, enum rtw89_rf_path rf_path)
2343 {
2344 	struct rtw89_power_trim_info *info = &rtwdev->pwr_trim;
2345 	struct rtw89_hal *hal = &rtwdev->hal;
2346 	int th;
2347 
2348 	/* read thermal only if debugging or thermal protection enabled */
2349 	if (!rtw89_debug_is_enabled(rtwdev, RTW89_DBG_CFO | RTW89_DBG_RFK_TRACK) &&
2350 	    !hal->thermal_prot_th)
2351 		return 80;
2352 
2353 	rtw89_write_rf(rtwdev, rf_path, RR_TM, RR_TM_TRI, 0x1);
2354 	rtw89_write_rf(rtwdev, rf_path, RR_TM, RR_TM_TRI, 0x0);
2355 	rtw89_write_rf(rtwdev, rf_path, RR_TM, RR_TM_TRI, 0x1);
2356 
2357 	fsleep(200);
2358 
2359 	th = rtw89_read_rf(rtwdev, rf_path, RR_TM, RR_TM_VAL_V1);
2360 	th += (s8)info->thermal_trim[rf_path];
2361 
2362 	return clamp_t(int, th, 0, U8_MAX);
2363 }
2364 
rtw8922a_btc_set_rfe(struct rtw89_dev * rtwdev)2365 static void rtw8922a_btc_set_rfe(struct rtw89_dev *rtwdev)
2366 {
2367 	union rtw89_btc_module_info *md = &rtwdev->btc.mdinfo;
2368 	struct rtw89_btc_module_v7 *module = &md->md_v7;
2369 
2370 	module->rfe_type = rtwdev->efuse.rfe_type;
2371 	module->kt_ver = rtwdev->hal.cv;
2372 	module->bt_solo = 0;
2373 	module->switch_type = BTC_SWITCH_INTERNAL;
2374 	module->wa_type = 0;
2375 
2376 	module->ant.type = BTC_ANT_SHARED;
2377 	module->ant.num = 2;
2378 	module->ant.isolation = 10;
2379 	module->ant.diversity = 0;
2380 	module->ant.single_pos = RF_PATH_A;
2381 	module->ant.btg_pos = RF_PATH_B;
2382 
2383 	if (module->kt_ver <= 1)
2384 		module->wa_type |= BTC_WA_HFP_ZB;
2385 
2386 	rtwdev->btc.cx.other.type = BTC_3CX_NONE;
2387 
2388 	if (module->rfe_type == 0) {
2389 		rtwdev->btc.dm.error.map.rfe_type0 = true;
2390 		return;
2391 	}
2392 
2393 	module->ant.num = (module->rfe_type % 2) ?  2 : 3;
2394 
2395 	if (module->kt_ver == 0)
2396 		module->ant.num = 2;
2397 
2398 	if (module->ant.num == 3) {
2399 		module->ant.type = BTC_ANT_DEDICATED;
2400 		module->bt_pos = BTC_BT_ALONE;
2401 	} else {
2402 		module->ant.type = BTC_ANT_SHARED;
2403 		module->bt_pos = BTC_BT_BTG;
2404 	}
2405 	rtwdev->btc.btg_pos = module->ant.btg_pos;
2406 	rtwdev->btc.ant_type = module->ant.type;
2407 }
2408 
2409 static
rtw8922a_set_trx_mask(struct rtw89_dev * rtwdev,u8 path,u8 group,u32 val)2410 void rtw8922a_set_trx_mask(struct rtw89_dev *rtwdev, u8 path, u8 group, u32 val)
2411 {
2412 	rtw89_write_rf(rtwdev, path, RR_LUTWA, RFREG_MASK, group);
2413 	rtw89_write_rf(rtwdev, path, RR_LUTWD0, RFREG_MASK, val);
2414 }
2415 
rtw8922a_btc_init_cfg(struct rtw89_dev * rtwdev)2416 static void rtw8922a_btc_init_cfg(struct rtw89_dev *rtwdev)
2417 {
2418 	struct rtw89_btc *btc = &rtwdev->btc;
2419 	struct rtw89_btc_ant_info_v7 *ant = &btc->mdinfo.md_v7.ant;
2420 	u32 wl_pri, path_min, path_max;
2421 	u8 path;
2422 
2423 	/* for 1-Ant && 1-ss case: only 1-path */
2424 	if (ant->num == 1) {
2425 		path_min = ant->single_pos;
2426 		path_max = path_min;
2427 	} else {
2428 		path_min = RF_PATH_A;
2429 		path_max = RF_PATH_B;
2430 	}
2431 
2432 	path = path_min;
2433 
2434 	for (path = path_min; path <= path_max; path++) {
2435 		/* set DEBUG_LUT_RFMODE_MASK = 1 to start trx-mask-setup */
2436 		rtw89_write_rf(rtwdev, path, RR_LUTWE, RFREG_MASK, BIT(17));
2437 
2438 		/* if GNT_WL=0 && BT=SS_group --> WL Tx/Rx = THRU  */
2439 		rtw8922a_set_trx_mask(rtwdev, path, BTC_BT_SS_GROUP, 0x5ff);
2440 
2441 		/* if GNT_WL=0 && BT=Rx_group --> WL-Rx = THRU + WL-Tx = MASK */
2442 		rtw8922a_set_trx_mask(rtwdev, path, BTC_BT_RX_GROUP, 0x5df);
2443 
2444 		/* if GNT_WL = 0 && BT = Tx_group -->
2445 		 * Shared-Ant && BTG-path:WL mask(0x55f), others:WL THRU(0x5ff)
2446 		 */
2447 		if (btc->ant_type == BTC_ANT_SHARED && btc->btg_pos == path)
2448 			rtw8922a_set_trx_mask(rtwdev, path, BTC_BT_TX_GROUP, 0x55f);
2449 		else
2450 			rtw8922a_set_trx_mask(rtwdev, path, BTC_BT_TX_GROUP, 0x5ff);
2451 
2452 		rtw89_write_rf(rtwdev, path, RR_LUTWE, RFREG_MASK, 0);
2453 	}
2454 
2455 	/* set WL PTA Hi-Pri: Ack-Tx, beacon-tx, Trig-frame-Tx, Null-Tx*/
2456 	wl_pri = B_BTC_RSP_ACK_HI | B_BTC_TX_BCN_HI | B_BTC_TX_TRI_HI |
2457 		 B_BTC_TX_NULL_HI;
2458 	rtw89_write32(rtwdev, R_BTC_COEX_WL_REQ_BE, wl_pri);
2459 
2460 	/* set PTA break table */
2461 	rtw89_write32(rtwdev, R_BE_BT_BREAK_TABLE, BTC_BREAK_PARAM);
2462 
2463 	/* ZB coex table init for HFP PTA req-cmd bit-4 define issue COEX-900*/
2464 	rtw89_write32(rtwdev, R_BTC_ZB_COEX_TBL_0, 0xda5a5a5a);
2465 
2466 	rtw89_write32(rtwdev, R_BTC_ZB_COEX_TBL_1, 0xda5a5a5a);
2467 
2468 	rtw89_write32(rtwdev, R_BTC_ZB_BREAK_TBL, 0xf0ffffff);
2469 	btc->cx.wl.status.map.init_ok = true;
2470 }
2471 
2472 static void
rtw8922a_btc_set_wl_txpwr_ctrl(struct rtw89_dev * rtwdev,u32 txpwr_val)2473 rtw8922a_btc_set_wl_txpwr_ctrl(struct rtw89_dev *rtwdev, u32 txpwr_val)
2474 {
2475 	u16 ctrl_all_time = u32_get_bits(txpwr_val, GENMASK(15, 0));
2476 	u16 ctrl_gnt_bt = u32_get_bits(txpwr_val, GENMASK(31, 16));
2477 
2478 	switch (ctrl_all_time) {
2479 	case 0xffff:
2480 		rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_RATE_CTRL,
2481 					     B_BE_FORCE_PWR_BY_RATE_EN, 0x0);
2482 		rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_RATE_CTRL,
2483 					     B_BE_FORCE_PWR_BY_RATE_VAL, 0x0);
2484 		break;
2485 	default:
2486 		rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_RATE_CTRL,
2487 					     B_BE_FORCE_PWR_BY_RATE_VAL, ctrl_all_time);
2488 		rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_RATE_CTRL,
2489 					     B_BE_FORCE_PWR_BY_RATE_EN, 0x1);
2490 		break;
2491 	}
2492 
2493 	switch (ctrl_gnt_bt) {
2494 	case 0xffff:
2495 		rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_REG_CTRL,
2496 					     B_BE_PWR_BT_EN, 0x0);
2497 		rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_COEX_CTRL,
2498 					     B_BE_PWR_BT_VAL, 0x0);
2499 		break;
2500 	default:
2501 		rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_COEX_CTRL,
2502 					     B_BE_PWR_BT_VAL, ctrl_gnt_bt);
2503 		rtw89_mac_txpwr_write32_mask(rtwdev, RTW89_PHY_0, R_BE_PWR_REG_CTRL,
2504 					     B_BE_PWR_BT_EN, 0x1);
2505 		break;
2506 	}
2507 }
2508 
2509 static
rtw8922a_btc_get_bt_rssi(struct rtw89_dev * rtwdev,s8 val)2510 s8 rtw8922a_btc_get_bt_rssi(struct rtw89_dev *rtwdev, s8 val)
2511 {
2512 	return clamp_t(s8, val, -100, 0) + 100;
2513 }
2514 
2515 static const struct rtw89_btc_rf_trx_para rtw89_btc_8922a_rf_ul[] = {
2516 	{255, 0, 0, 7}, /* 0 -> original */
2517 	{255, 2, 0, 7}, /* 1 -> for BT-connected ACI issue && BTG co-rx */
2518 	{255, 0, 0, 7}, /* 2 ->reserved for shared-antenna */
2519 	{255, 0, 0, 7}, /* 3- >reserved for shared-antenna */
2520 	{255, 0, 0, 7}, /* 4 ->reserved for shared-antenna */
2521 	{255, 1, 0, 7}, /* the below id is for non-shared-antenna free-run */
2522 	{6, 1, 0, 7},
2523 	{13, 1, 0, 7},
2524 	{13, 1, 0, 7}
2525 };
2526 
2527 static const struct rtw89_btc_rf_trx_para rtw89_btc_8922a_rf_dl[] = {
2528 	{255, 0, 0, 7}, /* 0 -> original */
2529 	{255, 2, 0, 7}, /* 1 -> reserved for shared-antenna */
2530 	{255, 0, 0, 7}, /* 2 ->reserved for shared-antenna */
2531 	{255, 0, 0, 7}, /* 3- >reserved for shared-antenna */
2532 	{255, 0, 0, 7}, /* 4 ->reserved for shared-antenna */
2533 	{255, 1, 0, 7}, /* the below id is for non-shared-antenna free-run */
2534 	{255, 1, 0, 7},
2535 	{255, 1, 0, 7},
2536 	{255, 1, 0, 7}
2537 };
2538 
2539 static const u8 rtw89_btc_8922a_wl_rssi_thres[BTC_WL_RSSI_THMAX] = {60, 50, 40, 30};
2540 static const u8 rtw89_btc_8922a_bt_rssi_thres[BTC_BT_RSSI_THMAX] = {50, 40, 30, 20};
2541 
2542 static const struct rtw89_btc_fbtc_mreg rtw89_btc_8922a_mon_reg[] = {
2543 	RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe300),
2544 	RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe320),
2545 	RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe324),
2546 	RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe328),
2547 	RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe32c),
2548 	RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe330),
2549 	RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe334),
2550 	RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe338),
2551 	RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe344),
2552 	RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe348),
2553 	RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe34c),
2554 	RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0xe350),
2555 	RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0x11a2c),
2556 	RTW89_DEF_FBTC_MREG(REG_MAC, 4, 0x11a50),
2557 	RTW89_DEF_FBTC_MREG(REG_BB, 4, 0x980),
2558 	RTW89_DEF_FBTC_MREG(REG_BB, 4, 0x660),
2559 	RTW89_DEF_FBTC_MREG(REG_BB, 4, 0x1660),
2560 	RTW89_DEF_FBTC_MREG(REG_BB, 4, 0x418c),
2561 	RTW89_DEF_FBTC_MREG(REG_BB, 4, 0x518c),
2562 };
2563 
2564 static
rtw8922a_btc_update_bt_cnt(struct rtw89_dev * rtwdev)2565 void rtw8922a_btc_update_bt_cnt(struct rtw89_dev *rtwdev)
2566 {
2567 	/* Feature move to firmware */
2568 }
2569 
2570 static
rtw8922a_btc_wl_s1_standby(struct rtw89_dev * rtwdev,bool state)2571 void rtw8922a_btc_wl_s1_standby(struct rtw89_dev *rtwdev, bool state)
2572 {
2573 	if (!state) {
2574 		rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x80000);
2575 		rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x1);
2576 		rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD1, RFREG_MASK, 0x0c110);
2577 		rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0, RFREG_MASK, 0x01018);
2578 		rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x00000);
2579 
2580 		rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWE, RFREG_MASK, 0x80000);
2581 		rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWA, RFREG_MASK, 0x1);
2582 		rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWD1, RFREG_MASK, 0x0c110);
2583 		rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWD0, RFREG_MASK, 0x01018);
2584 		rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWE, RFREG_MASK, 0x00000);
2585 	} else {
2586 		rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x80000);
2587 		rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWA, RFREG_MASK, 0x1);
2588 		rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD1, RFREG_MASK, 0x0c110);
2589 		rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWD0, RFREG_MASK, 0x09018);
2590 		rtw89_write_rf(rtwdev, RF_PATH_B, RR_LUTWE, RFREG_MASK, 0x00000);
2591 
2592 		rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWE, RFREG_MASK, 0x80000);
2593 		rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWA, RFREG_MASK, 0x1);
2594 		rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWD1, RFREG_MASK, 0x0c110);
2595 		rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWD0, RFREG_MASK, 0x09018);
2596 		rtw89_write_rf(rtwdev, RF_PATH_A, RR_LUTWE, RFREG_MASK, 0x00000);
2597 	}
2598 }
2599 
rtw8922a_btc_set_wl_rx_gain(struct rtw89_dev * rtwdev,u32 level)2600 static void rtw8922a_btc_set_wl_rx_gain(struct rtw89_dev *rtwdev, u32 level)
2601 {
2602 }
2603 
rtw8922a_fill_freq_with_ppdu(struct rtw89_dev * rtwdev,struct rtw89_rx_phy_ppdu * phy_ppdu,struct ieee80211_rx_status * status)2604 static void rtw8922a_fill_freq_with_ppdu(struct rtw89_dev *rtwdev,
2605 					 struct rtw89_rx_phy_ppdu *phy_ppdu,
2606 					 struct ieee80211_rx_status *status)
2607 {
2608 	u8 chan_idx = phy_ppdu->chan_idx;
2609 	enum nl80211_band band;
2610 	u8 ch;
2611 
2612 	if (chan_idx == 0)
2613 		return;
2614 
2615 	rtw89_decode_chan_idx(rtwdev, chan_idx, &ch, &band);
2616 	status->freq = ieee80211_channel_to_frequency(ch, band);
2617 	status->band = band;
2618 }
2619 
rtw8922a_query_ppdu(struct rtw89_dev * rtwdev,struct rtw89_rx_phy_ppdu * phy_ppdu,struct ieee80211_rx_status * status)2620 static void rtw8922a_query_ppdu(struct rtw89_dev *rtwdev,
2621 				struct rtw89_rx_phy_ppdu *phy_ppdu,
2622 				struct ieee80211_rx_status *status)
2623 {
2624 	u8 path;
2625 	u8 *rx_power = phy_ppdu->rssi;
2626 
2627 	if (!status->signal)
2628 		status->signal = RTW89_RSSI_RAW_TO_DBM(max(rx_power[RF_PATH_A],
2629 							   rx_power[RF_PATH_B]));
2630 
2631 	for (path = 0; path < rtwdev->chip->rf_path_num; path++) {
2632 		status->chains |= BIT(path);
2633 		status->chain_signal[path] = RTW89_RSSI_RAW_TO_DBM(rx_power[path]);
2634 	}
2635 	if (phy_ppdu->valid)
2636 		rtw8922a_fill_freq_with_ppdu(rtwdev, phy_ppdu, status);
2637 }
2638 
rtw8922a_convert_rpl_to_rssi(struct rtw89_dev * rtwdev,struct rtw89_rx_phy_ppdu * phy_ppdu)2639 static void rtw8922a_convert_rpl_to_rssi(struct rtw89_dev *rtwdev,
2640 					 struct rtw89_rx_phy_ppdu *phy_ppdu)
2641 {
2642 	/* Mapping to BW: 5, 10, 20, 40, 80, 160, 80_80 */
2643 	static const u8 bw_compensate[] = {0, 0, 0, 6, 12, 18, 0};
2644 	u8 *rssi = phy_ppdu->rssi;
2645 	u8 compensate = 0;
2646 	u16 rpl_tmp;
2647 	u8 i;
2648 
2649 	if (phy_ppdu->bw_idx < ARRAY_SIZE(bw_compensate))
2650 		compensate = bw_compensate[phy_ppdu->bw_idx];
2651 
2652 	for (i = 0; i < RF_PATH_NUM_8922A; i++) {
2653 		if (!(phy_ppdu->rx_path_en & BIT(i))) {
2654 			rssi[i] = 0;
2655 			phy_ppdu->rpl_path[i] = 0;
2656 			phy_ppdu->rpl_fd[i] = 0;
2657 		}
2658 		if (phy_ppdu->rate >= RTW89_HW_RATE_OFDM6) {
2659 			rpl_tmp = phy_ppdu->rpl_fd[i];
2660 			if (rpl_tmp)
2661 				rpl_tmp += compensate;
2662 
2663 			phy_ppdu->rpl_path[i] = rpl_tmp;
2664 		}
2665 		rssi[i] = phy_ppdu->rpl_path[i];
2666 	}
2667 
2668 	phy_ppdu->rssi_avg = phy_ppdu->rpl_avg;
2669 }
2670 
rtw8922a_phy_rpt_to_rssi(struct rtw89_dev * rtwdev,struct rtw89_rx_desc_info * desc_info,struct ieee80211_rx_status * rx_status)2671 static void rtw8922a_phy_rpt_to_rssi(struct rtw89_dev *rtwdev,
2672 				     struct rtw89_rx_desc_info *desc_info,
2673 				     struct ieee80211_rx_status *rx_status)
2674 {
2675 	if (desc_info->rssi <= 0x1 || (desc_info->rssi >> 2) > MAX_RSSI)
2676 		return;
2677 
2678 	rx_status->signal = (desc_info->rssi >> 2) - MAX_RSSI;
2679 }
2680 
rtw8922a_mac_enable_bb_rf(struct rtw89_dev * rtwdev)2681 static int rtw8922a_mac_enable_bb_rf(struct rtw89_dev *rtwdev)
2682 {
2683 	rtw89_write8_set(rtwdev, R_BE_FEN_RST_ENABLE,
2684 			 B_BE_FEN_BBPLAT_RSTB | B_BE_FEN_BB_IP_RSTN);
2685 	rtw89_write32(rtwdev, R_BE_DMAC_SYS_CR32B, 0x7FF97FF9);
2686 
2687 	return 0;
2688 }
2689 
rtw8922a_mac_disable_bb_rf(struct rtw89_dev * rtwdev)2690 static int rtw8922a_mac_disable_bb_rf(struct rtw89_dev *rtwdev)
2691 {
2692 	rtw89_write8_clr(rtwdev, R_BE_FEN_RST_ENABLE,
2693 			 B_BE_FEN_BBPLAT_RSTB | B_BE_FEN_BB_IP_RSTN);
2694 
2695 	return 0;
2696 }
2697 
2698 #ifdef CONFIG_PM
2699 static const struct wiphy_wowlan_support rtw_wowlan_stub_8922a = {
2700 	.flags = WIPHY_WOWLAN_MAGIC_PKT | WIPHY_WOWLAN_DISCONNECT |
2701 		 WIPHY_WOWLAN_NET_DETECT,
2702 	.n_patterns = RTW89_MAX_PATTERN_NUM,
2703 	.pattern_max_len = RTW89_MAX_PATTERN_SIZE,
2704 	.pattern_min_len = 1,
2705 	.max_nd_match_sets = RTW89_SCANOFLD_MAX_SSID,
2706 };
2707 #endif
2708 
2709 static const struct rtw89_chip_ops rtw8922a_chip_ops = {
2710 	.enable_bb_rf		= rtw8922a_mac_enable_bb_rf,
2711 	.disable_bb_rf		= rtw8922a_mac_disable_bb_rf,
2712 	.bb_preinit		= rtw8922a_bb_preinit,
2713 	.bb_postinit		= rtw8922a_bb_postinit,
2714 	.bb_reset		= rtw8922a_bb_reset,
2715 	.bb_sethw		= rtw8922a_bb_sethw,
2716 	.read_rf		= rtw89_phy_read_rf_v2,
2717 	.write_rf		= rtw89_phy_write_rf_v2,
2718 	.set_channel		= rtw8922a_set_channel,
2719 	.set_channel_help	= rtw8922a_set_channel_help,
2720 	.read_efuse		= rtw8922a_read_efuse,
2721 	.read_phycap		= rtw8922a_read_phycap,
2722 	.fem_setup		= NULL,
2723 	.rfe_gpio		= NULL,
2724 	.rfk_hw_init		= rtw8922a_rfk_hw_init,
2725 	.rfk_init		= rtw8922a_rfk_init,
2726 	.rfk_init_late		= rtw8922a_rfk_init_late,
2727 	.rfk_channel		= rtw8922a_rfk_channel,
2728 	.rfk_band_changed	= rtw8922a_rfk_band_changed,
2729 	.rfk_scan		= rtw8922a_rfk_scan,
2730 	.rfk_track		= rtw8922a_rfk_track,
2731 	.power_trim		= rtw8922a_power_trim,
2732 	.set_txpwr		= rtw8922a_set_txpwr,
2733 	.set_txpwr_ctrl		= rtw8922a_set_txpwr_ctrl,
2734 	.init_txpwr_unit	= NULL,
2735 	.get_thermal		= rtw8922a_get_thermal,
2736 	.ctrl_btg_bt_rx		= rtw8922a_ctrl_btg_bt_rx,
2737 	.query_ppdu		= rtw8922a_query_ppdu,
2738 	.convert_rpl_to_rssi	= rtw8922a_convert_rpl_to_rssi,
2739 	.phy_rpt_to_rssi	= rtw8922a_phy_rpt_to_rssi,
2740 	.ctrl_nbtg_bt_tx	= rtw8922a_ctrl_nbtg_bt_tx,
2741 	.cfg_txrx_path		= rtw8922a_bb_cfg_txrx_path,
2742 	.set_txpwr_ul_tb_offset	= NULL,
2743 	.digital_pwr_comp	= rtw8922a_digital_pwr_comp,
2744 	.pwr_on_func		= rtw8922a_pwr_on_func,
2745 	.pwr_off_func		= rtw8922a_pwr_off_func,
2746 	.query_rxdesc		= rtw89_core_query_rxdesc_v2,
2747 	.fill_txdesc		= rtw89_core_fill_txdesc_v2,
2748 	.fill_txdesc_fwcmd	= rtw89_core_fill_txdesc_fwcmd_v2,
2749 	.cfg_ctrl_path		= rtw89_mac_cfg_ctrl_path_v2,
2750 	.mac_cfg_gnt		= rtw89_mac_cfg_gnt_v2,
2751 	.stop_sch_tx		= rtw89_mac_stop_sch_tx_v2,
2752 	.resume_sch_tx		= rtw89_mac_resume_sch_tx_v2,
2753 	.h2c_dctl_sec_cam	= rtw89_fw_h2c_dctl_sec_cam_v2,
2754 	.h2c_default_cmac_tbl	= rtw89_fw_h2c_default_cmac_tbl_g7,
2755 	.h2c_assoc_cmac_tbl	= rtw89_fw_h2c_assoc_cmac_tbl_g7,
2756 	.h2c_ampdu_cmac_tbl	= rtw89_fw_h2c_ampdu_cmac_tbl_g7,
2757 	.h2c_txtime_cmac_tbl	= rtw89_fw_h2c_txtime_cmac_tbl_g7,
2758 	.h2c_default_dmac_tbl	= rtw89_fw_h2c_default_dmac_tbl_v2,
2759 	.h2c_update_beacon	= rtw89_fw_h2c_update_beacon_be,
2760 	.h2c_ba_cam		= rtw89_fw_h2c_ba_cam_v1,
2761 
2762 	.btc_set_rfe		= rtw8922a_btc_set_rfe,
2763 	.btc_init_cfg		= rtw8922a_btc_init_cfg,
2764 	.btc_set_wl_pri		= NULL,
2765 	.btc_set_wl_txpwr_ctrl	= rtw8922a_btc_set_wl_txpwr_ctrl,
2766 	.btc_get_bt_rssi	= rtw8922a_btc_get_bt_rssi,
2767 	.btc_update_bt_cnt	= rtw8922a_btc_update_bt_cnt,
2768 	.btc_wl_s1_standby	= rtw8922a_btc_wl_s1_standby,
2769 	.btc_set_wl_rx_gain	= rtw8922a_btc_set_wl_rx_gain,
2770 	.btc_set_policy		= rtw89_btc_set_policy_v1,
2771 };
2772 
2773 const struct rtw89_chip_info rtw8922a_chip_info = {
2774 	.chip_id		= RTL8922A,
2775 	.chip_gen		= RTW89_CHIP_BE,
2776 	.ops			= &rtw8922a_chip_ops,
2777 	.mac_def		= &rtw89_mac_gen_be,
2778 	.phy_def		= &rtw89_phy_gen_be,
2779 	.fw_basename		= RTW8922A_FW_BASENAME,
2780 	.fw_format_max		= RTW8922A_FW_FORMAT_MAX,
2781 	.try_ce_fw		= false,
2782 	.bbmcu_nr		= 1,
2783 	.needed_fw_elms		= RTW89_BE_GEN_DEF_NEEDED_FW_ELEMENTS,
2784 	.fw_blacklist		= &rtw89_fw_blacklist_default,
2785 	.fifo_size		= 589824,
2786 	.small_fifo_size	= false,
2787 	.dle_scc_rsvd_size	= 0,
2788 	.max_amsdu_limit	= 8000,
2789 	.dis_2g_40m_ul_ofdma	= false,
2790 	.rsvd_ple_ofst		= 0x8f800,
2791 	.hfc_param_ini		= rtw8922a_hfc_param_ini_pcie,
2792 	.dle_mem		= rtw8922a_dle_mem_pcie,
2793 	.wde_qempty_acq_grpnum	= 4,
2794 	.wde_qempty_mgq_grpsel	= 4,
2795 	.rf_base_addr		= {0xe000, 0xf000},
2796 	.thermal_th		= {0xad, 0xb4},
2797 	.pwr_on_seq		= NULL,
2798 	.pwr_off_seq		= NULL,
2799 	.bb_table		= NULL,
2800 	.bb_gain_table		= NULL,
2801 	.rf_table		= {},
2802 	.nctl_table		= NULL,
2803 	.nctl_post_table	= NULL,
2804 	.dflt_parms		= NULL, /* load parm from fw */
2805 	.rfe_parms_conf		= NULL, /* load parm from fw */
2806 	.txpwr_factor_bb	= 3,
2807 	.txpwr_factor_rf	= 2,
2808 	.txpwr_factor_mac	= 1,
2809 	.dig_table		= NULL,
2810 	.dig_regs		= &rtw8922a_dig_regs,
2811 	.tssi_dbw_table		= NULL,
2812 	.support_macid_num	= 32,
2813 	.support_link_num	= 2,
2814 	.support_chanctx_num	= 2,
2815 	.support_rnr		= true,
2816 	.support_bands		= BIT(NL80211_BAND_2GHZ) |
2817 				  BIT(NL80211_BAND_5GHZ) |
2818 				  BIT(NL80211_BAND_6GHZ),
2819 	.support_bandwidths	= BIT(NL80211_CHAN_WIDTH_20) |
2820 				  BIT(NL80211_CHAN_WIDTH_40) |
2821 				  BIT(NL80211_CHAN_WIDTH_80) |
2822 				  BIT(NL80211_CHAN_WIDTH_160),
2823 	.support_unii4		= true,
2824 	.support_ant_gain	= true,
2825 	.support_tas		= false,
2826 	.ul_tb_waveform_ctrl	= false,
2827 	.ul_tb_pwr_diff		= false,
2828 	.rx_freq_frome_ie	= false,
2829 	.hw_sec_hdr		= true,
2830 	.hw_mgmt_tx_encrypt	= true,
2831 	.hw_tkip_crypto		= true,
2832 	.rf_path_num		= 2,
2833 	.tx_nss			= 2,
2834 	.rx_nss			= 2,
2835 	.acam_num		= 128,
2836 	.bcam_num		= 20,
2837 	.scam_num		= 32,
2838 	.bacam_num		= 24,
2839 	.bacam_dynamic_num	= 8,
2840 	.bacam_ver		= RTW89_BACAM_V1,
2841 	.ppdu_max_usr		= 16,
2842 	.sec_ctrl_efuse_size	= 4,
2843 	.physical_efuse_size	= 0x1300,
2844 	.logical_efuse_size	= 0x70000,
2845 	.limit_efuse_size	= 0x40000,
2846 	.dav_phy_efuse_size	= 0,
2847 	.dav_log_efuse_size	= 0,
2848 	.efuse_blocks		= rtw8922a_efuse_blocks,
2849 	.phycap_addr		= 0x1700,
2850 	.phycap_size		= 0x38,
2851 	.para_ver		= 0xf,
2852 	.wlcx_desired		= 0x07110000,
2853 	.btcx_desired		= 0x7,
2854 	.scbd			= 0x1,
2855 	.mailbox		= 0x1,
2856 
2857 	.afh_guard_ch		= 6,
2858 	.wl_rssi_thres		= rtw89_btc_8922a_wl_rssi_thres,
2859 	.bt_rssi_thres		= rtw89_btc_8922a_bt_rssi_thres,
2860 	.rssi_tol		= 2,
2861 	.mon_reg_num		= ARRAY_SIZE(rtw89_btc_8922a_mon_reg),
2862 	.mon_reg		= rtw89_btc_8922a_mon_reg,
2863 	.rf_para_ulink_num	= ARRAY_SIZE(rtw89_btc_8922a_rf_ul),
2864 	.rf_para_ulink		= rtw89_btc_8922a_rf_ul,
2865 	.rf_para_dlink_num	= ARRAY_SIZE(rtw89_btc_8922a_rf_dl),
2866 	.rf_para_dlink		= rtw89_btc_8922a_rf_dl,
2867 	.ps_mode_supported	= BIT(RTW89_PS_MODE_RFOFF) |
2868 				  BIT(RTW89_PS_MODE_CLK_GATED) |
2869 				  BIT(RTW89_PS_MODE_PWR_GATED),
2870 	.low_power_hci_modes	= 0,
2871 	.h2c_cctl_func_id	= H2C_FUNC_MAC_CCTLINFO_UD_G7,
2872 	.hci_func_en_addr	= R_BE_HCI_FUNC_EN,
2873 	.h2c_desc_size		= sizeof(struct rtw89_rxdesc_short_v2),
2874 	.txwd_body_size		= sizeof(struct rtw89_txwd_body_v2),
2875 	.txwd_info_size		= sizeof(struct rtw89_txwd_info_v2),
2876 	.h2c_ctrl_reg		= R_BE_H2CREG_CTRL,
2877 	.h2c_counter_reg	= {R_BE_UDM1 + 1, B_BE_UDM1_HALMAC_H2C_DEQ_CNT_MASK >> 8},
2878 	.h2c_regs		= rtw8922a_h2c_regs,
2879 	.c2h_ctrl_reg		= R_BE_C2HREG_CTRL,
2880 	.c2h_counter_reg	= {R_BE_UDM1 + 1, B_BE_UDM1_HALMAC_C2H_ENQ_CNT_MASK >> 8},
2881 	.c2h_regs		= rtw8922a_c2h_regs,
2882 	.page_regs		= &rtw8922a_page_regs,
2883 	.wow_reason_reg		= rtw8922a_wow_wakeup_regs,
2884 	.cfo_src_fd		= true,
2885 	.cfo_hw_comp            = true,
2886 	.dcfo_comp		= NULL,
2887 	.dcfo_comp_sft		= 0,
2888 	.imr_info		= NULL,
2889 	.imr_dmac_table		= &rtw8922a_imr_dmac_table,
2890 	.imr_cmac_table		= &rtw8922a_imr_cmac_table,
2891 	.rrsr_cfgs		= &rtw8922a_rrsr_cfgs,
2892 	.bss_clr_vld		= {R_BSS_CLR_VLD_V2, B_BSS_CLR_VLD0_V2},
2893 	.bss_clr_map_reg	= R_BSS_CLR_MAP_V2,
2894 	.rfkill_init		= &rtw8922a_rfkill_regs,
2895 	.rfkill_get		= {R_BE_GPIO_EXT_CTRL, B_BE_GPIO_IN_9},
2896 	.dma_ch_mask		= 0,
2897 	.edcca_regs		= &rtw8922a_edcca_regs,
2898 #ifdef CONFIG_PM
2899 	.wowlan_stub		= &rtw_wowlan_stub_8922a,
2900 #endif
2901 	.xtal_info		= NULL,
2902 };
2903 EXPORT_SYMBOL(rtw8922a_chip_info);
2904 
2905 const struct rtw89_chip_variant rtw8922ae_vs_variant = {
2906 	.no_mcs_12_13 = true,
2907 	.fw_min_ver_code = RTW89_FW_VER_CODE(0, 35, 54, 0),
2908 };
2909 EXPORT_SYMBOL(rtw8922ae_vs_variant);
2910 
2911 MODULE_FIRMWARE(RTW8922A_MODULE_FIRMWARE);
2912 MODULE_AUTHOR("Realtek Corporation");
2913 MODULE_DESCRIPTION("Realtek 802.11be wireless 8922A driver");
2914 MODULE_LICENSE("Dual BSD/GPL");
2915