xref: /linux/drivers/net/wireless/realtek/rtw89/phy_be.c (revision 1fd1dc41724319406b0aff221a352a400b0ddfc5)
1 // SPDX-License-Identifier: GPL-2.0 OR BSD-3-Clause
2 /* Copyright(c) 2023  Realtek Corporation
3  */
4 
5 #include "chan.h"
6 #include "debug.h"
7 #include "mac.h"
8 #include "phy.h"
9 #include "reg.h"
10 
11 static const struct rtw89_ccx_regs rtw89_ccx_regs_be = {
12 	.setting_addr = R_CCX,
13 	.edcca_opt_mask = B_CCX_EDCCA_OPT_MSK_V1,
14 	.measurement_trig_mask = B_MEASUREMENT_TRIG_MSK,
15 	.trig_opt_mask = B_CCX_TRIG_OPT_MSK,
16 	.en_mask = B_CCX_EN_MSK,
17 	.ifs_cnt_addr = R_IFS_COUNTER,
18 	.ifs_clm_period_mask = B_IFS_CLM_PERIOD_MSK,
19 	.ifs_clm_cnt_unit_mask = B_IFS_CLM_COUNTER_UNIT_MSK,
20 	.ifs_clm_cnt_clear_mask = B_IFS_COUNTER_CLR_MSK,
21 	.ifs_collect_en_mask = B_IFS_COLLECT_EN,
22 	.ifs_t1_addr = R_IFS_T1,
23 	.ifs_t1_th_h_mask = B_IFS_T1_TH_HIGH_MSK,
24 	.ifs_t1_en_mask = B_IFS_T1_EN_MSK,
25 	.ifs_t1_th_l_mask = B_IFS_T1_TH_LOW_MSK,
26 	.ifs_t2_addr = R_IFS_T2,
27 	.ifs_t2_th_h_mask = B_IFS_T2_TH_HIGH_MSK,
28 	.ifs_t2_en_mask = B_IFS_T2_EN_MSK,
29 	.ifs_t2_th_l_mask = B_IFS_T2_TH_LOW_MSK,
30 	.ifs_t3_addr = R_IFS_T3,
31 	.ifs_t3_th_h_mask = B_IFS_T3_TH_HIGH_MSK,
32 	.ifs_t3_en_mask = B_IFS_T3_EN_MSK,
33 	.ifs_t3_th_l_mask = B_IFS_T3_TH_LOW_MSK,
34 	.ifs_t4_addr = R_IFS_T4,
35 	.ifs_t4_th_h_mask = B_IFS_T4_TH_HIGH_MSK,
36 	.ifs_t4_en_mask = B_IFS_T4_EN_MSK,
37 	.ifs_t4_th_l_mask = B_IFS_T4_TH_LOW_MSK,
38 	.ifs_clm_tx_cnt_addr = R_IFS_CLM_TX_CNT_V1,
39 	.ifs_clm_edcca_excl_cca_fa_mask = B_IFS_CLM_EDCCA_EXCLUDE_CCA_FA_MSK,
40 	.ifs_clm_tx_cnt_msk = B_IFS_CLM_TX_CNT_MSK,
41 	.ifs_clm_cca_addr = R_IFS_CLM_CCA_V1,
42 	.ifs_clm_ofdmcca_excl_fa_mask = B_IFS_CLM_OFDMCCA_EXCLUDE_FA_MSK,
43 	.ifs_clm_cckcca_excl_fa_mask = B_IFS_CLM_CCKCCA_EXCLUDE_FA_MSK,
44 	.ifs_clm_fa_addr = R_IFS_CLM_FA_V1,
45 	.ifs_clm_ofdm_fa_mask = B_IFS_CLM_OFDM_FA_MSK,
46 	.ifs_clm_cck_fa_mask = B_IFS_CLM_CCK_FA_MSK,
47 	.ifs_his_addr = R_IFS_HIS_V1,
48 	.ifs_his_addr2 = R_IFS_HIS_V1,
49 	.ifs_t4_his_mask = B_IFS_T4_HIS_MSK,
50 	.ifs_t3_his_mask = B_IFS_T3_HIS_MSK,
51 	.ifs_t2_his_mask = B_IFS_T2_HIS_MSK,
52 	.ifs_t1_his_mask = B_IFS_T1_HIS_MSK,
53 	.ifs_avg_l_addr = R_IFS_AVG_L_V1,
54 	.ifs_t2_avg_mask = B_IFS_T2_AVG_MSK,
55 	.ifs_t1_avg_mask = B_IFS_T1_AVG_MSK,
56 	.ifs_avg_h_addr = R_IFS_AVG_H_V1,
57 	.ifs_t4_avg_mask = B_IFS_T4_AVG_MSK,
58 	.ifs_t3_avg_mask = B_IFS_T3_AVG_MSK,
59 	.ifs_cca_l_addr = R_IFS_CCA_L_V1,
60 	.ifs_t2_cca_mask = B_IFS_T2_CCA_MSK,
61 	.ifs_t1_cca_mask = B_IFS_T1_CCA_MSK,
62 	.ifs_cca_h_addr = R_IFS_CCA_H_V1,
63 	.ifs_t4_cca_mask = B_IFS_T4_CCA_MSK,
64 	.ifs_t3_cca_mask = B_IFS_T3_CCA_MSK,
65 	.ifs_total_addr = R_IFSCNT_V1,
66 	.ifs_cnt_done_mask = B_IFSCNT_DONE_MSK,
67 	.ifs_total_mask = B_IFSCNT_TOTAL_CNT_MSK,
68 	.nhm = R_NHM_BE,
69 	.nhm_ready = B_NHM_READY_BE_MSK,
70 	.nhm_config = R_NHM_CFG,
71 	.nhm_period_mask = B_NHM_PERIOD_MSK,
72 	.nhm_unit_mask = B_NHM_COUNTER_MSK,
73 	.nhm_include_cca_mask = B_NHM_INCLUDE_CCA_MSK,
74 	.nhm_en_mask = B_NHM_EN_MSK,
75 	.nhm_method = R_NHM_TH9,
76 	.nhm_pwr_method_msk = B_NHM_PWDB_METHOD_MSK,
77 };
78 
79 static const struct rtw89_ccx_regs rtw89_ccx_regs_be_v1 = {
80 	.setting_addr = R_CCX_BE4,
81 	.edcca_opt_mask = B_CCX_EDCCA_OPT_MSK_V1,
82 	.measurement_trig_mask = B_MEASUREMENT_TRIG_MSK,
83 	.trig_opt_mask = B_CCX_TRIG_OPT_MSK,
84 	.en_mask = B_CCX_EN_MSK,
85 	.ifs_cnt_addr = R_IFS_COUNTER_BE4,
86 	.ifs_clm_period_mask = B_IFS_CLM_PERIOD_MSK,
87 	.ifs_clm_cnt_unit_mask = B_IFS_CLM_COUNTER_UNIT_MSK,
88 	.ifs_clm_cnt_clear_mask = B_IFS_COUNTER_CLR_MSK,
89 	.ifs_collect_en_mask = B_IFS_COLLECT_EN,
90 	.ifs_t1_addr = R_IFS_T1_BE4,
91 	.ifs_t1_th_h_mask = B_IFS_T1_TH_HIGH_MSK,
92 	.ifs_t1_en_mask = B_IFS_T1_EN_MSK,
93 	.ifs_t1_th_l_mask = B_IFS_T1_TH_LOW_MSK,
94 	.ifs_t2_addr = R_IFS_T2_BE4,
95 	.ifs_t2_th_h_mask = B_IFS_T2_TH_HIGH_MSK,
96 	.ifs_t2_en_mask = B_IFS_T2_EN_MSK,
97 	.ifs_t2_th_l_mask = B_IFS_T2_TH_LOW_MSK,
98 	.ifs_t3_addr = R_IFS_T3_BE4,
99 	.ifs_t3_th_h_mask = B_IFS_T3_TH_HIGH_MSK,
100 	.ifs_t3_en_mask = B_IFS_T3_EN_MSK,
101 	.ifs_t3_th_l_mask = B_IFS_T3_TH_LOW_MSK,
102 	.ifs_t4_addr = R_IFS_T4_BE4,
103 	.ifs_t4_th_h_mask = B_IFS_T4_TH_HIGH_MSK,
104 	.ifs_t4_en_mask = B_IFS_T4_EN_MSK,
105 	.ifs_t4_th_l_mask = B_IFS_T4_TH_LOW_MSK,
106 	.ifs_clm_tx_cnt_addr = R_IFS_CLM_TX_CNT_BE4,
107 	.ifs_clm_edcca_excl_cca_fa_mask = B_IFS_CLM_EDCCA_EXCLUDE_CCA_FA_MSK,
108 	.ifs_clm_tx_cnt_msk = B_IFS_CLM_TX_CNT_MSK,
109 	.ifs_clm_cca_addr = R_IFS_CLM_CCA_BE4,
110 	.ifs_clm_ofdmcca_excl_fa_mask = B_IFS_CLM_OFDMCCA_EXCLUDE_FA_MSK,
111 	.ifs_clm_cckcca_excl_fa_mask = B_IFS_CLM_CCKCCA_EXCLUDE_FA_MSK,
112 	.ifs_clm_fa_addr = R_IFS_CLM_FA_BE4,
113 	.ifs_clm_ofdm_fa_mask = B_IFS_CLM_OFDM_FA_MSK,
114 	.ifs_clm_cck_fa_mask = B_IFS_CLM_CCK_FA_MSK,
115 	.ifs_his_addr = R_IFS_T1_HIS_BE4,
116 	.ifs_his_addr2 = R_IFS_T3_HIS_BE4, /* for 3/4 */
117 	.ifs_t4_his_mask = B_IFS_T4_HIS_BE4,
118 	.ifs_t3_his_mask = B_IFS_T3_HIS_BE4,
119 	.ifs_t2_his_mask = B_IFS_T2_HIS_BE4,
120 	.ifs_t1_his_mask = B_IFS_T1_HIS_BE4,
121 	.ifs_avg_l_addr = R_IFS_T1_AVG_BE4,
122 	.ifs_t2_avg_mask = B_IFS_T2_AVG_BE4,
123 	.ifs_t1_avg_mask = B_IFS_T1_AVG_BE4,
124 	.ifs_avg_h_addr = R_IFS_T3_AVG_BE4,
125 	.ifs_t4_avg_mask = B_IFS_T4_AVG_BE4,
126 	.ifs_t3_avg_mask = B_IFS_T3_AVG_BE4,
127 	.ifs_cca_l_addr = R_IFS_T1_CLM_BE4,
128 	.ifs_t2_cca_mask = B_IFS_T2_CLM_BE4,
129 	.ifs_t1_cca_mask = B_IFS_T1_CLM_BE4,
130 	.ifs_cca_h_addr = R_IFS_T3_CLM_BE4,
131 	.ifs_t4_cca_mask = B_IFS_T4_CLM_BE4,
132 	.ifs_t3_cca_mask = B_IFS_T3_CLM_BE4,
133 	.ifs_total_addr = R_IFS_TOTAL_BE4,
134 	.ifs_cnt_done_mask = B_IFS_CNT_DONE_BE4,
135 	.ifs_total_mask = B_IFS_TOTAL_BE4,
136 };
137 
138 static const struct rtw89_physts_regs rtw89_physts_regs_be = {
139 	.setting_addr = R_PLCP_HISTOGRAM,
140 	.dis_trigger_fail_mask = B_STS_DIS_TRIG_BY_FAIL,
141 	.dis_trigger_brk_mask = B_STS_DIS_TRIG_BY_BRK,
142 };
143 
144 static const struct rtw89_physts_regs rtw89_physts_regs_be_v1 = {
145 	.setting_addr = R_PLCP_HISTOGRAM_BE_V1,
146 	.dis_trigger_fail_mask = B_STS_DIS_TRIG_BY_FAIL,
147 	.dis_trigger_brk_mask = B_STS_DIS_TRIG_BY_BRK,
148 };
149 
150 static const struct rtw89_cfo_regs rtw89_cfo_regs_be = {
151 	.comp = R_DCFO_WEIGHT_BE,
152 	.weighting_mask = B_DCFO_WEIGHT_MSK_BE,
153 	.comp_seg0 = R_DCFO_OPT_BE,
154 	.valid_0_mask = B_DCFO_OPT_EN_BE,
155 };
156 
157 static const struct rtw89_cfo_regs rtw89_cfo_regs_be_v1 = {
158 	.comp = R_DCFO_WEIGHT_BE_V1,
159 	.weighting_mask = B_DCFO_WEIGHT_MSK_BE,
160 	.comp_seg0 = R_DCFO_OPT_BE_V1,
161 	.valid_0_mask = B_DCFO_OPT_EN_BE,
162 };
163 
164 static const struct rtw89_bb_wrap_regs rtw89_bb_wrap_regs_be = {
165 	.pwr_macid_lmt = R_BE_PWR_MACID_LMT_BASE,
166 	.pwr_macid_path = R_BE_PWR_MACID_PATH_BASE,
167 };
168 
169 static const struct rtw89_bb_wrap_regs rtw89_bb_wrap_regs_be_v1 = {
170 	.pwr_macid_lmt = R_BE_PWR_MACID_LMT_BASE_V1,
171 	.pwr_macid_path = R_BE_PWR_MACID_PATH_BASE_V1,
172 };
173 
174 static u32 rtw89_phy0_phy1_offset_be(struct rtw89_dev *rtwdev, u32 addr)
175 {
176 	u32 phy_page = addr >> 8;
177 	u32 ofst = 0;
178 
179 	if ((phy_page >= 0x4 && phy_page <= 0xF) ||
180 	    (phy_page >= 0x20 && phy_page <= 0x2B) ||
181 	    (phy_page >= 0x40 && phy_page <= 0x4f) ||
182 	    (phy_page >= 0x60 && phy_page <= 0x6f) ||
183 	    (phy_page >= 0xE4 && phy_page <= 0xE5) ||
184 	    (phy_page >= 0xE8 && phy_page <= 0xED))
185 		ofst = 0x1000;
186 	else
187 		ofst = 0x0;
188 
189 	return ofst;
190 }
191 
192 static u32 rtw89_phy0_phy1_offset_be_v1(struct rtw89_dev *rtwdev, u32 addr)
193 {
194 	u32 phy_page = addr >> 8;
195 	u32 ofst = 0;
196 
197 	if ((phy_page >= 0x204 && phy_page <= 0x20F) ||
198 	    (phy_page >= 0x220 && phy_page <= 0x22F) ||
199 	    (phy_page >= 0x240 && phy_page <= 0x24f) ||
200 	    (phy_page >= 0x260 && phy_page <= 0x26f) ||
201 	    (phy_page >= 0x2C0 && phy_page <= 0x2C9) ||
202 	    (phy_page >= 0x2E4 && phy_page <= 0x2E8) ||
203 	    phy_page == 0x2EE)
204 		ofst = 0x1000;
205 	else
206 		ofst = 0x0;
207 
208 	return ofst;
209 }
210 
211 union rtw89_phy_bb_gain_arg_be {
212 	u32 addr;
213 	struct {
214 		u8 type;
215 #define BB_GAIN_TYPE_SUB0_BE GENMASK(3, 0)
216 #define BB_GAIN_TYPE_SUB1_BE GENMASK(7, 4)
217 		u8 path_bw;
218 #define BB_GAIN_PATH_BE GENMASK(3, 0)
219 #define BB_GAIN_BW_BE GENMASK(7, 4)
220 		u8 gain_band;
221 		u8 cfg_type;
222 	} __packed;
223 } __packed;
224 
225 static void
226 rtw89_phy_cfg_bb_gain_error_be(struct rtw89_dev *rtwdev,
227 			       union rtw89_phy_bb_gain_arg_be arg, u32 data)
228 {
229 	struct rtw89_phy_bb_gain_info_be *gain = &rtwdev->bb_gain.be;
230 	u8 bw_type = u8_get_bits(arg.path_bw, BB_GAIN_BW_BE);
231 	u8 path = u8_get_bits(arg.path_bw, BB_GAIN_PATH_BE);
232 	u8 gband = arg.gain_band;
233 	u8 type = arg.type;
234 	int i;
235 
236 	switch (type) {
237 	case 0:
238 		for (i = 0; i < 4; i++, data >>= 8)
239 			gain->lna_gain[gband][bw_type][path][i] = data & 0xff;
240 		break;
241 	case 1:
242 		for (i = 4; i < 7; i++, data >>= 8)
243 			gain->lna_gain[gband][bw_type][path][i] = data & 0xff;
244 		break;
245 	case 2:
246 		for (i = 0; i < 2; i++, data >>= 8)
247 			gain->tia_gain[gband][bw_type][path][i] = data & 0xff;
248 		break;
249 	default:
250 		rtw89_warn(rtwdev,
251 			   "bb gain error {0x%x:0x%x} with unknown type: %d\n",
252 			   arg.addr, data, type);
253 		break;
254 	}
255 }
256 
257 static void
258 rtw89_phy_cfg_bb_rpl_ofst_be(struct rtw89_dev *rtwdev,
259 			     union rtw89_phy_bb_gain_arg_be arg, u32 data)
260 {
261 	struct rtw89_phy_bb_gain_info_be *gain = &rtwdev->bb_gain.be;
262 	u8 type_sub0 = u8_get_bits(arg.type, BB_GAIN_TYPE_SUB0_BE);
263 	u8 type_sub1 = u8_get_bits(arg.type, BB_GAIN_TYPE_SUB1_BE);
264 	u8 path = u8_get_bits(arg.path_bw, BB_GAIN_PATH_BE);
265 	u8 gband = arg.gain_band;
266 	u8 ofst = 0;
267 	int i;
268 
269 	switch (type_sub1) {
270 	case RTW89_CMAC_BW_20M:
271 		gain->rpl_ofst_20[gband][path][0] = (s8)data;
272 		break;
273 	case RTW89_CMAC_BW_40M:
274 		for (i = 0; i < RTW89_BW20_SC_40M; i++, data >>= 8)
275 			gain->rpl_ofst_40[gband][path][i] = data & 0xff;
276 		break;
277 	case RTW89_CMAC_BW_80M:
278 		for (i = 0; i < RTW89_BW20_SC_80M; i++, data >>= 8)
279 			gain->rpl_ofst_80[gband][path][i] = data & 0xff;
280 		break;
281 	case RTW89_CMAC_BW_160M:
282 		if (type_sub0 == 0)
283 			ofst = 0;
284 		else
285 			ofst = RTW89_BW20_SC_80M;
286 
287 		for (i = 0; i < RTW89_BW20_SC_80M; i++, data >>= 8)
288 			gain->rpl_ofst_160[gband][path][i + ofst] = data & 0xff;
289 		break;
290 	default:
291 		rtw89_warn(rtwdev,
292 			   "bb rpl ofst {0x%x:0x%x} with unknown type_sub1: %d\n",
293 			   arg.addr, data, type_sub1);
294 		break;
295 	}
296 }
297 
298 static void
299 rtw89_phy_cfg_bb_gain_op1db_be(struct rtw89_dev *rtwdev,
300 			       union rtw89_phy_bb_gain_arg_be arg, u32 data)
301 {
302 	struct rtw89_phy_bb_gain_info_be *gain = &rtwdev->bb_gain.be;
303 	u8 bw_type = u8_get_bits(arg.path_bw, BB_GAIN_BW_BE);
304 	u8 path = u8_get_bits(arg.path_bw, BB_GAIN_PATH_BE);
305 	u8 gband = arg.gain_band;
306 	u8 type = arg.type;
307 	int i;
308 
309 	switch (type) {
310 	case 0:
311 		for (i = 0; i < 4; i++, data >>= 8)
312 			gain->lna_op1db[gband][bw_type][path][i] = data & 0xff;
313 		break;
314 	case 1:
315 		for (i = 4; i < 7; i++, data >>= 8)
316 			gain->lna_op1db[gband][bw_type][path][i] = data & 0xff;
317 		break;
318 	case 2:
319 		for (i = 0; i < 4; i++, data >>= 8)
320 			gain->tia_lna_op1db[gband][bw_type][path][i] = data & 0xff;
321 		break;
322 	case 3:
323 		for (i = 4; i < 8; i++, data >>= 8)
324 			gain->tia_lna_op1db[gband][bw_type][path][i] = data & 0xff;
325 		break;
326 	default:
327 		rtw89_warn(rtwdev,
328 			   "bb gain op1db {0x%x:0x%x} with unknown type: %d\n",
329 			   arg.addr, data, type);
330 		break;
331 	}
332 }
333 
334 static void rtw89_phy_config_bb_gain_be(struct rtw89_dev *rtwdev,
335 					const struct rtw89_reg2_def *reg,
336 					enum rtw89_rf_path rf_path,
337 					void *extra_data)
338 {
339 	const struct rtw89_chip_info *chip = rtwdev->chip;
340 	union rtw89_phy_bb_gain_arg_be arg = { .addr = reg->addr };
341 	struct rtw89_efuse *efuse = &rtwdev->efuse;
342 	u8 bw_type = u8_get_bits(arg.path_bw, BB_GAIN_BW_BE);
343 	u8 path = u8_get_bits(arg.path_bw, BB_GAIN_PATH_BE);
344 
345 	if (bw_type >= RTW89_BB_BW_NR_BE)
346 		return;
347 
348 	if (arg.gain_band >= RTW89_BB_GAIN_BAND_NR_BE)
349 		return;
350 
351 	if (path >= chip->rf_path_num)
352 		return;
353 
354 	if (arg.addr >= 0xf9 && arg.addr <= 0xfe) {
355 		rtw89_warn(rtwdev, "bb gain table with flow ctrl\n");
356 		return;
357 	}
358 
359 	switch (arg.cfg_type) {
360 	case 0:
361 		rtw89_phy_cfg_bb_gain_error_be(rtwdev, arg, reg->data);
362 		break;
363 	case 1:
364 		rtw89_phy_cfg_bb_rpl_ofst_be(rtwdev, arg, reg->data);
365 		break;
366 	case 2:
367 		/* ignore BB gain bypass */
368 		break;
369 	case 3:
370 		rtw89_phy_cfg_bb_gain_op1db_be(rtwdev, arg, reg->data);
371 		break;
372 	case 15:
373 		rtw89_phy_write32_idx(rtwdev, reg->addr & 0xFFFFF, MASKHWORD,
374 				      reg->data, RTW89_PHY_0);
375 		break;
376 	case 4:
377 		/* This cfg_type is only used by rfe_type >= 50 with eFEM */
378 		if (efuse->rfe_type < 50)
379 			break;
380 		fallthrough;
381 	default:
382 		rtw89_warn(rtwdev,
383 			   "bb gain {0x%x:0x%x} with unknown cfg type: %d\n",
384 			   arg.addr, reg->data, arg.cfg_type);
385 		break;
386 	}
387 }
388 
389 static void rtw89_phy_preinit_rf_nctl_be(struct rtw89_dev *rtwdev)
390 {
391 	rtw89_phy_write32_mask(rtwdev, R_GOTX_IQKDPK_C0, B_GOTX_IQKDPK, 0x3);
392 	rtw89_phy_write32_mask(rtwdev, R_GOTX_IQKDPK_C1, B_GOTX_IQKDPK, 0x3);
393 	rtw89_phy_write32_mask(rtwdev, R_IQKDPK_HC, B_IQKDPK_HC, 0x1);
394 	rtw89_phy_write32_mask(rtwdev, R_CLK_GCK, B_CLK_GCK, 0x00fffff);
395 	rtw89_phy_write32_mask(rtwdev, R_IOQ_IQK_DPK, B_IOQ_IQK_DPK_CLKEN, 0x3);
396 	rtw89_phy_write32_mask(rtwdev, R_IQK_DPK_RST, B_IQK_DPK_RST, 0x1);
397 	rtw89_phy_write32_mask(rtwdev, R_IQK_DPK_PRST, B_IQK_DPK_PRST, 0x1);
398 	rtw89_phy_write32_mask(rtwdev, R_IQK_DPK_PRST_C1, B_IQK_DPK_PRST, 0x1);
399 	rtw89_phy_write32_mask(rtwdev, R_TXRFC, B_TXRFC_RST, 0x1);
400 
401 	if (rtwdev->dbcc_en) {
402 		rtw89_phy_write32_mask(rtwdev, R_IQK_DPK_RST_C1, B_IQK_DPK_RST, 0x1);
403 		rtw89_phy_write32_mask(rtwdev, R_TXRFC_C1, B_TXRFC_RST, 0x1);
404 	}
405 }
406 
407 static void rtw89_phy_preinit_rf_nctl_be_v1(struct rtw89_dev *rtwdev)
408 {
409 	rtw89_phy_write32_mask(rtwdev, R_GOTX_IQKDPK_C0_BE4, B_GOTX_IQKDPK, 0x3);
410 	rtw89_phy_write32_mask(rtwdev, R_GOTX_IQKDPK_C1_BE4, B_GOTX_IQKDPK, 0x3);
411 	rtw89_phy_write32_mask(rtwdev, R_IOQ_IQK_DPK_BE4, B_IOQ_IQK_DPK_RST, 0x1);
412 	rtw89_phy_write32_mask(rtwdev, R_IQK_DPK_RST_BE4, B_IQK_DPK_RST, 0x1);
413 	rtw89_phy_write32_mask(rtwdev, R_IQK_DPK_PRST_BE4, B_IQK_DPK_PRST, 0x1);
414 	rtw89_phy_write32_mask(rtwdev, R_IQK_DPK_PRST_C1_BE4, B_IQK_DPK_PRST, 0x1);
415 }
416 
417 static u32 rtw89_phy_bb_wrap_flush_addr(struct rtw89_dev *rtwdev, u32 addr)
418 {
419 	struct rtw89_hal *hal = &rtwdev->hal;
420 
421 	if (!test_bit(RTW89_FLAG_RUNNING, rtwdev->flags))
422 		return 0;
423 
424 	if (rtwdev->chip->chip_id == RTL8922D && hal->cid == RTL8922D_CID7025) {
425 		if (addr >= R_BE_PWR_MACID_PATH_BASE_V1 &&
426 		    addr <= R_BE_PWR_MACID_PATH_BASE_V1 + 0xFF)
427 			return addr + 0x800;
428 
429 		if (addr >= R_BE_PWR_MACID_LMT_BASE_V1 &&
430 		    addr <= R_BE_PWR_MACID_LMT_BASE_V1 + 0xFF)
431 			return addr - 0x800;
432 	}
433 
434 	return 0;
435 }
436 
437 static
438 void rtw89_write_bb_wrap_flush(struct rtw89_dev *rtwdev, u32 addr, u32 data)
439 {
440 	/* To write registers of pwr_macid_lmt and pwr_macid_path with flush */
441 	u32 flush_addr;
442 	u32 val32;
443 
444 	flush_addr = rtw89_phy_bb_wrap_flush_addr(rtwdev, addr);
445 	if (flush_addr) {
446 		val32 = rtw89_read32(rtwdev, flush_addr);
447 		rtw89_write32(rtwdev, flush_addr, val32);
448 	}
449 
450 	rtw89_write32(rtwdev, addr, data);
451 }
452 
453 static
454 void rtw89_phy_bb_wrap_pwr_by_macid_init(struct rtw89_dev *rtwdev)
455 {
456 	const struct rtw89_phy_gen_def *phy = rtwdev->chip->phy_def;
457 	const struct rtw89_bb_wrap_regs *bb_wrap = phy->bb_wrap;
458 	u32 max_macid = rtwdev->chip->support_macid_num;
459 	u32 macid_idx, cr, base_macid_lmt;
460 
461 	base_macid_lmt = bb_wrap->pwr_macid_lmt;
462 
463 	for (macid_idx = 0; macid_idx < 4 * max_macid; macid_idx += 4) {
464 		cr = base_macid_lmt + macid_idx;
465 		rtw89_write_bb_wrap_flush(rtwdev, cr, 0);
466 	}
467 }
468 
469 static
470 void rtw89_phy_bb_wrap_tx_path_by_macid_init(struct rtw89_dev *rtwdev)
471 {
472 	const struct rtw89_phy_gen_def *phy = rtwdev->chip->phy_def;
473 	const struct rtw89_bb_wrap_regs *bb_wrap = phy->bb_wrap;
474 	u32 max_macid = rtwdev->chip->support_macid_num;
475 	u32 cr = bb_wrap->pwr_macid_path;
476 	int i;
477 
478 	for (i = 0; i < max_macid; i++, cr += 4)
479 		rtw89_write_bb_wrap_flush(rtwdev, cr, 0);
480 }
481 
482 static void rtw89_phy_bb_wrap_tpu_set_all(struct rtw89_dev *rtwdev,
483 					  enum rtw89_mac_idx mac_idx)
484 {
485 	u32 addr, t;
486 
487 	addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_FTM_SS, mac_idx);
488 	rtw89_write32_mask(rtwdev, addr, B_BE_PWR_BY_RATE_DBW_ON, 0x3);
489 
490 	for (addr = R_BE_PWR_BY_RATE; addr <= R_BE_PWR_BY_RATE_END; addr += 4) {
491 		t = rtw89_mac_reg_by_idx(rtwdev, addr, mac_idx);
492 		rtw89_write32(rtwdev, t, 0);
493 	}
494 	for (addr = R_BE_PWR_RULMT_START; addr <= R_BE_PWR_RULMT_END; addr += 4) {
495 		t = rtw89_mac_reg_by_idx(rtwdev, addr, mac_idx);
496 		rtw89_write32(rtwdev, t, 0);
497 	}
498 	for (addr = R_BE_PWR_RATE_OFST_CTRL; addr <= R_BE_PWR_RATE_OFST_END; addr += 4) {
499 		t = rtw89_mac_reg_by_idx(rtwdev, addr, mac_idx);
500 		rtw89_write32(rtwdev, t, 0);
501 	}
502 
503 	addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_REF_CTRL, mac_idx);
504 	rtw89_write32_mask(rtwdev, addr, B_BE_PWR_OFST_LMT_DB, 0);
505 	addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_OFST_LMTBF, mac_idx);
506 	rtw89_write32_mask(rtwdev, addr, B_BE_PWR_OFST_LMTBF_DB, 0);
507 	addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_RATE_CTRL, mac_idx);
508 	rtw89_write32_mask(rtwdev, addr, B_BE_PWR_OFST_BYRATE_DB, 0);
509 	addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_OFST_RULMT, mac_idx);
510 	rtw89_write32_mask(rtwdev, addr, B_BE_PWR_OFST_RULMT_DB, 0);
511 	addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_OFST_SW, mac_idx);
512 	rtw89_write32_mask(rtwdev, addr, B_BE_PWR_OFST_SW_DB, 0);
513 }
514 
515 static
516 void rtw89_phy_bb_wrap_listen_path_en_init(struct rtw89_dev *rtwdev)
517 {
518 	u32 addr;
519 	int ret;
520 
521 	ret = rtw89_mac_check_mac_en(rtwdev, RTW89_MAC_1, RTW89_CMAC_SEL);
522 	if (ret)
523 		return;
524 
525 	addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_LISTEN_PATH, RTW89_MAC_1);
526 	rtw89_write32_mask(rtwdev, addr, B_BE_PWR_LISTEN_PATH_EN, 0x2);
527 }
528 
529 static void rtw89_phy_bb_wrap_force_cr_init(struct rtw89_dev *rtwdev,
530 					    enum rtw89_mac_idx mac_idx)
531 {
532 	u32 addr;
533 
534 	addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_FORCE_LMT, mac_idx);
535 	rtw89_write32_mask(rtwdev, addr, B_BE_PWR_FORCE_LMT_ON, 0);
536 	addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_BOOST, mac_idx);
537 	rtw89_write32_mask(rtwdev, addr, B_BE_PWR_FORCE_RATE_ON, 0);
538 	addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_OFST_RULMT, mac_idx);
539 	rtw89_write32_mask(rtwdev, addr, B_BE_PWR_FORCE_RU_ENON, 0);
540 	rtw89_write32_mask(rtwdev, addr, B_BE_PWR_FORCE_RU_ON, 0);
541 	addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_FORCE_MACID, mac_idx);
542 	rtw89_write32_mask(rtwdev, addr, B_BE_PWR_FORCE_MACID_ALL, 0);
543 	addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_COEX_CTRL, mac_idx);
544 	rtw89_write32_mask(rtwdev, addr, B_BE_PWR_FORCE_COEX_ON, 0);
545 	addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_RATE_CTRL, mac_idx);
546 	rtw89_write32_mask(rtwdev, addr, B_BE_FORCE_PWR_BY_RATE_EN, 0);
547 }
548 
549 static void rtw89_phy_bb_wrap_ftm_init(struct rtw89_dev *rtwdev,
550 				       enum rtw89_mac_idx mac_idx)
551 {
552 	u32 addr;
553 
554 	addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_FTM, mac_idx);
555 	rtw89_write32(rtwdev, addr, 0xE4E431);
556 
557 	addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_FTM_SS, mac_idx);
558 	rtw89_write32_mask(rtwdev, addr, 0x7, 0);
559 }
560 
561 static u32 rtw89_phy_bb_wrap_be_bandedge_decision(struct rtw89_dev *rtwdev,
562 						  const struct rtw89_chan *chan)
563 {
564 	u8 pri_ch = chan->primary_channel;
565 	u32 val = 0;
566 
567 	switch (chan->band_type) {
568 	default:
569 	case RTW89_BAND_2G:
570 		if (pri_ch == 1 || pri_ch == 13)
571 			val = BIT(1) | BIT(0);
572 		else if (pri_ch == 3 || pri_ch == 11)
573 			val = BIT(1);
574 		break;
575 	case RTW89_BAND_5G:
576 		if (pri_ch == 36 || pri_ch == 64 || pri_ch == 100)
577 			val = BIT(3) | BIT(2) | BIT(1) | BIT(0);
578 		else if (pri_ch == 40 || pri_ch == 60 || pri_ch == 104)
579 			val = BIT(3) | BIT(2) | BIT(1);
580 		else if ((pri_ch > 40 && pri_ch < 60) || pri_ch == 108 || pri_ch == 112)
581 			val = BIT(3) | BIT(2);
582 		else if (pri_ch > 112 && pri_ch < 132)
583 			val = BIT(3);
584 		break;
585 	case RTW89_BAND_6G:
586 		if (pri_ch == 233)
587 			val = BIT(0);
588 		break;
589 	}
590 
591 	return val;
592 }
593 
594 void rtw89_phy_bb_wrap_set_rfsi_ct_opt(struct rtw89_dev *rtwdev,
595 				       enum rtw89_phy_idx phy_idx)
596 {
597 	u32 reg;
598 
599 	reg = rtw89_mac_reg_by_idx(rtwdev, R_RFSI_CT_OPT_0_BE4, phy_idx);
600 	rtw89_write32(rtwdev, reg, 0x00010001);
601 
602 	reg = rtw89_mac_reg_by_idx(rtwdev, R_RFSI_CT_OPT_8_BE4, phy_idx);
603 	rtw89_write32(rtwdev, reg, 0x00010001);
604 }
605 EXPORT_SYMBOL(rtw89_phy_bb_wrap_set_rfsi_ct_opt);
606 
607 void rtw89_phy_bb_wrap_set_rfsi_bandedge_ch(struct rtw89_dev *rtwdev,
608 					    const struct rtw89_chan *chan,
609 					    enum rtw89_phy_idx phy_idx)
610 {
611 	u32 reg;
612 	u32 val;
613 
614 	val = rtw89_phy_bb_wrap_be_bandedge_decision(rtwdev, chan);
615 
616 	rtw89_phy_write32_idx(rtwdev, R_TX_CFR_MANUAL_EN_BE4, B_TX_CFR_MANUAL_EN_BE4_M,
617 			      chan->primary_channel == 13, phy_idx);
618 
619 	reg = rtw89_mac_reg_by_idx(rtwdev, R_BANDEDGE_DBWX_BE4, phy_idx);
620 	rtw89_write32_mask(rtwdev, reg, B_BANDEDGE_DBW20_BE4, val & BIT(0));
621 	reg = rtw89_mac_reg_by_idx(rtwdev, R_BANDEDGE_DBWX_BE4, phy_idx);
622 	rtw89_write32_mask(rtwdev, reg, B_BANDEDGE_DBW40_BE4, (val & BIT(1)) >> 1);
623 	reg = rtw89_mac_reg_by_idx(rtwdev, R_BANDEDGE_DBWX_BE4, phy_idx);
624 	rtw89_write32_mask(rtwdev, reg, B_BANDEDGE_DBW80_BE4, (val & BIT(2)) >> 2);
625 	reg = rtw89_mac_reg_by_idx(rtwdev, R_BANDEDGE_DBWY_BE4, phy_idx);
626 	rtw89_write32_mask(rtwdev, reg, B_BANDEDGE_DBW160_BE4, (val & BIT(3)) >> 3);
627 }
628 EXPORT_SYMBOL(rtw89_phy_bb_wrap_set_rfsi_bandedge_ch);
629 
630 static void rtw89_phy_bb_wrap_tx_rfsi_qam_comp_th_init(struct rtw89_dev *rtwdev,
631 						       enum rtw89_mac_idx mac_idx)
632 {
633 	/* TH0 */
634 	rtw89_write32_idx(rtwdev, R_QAM_TH0_BE4, B_QAM_TH0_0_BE4, 0x1, mac_idx);
635 	rtw89_write32_idx(rtwdev, R_QAM_TH0_BE4, B_QAM_TH0_3_BE4, 0x1, mac_idx);
636 	rtw89_write32_idx(rtwdev, R_QAM_TH1_BE4, B_QAM_TH1_1_BE4, 0x1, mac_idx);
637 	rtw89_write32_idx(rtwdev, R_QAM_TH1_BE4, B_QAM_TH1_4_BE4, 0x1, mac_idx);
638 	rtw89_write32_idx(rtwdev, R_QAM_TH1_BE4, B_QAM_TH1_7_BE4, 0x1, mac_idx);
639 	rtw89_write32_idx(rtwdev, R_QAM_TH2_BE4, B_QAM_TH2_0_BE4, 0x1, mac_idx);
640 	rtw89_write32_idx(rtwdev, R_QAM_TH2_BE4, B_QAM_TH2_3_BE4, 0x1, mac_idx);
641 	rtw89_write32_idx(rtwdev, R_QAM_TH2_BE4, B_QAM_TH2_6_BE4, 0x1, mac_idx);
642 	/* TH1 */
643 	rtw89_write32_idx(rtwdev, R_QAM_TH0_BE4, B_QAM_TH0_1_BE4, 0x2, mac_idx);
644 	rtw89_write32_idx(rtwdev, R_QAM_TH0_BE4, B_QAM_TH0_4_BE4, 0x2, mac_idx);
645 	rtw89_write32_idx(rtwdev, R_QAM_TH1_BE4, B_QAM_TH1_2_BE4, 0x2, mac_idx);
646 	rtw89_write32_idx(rtwdev, R_QAM_TH1_BE4, B_QAM_TH1_5_BE4, 0x2, mac_idx);
647 	rtw89_write32_idx(rtwdev, R_QAM_TH1_BE4, B_QAM_TH1_8_BE4, 0x2, mac_idx);
648 	rtw89_write32_idx(rtwdev, R_QAM_TH2_BE4, B_QAM_TH2_1_BE4, 0x2, mac_idx);
649 	rtw89_write32_idx(rtwdev, R_QAM_TH2_BE4, B_QAM_TH2_4_BE4, 0x2, mac_idx);
650 	rtw89_write32_idx(rtwdev, R_QAM_TH2_BE4, B_QAM_TH2_7_BE4, 0x2, mac_idx);
651 	/* TH2 */
652 	rtw89_write32_idx(rtwdev, R_QAM_TH0_BE4, B_QAM_TH0_2_BE4, 0x4, mac_idx);
653 	rtw89_write32_idx(rtwdev, R_QAM_TH1_BE4, B_QAM_TH1_0_BE4, 0x4, mac_idx);
654 	rtw89_write32_idx(rtwdev, R_QAM_TH1_BE4, B_QAM_TH1_3_BE4, 0x4, mac_idx);
655 	rtw89_write32_idx(rtwdev, R_QAM_TH1_BE4, B_QAM_TH1_6_BE4, 0x4, mac_idx);
656 	rtw89_write32_idx(rtwdev, R_QAM_TH1_BE4, B_QAM_TH1_9_BE4, 0x4, mac_idx);
657 	rtw89_write32_idx(rtwdev, R_QAM_TH2_BE4, B_QAM_TH2_2_BE4, 0x4, mac_idx);
658 	rtw89_write32_idx(rtwdev, R_QAM_TH2_BE4, B_QAM_TH2_5_BE4, 0x4, mac_idx);
659 	rtw89_write32_idx(rtwdev, R_QAM_TH2_BE4, B_QAM_TH2_8_BE4, 0x4, mac_idx);
660 	/* DPD 160M */
661 	rtw89_write32_idx(rtwdev, R_DPD_DBW160_TH0_BE4, B_DPD_DBW160_TH0_0_BE4, 0x1, mac_idx);
662 	rtw89_write32_idx(rtwdev, R_DPD_DBW160_TH0_BE4, B_DPD_DBW160_TH0_1_BE4, 0x1, mac_idx);
663 	rtw89_write32_idx(rtwdev, R_DPD_DBW160_TH0_BE4, B_DPD_DBW160_TH0_2_BE4, 0x1, mac_idx);
664 	rtw89_write32_idx(rtwdev, R_DPD_DBW160_TH0_BE4, B_DPD_DBW160_TH0_3_BE4, 0x1, mac_idx);
665 	rtw89_write32_idx(rtwdev, R_DPD_DBW160_TH0_BE4, B_DPD_DBW160_TH0_4_BE4, 0x1, mac_idx);
666 	rtw89_write32_idx(rtwdev, R_DPD_DBW160_TH1_BE4, B_DPD_DBW160_TH1_5_BE4, 0x1, mac_idx);
667 	rtw89_write32_idx(rtwdev, R_DPD_DBW160_TH1_BE4, B_DPD_DBW160_TH1_6_BE4, 0x1, mac_idx);
668 	rtw89_write32_idx(rtwdev, R_DPD_DBW160_TH1_BE4, B_DPD_DBW160_TH1_7_BE4, 0x1, mac_idx);
669 	/* DPD 20M */
670 	rtw89_write32_idx(rtwdev, R_DPD_CBW_TH0_BE4, B_DPD_CBW20_TH0_0_BE4, 0x2, mac_idx);
671 	rtw89_write32_idx(rtwdev, R_DPD_CBW_TH0_BE4, B_DPD_CBW20_TH0_1_BE4, 0x2, mac_idx);
672 	rtw89_write32_idx(rtwdev, R_DPD_CBW_TH0_BE4, B_DPD_CBW20_TH0_2_BE4, 0x2, mac_idx);
673 	rtw89_write32_idx(rtwdev, R_DPD_CBW_TH0_BE4, B_DPD_CBW20_TH0_3_BE4, 0x2, mac_idx);
674 	rtw89_write32_idx(rtwdev, R_DPD_CBW_TH0_BE4, B_DPD_CBW20_TH0_4_BE4, 0x2, mac_idx);
675 	rtw89_write32_idx(rtwdev, R_DPD_CBW_TH0_BE4, B_DPD_CBW20_TH0_5_BE4, 0x2, mac_idx);
676 	rtw89_write32_idx(rtwdev, R_DPD_CBW_TH0_BE4, B_DPD_CBW20_TH0_6_BE4, 0x2, mac_idx);
677 	rtw89_write32_idx(rtwdev, R_DPD_CBW_TH1_BE4, B_DPD_CBW20_TH1_7_BE4, 0x2, mac_idx);
678 	/* DPD 40M */
679 	rtw89_write32_idx(rtwdev, R_DPD_CBW_TH1_BE4, B_DPD_CBW40_TH1_0_BE4, 0x2, mac_idx);
680 	rtw89_write32_idx(rtwdev, R_DPD_CBW_TH1_BE4, B_DPD_CBW40_TH1_1_BE4, 0x2, mac_idx);
681 	rtw89_write32_idx(rtwdev, R_DPD_CBW_TH1_BE4, B_DPD_CBW40_TH1_2_BE4, 0x2, mac_idx);
682 	rtw89_write32_idx(rtwdev, R_DPD_CBW_TH1_BE4, B_DPD_CBW40_TH1_3_BE4, 0x2, mac_idx);
683 	rtw89_write32_idx(rtwdev, R_DPD_CBW_TH1_BE4, B_DPD_CBW40_TH1_4_BE4, 0x2, mac_idx);
684 	rtw89_write32_idx(rtwdev, R_DPD_CBW_TH1_BE4, B_DPD_CBW20_TH0_3_BE4, 0x2, mac_idx);
685 	rtw89_write32_idx(rtwdev, R_DPD_CBW_TH1_BE4, B_DPD_CBW20_TH0_4_BE4, 0x2, mac_idx);
686 	rtw89_write32_idx(rtwdev, R_DPD_CBW_TH1_BE4, B_DPD_CBW20_TH0_5_BE4, 0x2, mac_idx);
687 	/* DPD 80M */
688 	rtw89_write32_idx(rtwdev, R_DPD_CBW_TH1_BE4, B_DPD_CBW80_TH1_0_BE4, 0x2, mac_idx);
689 	rtw89_write32_idx(rtwdev, R_DPD_CBW_TH2_BE4, B_DPD_CBW80_TH2_1_BE4, 0x2, mac_idx);
690 	rtw89_write32_idx(rtwdev, R_DPD_CBW_TH2_BE4, B_DPD_CBW80_TH2_2_BE4, 0x2, mac_idx);
691 	rtw89_write32_idx(rtwdev, R_DPD_CBW_TH2_BE4, B_DPD_CBW80_TH2_3_BE4, 0x2, mac_idx);
692 	rtw89_write32_idx(rtwdev, R_DPD_CBW_TH2_BE4, B_DPD_CBW80_TH2_4_BE4, 0x2, mac_idx);
693 	rtw89_write32_idx(rtwdev, R_DPD_CBW_TH2_BE4, B_DPD_CBW80_TH2_5_BE4, 0x2, mac_idx);
694 	rtw89_write32_idx(rtwdev, R_DPD_CBW_TH2_BE4, B_DPD_CBW80_TH2_6_BE4, 0x2, mac_idx);
695 	rtw89_write32_idx(rtwdev, R_DPD_CBW_TH2_BE4, B_DPD_CBW80_TH2_7_BE4, 0x2, mac_idx);
696 	/* CIM3K */
697 	rtw89_write32_idx(rtwdev, R_COMP_CIM3K_BE4, B_COMP_CIM3K_TH2_BE4, 0x2, mac_idx);
698 }
699 
700 static void rtw89_phy_bb_wrap_tx_rfsi_scenario_def(struct rtw89_dev *rtwdev,
701 						   enum rtw89_mac_idx mac_idx)
702 {
703 	rtw89_write32_idx(rtwdev, R_RFSI_CT_DEF_BE4, B_RFSI_CT_ER_BE4, 0x0, mac_idx);
704 	rtw89_write32_idx(rtwdev, R_RFSI_CT_DEF_BE4, B_RFSI_CT_SUBF_BE4, 0x0, mac_idx);
705 	rtw89_write32_idx(rtwdev, R_RFSI_CT_DEF_BE4, B_RFSI_CT_FTM_BE4, 0x0, mac_idx);
706 	rtw89_write32_idx(rtwdev, R_RFSI_CT_DEF_BE4, B_RFSI_CT_SENS_BE4, 0x0, mac_idx);
707 
708 	rtw89_write32_idx(rtwdev, R_FBTB_CT_DEF_BE4, B_FBTB_CT_DEF_BE, 0x0, mac_idx);
709 	rtw89_write32_idx(rtwdev, R_FBTB_CT_DEF_BE4, B_FBTB_CT_PB_BE4, 0x0, mac_idx);
710 	rtw89_write32_idx(rtwdev, R_FBTB_CT_DEF_BE4, B_FBTB_CT_DL_WO_BE4, 0x0, mac_idx);
711 	rtw89_write32_idx(rtwdev, R_FBTB_CT_DEF_BE4, B_FBTB_CT_DL_BF_BE4, 0x0, mac_idx);
712 	rtw89_write32_idx(rtwdev, R_FBTB_CT_DEF_BE4, B_FBTB_CT_MUMIMO_BE4, 0x0, mac_idx);
713 	rtw89_write32_idx(rtwdev, R_FBTB_CT_DEF_BE4, B_FBTB_CT_FTM_BE4, 0x0, mac_idx);
714 	rtw89_write32_idx(rtwdev, R_FBTB_CT_DEF_BE4, B_FBTB_CT_SENS_BE4, 0x0, mac_idx);
715 }
716 
717 static void rtw89_phy_bb_wrap_tx_rfsi_qam_comp_val(struct rtw89_dev *rtwdev,
718 						   enum rtw89_mac_idx mac_idx)
719 {
720 	rtw89_write32_idx(rtwdev, R_QAM_COMP_TH0_BE4, MASKLWORD, 0x4010, mac_idx);
721 	rtw89_write32_idx(rtwdev, R_QAM_COMP_TH0_BE4, MASKHWORD, 0x4410, mac_idx);
722 	rtw89_write32_idx(rtwdev, R_QAM_COMP_TH1_BE4, MASKLWORD, 0x0, mac_idx);
723 	rtw89_write32_idx(rtwdev, R_QAM_COMP_TH1_BE4, MASKHWORD, 0x0, mac_idx);
724 	rtw89_write32_idx(rtwdev, R_QAM_COMP_TH2_BE4, MASKLWORD, 0x0, mac_idx);
725 	rtw89_write32_idx(rtwdev, R_QAM_COMP_TH2_BE4, MASKHWORD, 0x0, mac_idx);
726 	rtw89_write32_idx(rtwdev, R_QAM_COMP_TH3_BE4, MASKLWORD, 0x0, mac_idx);
727 	rtw89_write32_idx(rtwdev, R_QAM_COMP_TH3_BE4, MASKHWORD, 0x0, mac_idx);
728 
729 	rtw89_write32_idx(rtwdev, R_QAM_COMP_TH4_BE4, B_QAM_COMP_TH4_L, 0x8, mac_idx);
730 	rtw89_write32_idx(rtwdev, R_QAM_COMP_TH4_BE4, B_QAM_COMP_TH4_M, 0x8, mac_idx);
731 	rtw89_write32_idx(rtwdev, R_QAM_COMP_TH4_BE4, B_QAM_COMP_TH4_H, 0x0, mac_idx);
732 	rtw89_write32_idx(rtwdev, R_QAM_COMP_TH5_BE4, B_QAM_COMP_TH5_L, 0x0, mac_idx);
733 	rtw89_write32_idx(rtwdev, R_QAM_COMP_TH5_BE4, B_QAM_COMP_TH5_M, 0x0, mac_idx);
734 	rtw89_write32_idx(rtwdev, R_QAM_COMP_TH5_BE4, B_QAM_COMP_TH5_H, 0x0, mac_idx);
735 	rtw89_write32_idx(rtwdev, R_QAM_COMP_TH6_BE4, B_QAM_COMP_TH6_L, 0x0, mac_idx);
736 	rtw89_write32_idx(rtwdev, R_QAM_COMP_TH6_BE4, B_QAM_COMP_TH6_M, 0x0, mac_idx);
737 	rtw89_write32_idx(rtwdev, R_QAM_COMP_TH4_BE4, B_QAM_COMP_TH4_2L, 0x8, mac_idx);
738 	rtw89_write32_idx(rtwdev, R_QAM_COMP_TH4_BE4, B_QAM_COMP_TH4_2M, 0x8, mac_idx);
739 	rtw89_write32_idx(rtwdev, R_QAM_COMP_TH4_BE4, B_QAM_COMP_TH4_2H, 0x0, mac_idx);
740 	rtw89_write32_idx(rtwdev, R_QAM_COMP_TH5_BE4, B_QAM_COMP_TH5_2L, 0x0, mac_idx);
741 	rtw89_write32_idx(rtwdev, R_QAM_COMP_TH5_BE4, B_QAM_COMP_TH5_2M, 0x0, mac_idx);
742 	rtw89_write32_idx(rtwdev, R_QAM_COMP_TH5_BE4, B_QAM_COMP_TH5_2H, 0x0, mac_idx);
743 	rtw89_write32_idx(rtwdev, R_QAM_COMP_TH6_BE4, B_QAM_COMP_TH6_2L, 0x0, mac_idx);
744 	rtw89_write32_idx(rtwdev, R_QAM_COMP_TH6_BE4, B_QAM_COMP_TH6_2M, 0x0, mac_idx);
745 
746 	rtw89_write32_idx(rtwdev, R_OW_VAL_0_BE4, MASKLWORD, 0x4010, mac_idx);
747 	rtw89_write32_idx(rtwdev, R_OW_VAL_0_BE4, MASKHWORD, 0x4010, mac_idx);
748 	rtw89_write32_idx(rtwdev, R_OW_VAL_1_BE4, MASKLWORD, 0x0, mac_idx);
749 	rtw89_write32_idx(rtwdev, R_OW_VAL_1_BE4, MASKHWORD, 0x0, mac_idx);
750 	rtw89_write32_idx(rtwdev, R_OW_VAL_2_BE4, MASKLWORD, 0x0, mac_idx);
751 	rtw89_write32_idx(rtwdev, R_OW_VAL_2_BE4, MASKHWORD, 0x0, mac_idx);
752 	rtw89_write32_idx(rtwdev, R_OW_VAL_3_BE4, MASKLWORD, 0x0, mac_idx);
753 	rtw89_write32_idx(rtwdev, R_OW_VAL_3_BE4, MASKHWORD, 0x0, mac_idx);
754 }
755 
756 static void rtw89_phy_bb_set_oob_dpd_qam_comp_val(struct rtw89_dev *rtwdev,
757 						  enum rtw89_mac_idx mac_idx)
758 {
759 	rtw89_write32_idx(rtwdev, R_OOB_CBW20_BE4, B_OOB_CBW20_CCK0_BE4, 0x0, mac_idx);
760 	rtw89_write32_idx(rtwdev, R_OOB_CBW20_BE4, B_OOB_CBW20_CCK1_BE4, 0x0, mac_idx);
761 	rtw89_write32_idx(rtwdev, R_OOB_CBW20_BE4, B_OOB_CBW20_CCK2_BE4, 0x0, mac_idx);
762 	rtw89_write32_idx(rtwdev, R_OOB_CBW20_BE4, B_OOB_CBW20_CCK3_BE4, 0x0, mac_idx);
763 	rtw89_write32_idx(rtwdev, R_OOB_CBW20_BE4, B_OOB_CBW20_CCK4_BE4, 0x0, mac_idx);
764 	rtw89_write32_idx(rtwdev, R_OOB_CBW20_BE4, B_OOB_CBW20_CCK5_BE4, 0x0, mac_idx);
765 	rtw89_write32_idx(rtwdev, R_OOB_CBW20_BE4, B_OOB_CBW20_CCK6_BE4, 0x0, mac_idx);
766 	rtw89_write32_idx(rtwdev, R_OOB_CBW20_BE4, B_OOB_CBW20_CCK7_BE4, 0x0, mac_idx);
767 
768 	rtw89_write32_idx(rtwdev, R_OOB_CBW40_BE4, B_OOB_CBW40_CCK0_BE4, 0x0, mac_idx);
769 	rtw89_write32_idx(rtwdev, R_OOB_CBW40_BE4, B_OOB_CBW40_CCK1_BE4, 0x0, mac_idx);
770 	rtw89_write32_idx(rtwdev, R_OOB_CBW40_BE4, B_OOB_CBW40_CCK2_BE4, 0x0, mac_idx);
771 	rtw89_write32_idx(rtwdev, R_OOB_CBW40_BE4, B_OOB_CBW40_CCK3_BE4, 0x0, mac_idx);
772 	rtw89_write32_idx(rtwdev, R_OOB_CBW40_BE4, B_OOB_CBW40_CCK4_BE4, 0x0, mac_idx);
773 	rtw89_write32_idx(rtwdev, R_OOB_CBW40_BE4, B_OOB_CBW40_CCK5_BE4, 0x0, mac_idx);
774 	rtw89_write32_idx(rtwdev, R_OOB_CBW40_BE4, B_OOB_CBW40_CCK6_BE4, 0x0, mac_idx);
775 	rtw89_write32_idx(rtwdev, R_OOB_CBW40_BE4, B_OOB_CBW40_CCK7_BE4, 0x0, mac_idx);
776 
777 	rtw89_write32_idx(rtwdev, R_OOB_CBW20_BE4, B_OOB_CBW20_TH0_BE4, 0x0, mac_idx);
778 	rtw89_write32_idx(rtwdev, R_OOB_CBW20_BE4, B_OOB_CBW20_TH1_BE4, 0x0, mac_idx);
779 	rtw89_write32_idx(rtwdev, R_OOB_CBW20_BE4, B_OOB_CBW20_TH2_BE4, 0x0, mac_idx);
780 	rtw89_write32_idx(rtwdev, R_OOB_CBW20_BE4, B_OOB_CBW20_TH3_BE4, 0x0, mac_idx);
781 	rtw89_write32_idx(rtwdev, R_OOB_CBW20_BE4, B_OOB_CBW20_TH4_BE4, 0x0, mac_idx);
782 	rtw89_write32_idx(rtwdev, R_OOB_CBW20_BE4, B_OOB_CBW20_TH5_BE4, 0x0, mac_idx);
783 	rtw89_write32_idx(rtwdev, R_OOB_CBW20_BE4, B_OOB_CBW20_TH6_BE4, 0x0, mac_idx);
784 	rtw89_write32_idx(rtwdev, R_OOB_CBW20_BE4, B_OOB_CBW20_TH7_BE4, 0x0, mac_idx);
785 
786 	rtw89_write32_idx(rtwdev, R_OOB_CBW40_BE4, B_OOB_CBW40_TH0_BE4, 0x0, mac_idx);
787 	rtw89_write32_idx(rtwdev, R_OOB_CBW40_BE4, B_OOB_CBW40_TH1_BE4, 0x0, mac_idx);
788 	rtw89_write32_idx(rtwdev, R_OOB_CBW40_BE4, B_OOB_CBW40_TH2_BE4, 0x0, mac_idx);
789 	rtw89_write32_idx(rtwdev, R_OOB_CBW40_BE4, B_OOB_CBW40_TH3_BE4, 0x0, mac_idx);
790 	rtw89_write32_idx(rtwdev, R_OOB_CBW40_BE4, B_OOB_CBW40_TH4_BE4, 0x0, mac_idx);
791 	rtw89_write32_idx(rtwdev, R_OOB_CBW40_BE4, B_OOB_CBW40_TH5_BE4, 0x0, mac_idx);
792 	rtw89_write32_idx(rtwdev, R_OOB_CBW40_BE4, B_OOB_CBW40_TH6_BE4, 0x0, mac_idx);
793 	rtw89_write32_idx(rtwdev, R_OOB_CBW40_BE4, B_OOB_CBW40_TH7_BE4, 0x0, mac_idx);
794 
795 	rtw89_write32_idx(rtwdev, R_OOB_CBW80_BE4, B_OOB_CBW80_TH0_BE4, 0x0, mac_idx);
796 	rtw89_write32_idx(rtwdev, R_OOB_CBW80_BE4, B_OOB_CBW80_TH1_BE4, 0x0, mac_idx);
797 	rtw89_write32_idx(rtwdev, R_OOB_CBW80_BE4, B_OOB_CBW80_TH2_BE4, 0x0, mac_idx);
798 	rtw89_write32_idx(rtwdev, R_OOB_CBW80_BE4, B_OOB_CBW80_TH3_BE4, 0x0, mac_idx);
799 	rtw89_write32_idx(rtwdev, R_OOB_CBW80_BE4, B_OOB_CBW80_TH4_BE4, 0x0, mac_idx);
800 	rtw89_write32_idx(rtwdev, R_OOB_CBW80_BE4, B_OOB_CBW80_TH5_BE4, 0x0, mac_idx);
801 	rtw89_write32_idx(rtwdev, R_OOB_CBW80_BE4, B_OOB_CBW80_TH6_BE4, 0x0, mac_idx);
802 	rtw89_write32_idx(rtwdev, R_OOB_CBW80_BE4, B_OOB_CBW80_TH7_BE4, 0x0, mac_idx);
803 
804 	rtw89_write32_idx(rtwdev, R_OOB_CBW40_BE4, B_OOB_CBW20_OW0_BE4, 0x0, mac_idx);
805 	rtw89_write32_idx(rtwdev, R_OOB_CBW40_BE4, B_OOB_CBW20_OW1_BE4, 0x0, mac_idx);
806 	rtw89_write32_idx(rtwdev, R_OOB_CBW40_BE4, B_OOB_CBW20_OW2_BE4, 0x0, mac_idx);
807 	rtw89_write32_idx(rtwdev, R_OOB_CBW40_BE4, B_OOB_CBW20_OW3_BE4, 0x0, mac_idx);
808 	rtw89_write32_idx(rtwdev, R_OOB_CBW40_BE4, B_OOB_CBW20_OW4_BE4, 0x0, mac_idx);
809 	rtw89_write32_idx(rtwdev, R_OOB_CBW40_BE4, B_OOB_CBW20_OW5_BE4, 0x0, mac_idx);
810 	rtw89_write32_idx(rtwdev, R_OOB_CBW40_BE4, B_OOB_CBW20_OW6_BE4, 0x0, mac_idx);
811 	rtw89_write32_idx(rtwdev, R_OOB_CBW40_BE4, B_OOB_CBW20_OW7_BE4, 0x0, mac_idx);
812 
813 	rtw89_write32_idx(rtwdev, R_OOB_CBW40_BE4, B_OOB_CBW40_OW0_BE4, 0x0, mac_idx);
814 	rtw89_write32_idx(rtwdev, R_OOB_CBW40_BE4, B_OOB_CBW40_OW1_BE4, 0x0, mac_idx);
815 	rtw89_write32_idx(rtwdev, R_OOB_CBW40_BE4, B_OOB_CBW40_OW2_BE4, 0x0, mac_idx);
816 	rtw89_write32_idx(rtwdev, R_OOB_CBW40_BE4, B_OOB_CBW40_OW3_BE4, 0x0, mac_idx);
817 	rtw89_write32_idx(rtwdev, R_OOB_CBW40_BE4, B_OOB_CBW40_OW4_BE4, 0x0, mac_idx);
818 	rtw89_write32_idx(rtwdev, R_OOB_CBW40_BE4, B_OOB_CBW40_OW5_BE4, 0x0, mac_idx);
819 	rtw89_write32_idx(rtwdev, R_OOB_CBW40_BE4, B_OOB_CBW40_OW6_BE4, 0x0, mac_idx);
820 	rtw89_write32_idx(rtwdev, R_OOB_CBW40_BE4, B_OOB_CBW40_OW7_BE4, 0x0, mac_idx);
821 
822 	rtw89_write32_idx(rtwdev, R_OOB_CBW80_BE4, B_OOB_CBW80_OW0_BE4, 0x0, mac_idx);
823 	rtw89_write32_idx(rtwdev, R_OOB_CBW80_BE4, B_OOB_CBW80_OW1_BE4, 0x0, mac_idx);
824 	rtw89_write32_idx(rtwdev, R_OOB_CBW80_BE4, B_OOB_CBW80_OW2_BE4, 0x0, mac_idx);
825 	rtw89_write32_idx(rtwdev, R_OOB_CBW80_BE4, B_OOB_CBW80_OW3_BE4, 0x0, mac_idx);
826 	rtw89_write32_idx(rtwdev, R_OOB_CBW80_BE4, B_OOB_CBW80_OW4_BE4, 0x0, mac_idx);
827 	rtw89_write32_idx(rtwdev, R_OOB_CBW80_BE4, B_OOB_CBW80_OW5_BE4, 0x0, mac_idx);
828 	rtw89_write32_idx(rtwdev, R_OOB_CBW80_BE4, B_OOB_CBW80_OW6_BE4, 0x0, mac_idx);
829 	rtw89_write32_idx(rtwdev, R_OOB_CBW80_BE4, B_OOB_CBW80_OW7_BE4, 0x0, mac_idx);
830 }
831 
832 static void rtw89_phy_bb_set_mdpd_qam_comp_val(struct rtw89_dev *rtwdev,
833 					       enum rtw89_mac_idx mac_idx)
834 {
835 	rtw89_write32_idx(rtwdev, R_DPD_CBW160_BE4, B_DPD_CBW160_TH0_BE4, 0x0, mac_idx);
836 	rtw89_write32_idx(rtwdev, R_DPD_CBW160_BE4, B_DPD_CBW160_TH1_BE4, 0x0, mac_idx);
837 	rtw89_write32_idx(rtwdev, R_DPD_CBW160_BE4, B_DPD_CBW160_TH2_BE4, 0x0, mac_idx);
838 	rtw89_write32_idx(rtwdev, R_DPD_CBW160_BE4, B_DPD_CBW160_TH3_BE4, 0x0, mac_idx);
839 	rtw89_write32_idx(rtwdev, R_DPD_CBW160_BE4, B_DPD_CBW160_TH4_BE4, 0x0, mac_idx);
840 	rtw89_write32_idx(rtwdev, R_DPD_CBW160_BE4, B_DPD_CBW160_TH5_BE4, 0x0, mac_idx);
841 	rtw89_write32_idx(rtwdev, R_DPD_CBW160_BE4, B_DPD_CBW160_TH6_BE4, 0x0, mac_idx);
842 	rtw89_write32_idx(rtwdev, R_DPD_CBW160_BE4, B_DPD_CBW160_TH7_BE4, 0x0, mac_idx);
843 
844 	rtw89_write32_idx(rtwdev, R_DPD_CBW160_BE4, B_DPD_CBW160_OW0_BE4, 0x0, mac_idx);
845 	rtw89_write32_idx(rtwdev, R_DPD_CBW160_BE4, B_DPD_CBW160_OW1_BE4, 0x0, mac_idx);
846 	rtw89_write32_idx(rtwdev, R_DPD_CBW160_BE4, B_DPD_CBW160_OW2_BE4, 0x0, mac_idx);
847 	rtw89_write32_idx(rtwdev, R_DPD_CBW160_BE4, B_DPD_CBW160_OW3_BE4, 0x0, mac_idx);
848 	rtw89_write32_idx(rtwdev, R_DPD_CBW160_BE4, B_DPD_CBW160_OW4_BE4, 0x0, mac_idx);
849 	rtw89_write32_idx(rtwdev, R_DPD_CBW160_BE4, B_DPD_CBW160_OW5_BE4, 0x0, mac_idx);
850 	rtw89_write32_idx(rtwdev, R_DPD_CBW160_BE4, B_DPD_CBW160_OW6_BE4, 0x0, mac_idx);
851 	rtw89_write32_idx(rtwdev, R_DPD_CBW160_BE4, B_DPD_CBW160_OW7_BE4, 0x0, mac_idx);
852 }
853 
854 static void rtw89_phy_bb_set_cim3k_val(struct rtw89_dev *rtwdev,
855 				       enum rtw89_mac_idx mac_idx)
856 {
857 	rtw89_write32_idx(rtwdev, R_COMP_CIM3K_BE4, B_COMP_CIM3K_TH_BE4, 0x0, mac_idx);
858 	rtw89_write32_idx(rtwdev, R_COMP_CIM3K_BE4, B_COMP_CIM3K_OW_BE4, 0x0, mac_idx);
859 	rtw89_write32_idx(rtwdev, R_COMP_CIM3K_BE4, B_COMP_CIM3K_NONBE_BE4, 0x1, mac_idx);
860 	rtw89_write32_idx(rtwdev, R_COMP_CIM3K_BE4, B_COMP_CIM3K_BANDEDGE_BE4, 0x1, mac_idx);
861 }
862 
863 static void rtw89_phy_bb_wrap_tx_rfsi_ctrl_init(struct rtw89_dev *rtwdev,
864 						enum rtw89_mac_idx mac_idx)
865 {
866 	enum rtw89_phy_idx phy_idx = mac_idx != RTW89_MAC_0 ? RTW89_PHY_1 : RTW89_PHY_0;
867 	enum rtw89_core_chip_id chip_id = rtwdev->chip->chip_id;
868 	const struct rtw89_chan *chan;
869 
870 	if (chip_id != RTL8922D)
871 		return;
872 
873 	rtw89_phy_bb_wrap_tx_rfsi_qam_comp_th_init(rtwdev, mac_idx);
874 	rtw89_phy_bb_wrap_tx_rfsi_scenario_def(rtwdev, mac_idx);
875 	rtw89_phy_bb_wrap_tx_rfsi_qam_comp_val(rtwdev, mac_idx);
876 	rtw89_phy_bb_set_oob_dpd_qam_comp_val(rtwdev, mac_idx);
877 	rtw89_phy_bb_set_mdpd_qam_comp_val(rtwdev, mac_idx);
878 	rtw89_phy_bb_set_cim3k_val(rtwdev, mac_idx);
879 
880 	rtw89_phy_bb_wrap_set_rfsi_ct_opt(rtwdev, phy_idx);
881 
882 	chan = rtw89_mgnt_chan_get(rtwdev, phy_idx);
883 	if (chan)
884 		rtw89_phy_bb_wrap_set_rfsi_bandedge_ch(rtwdev, chan, phy_idx);
885 }
886 
887 static void rtw89_phy_bb_wrap_ul_pwr(struct rtw89_dev *rtwdev)
888 {
889 	enum rtw89_core_chip_id chip_id = rtwdev->chip->chip_id;
890 	u8 mac_idx;
891 	u32 addr;
892 
893 	if (chip_id != RTL8922A)
894 		return;
895 
896 	for (mac_idx = 0; mac_idx < RTW89_MAC_NUM; mac_idx++) {
897 		addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_RSSI_TARGET_LMT, mac_idx);
898 		rtw89_write32(rtwdev, addr, 0x0201FE00);
899 		addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_TH, mac_idx);
900 		rtw89_write32(rtwdev, addr, 0x00FFEC7E);
901 	}
902 }
903 
904 static void __rtw89_phy_bb_wrap_init_be(struct rtw89_dev *rtwdev,
905 					enum rtw89_mac_idx mac_idx)
906 {
907 	rtw89_phy_bb_wrap_tx_path_by_macid_init(rtwdev);
908 	rtw89_phy_bb_wrap_pwr_by_macid_init(rtwdev);
909 	rtw89_phy_bb_wrap_tpu_set_all(rtwdev, mac_idx);
910 	rtw89_phy_bb_wrap_tx_rfsi_ctrl_init(rtwdev, mac_idx);
911 	rtw89_phy_bb_wrap_force_cr_init(rtwdev, mac_idx);
912 	rtw89_phy_bb_wrap_ftm_init(rtwdev, mac_idx);
913 	rtw89_phy_bb_wrap_listen_path_en_init(rtwdev);
914 	rtw89_phy_bb_wrap_ul_pwr(rtwdev);
915 }
916 
917 static void rtw89_phy_bb_wrap_init_be(struct rtw89_dev *rtwdev)
918 {
919 	__rtw89_phy_bb_wrap_init_be(rtwdev, RTW89_MAC_0);
920 	if (rtwdev->dbcc_en)
921 		__rtw89_phy_bb_wrap_init_be(rtwdev, RTW89_MAC_1);
922 }
923 
924 static void rtw89_phy_ch_info_init_be(struct rtw89_dev *rtwdev)
925 {
926 	rtw89_phy_write32_mask(rtwdev, R_CHINFO_SEG, B_CHINFO_SEG_LEN, 0x0);
927 	rtw89_phy_write32_mask(rtwdev, R_CHINFO_SEG, B_CHINFO_SEG, 0xf);
928 	rtw89_phy_write32_mask(rtwdev, R_CHINFO_DATA, B_CHINFO_DATA_BITMAP, 0x1);
929 	rtw89_phy_set_phy_regs(rtwdev, R_CHINFO_ELM_SRC, B_CHINFO_ELM_BITMAP, 0x40303);
930 	rtw89_phy_set_phy_regs(rtwdev, R_CHINFO_ELM_SRC, B_CHINFO_SRC, 0x0);
931 	rtw89_phy_set_phy_regs(rtwdev, R_CHINFO_TYPE_SCAL, B_CHINFO_TYPE, 0x3);
932 	rtw89_phy_set_phy_regs(rtwdev, R_CHINFO_TYPE_SCAL, B_CHINFO_SCAL, 0x0);
933 }
934 
935 static void rtw89_phy_ch_info_init_be_v1(struct rtw89_dev *rtwdev)
936 {
937 	rtw89_phy_write32_mask(rtwdev, R_CHINFO_SEG_BE4, B_CHINFO_SEG_LEN_BE4, 0);
938 	rtw89_phy_set_phy_regs(rtwdev, R_CHINFO_OPT_BE4, B_CHINFO_OPT_BE4, 0x3);
939 	rtw89_phy_set_phy_regs(rtwdev, R_CHINFO_NX_BE4, B_CHINFO_NX_BE4, 0x669);
940 	rtw89_phy_set_phy_regs(rtwdev, R_CHINFO_ALG_BE4, B_CHINFO_ALG_BE4, 0);
941 }
942 
943 struct rtw89_byr_spec_ent_be {
944 	struct rtw89_rate_desc init;
945 	u8 num_of_idx;
946 	bool no_over_bw40;
947 	bool no_multi_nss;
948 };
949 
950 static const struct rtw89_byr_spec_ent_be rtw89_byr_spec_be[] = {
951 	{
952 		.init = { .rs = RTW89_RS_CCK },
953 		.num_of_idx = RTW89_RATE_CCK_NUM,
954 		.no_over_bw40 = true,
955 		.no_multi_nss = true,
956 	},
957 	{
958 		.init = { .rs = RTW89_RS_OFDM },
959 		.num_of_idx = RTW89_RATE_OFDM_NUM,
960 		.no_multi_nss = true,
961 	},
962 	{
963 		.init = { .rs = RTW89_RS_MCS, .idx = 14, .ofdma = RTW89_NON_OFDMA },
964 		.num_of_idx = 2,
965 		.no_multi_nss = true,
966 	},
967 	{
968 		.init = { .rs = RTW89_RS_MCS, .idx = 14, .ofdma = RTW89_OFDMA },
969 		.num_of_idx = 2,
970 		.no_multi_nss = true,
971 	},
972 	{
973 		.init = { .rs = RTW89_RS_MCS, .ofdma = RTW89_NON_OFDMA },
974 		.num_of_idx = 14,
975 	},
976 	{
977 		.init = { .rs = RTW89_RS_HEDCM, .ofdma = RTW89_NON_OFDMA },
978 		.num_of_idx = RTW89_RATE_HEDCM_NUM,
979 	},
980 	{
981 		.init = { .rs = RTW89_RS_MCS, .ofdma = RTW89_OFDMA },
982 		.num_of_idx = 14,
983 	},
984 	{
985 		.init = { .rs = RTW89_RS_HEDCM, .ofdma = RTW89_OFDMA },
986 		.num_of_idx = RTW89_RATE_HEDCM_NUM,
987 	},
988 };
989 
990 static
991 void __phy_set_txpwr_byrate_be(struct rtw89_dev *rtwdev, u8 band, u8 bw,
992 			       u8 nss, u32 *addr, enum rtw89_phy_idx phy_idx)
993 {
994 	const struct rtw89_byr_spec_ent_be *ent;
995 	struct rtw89_rate_desc desc;
996 	int pos = 0;
997 	int i, j;
998 	u32 val;
999 	s8 v[4];
1000 
1001 	for (i = 0; i < ARRAY_SIZE(rtw89_byr_spec_be); i++) {
1002 		ent = &rtw89_byr_spec_be[i];
1003 
1004 		if (bw > RTW89_CHANNEL_WIDTH_40 && ent->no_over_bw40)
1005 			continue;
1006 		if (nss > RTW89_NSS_1 && ent->no_multi_nss)
1007 			continue;
1008 
1009 		desc = ent->init;
1010 		desc.nss = nss;
1011 		for (j = 0; j < ent->num_of_idx; j++, desc.idx++) {
1012 			v[pos] = rtw89_phy_read_txpwr_byrate(rtwdev, band, bw,
1013 							     &desc);
1014 			pos = (pos + 1) % 4;
1015 			if (pos)
1016 				continue;
1017 
1018 			val = u32_encode_bits(v[0], GENMASK(7, 0)) |
1019 			      u32_encode_bits(v[1], GENMASK(15, 8)) |
1020 			      u32_encode_bits(v[2], GENMASK(23, 16)) |
1021 			      u32_encode_bits(v[3], GENMASK(31, 24));
1022 
1023 			rtw89_mac_txpwr_write32(rtwdev, phy_idx, *addr, val);
1024 			*addr += 4;
1025 		}
1026 	}
1027 }
1028 
1029 static void rtw89_phy_set_txpwr_byrate_be(struct rtw89_dev *rtwdev,
1030 					  const struct rtw89_chan *chan,
1031 					  enum rtw89_phy_idx phy_idx)
1032 {
1033 	u32 addr = R_BE_PWR_BY_RATE;
1034 	u8 band = chan->band_type;
1035 	u8 bw, nss;
1036 
1037 	rtw89_debug(rtwdev, RTW89_DBG_TXPWR,
1038 		    "[TXPWR] set txpwr byrate on band %d\n", band);
1039 
1040 	for (bw = 0; bw <= RTW89_CHANNEL_WIDTH_320; bw++)
1041 		for (nss = 0; nss <= RTW89_NSS_2; nss++)
1042 			__phy_set_txpwr_byrate_be(rtwdev, band, bw, nss,
1043 						  &addr, phy_idx);
1044 }
1045 
1046 static void rtw89_phy_set_txpwr_offset_be(struct rtw89_dev *rtwdev,
1047 					  const struct rtw89_chan *chan,
1048 					  enum rtw89_phy_idx phy_idx)
1049 {
1050 	struct rtw89_rate_desc desc = {
1051 		.nss = RTW89_NSS_1,
1052 		.rs = RTW89_RS_OFFSET,
1053 	};
1054 	u8 band = chan->band_type;
1055 	s8 v[RTW89_RATE_OFFSET_NUM_BE] = {};
1056 	u32 val;
1057 
1058 	rtw89_debug(rtwdev, RTW89_DBG_TXPWR,
1059 		    "[TXPWR] set txpwr offset on band %d\n", band);
1060 
1061 	for (desc.idx = 0; desc.idx < RTW89_RATE_OFFSET_NUM_BE; desc.idx++)
1062 		v[desc.idx] = rtw89_phy_read_txpwr_byrate(rtwdev, band, 0, &desc);
1063 
1064 	val = u32_encode_bits(v[RTW89_RATE_OFFSET_CCK], GENMASK(3, 0)) |
1065 	      u32_encode_bits(v[RTW89_RATE_OFFSET_OFDM], GENMASK(7, 4)) |
1066 	      u32_encode_bits(v[RTW89_RATE_OFFSET_HT], GENMASK(11, 8)) |
1067 	      u32_encode_bits(v[RTW89_RATE_OFFSET_VHT], GENMASK(15, 12)) |
1068 	      u32_encode_bits(v[RTW89_RATE_OFFSET_HE], GENMASK(19, 16)) |
1069 	      u32_encode_bits(v[RTW89_RATE_OFFSET_EHT], GENMASK(23, 20)) |
1070 	      u32_encode_bits(v[RTW89_RATE_OFFSET_DLRU_HE], GENMASK(27, 24)) |
1071 	      u32_encode_bits(v[RTW89_RATE_OFFSET_DLRU_EHT], GENMASK(31, 28));
1072 
1073 	rtw89_mac_txpwr_write32(rtwdev, phy_idx, R_BE_PWR_RATE_OFST_CTRL, val);
1074 }
1075 
1076 static void
1077 fill_limit_nonbf_bf(struct rtw89_dev *rtwdev, s8 (*ptr)[RTW89_BF_NUM],
1078 		    u8 band, u8 bw, u8 ntx, u8 rs, u8 ch)
1079 {
1080 	int bf;
1081 
1082 	for (bf = 0; bf < RTW89_BF_NUM; bf++)
1083 		(*ptr)[bf] = rtw89_phy_read_txpwr_limit(rtwdev, band, bw, ntx,
1084 							rs, bf, ch);
1085 }
1086 
1087 static void
1088 fill_limit_nonbf_bf_min(struct rtw89_dev *rtwdev, s8 (*ptr)[RTW89_BF_NUM],
1089 			u8 band, u8 bw, u8 ntx, u8 rs, u8 ch1, u8 ch2)
1090 {
1091 	s8 v1[RTW89_BF_NUM];
1092 	s8 v2[RTW89_BF_NUM];
1093 	int bf;
1094 
1095 	fill_limit_nonbf_bf(rtwdev, &v1, band, bw, ntx, rs, ch1);
1096 	fill_limit_nonbf_bf(rtwdev, &v2, band, bw, ntx, rs, ch2);
1097 
1098 	for (bf = 0; bf < RTW89_BF_NUM; bf++)
1099 		(*ptr)[bf] = min(v1[bf], v2[bf]);
1100 }
1101 
1102 static void phy_fill_limit_20m_be(struct rtw89_dev *rtwdev,
1103 				  struct rtw89_txpwr_limit_be *lmt,
1104 				  u8 band, u8 ntx, u8 ch)
1105 {
1106 	fill_limit_nonbf_bf(rtwdev, &lmt->cck_20m, band,
1107 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_CCK, ch);
1108 	fill_limit_nonbf_bf(rtwdev, &lmt->cck_40m, band,
1109 			    RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_CCK, ch);
1110 	fill_limit_nonbf_bf(rtwdev, &lmt->ofdm, band,
1111 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_OFDM, ch);
1112 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[0], band,
1113 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch);
1114 }
1115 
1116 static void phy_fill_limit_40m_be(struct rtw89_dev *rtwdev,
1117 				  struct rtw89_txpwr_limit_be *lmt,
1118 				  u8 band, u8 ntx, u8 ch, u8 pri_ch)
1119 {
1120 	fill_limit_nonbf_bf(rtwdev, &lmt->cck_20m, band,
1121 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_CCK, ch - 2);
1122 	fill_limit_nonbf_bf(rtwdev, &lmt->cck_40m, band,
1123 			    RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_CCK, ch);
1124 
1125 	fill_limit_nonbf_bf(rtwdev, &lmt->ofdm, band,
1126 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_OFDM, pri_ch);
1127 
1128 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[0], band,
1129 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 2);
1130 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[1], band,
1131 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 2);
1132 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[0], band,
1133 			    RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch);
1134 }
1135 
1136 static void phy_fill_limit_80m_be(struct rtw89_dev *rtwdev,
1137 				  struct rtw89_txpwr_limit_be *lmt,
1138 				  u8 band, u8 ntx, u8 ch, u8 pri_ch)
1139 {
1140 	fill_limit_nonbf_bf(rtwdev, &lmt->ofdm, band,
1141 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_OFDM, pri_ch);
1142 
1143 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[0], band,
1144 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 6);
1145 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[1], band,
1146 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 2);
1147 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[2], band,
1148 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 2);
1149 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[3], band,
1150 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 6);
1151 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[0], band,
1152 			    RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch - 4);
1153 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[1], band,
1154 			    RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch + 4);
1155 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_80m[0], band,
1156 			    RTW89_CHANNEL_WIDTH_80, ntx, RTW89_RS_MCS, ch);
1157 
1158 	fill_limit_nonbf_bf_min(rtwdev, &lmt->mcs_40m_0p5, band,
1159 				RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS,
1160 				ch - 4, ch + 4);
1161 }
1162 
1163 static void phy_fill_limit_160m_be(struct rtw89_dev *rtwdev,
1164 				   struct rtw89_txpwr_limit_be *lmt,
1165 				   u8 band, u8 ntx, u8 ch, u8 pri_ch)
1166 {
1167 	fill_limit_nonbf_bf(rtwdev, &lmt->ofdm, band,
1168 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_OFDM, pri_ch);
1169 
1170 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[0], band,
1171 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 14);
1172 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[1], band,
1173 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 10);
1174 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[2], band,
1175 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 6);
1176 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[3], band,
1177 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 2);
1178 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[4], band,
1179 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 2);
1180 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[5], band,
1181 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 6);
1182 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[6], band,
1183 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 10);
1184 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[7], band,
1185 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 14);
1186 
1187 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[0], band,
1188 			    RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch - 12);
1189 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[1], band,
1190 			    RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch - 4);
1191 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[2], band,
1192 			    RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch + 4);
1193 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[3], band,
1194 			    RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch + 12);
1195 
1196 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_80m[0], band,
1197 			    RTW89_CHANNEL_WIDTH_80, ntx, RTW89_RS_MCS, ch - 8);
1198 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_80m[1], band,
1199 			    RTW89_CHANNEL_WIDTH_80, ntx, RTW89_RS_MCS, ch + 8);
1200 
1201 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_160m[0], band,
1202 			    RTW89_CHANNEL_WIDTH_160, ntx, RTW89_RS_MCS, ch);
1203 
1204 	fill_limit_nonbf_bf_min(rtwdev, &lmt->mcs_40m_0p5, band,
1205 				RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS,
1206 				ch - 12, ch - 4);
1207 	fill_limit_nonbf_bf_min(rtwdev, &lmt->mcs_40m_2p5, band,
1208 				RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS,
1209 				ch + 4, ch + 12);
1210 }
1211 
1212 static void phy_fill_limit_320m_be(struct rtw89_dev *rtwdev,
1213 				   struct rtw89_txpwr_limit_be *lmt,
1214 				   u8 band, u8 ntx, u8 ch, u8 pri_ch)
1215 {
1216 	fill_limit_nonbf_bf(rtwdev, &lmt->ofdm, band,
1217 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_OFDM, pri_ch);
1218 
1219 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[0], band,
1220 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 30);
1221 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[1], band,
1222 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 26);
1223 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[2], band,
1224 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 22);
1225 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[3], band,
1226 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 18);
1227 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[4], band,
1228 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 14);
1229 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[5], band,
1230 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 10);
1231 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[6], band,
1232 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 6);
1233 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[7], band,
1234 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 2);
1235 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[8], band,
1236 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 2);
1237 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[9], band,
1238 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 6);
1239 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[10], band,
1240 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 10);
1241 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[11], band,
1242 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 14);
1243 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[12], band,
1244 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 18);
1245 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[13], band,
1246 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 22);
1247 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[14], band,
1248 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 26);
1249 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[15], band,
1250 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 30);
1251 
1252 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[0], band,
1253 			    RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch - 28);
1254 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[1], band,
1255 			    RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch - 20);
1256 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[2], band,
1257 			    RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch - 12);
1258 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[3], band,
1259 			    RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch - 4);
1260 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[4], band,
1261 			    RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch + 4);
1262 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[5], band,
1263 			    RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch + 12);
1264 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[6], band,
1265 			    RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch + 20);
1266 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[7], band,
1267 			    RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch + 28);
1268 
1269 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_80m[0], band,
1270 			    RTW89_CHANNEL_WIDTH_80, ntx, RTW89_RS_MCS, ch - 24);
1271 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_80m[1], band,
1272 			    RTW89_CHANNEL_WIDTH_80, ntx, RTW89_RS_MCS, ch - 8);
1273 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_80m[2], band,
1274 			    RTW89_CHANNEL_WIDTH_80, ntx, RTW89_RS_MCS, ch + 8);
1275 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_80m[3], band,
1276 			    RTW89_CHANNEL_WIDTH_80, ntx, RTW89_RS_MCS, ch + 24);
1277 
1278 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_160m[0], band,
1279 			    RTW89_CHANNEL_WIDTH_160, ntx, RTW89_RS_MCS, ch - 16);
1280 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_160m[1], band,
1281 			    RTW89_CHANNEL_WIDTH_160, ntx, RTW89_RS_MCS, ch + 16);
1282 
1283 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_320m, band,
1284 			    RTW89_CHANNEL_WIDTH_320, ntx, RTW89_RS_MCS, ch);
1285 
1286 	fill_limit_nonbf_bf_min(rtwdev, &lmt->mcs_40m_0p5, band,
1287 				RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS,
1288 				ch - 28, ch - 20);
1289 	fill_limit_nonbf_bf_min(rtwdev, &lmt->mcs_40m_2p5, band,
1290 				RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS,
1291 				ch - 12, ch - 4);
1292 	fill_limit_nonbf_bf_min(rtwdev, &lmt->mcs_40m_4p5, band,
1293 				RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS,
1294 				ch + 4, ch + 12);
1295 	fill_limit_nonbf_bf_min(rtwdev, &lmt->mcs_40m_6p5, band,
1296 				RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS,
1297 				ch + 20, ch + 28);
1298 }
1299 
1300 static void rtw89_phy_fill_limit_be(struct rtw89_dev *rtwdev,
1301 				    const struct rtw89_chan *chan,
1302 				    struct rtw89_txpwr_limit_be *lmt,
1303 				    u8 ntx)
1304 {
1305 	u8 band = chan->band_type;
1306 	u8 pri_ch = chan->primary_channel;
1307 	u8 ch = chan->channel;
1308 	u8 bw = chan->band_width;
1309 
1310 	memset(lmt, 0, sizeof(*lmt));
1311 
1312 	switch (bw) {
1313 	case RTW89_CHANNEL_WIDTH_20:
1314 		phy_fill_limit_20m_be(rtwdev, lmt, band, ntx, ch);
1315 		break;
1316 	case RTW89_CHANNEL_WIDTH_40:
1317 		phy_fill_limit_40m_be(rtwdev, lmt, band, ntx, ch, pri_ch);
1318 		break;
1319 	case RTW89_CHANNEL_WIDTH_80:
1320 		phy_fill_limit_80m_be(rtwdev, lmt, band, ntx, ch, pri_ch);
1321 		break;
1322 	case RTW89_CHANNEL_WIDTH_160:
1323 		phy_fill_limit_160m_be(rtwdev, lmt, band, ntx, ch, pri_ch);
1324 		break;
1325 	case RTW89_CHANNEL_WIDTH_320:
1326 		phy_fill_limit_320m_be(rtwdev, lmt, band, ntx, ch, pri_ch);
1327 		break;
1328 	}
1329 }
1330 
1331 static void rtw89_phy_set_txpwr_limit_be(struct rtw89_dev *rtwdev,
1332 					 const struct rtw89_chan *chan,
1333 					 enum rtw89_phy_idx phy_idx)
1334 {
1335 	struct rtw89_txpwr_limit_be lmt;
1336 	const s8 *ptr;
1337 	u32 addr, val;
1338 	u8 i, j;
1339 
1340 	BUILD_BUG_ON(sizeof(struct rtw89_txpwr_limit_be) !=
1341 		     RTW89_TXPWR_LMT_PAGE_SIZE_BE);
1342 
1343 	rtw89_debug(rtwdev, RTW89_DBG_TXPWR,
1344 		    "[TXPWR] set txpwr limit on band %d bw %d\n",
1345 		    chan->band_type, chan->band_width);
1346 
1347 	addr = R_BE_PWR_LMT;
1348 	for (i = 0; i <= RTW89_NSS_2; i++) {
1349 		rtw89_phy_fill_limit_be(rtwdev, chan, &lmt, i);
1350 
1351 		ptr = (s8 *)&lmt;
1352 		for (j = 0; j < RTW89_TXPWR_LMT_PAGE_SIZE_BE;
1353 		     j += 4, addr += 4, ptr += 4) {
1354 			val = u32_encode_bits(ptr[0], GENMASK(7, 0)) |
1355 			      u32_encode_bits(ptr[1], GENMASK(15, 8)) |
1356 			      u32_encode_bits(ptr[2], GENMASK(23, 16)) |
1357 			      u32_encode_bits(ptr[3], GENMASK(31, 24));
1358 
1359 			rtw89_mac_txpwr_write32(rtwdev, phy_idx, addr, val);
1360 		}
1361 	}
1362 }
1363 
1364 static void fill_limit_ru_each(struct rtw89_dev *rtwdev, u8 index,
1365 			       struct rtw89_txpwr_limit_ru_be *lmt_ru,
1366 			       u8 band, u8 ntx, u8 ch)
1367 {
1368 	lmt_ru->ru26[index] =
1369 		rtw89_phy_read_txpwr_limit_ru(rtwdev, band, RTW89_RU26, ntx, ch);
1370 	lmt_ru->ru52[index] =
1371 		rtw89_phy_read_txpwr_limit_ru(rtwdev, band, RTW89_RU52, ntx, ch);
1372 	lmt_ru->ru106[index] =
1373 		rtw89_phy_read_txpwr_limit_ru(rtwdev, band, RTW89_RU106, ntx, ch);
1374 	lmt_ru->ru52_26[index] =
1375 		rtw89_phy_read_txpwr_limit_ru(rtwdev, band, RTW89_RU52_26, ntx, ch);
1376 	lmt_ru->ru106_26[index] =
1377 		rtw89_phy_read_txpwr_limit_ru(rtwdev, band, RTW89_RU106_26, ntx, ch);
1378 }
1379 
1380 static void phy_fill_limit_ru_20m_be(struct rtw89_dev *rtwdev,
1381 				     struct rtw89_txpwr_limit_ru_be *lmt_ru,
1382 				     u8 band, u8 ntx, u8 ch)
1383 {
1384 	fill_limit_ru_each(rtwdev, 0, lmt_ru, band, ntx, ch);
1385 }
1386 
1387 static void phy_fill_limit_ru_40m_be(struct rtw89_dev *rtwdev,
1388 				     struct rtw89_txpwr_limit_ru_be *lmt_ru,
1389 				     u8 band, u8 ntx, u8 ch)
1390 {
1391 	fill_limit_ru_each(rtwdev, 0, lmt_ru, band, ntx, ch - 2);
1392 	fill_limit_ru_each(rtwdev, 1, lmt_ru, band, ntx, ch + 2);
1393 }
1394 
1395 static void phy_fill_limit_ru_80m_be(struct rtw89_dev *rtwdev,
1396 				     struct rtw89_txpwr_limit_ru_be *lmt_ru,
1397 				     u8 band, u8 ntx, u8 ch)
1398 {
1399 	fill_limit_ru_each(rtwdev, 0, lmt_ru, band, ntx, ch - 6);
1400 	fill_limit_ru_each(rtwdev, 1, lmt_ru, band, ntx, ch - 2);
1401 	fill_limit_ru_each(rtwdev, 2, lmt_ru, band, ntx, ch + 2);
1402 	fill_limit_ru_each(rtwdev, 3, lmt_ru, band, ntx, ch + 6);
1403 }
1404 
1405 static void phy_fill_limit_ru_160m_be(struct rtw89_dev *rtwdev,
1406 				      struct rtw89_txpwr_limit_ru_be *lmt_ru,
1407 				      u8 band, u8 ntx, u8 ch)
1408 {
1409 	fill_limit_ru_each(rtwdev, 0, lmt_ru, band, ntx, ch - 14);
1410 	fill_limit_ru_each(rtwdev, 1, lmt_ru, band, ntx, ch - 10);
1411 	fill_limit_ru_each(rtwdev, 2, lmt_ru, band, ntx, ch - 6);
1412 	fill_limit_ru_each(rtwdev, 3, lmt_ru, band, ntx, ch - 2);
1413 	fill_limit_ru_each(rtwdev, 4, lmt_ru, band, ntx, ch + 2);
1414 	fill_limit_ru_each(rtwdev, 5, lmt_ru, band, ntx, ch + 6);
1415 	fill_limit_ru_each(rtwdev, 6, lmt_ru, band, ntx, ch + 10);
1416 	fill_limit_ru_each(rtwdev, 7, lmt_ru, band, ntx, ch + 14);
1417 }
1418 
1419 static void phy_fill_limit_ru_320m_be(struct rtw89_dev *rtwdev,
1420 				      struct rtw89_txpwr_limit_ru_be *lmt_ru,
1421 				      u8 band, u8 ntx, u8 ch)
1422 {
1423 	fill_limit_ru_each(rtwdev, 0, lmt_ru, band, ntx, ch - 30);
1424 	fill_limit_ru_each(rtwdev, 1, lmt_ru, band, ntx, ch - 26);
1425 	fill_limit_ru_each(rtwdev, 2, lmt_ru, band, ntx, ch - 22);
1426 	fill_limit_ru_each(rtwdev, 3, lmt_ru, band, ntx, ch - 18);
1427 	fill_limit_ru_each(rtwdev, 4, lmt_ru, band, ntx, ch - 14);
1428 	fill_limit_ru_each(rtwdev, 5, lmt_ru, band, ntx, ch - 10);
1429 	fill_limit_ru_each(rtwdev, 6, lmt_ru, band, ntx, ch - 6);
1430 	fill_limit_ru_each(rtwdev, 7, lmt_ru, band, ntx, ch - 2);
1431 	fill_limit_ru_each(rtwdev, 8, lmt_ru, band, ntx, ch + 2);
1432 	fill_limit_ru_each(rtwdev, 9, lmt_ru, band, ntx, ch + 6);
1433 	fill_limit_ru_each(rtwdev, 10, lmt_ru, band, ntx, ch + 10);
1434 	fill_limit_ru_each(rtwdev, 11, lmt_ru, band, ntx, ch + 14);
1435 	fill_limit_ru_each(rtwdev, 12, lmt_ru, band, ntx, ch + 18);
1436 	fill_limit_ru_each(rtwdev, 13, lmt_ru, band, ntx, ch + 22);
1437 	fill_limit_ru_each(rtwdev, 14, lmt_ru, band, ntx, ch + 26);
1438 	fill_limit_ru_each(rtwdev, 15, lmt_ru, band, ntx, ch + 30);
1439 }
1440 
1441 static void rtw89_phy_fill_limit_ru_be(struct rtw89_dev *rtwdev,
1442 				       const struct rtw89_chan *chan,
1443 				       struct rtw89_txpwr_limit_ru_be *lmt_ru,
1444 				       u8 ntx)
1445 {
1446 	u8 band = chan->band_type;
1447 	u8 ch = chan->channel;
1448 	u8 bw = chan->band_width;
1449 
1450 	memset(lmt_ru, 0, sizeof(*lmt_ru));
1451 
1452 	switch (bw) {
1453 	case RTW89_CHANNEL_WIDTH_20:
1454 		phy_fill_limit_ru_20m_be(rtwdev, lmt_ru, band, ntx, ch);
1455 		break;
1456 	case RTW89_CHANNEL_WIDTH_40:
1457 		phy_fill_limit_ru_40m_be(rtwdev, lmt_ru, band, ntx, ch);
1458 		break;
1459 	case RTW89_CHANNEL_WIDTH_80:
1460 		phy_fill_limit_ru_80m_be(rtwdev, lmt_ru, band, ntx, ch);
1461 		break;
1462 	case RTW89_CHANNEL_WIDTH_160:
1463 		phy_fill_limit_ru_160m_be(rtwdev, lmt_ru, band, ntx, ch);
1464 		break;
1465 	case RTW89_CHANNEL_WIDTH_320:
1466 		phy_fill_limit_ru_320m_be(rtwdev, lmt_ru, band, ntx, ch);
1467 		break;
1468 	}
1469 }
1470 
1471 static void rtw89_phy_set_txpwr_limit_ru_be(struct rtw89_dev *rtwdev,
1472 					    const struct rtw89_chan *chan,
1473 					    enum rtw89_phy_idx phy_idx)
1474 {
1475 	struct rtw89_txpwr_limit_ru_be lmt_ru;
1476 	const s8 *ptr;
1477 	u32 addr, val;
1478 	u8 i, j;
1479 
1480 	BUILD_BUG_ON(sizeof(struct rtw89_txpwr_limit_ru_be) !=
1481 		     RTW89_TXPWR_LMT_RU_PAGE_SIZE_BE);
1482 
1483 	rtw89_debug(rtwdev, RTW89_DBG_TXPWR,
1484 		    "[TXPWR] set txpwr limit ru on band %d bw %d\n",
1485 		    chan->band_type, chan->band_width);
1486 
1487 	addr = R_BE_PWR_RU_LMT;
1488 	for (i = 0; i <= RTW89_NSS_2; i++) {
1489 		rtw89_phy_fill_limit_ru_be(rtwdev, chan, &lmt_ru, i);
1490 
1491 		ptr = (s8 *)&lmt_ru;
1492 		for (j = 0; j < RTW89_TXPWR_LMT_RU_PAGE_SIZE_BE;
1493 		     j += 4, addr += 4, ptr += 4) {
1494 			val = u32_encode_bits(ptr[0], GENMASK(7, 0)) |
1495 			      u32_encode_bits(ptr[1], GENMASK(15, 8)) |
1496 			      u32_encode_bits(ptr[2], GENMASK(23, 16)) |
1497 			      u32_encode_bits(ptr[3], GENMASK(31, 24));
1498 
1499 			rtw89_mac_txpwr_write32(rtwdev, phy_idx, addr, val);
1500 		}
1501 	}
1502 }
1503 
1504 const struct rtw89_phy_gen_def rtw89_phy_gen_be = {
1505 	.cr_base = 0x20000,
1506 	.physt_bmp_start = R_PHY_STS_BITMAP_ADDR_START,
1507 	.physt_bmp_eht = R_PHY_STS_BITMAP_EHT,
1508 	.ccx = &rtw89_ccx_regs_be,
1509 	.physts = &rtw89_physts_regs_be,
1510 	.cfo = &rtw89_cfo_regs_be,
1511 	.bb_wrap = &rtw89_bb_wrap_regs_be,
1512 	.phy0_phy1_offset = rtw89_phy0_phy1_offset_be,
1513 	.config_bb_gain = rtw89_phy_config_bb_gain_be,
1514 	.preinit_rf_nctl = rtw89_phy_preinit_rf_nctl_be,
1515 	.bb_wrap_init = rtw89_phy_bb_wrap_init_be,
1516 	.ch_info_init = rtw89_phy_ch_info_init_be,
1517 
1518 	.set_txpwr_byrate = rtw89_phy_set_txpwr_byrate_be,
1519 	.set_txpwr_offset = rtw89_phy_set_txpwr_offset_be,
1520 	.set_txpwr_limit = rtw89_phy_set_txpwr_limit_be,
1521 	.set_txpwr_limit_ru = rtw89_phy_set_txpwr_limit_ru_be,
1522 };
1523 EXPORT_SYMBOL(rtw89_phy_gen_be);
1524 
1525 const struct rtw89_phy_gen_def rtw89_phy_gen_be_v1 = {
1526 	.cr_base = 0x0,
1527 	.physt_bmp_start = R_PHY_STS_BITMAP_ADDR_START_BE4,
1528 	.physt_bmp_eht = R_PHY_STS_BITMAP_EHT_BE4,
1529 	.ccx = &rtw89_ccx_regs_be_v1,
1530 	.physts = &rtw89_physts_regs_be_v1,
1531 	.cfo = &rtw89_cfo_regs_be_v1,
1532 	.bb_wrap = &rtw89_bb_wrap_regs_be_v1,
1533 	.phy0_phy1_offset = rtw89_phy0_phy1_offset_be_v1,
1534 	.config_bb_gain = rtw89_phy_config_bb_gain_be,
1535 	.preinit_rf_nctl = rtw89_phy_preinit_rf_nctl_be_v1,
1536 	.bb_wrap_init = rtw89_phy_bb_wrap_init_be,
1537 	.ch_info_init = rtw89_phy_ch_info_init_be_v1,
1538 
1539 	.set_txpwr_byrate = rtw89_phy_set_txpwr_byrate_be,
1540 	.set_txpwr_offset = rtw89_phy_set_txpwr_offset_be,
1541 	.set_txpwr_limit = rtw89_phy_set_txpwr_limit_be,
1542 	.set_txpwr_limit_ru = rtw89_phy_set_txpwr_limit_ru_be,
1543 };
1544 EXPORT_SYMBOL(rtw89_phy_gen_be_v1);
1545