xref: /linux/drivers/net/wireless/realtek/rtw89/phy_be.c (revision 8a5f956a9fb7d74fff681145082acfad5afa6bb8)
1 // SPDX-License-Identifier: GPL-2.0 OR BSD-3-Clause
2 /* Copyright(c) 2023  Realtek Corporation
3  */
4 
5 #include "debug.h"
6 #include "mac.h"
7 #include "phy.h"
8 #include "reg.h"
9 
10 static const struct rtw89_ccx_regs rtw89_ccx_regs_be = {
11 	.setting_addr = R_CCX,
12 	.edcca_opt_mask = B_CCX_EDCCA_OPT_MSK_V1,
13 	.measurement_trig_mask = B_MEASUREMENT_TRIG_MSK,
14 	.trig_opt_mask = B_CCX_TRIG_OPT_MSK,
15 	.en_mask = B_CCX_EN_MSK,
16 	.ifs_cnt_addr = R_IFS_COUNTER,
17 	.ifs_clm_period_mask = B_IFS_CLM_PERIOD_MSK,
18 	.ifs_clm_cnt_unit_mask = B_IFS_CLM_COUNTER_UNIT_MSK,
19 	.ifs_clm_cnt_clear_mask = B_IFS_COUNTER_CLR_MSK,
20 	.ifs_collect_en_mask = B_IFS_COLLECT_EN,
21 	.ifs_t1_addr = R_IFS_T1,
22 	.ifs_t1_th_h_mask = B_IFS_T1_TH_HIGH_MSK,
23 	.ifs_t1_en_mask = B_IFS_T1_EN_MSK,
24 	.ifs_t1_th_l_mask = B_IFS_T1_TH_LOW_MSK,
25 	.ifs_t2_addr = R_IFS_T2,
26 	.ifs_t2_th_h_mask = B_IFS_T2_TH_HIGH_MSK,
27 	.ifs_t2_en_mask = B_IFS_T2_EN_MSK,
28 	.ifs_t2_th_l_mask = B_IFS_T2_TH_LOW_MSK,
29 	.ifs_t3_addr = R_IFS_T3,
30 	.ifs_t3_th_h_mask = B_IFS_T3_TH_HIGH_MSK,
31 	.ifs_t3_en_mask = B_IFS_T3_EN_MSK,
32 	.ifs_t3_th_l_mask = B_IFS_T3_TH_LOW_MSK,
33 	.ifs_t4_addr = R_IFS_T4,
34 	.ifs_t4_th_h_mask = B_IFS_T4_TH_HIGH_MSK,
35 	.ifs_t4_en_mask = B_IFS_T4_EN_MSK,
36 	.ifs_t4_th_l_mask = B_IFS_T4_TH_LOW_MSK,
37 	.ifs_clm_tx_cnt_addr = R_IFS_CLM_TX_CNT_V1,
38 	.ifs_clm_edcca_excl_cca_fa_mask = B_IFS_CLM_EDCCA_EXCLUDE_CCA_FA_MSK,
39 	.ifs_clm_tx_cnt_msk = B_IFS_CLM_TX_CNT_MSK,
40 	.ifs_clm_cca_addr = R_IFS_CLM_CCA_V1,
41 	.ifs_clm_ofdmcca_excl_fa_mask = B_IFS_CLM_OFDMCCA_EXCLUDE_FA_MSK,
42 	.ifs_clm_cckcca_excl_fa_mask = B_IFS_CLM_CCKCCA_EXCLUDE_FA_MSK,
43 	.ifs_clm_fa_addr = R_IFS_CLM_FA_V1,
44 	.ifs_clm_ofdm_fa_mask = B_IFS_CLM_OFDM_FA_MSK,
45 	.ifs_clm_cck_fa_mask = B_IFS_CLM_CCK_FA_MSK,
46 	.ifs_his_addr = R_IFS_HIS_V1,
47 	.ifs_t4_his_mask = B_IFS_T4_HIS_MSK,
48 	.ifs_t3_his_mask = B_IFS_T3_HIS_MSK,
49 	.ifs_t2_his_mask = B_IFS_T2_HIS_MSK,
50 	.ifs_t1_his_mask = B_IFS_T1_HIS_MSK,
51 	.ifs_avg_l_addr = R_IFS_AVG_L_V1,
52 	.ifs_t2_avg_mask = B_IFS_T2_AVG_MSK,
53 	.ifs_t1_avg_mask = B_IFS_T1_AVG_MSK,
54 	.ifs_avg_h_addr = R_IFS_AVG_H_V1,
55 	.ifs_t4_avg_mask = B_IFS_T4_AVG_MSK,
56 	.ifs_t3_avg_mask = B_IFS_T3_AVG_MSK,
57 	.ifs_cca_l_addr = R_IFS_CCA_L_V1,
58 	.ifs_t2_cca_mask = B_IFS_T2_CCA_MSK,
59 	.ifs_t1_cca_mask = B_IFS_T1_CCA_MSK,
60 	.ifs_cca_h_addr = R_IFS_CCA_H_V1,
61 	.ifs_t4_cca_mask = B_IFS_T4_CCA_MSK,
62 	.ifs_t3_cca_mask = B_IFS_T3_CCA_MSK,
63 	.ifs_total_addr = R_IFSCNT_V1,
64 	.ifs_cnt_done_mask = B_IFSCNT_DONE_MSK,
65 	.ifs_total_mask = B_IFSCNT_TOTAL_CNT_MSK,
66 	.nhm = R_NHM_BE,
67 	.nhm_ready = B_NHM_READY_BE_MSK,
68 	.nhm_config = R_NHM_CFG,
69 	.nhm_period_mask = B_NHM_PERIOD_MSK,
70 	.nhm_unit_mask = B_NHM_COUNTER_MSK,
71 	.nhm_include_cca_mask = B_NHM_INCLUDE_CCA_MSK,
72 	.nhm_en_mask = B_NHM_EN_MSK,
73 	.nhm_method = R_NHM_TH9,
74 	.nhm_pwr_method_msk = B_NHM_PWDB_METHOD_MSK,
75 };
76 
77 static const struct rtw89_physts_regs rtw89_physts_regs_be = {
78 	.setting_addr = R_PLCP_HISTOGRAM,
79 	.dis_trigger_fail_mask = B_STS_DIS_TRIG_BY_FAIL,
80 	.dis_trigger_brk_mask = B_STS_DIS_TRIG_BY_BRK,
81 };
82 
83 static const struct rtw89_cfo_regs rtw89_cfo_regs_be = {
84 	.comp = R_DCFO_WEIGHT_V1,
85 	.weighting_mask = B_DCFO_WEIGHT_MSK_V1,
86 	.comp_seg0 = R_DCFO_OPT_V1,
87 	.valid_0_mask = B_DCFO_OPT_EN_V1,
88 };
89 
90 static u32 rtw89_phy0_phy1_offset_be(struct rtw89_dev *rtwdev, u32 addr)
91 {
92 	u32 phy_page = addr >> 8;
93 	u32 ofst = 0;
94 
95 	if ((phy_page >= 0x4 && phy_page <= 0xF) ||
96 	    (phy_page >= 0x20 && phy_page <= 0x2B) ||
97 	    (phy_page >= 0x40 && phy_page <= 0x4f) ||
98 	    (phy_page >= 0x60 && phy_page <= 0x6f) ||
99 	    (phy_page >= 0xE4 && phy_page <= 0xE5) ||
100 	    (phy_page >= 0xE8 && phy_page <= 0xED))
101 		ofst = 0x1000;
102 	else
103 		ofst = 0x0;
104 
105 	return ofst;
106 }
107 
108 union rtw89_phy_bb_gain_arg_be {
109 	u32 addr;
110 	struct {
111 		u8 type;
112 #define BB_GAIN_TYPE_SUB0_BE GENMASK(3, 0)
113 #define BB_GAIN_TYPE_SUB1_BE GENMASK(7, 4)
114 		u8 path_bw;
115 #define BB_GAIN_PATH_BE GENMASK(3, 0)
116 #define BB_GAIN_BW_BE GENMASK(7, 4)
117 		u8 gain_band;
118 		u8 cfg_type;
119 	} __packed;
120 } __packed;
121 
122 static void
123 rtw89_phy_cfg_bb_gain_error_be(struct rtw89_dev *rtwdev,
124 			       union rtw89_phy_bb_gain_arg_be arg, u32 data)
125 {
126 	struct rtw89_phy_bb_gain_info_be *gain = &rtwdev->bb_gain.be;
127 	u8 bw_type = u8_get_bits(arg.path_bw, BB_GAIN_BW_BE);
128 	u8 path = u8_get_bits(arg.path_bw, BB_GAIN_PATH_BE);
129 	u8 gband = arg.gain_band;
130 	u8 type = arg.type;
131 	int i;
132 
133 	switch (type) {
134 	case 0:
135 		for (i = 0; i < 4; i++, data >>= 8)
136 			gain->lna_gain[gband][bw_type][path][i] = data & 0xff;
137 		break;
138 	case 1:
139 		for (i = 4; i < 7; i++, data >>= 8)
140 			gain->lna_gain[gband][bw_type][path][i] = data & 0xff;
141 		break;
142 	case 2:
143 		for (i = 0; i < 2; i++, data >>= 8)
144 			gain->tia_gain[gband][bw_type][path][i] = data & 0xff;
145 		break;
146 	default:
147 		rtw89_warn(rtwdev,
148 			   "bb gain error {0x%x:0x%x} with unknown type: %d\n",
149 			   arg.addr, data, type);
150 		break;
151 	}
152 }
153 
154 static void
155 rtw89_phy_cfg_bb_rpl_ofst_be(struct rtw89_dev *rtwdev,
156 			     union rtw89_phy_bb_gain_arg_be arg, u32 data)
157 {
158 	struct rtw89_phy_bb_gain_info_be *gain = &rtwdev->bb_gain.be;
159 	u8 type_sub0 = u8_get_bits(arg.type, BB_GAIN_TYPE_SUB0_BE);
160 	u8 type_sub1 = u8_get_bits(arg.type, BB_GAIN_TYPE_SUB1_BE);
161 	u8 path = u8_get_bits(arg.path_bw, BB_GAIN_PATH_BE);
162 	u8 gband = arg.gain_band;
163 	u8 ofst = 0;
164 	int i;
165 
166 	switch (type_sub1) {
167 	case RTW89_CMAC_BW_20M:
168 		gain->rpl_ofst_20[gband][path][0] = (s8)data;
169 		break;
170 	case RTW89_CMAC_BW_40M:
171 		for (i = 0; i < RTW89_BW20_SC_40M; i++, data >>= 8)
172 			gain->rpl_ofst_40[gband][path][i] = data & 0xff;
173 		break;
174 	case RTW89_CMAC_BW_80M:
175 		for (i = 0; i < RTW89_BW20_SC_80M; i++, data >>= 8)
176 			gain->rpl_ofst_80[gband][path][i] = data & 0xff;
177 		break;
178 	case RTW89_CMAC_BW_160M:
179 		if (type_sub0 == 0)
180 			ofst = 0;
181 		else
182 			ofst = RTW89_BW20_SC_80M;
183 
184 		for (i = 0; i < RTW89_BW20_SC_80M; i++, data >>= 8)
185 			gain->rpl_ofst_160[gband][path][i + ofst] = data & 0xff;
186 		break;
187 	default:
188 		rtw89_warn(rtwdev,
189 			   "bb rpl ofst {0x%x:0x%x} with unknown type_sub1: %d\n",
190 			   arg.addr, data, type_sub1);
191 		break;
192 	}
193 }
194 
195 static void
196 rtw89_phy_cfg_bb_gain_op1db_be(struct rtw89_dev *rtwdev,
197 			       union rtw89_phy_bb_gain_arg_be arg, u32 data)
198 {
199 	struct rtw89_phy_bb_gain_info_be *gain = &rtwdev->bb_gain.be;
200 	u8 bw_type = u8_get_bits(arg.path_bw, BB_GAIN_BW_BE);
201 	u8 path = u8_get_bits(arg.path_bw, BB_GAIN_PATH_BE);
202 	u8 gband = arg.gain_band;
203 	u8 type = arg.type;
204 	int i;
205 
206 	switch (type) {
207 	case 0:
208 		for (i = 0; i < 4; i++, data >>= 8)
209 			gain->lna_op1db[gband][bw_type][path][i] = data & 0xff;
210 		break;
211 	case 1:
212 		for (i = 4; i < 7; i++, data >>= 8)
213 			gain->lna_op1db[gband][bw_type][path][i] = data & 0xff;
214 		break;
215 	case 2:
216 		for (i = 0; i < 4; i++, data >>= 8)
217 			gain->tia_lna_op1db[gband][bw_type][path][i] = data & 0xff;
218 		break;
219 	case 3:
220 		for (i = 4; i < 8; i++, data >>= 8)
221 			gain->tia_lna_op1db[gband][bw_type][path][i] = data & 0xff;
222 		break;
223 	default:
224 		rtw89_warn(rtwdev,
225 			   "bb gain op1db {0x%x:0x%x} with unknown type: %d\n",
226 			   arg.addr, data, type);
227 		break;
228 	}
229 }
230 
231 static void rtw89_phy_config_bb_gain_be(struct rtw89_dev *rtwdev,
232 					const struct rtw89_reg2_def *reg,
233 					enum rtw89_rf_path rf_path,
234 					void *extra_data)
235 {
236 	const struct rtw89_chip_info *chip = rtwdev->chip;
237 	union rtw89_phy_bb_gain_arg_be arg = { .addr = reg->addr };
238 	struct rtw89_efuse *efuse = &rtwdev->efuse;
239 	u8 bw_type = u8_get_bits(arg.path_bw, BB_GAIN_BW_BE);
240 	u8 path = u8_get_bits(arg.path_bw, BB_GAIN_PATH_BE);
241 
242 	if (bw_type >= RTW89_BB_BW_NR_BE)
243 		return;
244 
245 	if (arg.gain_band >= RTW89_BB_GAIN_BAND_NR_BE)
246 		return;
247 
248 	if (path >= chip->rf_path_num)
249 		return;
250 
251 	if (arg.addr >= 0xf9 && arg.addr <= 0xfe) {
252 		rtw89_warn(rtwdev, "bb gain table with flow ctrl\n");
253 		return;
254 	}
255 
256 	switch (arg.cfg_type) {
257 	case 0:
258 		rtw89_phy_cfg_bb_gain_error_be(rtwdev, arg, reg->data);
259 		break;
260 	case 1:
261 		rtw89_phy_cfg_bb_rpl_ofst_be(rtwdev, arg, reg->data);
262 		break;
263 	case 2:
264 		/* ignore BB gain bypass */
265 		break;
266 	case 3:
267 		rtw89_phy_cfg_bb_gain_op1db_be(rtwdev, arg, reg->data);
268 		break;
269 	case 4:
270 		/* This cfg_type is only used by rfe_type >= 50 with eFEM */
271 		if (efuse->rfe_type < 50)
272 			break;
273 		fallthrough;
274 	default:
275 		rtw89_warn(rtwdev,
276 			   "bb gain {0x%x:0x%x} with unknown cfg type: %d\n",
277 			   arg.addr, reg->data, arg.cfg_type);
278 		break;
279 	}
280 }
281 
282 static void rtw89_phy_preinit_rf_nctl_be(struct rtw89_dev *rtwdev)
283 {
284 	rtw89_phy_write32_mask(rtwdev, R_GOTX_IQKDPK_C0, B_GOTX_IQKDPK, 0x3);
285 	rtw89_phy_write32_mask(rtwdev, R_GOTX_IQKDPK_C1, B_GOTX_IQKDPK, 0x3);
286 	rtw89_phy_write32_mask(rtwdev, R_IQKDPK_HC, B_IQKDPK_HC, 0x1);
287 	rtw89_phy_write32_mask(rtwdev, R_CLK_GCK, B_CLK_GCK, 0x00fffff);
288 	rtw89_phy_write32_mask(rtwdev, R_IOQ_IQK_DPK, B_IOQ_IQK_DPK_CLKEN, 0x3);
289 	rtw89_phy_write32_mask(rtwdev, R_IQK_DPK_RST, B_IQK_DPK_RST, 0x1);
290 	rtw89_phy_write32_mask(rtwdev, R_IQK_DPK_PRST, B_IQK_DPK_PRST, 0x1);
291 	rtw89_phy_write32_mask(rtwdev, R_IQK_DPK_PRST_C1, B_IQK_DPK_PRST, 0x1);
292 	rtw89_phy_write32_mask(rtwdev, R_TXRFC, B_TXRFC_RST, 0x1);
293 
294 	if (rtwdev->dbcc_en) {
295 		rtw89_phy_write32_mask(rtwdev, R_IQK_DPK_RST_C1, B_IQK_DPK_RST, 0x1);
296 		rtw89_phy_write32_mask(rtwdev, R_TXRFC_C1, B_TXRFC_RST, 0x1);
297 	}
298 }
299 
300 static
301 void rtw89_phy_bb_wrap_pwr_by_macid_init(struct rtw89_dev *rtwdev)
302 {
303 	u32 macid_idx, cr, base_macid_lmt, max_macid = 32;
304 
305 	base_macid_lmt = R_BE_PWR_MACID_LMT_BASE;
306 
307 	for (macid_idx = 0; macid_idx < 4 * max_macid; macid_idx += 4) {
308 		cr = base_macid_lmt + macid_idx;
309 		rtw89_write32(rtwdev, cr, 0x03007F7F);
310 	}
311 }
312 
313 static
314 void rtw89_phy_bb_wrap_tx_path_by_macid_init(struct rtw89_dev *rtwdev)
315 {
316 	int i, max_macid = 32;
317 	u32 cr = R_BE_PWR_MACID_PATH_BASE;
318 
319 	for (i = 0; i < max_macid; i++, cr += 4)
320 		rtw89_write32(rtwdev, cr, 0x03C86000);
321 }
322 
323 static void rtw89_phy_bb_wrap_tpu_set_all(struct rtw89_dev *rtwdev,
324 					  enum rtw89_mac_idx mac_idx)
325 {
326 	u32 addr;
327 
328 	for (addr = R_BE_PWR_BY_RATE; addr <= R_BE_PWR_BY_RATE_END; addr += 4)
329 		rtw89_write32(rtwdev, addr, 0);
330 	for (addr = R_BE_PWR_RULMT_START; addr <= R_BE_PWR_RULMT_END; addr += 4)
331 		rtw89_write32(rtwdev, addr, 0);
332 	for (addr = R_BE_PWR_RATE_OFST_CTRL; addr <= R_BE_PWR_RATE_OFST_END; addr += 4)
333 		rtw89_write32(rtwdev, addr, 0);
334 
335 	addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_REF_CTRL, mac_idx);
336 	rtw89_write32_mask(rtwdev, addr, B_BE_PWR_OFST_LMT_DB, 0);
337 	addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_OFST_LMTBF, mac_idx);
338 	rtw89_write32_mask(rtwdev, addr, B_BE_PWR_OFST_LMTBF_DB, 0);
339 	addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_RATE_CTRL, mac_idx);
340 	rtw89_write32_mask(rtwdev, addr, B_BE_PWR_OFST_BYRATE_DB, 0);
341 	addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_OFST_RULMT, mac_idx);
342 	rtw89_write32_mask(rtwdev, addr, B_BE_PWR_OFST_RULMT_DB, 0);
343 	addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_OFST_SW, mac_idx);
344 	rtw89_write32_mask(rtwdev, addr, B_BE_PWR_OFST_SW_DB, 0);
345 }
346 
347 static
348 void rtw89_phy_bb_wrap_listen_path_en_init(struct rtw89_dev *rtwdev)
349 {
350 	u32 addr;
351 	int ret;
352 
353 	ret = rtw89_mac_check_mac_en(rtwdev, RTW89_MAC_1, RTW89_CMAC_SEL);
354 	if (ret)
355 		return;
356 
357 	addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_LISTEN_PATH, RTW89_MAC_1);
358 	rtw89_write32_mask(rtwdev, addr, B_BE_PWR_LISTEN_PATH_EN, 0x2);
359 }
360 
361 static void rtw89_phy_bb_wrap_force_cr_init(struct rtw89_dev *rtwdev,
362 					    enum rtw89_mac_idx mac_idx)
363 {
364 	u32 addr;
365 
366 	addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_FORCE_LMT, mac_idx);
367 	rtw89_write32_mask(rtwdev, addr, B_BE_PWR_FORCE_LMT_ON, 0);
368 	addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_BOOST, mac_idx);
369 	rtw89_write32_mask(rtwdev, addr, B_BE_PWR_FORCE_RATE_ON, 0);
370 	addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_OFST_RULMT, mac_idx);
371 	rtw89_write32_mask(rtwdev, addr, B_BE_PWR_FORCE_RU_ENON, 0);
372 	rtw89_write32_mask(rtwdev, addr, B_BE_PWR_FORCE_RU_ON, 0);
373 	addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_FORCE_MACID, mac_idx);
374 	rtw89_write32_mask(rtwdev, addr, B_BE_PWR_FORCE_MACID_ALL, 0);
375 	addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_COEX_CTRL, mac_idx);
376 	rtw89_write32_mask(rtwdev, addr, B_BE_PWR_FORCE_COEX_ON, 0);
377 	addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_RATE_CTRL, mac_idx);
378 	rtw89_write32_mask(rtwdev, addr, B_BE_FORCE_PWR_BY_RATE_EN, 0);
379 }
380 
381 static void rtw89_phy_bb_wrap_ftm_init(struct rtw89_dev *rtwdev,
382 				       enum rtw89_mac_idx mac_idx)
383 {
384 	u32 addr;
385 
386 	addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_FTM, mac_idx);
387 	rtw89_write32(rtwdev, addr, 0xE4E431);
388 
389 	addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_FTM_SS, mac_idx);
390 	rtw89_write32_mask(rtwdev, addr, 0x7, 0);
391 }
392 
393 static void rtw89_phy_bb_wrap_ul_pwr(struct rtw89_dev *rtwdev)
394 {
395 	enum rtw89_core_chip_id chip_id = rtwdev->chip->chip_id;
396 	u8 mac_idx;
397 	u32 addr;
398 
399 	if (chip_id != RTL8922A)
400 		return;
401 
402 	for (mac_idx = 0; mac_idx < RTW89_MAC_NUM; mac_idx++) {
403 		addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_RSSI_TARGET_LMT, mac_idx);
404 		rtw89_write32(rtwdev, addr, 0x0201FE00);
405 		addr = rtw89_mac_reg_by_idx(rtwdev, R_BE_PWR_TH, mac_idx);
406 		rtw89_write32(rtwdev, addr, 0x00FFEC7E);
407 	}
408 }
409 
410 static void __rtw89_phy_bb_wrap_init_be(struct rtw89_dev *rtwdev,
411 					enum rtw89_mac_idx mac_idx)
412 {
413 	rtw89_phy_bb_wrap_pwr_by_macid_init(rtwdev);
414 	rtw89_phy_bb_wrap_tx_path_by_macid_init(rtwdev);
415 	rtw89_phy_bb_wrap_listen_path_en_init(rtwdev);
416 	rtw89_phy_bb_wrap_force_cr_init(rtwdev, mac_idx);
417 	rtw89_phy_bb_wrap_ftm_init(rtwdev, mac_idx);
418 	rtw89_phy_bb_wrap_tpu_set_all(rtwdev, mac_idx);
419 	rtw89_phy_bb_wrap_ul_pwr(rtwdev);
420 }
421 
422 static void rtw89_phy_bb_wrap_init_be(struct rtw89_dev *rtwdev)
423 {
424 	__rtw89_phy_bb_wrap_init_be(rtwdev, RTW89_MAC_0);
425 	if (rtwdev->dbcc_en)
426 		__rtw89_phy_bb_wrap_init_be(rtwdev, RTW89_MAC_1);
427 }
428 
429 static void rtw89_phy_ch_info_init_be(struct rtw89_dev *rtwdev)
430 {
431 	rtw89_phy_write32_mask(rtwdev, R_CHINFO_SEG, B_CHINFO_SEG_LEN, 0x0);
432 	rtw89_phy_write32_mask(rtwdev, R_CHINFO_SEG, B_CHINFO_SEG, 0xf);
433 	rtw89_phy_write32_mask(rtwdev, R_CHINFO_DATA, B_CHINFO_DATA_BITMAP, 0x1);
434 	rtw89_phy_set_phy_regs(rtwdev, R_CHINFO_ELM_SRC, B_CHINFO_ELM_BITMAP, 0x40303);
435 	rtw89_phy_set_phy_regs(rtwdev, R_CHINFO_ELM_SRC, B_CHINFO_SRC, 0x0);
436 	rtw89_phy_set_phy_regs(rtwdev, R_CHINFO_TYPE_SCAL, B_CHINFO_TYPE, 0x3);
437 	rtw89_phy_set_phy_regs(rtwdev, R_CHINFO_TYPE_SCAL, B_CHINFO_SCAL, 0x0);
438 }
439 
440 struct rtw89_byr_spec_ent_be {
441 	struct rtw89_rate_desc init;
442 	u8 num_of_idx;
443 	bool no_over_bw40;
444 	bool no_multi_nss;
445 };
446 
447 static const struct rtw89_byr_spec_ent_be rtw89_byr_spec_be[] = {
448 	{
449 		.init = { .rs = RTW89_RS_CCK },
450 		.num_of_idx = RTW89_RATE_CCK_NUM,
451 		.no_over_bw40 = true,
452 		.no_multi_nss = true,
453 	},
454 	{
455 		.init = { .rs = RTW89_RS_OFDM },
456 		.num_of_idx = RTW89_RATE_OFDM_NUM,
457 		.no_multi_nss = true,
458 	},
459 	{
460 		.init = { .rs = RTW89_RS_MCS, .idx = 14, .ofdma = RTW89_NON_OFDMA },
461 		.num_of_idx = 2,
462 		.no_multi_nss = true,
463 	},
464 	{
465 		.init = { .rs = RTW89_RS_MCS, .idx = 14, .ofdma = RTW89_OFDMA },
466 		.num_of_idx = 2,
467 		.no_multi_nss = true,
468 	},
469 	{
470 		.init = { .rs = RTW89_RS_MCS, .ofdma = RTW89_NON_OFDMA },
471 		.num_of_idx = 14,
472 	},
473 	{
474 		.init = { .rs = RTW89_RS_HEDCM, .ofdma = RTW89_NON_OFDMA },
475 		.num_of_idx = RTW89_RATE_HEDCM_NUM,
476 	},
477 	{
478 		.init = { .rs = RTW89_RS_MCS, .ofdma = RTW89_OFDMA },
479 		.num_of_idx = 14,
480 	},
481 	{
482 		.init = { .rs = RTW89_RS_HEDCM, .ofdma = RTW89_OFDMA },
483 		.num_of_idx = RTW89_RATE_HEDCM_NUM,
484 	},
485 };
486 
487 static
488 void __phy_set_txpwr_byrate_be(struct rtw89_dev *rtwdev, u8 band, u8 bw,
489 			       u8 nss, u32 *addr, enum rtw89_phy_idx phy_idx)
490 {
491 	const struct rtw89_byr_spec_ent_be *ent;
492 	struct rtw89_rate_desc desc;
493 	int pos = 0;
494 	int i, j;
495 	u32 val;
496 	s8 v[4];
497 
498 	for (i = 0; i < ARRAY_SIZE(rtw89_byr_spec_be); i++) {
499 		ent = &rtw89_byr_spec_be[i];
500 
501 		if (bw > RTW89_CHANNEL_WIDTH_40 && ent->no_over_bw40)
502 			continue;
503 		if (nss > RTW89_NSS_1 && ent->no_multi_nss)
504 			continue;
505 
506 		desc = ent->init;
507 		desc.nss = nss;
508 		for (j = 0; j < ent->num_of_idx; j++, desc.idx++) {
509 			v[pos] = rtw89_phy_read_txpwr_byrate(rtwdev, band, bw,
510 							     &desc);
511 			pos = (pos + 1) % 4;
512 			if (pos)
513 				continue;
514 
515 			val = u32_encode_bits(v[0], GENMASK(7, 0)) |
516 			      u32_encode_bits(v[1], GENMASK(15, 8)) |
517 			      u32_encode_bits(v[2], GENMASK(23, 16)) |
518 			      u32_encode_bits(v[3], GENMASK(31, 24));
519 
520 			rtw89_mac_txpwr_write32(rtwdev, phy_idx, *addr, val);
521 			*addr += 4;
522 		}
523 	}
524 }
525 
526 static void rtw89_phy_set_txpwr_byrate_be(struct rtw89_dev *rtwdev,
527 					  const struct rtw89_chan *chan,
528 					  enum rtw89_phy_idx phy_idx)
529 {
530 	u32 addr = R_BE_PWR_BY_RATE;
531 	u8 band = chan->band_type;
532 	u8 bw, nss;
533 
534 	rtw89_debug(rtwdev, RTW89_DBG_TXPWR,
535 		    "[TXPWR] set txpwr byrate on band %d\n", band);
536 
537 	for (bw = 0; bw <= RTW89_CHANNEL_WIDTH_320; bw++)
538 		for (nss = 0; nss <= RTW89_NSS_2; nss++)
539 			__phy_set_txpwr_byrate_be(rtwdev, band, bw, nss,
540 						  &addr, phy_idx);
541 }
542 
543 static void rtw89_phy_set_txpwr_offset_be(struct rtw89_dev *rtwdev,
544 					  const struct rtw89_chan *chan,
545 					  enum rtw89_phy_idx phy_idx)
546 {
547 	struct rtw89_rate_desc desc = {
548 		.nss = RTW89_NSS_1,
549 		.rs = RTW89_RS_OFFSET,
550 	};
551 	u8 band = chan->band_type;
552 	s8 v[RTW89_RATE_OFFSET_NUM_BE] = {};
553 	u32 val;
554 
555 	rtw89_debug(rtwdev, RTW89_DBG_TXPWR,
556 		    "[TXPWR] set txpwr offset on band %d\n", band);
557 
558 	for (desc.idx = 0; desc.idx < RTW89_RATE_OFFSET_NUM_BE; desc.idx++)
559 		v[desc.idx] = rtw89_phy_read_txpwr_byrate(rtwdev, band, 0, &desc);
560 
561 	val = u32_encode_bits(v[RTW89_RATE_OFFSET_CCK], GENMASK(3, 0)) |
562 	      u32_encode_bits(v[RTW89_RATE_OFFSET_OFDM], GENMASK(7, 4)) |
563 	      u32_encode_bits(v[RTW89_RATE_OFFSET_HT], GENMASK(11, 8)) |
564 	      u32_encode_bits(v[RTW89_RATE_OFFSET_VHT], GENMASK(15, 12)) |
565 	      u32_encode_bits(v[RTW89_RATE_OFFSET_HE], GENMASK(19, 16)) |
566 	      u32_encode_bits(v[RTW89_RATE_OFFSET_EHT], GENMASK(23, 20)) |
567 	      u32_encode_bits(v[RTW89_RATE_OFFSET_DLRU_HE], GENMASK(27, 24)) |
568 	      u32_encode_bits(v[RTW89_RATE_OFFSET_DLRU_EHT], GENMASK(31, 28));
569 
570 	rtw89_mac_txpwr_write32(rtwdev, phy_idx, R_BE_PWR_RATE_OFST_CTRL, val);
571 }
572 
573 static void
574 fill_limit_nonbf_bf(struct rtw89_dev *rtwdev, s8 (*ptr)[RTW89_BF_NUM],
575 		    u8 band, u8 bw, u8 ntx, u8 rs, u8 ch)
576 {
577 	int bf;
578 
579 	for (bf = 0; bf < RTW89_BF_NUM; bf++)
580 		(*ptr)[bf] = rtw89_phy_read_txpwr_limit(rtwdev, band, bw, ntx,
581 							rs, bf, ch);
582 }
583 
584 static void
585 fill_limit_nonbf_bf_min(struct rtw89_dev *rtwdev, s8 (*ptr)[RTW89_BF_NUM],
586 			u8 band, u8 bw, u8 ntx, u8 rs, u8 ch1, u8 ch2)
587 {
588 	s8 v1[RTW89_BF_NUM];
589 	s8 v2[RTW89_BF_NUM];
590 	int bf;
591 
592 	fill_limit_nonbf_bf(rtwdev, &v1, band, bw, ntx, rs, ch1);
593 	fill_limit_nonbf_bf(rtwdev, &v2, band, bw, ntx, rs, ch2);
594 
595 	for (bf = 0; bf < RTW89_BF_NUM; bf++)
596 		(*ptr)[bf] = min(v1[bf], v2[bf]);
597 }
598 
599 static void phy_fill_limit_20m_be(struct rtw89_dev *rtwdev,
600 				  struct rtw89_txpwr_limit_be *lmt,
601 				  u8 band, u8 ntx, u8 ch)
602 {
603 	fill_limit_nonbf_bf(rtwdev, &lmt->cck_20m, band,
604 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_CCK, ch);
605 	fill_limit_nonbf_bf(rtwdev, &lmt->cck_40m, band,
606 			    RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_CCK, ch);
607 	fill_limit_nonbf_bf(rtwdev, &lmt->ofdm, band,
608 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_OFDM, ch);
609 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[0], band,
610 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch);
611 }
612 
613 static void phy_fill_limit_40m_be(struct rtw89_dev *rtwdev,
614 				  struct rtw89_txpwr_limit_be *lmt,
615 				  u8 band, u8 ntx, u8 ch, u8 pri_ch)
616 {
617 	fill_limit_nonbf_bf(rtwdev, &lmt->cck_20m, band,
618 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_CCK, ch - 2);
619 	fill_limit_nonbf_bf(rtwdev, &lmt->cck_40m, band,
620 			    RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_CCK, ch);
621 
622 	fill_limit_nonbf_bf(rtwdev, &lmt->ofdm, band,
623 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_OFDM, pri_ch);
624 
625 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[0], band,
626 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 2);
627 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[1], band,
628 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 2);
629 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[0], band,
630 			    RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch);
631 }
632 
633 static void phy_fill_limit_80m_be(struct rtw89_dev *rtwdev,
634 				  struct rtw89_txpwr_limit_be *lmt,
635 				  u8 band, u8 ntx, u8 ch, u8 pri_ch)
636 {
637 	fill_limit_nonbf_bf(rtwdev, &lmt->ofdm, band,
638 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_OFDM, pri_ch);
639 
640 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[0], band,
641 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 6);
642 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[1], band,
643 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 2);
644 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[2], band,
645 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 2);
646 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[3], band,
647 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 6);
648 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[0], band,
649 			    RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch - 4);
650 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[1], band,
651 			    RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch + 4);
652 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_80m[0], band,
653 			    RTW89_CHANNEL_WIDTH_80, ntx, RTW89_RS_MCS, ch);
654 
655 	fill_limit_nonbf_bf_min(rtwdev, &lmt->mcs_40m_0p5, band,
656 				RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS,
657 				ch - 4, ch + 4);
658 }
659 
660 static void phy_fill_limit_160m_be(struct rtw89_dev *rtwdev,
661 				   struct rtw89_txpwr_limit_be *lmt,
662 				   u8 band, u8 ntx, u8 ch, u8 pri_ch)
663 {
664 	fill_limit_nonbf_bf(rtwdev, &lmt->ofdm, band,
665 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_OFDM, pri_ch);
666 
667 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[0], band,
668 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 14);
669 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[1], band,
670 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 10);
671 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[2], band,
672 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 6);
673 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[3], band,
674 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 2);
675 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[4], band,
676 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 2);
677 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[5], band,
678 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 6);
679 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[6], band,
680 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 10);
681 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[7], band,
682 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 14);
683 
684 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[0], band,
685 			    RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch - 12);
686 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[1], band,
687 			    RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch - 4);
688 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[2], band,
689 			    RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch + 4);
690 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[3], band,
691 			    RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch + 12);
692 
693 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_80m[0], band,
694 			    RTW89_CHANNEL_WIDTH_80, ntx, RTW89_RS_MCS, ch - 8);
695 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_80m[1], band,
696 			    RTW89_CHANNEL_WIDTH_80, ntx, RTW89_RS_MCS, ch + 8);
697 
698 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_160m[0], band,
699 			    RTW89_CHANNEL_WIDTH_160, ntx, RTW89_RS_MCS, ch);
700 
701 	fill_limit_nonbf_bf_min(rtwdev, &lmt->mcs_40m_0p5, band,
702 				RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS,
703 				ch - 12, ch - 4);
704 	fill_limit_nonbf_bf_min(rtwdev, &lmt->mcs_40m_2p5, band,
705 				RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS,
706 				ch + 4, ch + 12);
707 }
708 
709 static void phy_fill_limit_320m_be(struct rtw89_dev *rtwdev,
710 				   struct rtw89_txpwr_limit_be *lmt,
711 				   u8 band, u8 ntx, u8 ch, u8 pri_ch)
712 {
713 	fill_limit_nonbf_bf(rtwdev, &lmt->ofdm, band,
714 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_OFDM, pri_ch);
715 
716 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[0], band,
717 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 30);
718 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[1], band,
719 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 26);
720 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[2], band,
721 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 22);
722 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[3], band,
723 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 18);
724 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[4], band,
725 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 14);
726 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[5], band,
727 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 10);
728 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[6], band,
729 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 6);
730 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[7], band,
731 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch - 2);
732 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[8], band,
733 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 2);
734 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[9], band,
735 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 6);
736 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[10], band,
737 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 10);
738 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[11], band,
739 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 14);
740 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[12], band,
741 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 18);
742 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[13], band,
743 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 22);
744 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[14], band,
745 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 26);
746 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_20m[15], band,
747 			    RTW89_CHANNEL_WIDTH_20, ntx, RTW89_RS_MCS, ch + 30);
748 
749 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[0], band,
750 			    RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch - 28);
751 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[1], band,
752 			    RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch - 20);
753 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[2], band,
754 			    RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch - 12);
755 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[3], band,
756 			    RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch - 4);
757 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[4], band,
758 			    RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch + 4);
759 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[5], band,
760 			    RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch + 12);
761 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[6], band,
762 			    RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch + 20);
763 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_40m[7], band,
764 			    RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS, ch + 28);
765 
766 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_80m[0], band,
767 			    RTW89_CHANNEL_WIDTH_80, ntx, RTW89_RS_MCS, ch - 24);
768 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_80m[1], band,
769 			    RTW89_CHANNEL_WIDTH_80, ntx, RTW89_RS_MCS, ch - 8);
770 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_80m[2], band,
771 			    RTW89_CHANNEL_WIDTH_80, ntx, RTW89_RS_MCS, ch + 8);
772 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_80m[3], band,
773 			    RTW89_CHANNEL_WIDTH_80, ntx, RTW89_RS_MCS, ch + 24);
774 
775 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_160m[0], band,
776 			    RTW89_CHANNEL_WIDTH_160, ntx, RTW89_RS_MCS, ch - 16);
777 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_160m[1], band,
778 			    RTW89_CHANNEL_WIDTH_160, ntx, RTW89_RS_MCS, ch + 16);
779 
780 	fill_limit_nonbf_bf(rtwdev, &lmt->mcs_320m, band,
781 			    RTW89_CHANNEL_WIDTH_320, ntx, RTW89_RS_MCS, ch);
782 
783 	fill_limit_nonbf_bf_min(rtwdev, &lmt->mcs_40m_0p5, band,
784 				RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS,
785 				ch - 28, ch - 20);
786 	fill_limit_nonbf_bf_min(rtwdev, &lmt->mcs_40m_2p5, band,
787 				RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS,
788 				ch - 12, ch - 4);
789 	fill_limit_nonbf_bf_min(rtwdev, &lmt->mcs_40m_4p5, band,
790 				RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS,
791 				ch + 4, ch + 12);
792 	fill_limit_nonbf_bf_min(rtwdev, &lmt->mcs_40m_6p5, band,
793 				RTW89_CHANNEL_WIDTH_40, ntx, RTW89_RS_MCS,
794 				ch + 20, ch + 28);
795 }
796 
797 static void rtw89_phy_fill_limit_be(struct rtw89_dev *rtwdev,
798 				    const struct rtw89_chan *chan,
799 				    struct rtw89_txpwr_limit_be *lmt,
800 				    u8 ntx)
801 {
802 	u8 band = chan->band_type;
803 	u8 pri_ch = chan->primary_channel;
804 	u8 ch = chan->channel;
805 	u8 bw = chan->band_width;
806 
807 	memset(lmt, 0, sizeof(*lmt));
808 
809 	switch (bw) {
810 	case RTW89_CHANNEL_WIDTH_20:
811 		phy_fill_limit_20m_be(rtwdev, lmt, band, ntx, ch);
812 		break;
813 	case RTW89_CHANNEL_WIDTH_40:
814 		phy_fill_limit_40m_be(rtwdev, lmt, band, ntx, ch, pri_ch);
815 		break;
816 	case RTW89_CHANNEL_WIDTH_80:
817 		phy_fill_limit_80m_be(rtwdev, lmt, band, ntx, ch, pri_ch);
818 		break;
819 	case RTW89_CHANNEL_WIDTH_160:
820 		phy_fill_limit_160m_be(rtwdev, lmt, band, ntx, ch, pri_ch);
821 		break;
822 	case RTW89_CHANNEL_WIDTH_320:
823 		phy_fill_limit_320m_be(rtwdev, lmt, band, ntx, ch, pri_ch);
824 		break;
825 	}
826 }
827 
828 static void rtw89_phy_set_txpwr_limit_be(struct rtw89_dev *rtwdev,
829 					 const struct rtw89_chan *chan,
830 					 enum rtw89_phy_idx phy_idx)
831 {
832 	struct rtw89_txpwr_limit_be lmt;
833 	const s8 *ptr;
834 	u32 addr, val;
835 	u8 i, j;
836 
837 	BUILD_BUG_ON(sizeof(struct rtw89_txpwr_limit_be) !=
838 		     RTW89_TXPWR_LMT_PAGE_SIZE_BE);
839 
840 	rtw89_debug(rtwdev, RTW89_DBG_TXPWR,
841 		    "[TXPWR] set txpwr limit on band %d bw %d\n",
842 		    chan->band_type, chan->band_width);
843 
844 	addr = R_BE_PWR_LMT;
845 	for (i = 0; i <= RTW89_NSS_2; i++) {
846 		rtw89_phy_fill_limit_be(rtwdev, chan, &lmt, i);
847 
848 		ptr = (s8 *)&lmt;
849 		for (j = 0; j < RTW89_TXPWR_LMT_PAGE_SIZE_BE;
850 		     j += 4, addr += 4, ptr += 4) {
851 			val = u32_encode_bits(ptr[0], GENMASK(7, 0)) |
852 			      u32_encode_bits(ptr[1], GENMASK(15, 8)) |
853 			      u32_encode_bits(ptr[2], GENMASK(23, 16)) |
854 			      u32_encode_bits(ptr[3], GENMASK(31, 24));
855 
856 			rtw89_mac_txpwr_write32(rtwdev, phy_idx, addr, val);
857 		}
858 	}
859 }
860 
861 static void fill_limit_ru_each(struct rtw89_dev *rtwdev, u8 index,
862 			       struct rtw89_txpwr_limit_ru_be *lmt_ru,
863 			       u8 band, u8 ntx, u8 ch)
864 {
865 	lmt_ru->ru26[index] =
866 		rtw89_phy_read_txpwr_limit_ru(rtwdev, band, RTW89_RU26, ntx, ch);
867 	lmt_ru->ru52[index] =
868 		rtw89_phy_read_txpwr_limit_ru(rtwdev, band, RTW89_RU52, ntx, ch);
869 	lmt_ru->ru106[index] =
870 		rtw89_phy_read_txpwr_limit_ru(rtwdev, band, RTW89_RU106, ntx, ch);
871 	lmt_ru->ru52_26[index] =
872 		rtw89_phy_read_txpwr_limit_ru(rtwdev, band, RTW89_RU52_26, ntx, ch);
873 	lmt_ru->ru106_26[index] =
874 		rtw89_phy_read_txpwr_limit_ru(rtwdev, band, RTW89_RU106_26, ntx, ch);
875 }
876 
877 static void phy_fill_limit_ru_20m_be(struct rtw89_dev *rtwdev,
878 				     struct rtw89_txpwr_limit_ru_be *lmt_ru,
879 				     u8 band, u8 ntx, u8 ch)
880 {
881 	fill_limit_ru_each(rtwdev, 0, lmt_ru, band, ntx, ch);
882 }
883 
884 static void phy_fill_limit_ru_40m_be(struct rtw89_dev *rtwdev,
885 				     struct rtw89_txpwr_limit_ru_be *lmt_ru,
886 				     u8 band, u8 ntx, u8 ch)
887 {
888 	fill_limit_ru_each(rtwdev, 0, lmt_ru, band, ntx, ch - 2);
889 	fill_limit_ru_each(rtwdev, 1, lmt_ru, band, ntx, ch + 2);
890 }
891 
892 static void phy_fill_limit_ru_80m_be(struct rtw89_dev *rtwdev,
893 				     struct rtw89_txpwr_limit_ru_be *lmt_ru,
894 				     u8 band, u8 ntx, u8 ch)
895 {
896 	fill_limit_ru_each(rtwdev, 0, lmt_ru, band, ntx, ch - 6);
897 	fill_limit_ru_each(rtwdev, 1, lmt_ru, band, ntx, ch - 2);
898 	fill_limit_ru_each(rtwdev, 2, lmt_ru, band, ntx, ch + 2);
899 	fill_limit_ru_each(rtwdev, 3, lmt_ru, band, ntx, ch + 6);
900 }
901 
902 static void phy_fill_limit_ru_160m_be(struct rtw89_dev *rtwdev,
903 				      struct rtw89_txpwr_limit_ru_be *lmt_ru,
904 				      u8 band, u8 ntx, u8 ch)
905 {
906 	fill_limit_ru_each(rtwdev, 0, lmt_ru, band, ntx, ch - 14);
907 	fill_limit_ru_each(rtwdev, 1, lmt_ru, band, ntx, ch - 10);
908 	fill_limit_ru_each(rtwdev, 2, lmt_ru, band, ntx, ch - 6);
909 	fill_limit_ru_each(rtwdev, 3, lmt_ru, band, ntx, ch - 2);
910 	fill_limit_ru_each(rtwdev, 4, lmt_ru, band, ntx, ch + 2);
911 	fill_limit_ru_each(rtwdev, 5, lmt_ru, band, ntx, ch + 6);
912 	fill_limit_ru_each(rtwdev, 6, lmt_ru, band, ntx, ch + 10);
913 	fill_limit_ru_each(rtwdev, 7, lmt_ru, band, ntx, ch + 14);
914 }
915 
916 static void phy_fill_limit_ru_320m_be(struct rtw89_dev *rtwdev,
917 				      struct rtw89_txpwr_limit_ru_be *lmt_ru,
918 				      u8 band, u8 ntx, u8 ch)
919 {
920 	fill_limit_ru_each(rtwdev, 0, lmt_ru, band, ntx, ch - 30);
921 	fill_limit_ru_each(rtwdev, 1, lmt_ru, band, ntx, ch - 26);
922 	fill_limit_ru_each(rtwdev, 2, lmt_ru, band, ntx, ch - 22);
923 	fill_limit_ru_each(rtwdev, 3, lmt_ru, band, ntx, ch - 18);
924 	fill_limit_ru_each(rtwdev, 4, lmt_ru, band, ntx, ch - 14);
925 	fill_limit_ru_each(rtwdev, 5, lmt_ru, band, ntx, ch - 10);
926 	fill_limit_ru_each(rtwdev, 6, lmt_ru, band, ntx, ch - 6);
927 	fill_limit_ru_each(rtwdev, 7, lmt_ru, band, ntx, ch - 2);
928 	fill_limit_ru_each(rtwdev, 8, lmt_ru, band, ntx, ch + 2);
929 	fill_limit_ru_each(rtwdev, 9, lmt_ru, band, ntx, ch + 6);
930 	fill_limit_ru_each(rtwdev, 10, lmt_ru, band, ntx, ch + 10);
931 	fill_limit_ru_each(rtwdev, 11, lmt_ru, band, ntx, ch + 14);
932 	fill_limit_ru_each(rtwdev, 12, lmt_ru, band, ntx, ch + 18);
933 	fill_limit_ru_each(rtwdev, 13, lmt_ru, band, ntx, ch + 22);
934 	fill_limit_ru_each(rtwdev, 14, lmt_ru, band, ntx, ch + 26);
935 	fill_limit_ru_each(rtwdev, 15, lmt_ru, band, ntx, ch + 30);
936 }
937 
938 static void rtw89_phy_fill_limit_ru_be(struct rtw89_dev *rtwdev,
939 				       const struct rtw89_chan *chan,
940 				       struct rtw89_txpwr_limit_ru_be *lmt_ru,
941 				       u8 ntx)
942 {
943 	u8 band = chan->band_type;
944 	u8 ch = chan->channel;
945 	u8 bw = chan->band_width;
946 
947 	memset(lmt_ru, 0, sizeof(*lmt_ru));
948 
949 	switch (bw) {
950 	case RTW89_CHANNEL_WIDTH_20:
951 		phy_fill_limit_ru_20m_be(rtwdev, lmt_ru, band, ntx, ch);
952 		break;
953 	case RTW89_CHANNEL_WIDTH_40:
954 		phy_fill_limit_ru_40m_be(rtwdev, lmt_ru, band, ntx, ch);
955 		break;
956 	case RTW89_CHANNEL_WIDTH_80:
957 		phy_fill_limit_ru_80m_be(rtwdev, lmt_ru, band, ntx, ch);
958 		break;
959 	case RTW89_CHANNEL_WIDTH_160:
960 		phy_fill_limit_ru_160m_be(rtwdev, lmt_ru, band, ntx, ch);
961 		break;
962 	case RTW89_CHANNEL_WIDTH_320:
963 		phy_fill_limit_ru_320m_be(rtwdev, lmt_ru, band, ntx, ch);
964 		break;
965 	}
966 }
967 
968 static void rtw89_phy_set_txpwr_limit_ru_be(struct rtw89_dev *rtwdev,
969 					    const struct rtw89_chan *chan,
970 					    enum rtw89_phy_idx phy_idx)
971 {
972 	struct rtw89_txpwr_limit_ru_be lmt_ru;
973 	const s8 *ptr;
974 	u32 addr, val;
975 	u8 i, j;
976 
977 	BUILD_BUG_ON(sizeof(struct rtw89_txpwr_limit_ru_be) !=
978 		     RTW89_TXPWR_LMT_RU_PAGE_SIZE_BE);
979 
980 	rtw89_debug(rtwdev, RTW89_DBG_TXPWR,
981 		    "[TXPWR] set txpwr limit ru on band %d bw %d\n",
982 		    chan->band_type, chan->band_width);
983 
984 	addr = R_BE_PWR_RU_LMT;
985 	for (i = 0; i <= RTW89_NSS_2; i++) {
986 		rtw89_phy_fill_limit_ru_be(rtwdev, chan, &lmt_ru, i);
987 
988 		ptr = (s8 *)&lmt_ru;
989 		for (j = 0; j < RTW89_TXPWR_LMT_RU_PAGE_SIZE_BE;
990 		     j += 4, addr += 4, ptr += 4) {
991 			val = u32_encode_bits(ptr[0], GENMASK(7, 0)) |
992 			      u32_encode_bits(ptr[1], GENMASK(15, 8)) |
993 			      u32_encode_bits(ptr[2], GENMASK(23, 16)) |
994 			      u32_encode_bits(ptr[3], GENMASK(31, 24));
995 
996 			rtw89_mac_txpwr_write32(rtwdev, phy_idx, addr, val);
997 		}
998 	}
999 }
1000 
1001 const struct rtw89_phy_gen_def rtw89_phy_gen_be = {
1002 	.cr_base = 0x20000,
1003 	.ccx = &rtw89_ccx_regs_be,
1004 	.physts = &rtw89_physts_regs_be,
1005 	.cfo = &rtw89_cfo_regs_be,
1006 	.phy0_phy1_offset = rtw89_phy0_phy1_offset_be,
1007 	.config_bb_gain = rtw89_phy_config_bb_gain_be,
1008 	.preinit_rf_nctl = rtw89_phy_preinit_rf_nctl_be,
1009 	.bb_wrap_init = rtw89_phy_bb_wrap_init_be,
1010 	.ch_info_init = rtw89_phy_ch_info_init_be,
1011 
1012 	.set_txpwr_byrate = rtw89_phy_set_txpwr_byrate_be,
1013 	.set_txpwr_offset = rtw89_phy_set_txpwr_offset_be,
1014 	.set_txpwr_limit = rtw89_phy_set_txpwr_limit_be,
1015 	.set_txpwr_limit_ru = rtw89_phy_set_txpwr_limit_ru_be,
1016 };
1017 EXPORT_SYMBOL(rtw89_phy_gen_be);
1018