1 // SPDX-License-Identifier: GPL-2.0 OR BSD-3-Clause
2 /* Copyright(c) 2018-2019 Realtek Corporation
3 */
4
5 #include <linux/module.h>
6 #include "main.h"
7 #include "coex.h"
8 #include "fw.h"
9 #include "tx.h"
10 #include "rx.h"
11 #include "phy.h"
12 #include "rtw8822c.h"
13 #include "rtw8822c_table.h"
14 #include "mac.h"
15 #include "reg.h"
16 #include "debug.h"
17 #include "util.h"
18 #include "bf.h"
19 #include "efuse.h"
20
21 #define IQK_DONE_8822C 0xaa
22
23 static void rtw8822c_config_trx_mode(struct rtw_dev *rtwdev, u8 tx_path,
24 u8 rx_path, bool is_tx2_path);
25
rtw8822ce_efuse_parsing(struct rtw_efuse * efuse,struct rtw8822c_efuse * map)26 static void rtw8822ce_efuse_parsing(struct rtw_efuse *efuse,
27 struct rtw8822c_efuse *map)
28 {
29 ether_addr_copy(efuse->addr, map->e.mac_addr);
30 }
31
rtw8822cu_efuse_parsing(struct rtw_efuse * efuse,struct rtw8822c_efuse * map)32 static void rtw8822cu_efuse_parsing(struct rtw_efuse *efuse,
33 struct rtw8822c_efuse *map)
34 {
35 ether_addr_copy(efuse->addr, map->u.mac_addr);
36 }
37
rtw8822cs_efuse_parsing(struct rtw_efuse * efuse,struct rtw8822c_efuse * map)38 static void rtw8822cs_efuse_parsing(struct rtw_efuse *efuse,
39 struct rtw8822c_efuse *map)
40 {
41 ether_addr_copy(efuse->addr, map->s.mac_addr);
42 }
43
rtw8822c_read_efuse(struct rtw_dev * rtwdev,u8 * log_map)44 static int rtw8822c_read_efuse(struct rtw_dev *rtwdev, u8 *log_map)
45 {
46 struct rtw_efuse *efuse = &rtwdev->efuse;
47 struct rtw8822c_efuse *map;
48 int i;
49
50 map = (struct rtw8822c_efuse *)log_map;
51
52 efuse->usb_mode_switch = u8_get_bits(map->usb_mode, BIT(7));
53 efuse->rfe_option = map->rfe_option;
54 efuse->rf_board_option = map->rf_board_option;
55 efuse->crystal_cap = map->xtal_k & XCAP_MASK;
56 efuse->channel_plan = map->channel_plan;
57 efuse->country_code[0] = map->country_code[0];
58 efuse->country_code[1] = map->country_code[1];
59 efuse->bt_setting = map->rf_bt_setting;
60 efuse->regd = map->rf_board_option & 0x7;
61 efuse->thermal_meter[RF_PATH_A] = map->path_a_thermal;
62 efuse->thermal_meter[RF_PATH_B] = map->path_b_thermal;
63 efuse->thermal_meter_k =
64 (map->path_a_thermal + map->path_b_thermal) >> 1;
65 efuse->power_track_type = (map->tx_pwr_calibrate_rate >> 4) & 0xf;
66
67 for (i = 0; i < 4; i++)
68 efuse->txpwr_idx_table[i] = map->txpwr_idx_table[i];
69
70 switch (rtw_hci_type(rtwdev)) {
71 case RTW_HCI_TYPE_PCIE:
72 rtw8822ce_efuse_parsing(efuse, map);
73 break;
74 case RTW_HCI_TYPE_USB:
75 rtw8822cu_efuse_parsing(efuse, map);
76 break;
77 case RTW_HCI_TYPE_SDIO:
78 rtw8822cs_efuse_parsing(efuse, map);
79 break;
80 default:
81 /* unsupported now */
82 return -ENOTSUPP;
83 }
84
85 return 0;
86 }
87
rtw8822c_header_file_init(struct rtw_dev * rtwdev,bool pre)88 static void rtw8822c_header_file_init(struct rtw_dev *rtwdev, bool pre)
89 {
90 rtw_write32_set(rtwdev, REG_3WIRE, BIT_3WIRE_TX_EN | BIT_3WIRE_RX_EN);
91 rtw_write32_set(rtwdev, REG_3WIRE, BIT_3WIRE_PI_ON);
92 rtw_write32_set(rtwdev, REG_3WIRE2, BIT_3WIRE_TX_EN | BIT_3WIRE_RX_EN);
93 rtw_write32_set(rtwdev, REG_3WIRE2, BIT_3WIRE_PI_ON);
94
95 if (pre)
96 rtw_write32_clr(rtwdev, REG_ENCCK, BIT_CCK_OFDM_BLK_EN);
97 else
98 rtw_write32_set(rtwdev, REG_ENCCK, BIT_CCK_OFDM_BLK_EN);
99 }
100
rtw8822c_bb_reset(struct rtw_dev * rtwdev)101 static void rtw8822c_bb_reset(struct rtw_dev *rtwdev)
102 {
103 rtw_write16_set(rtwdev, REG_SYS_FUNC_EN, BIT_FEN_BB_RSTB);
104 rtw_write16_clr(rtwdev, REG_SYS_FUNC_EN, BIT_FEN_BB_RSTB);
105 rtw_write16_set(rtwdev, REG_SYS_FUNC_EN, BIT_FEN_BB_RSTB);
106 }
107
rtw8822c_dac_backup_reg(struct rtw_dev * rtwdev,struct rtw_backup_info * backup,struct rtw_backup_info * backup_rf)108 static void rtw8822c_dac_backup_reg(struct rtw_dev *rtwdev,
109 struct rtw_backup_info *backup,
110 struct rtw_backup_info *backup_rf)
111 {
112 u32 path, i;
113 u32 val;
114 u32 reg;
115 u32 rf_addr[DACK_RF_8822C] = {0x8f};
116 u32 addrs[DACK_REG_8822C] = {0x180c, 0x1810, 0x410c, 0x4110,
117 0x1c3c, 0x1c24, 0x1d70, 0x9b4,
118 0x1a00, 0x1a14, 0x1d58, 0x1c38,
119 0x1e24, 0x1e28, 0x1860, 0x4160};
120
121 for (i = 0; i < DACK_REG_8822C; i++) {
122 backup[i].len = 4;
123 backup[i].reg = addrs[i];
124 backup[i].val = rtw_read32(rtwdev, addrs[i]);
125 }
126
127 for (path = 0; path < DACK_PATH_8822C; path++) {
128 for (i = 0; i < DACK_RF_8822C; i++) {
129 reg = rf_addr[i];
130 val = rtw_read_rf(rtwdev, path, reg, RFREG_MASK);
131 backup_rf[path * i + i].reg = reg;
132 backup_rf[path * i + i].val = val;
133 }
134 }
135 }
136
rtw8822c_dac_restore_reg(struct rtw_dev * rtwdev,struct rtw_backup_info * backup,struct rtw_backup_info * backup_rf)137 static void rtw8822c_dac_restore_reg(struct rtw_dev *rtwdev,
138 struct rtw_backup_info *backup,
139 struct rtw_backup_info *backup_rf)
140 {
141 u32 path, i;
142 u32 val;
143 u32 reg;
144
145 rtw_restore_reg(rtwdev, backup, DACK_REG_8822C);
146
147 for (path = 0; path < DACK_PATH_8822C; path++) {
148 for (i = 0; i < DACK_RF_8822C; i++) {
149 val = backup_rf[path * i + i].val;
150 reg = backup_rf[path * i + i].reg;
151 rtw_write_rf(rtwdev, path, reg, RFREG_MASK, val);
152 }
153 }
154 }
155
rtw8822c_rf_minmax_cmp(struct rtw_dev * rtwdev,u32 value,u32 * min,u32 * max)156 static void rtw8822c_rf_minmax_cmp(struct rtw_dev *rtwdev, u32 value,
157 u32 *min, u32 *max)
158 {
159 if (value >= 0x200) {
160 if (*min >= 0x200) {
161 if (*min > value)
162 *min = value;
163 } else {
164 *min = value;
165 }
166 if (*max >= 0x200) {
167 if (*max < value)
168 *max = value;
169 }
170 } else {
171 if (*min < 0x200) {
172 if (*min > value)
173 *min = value;
174 }
175
176 if (*max >= 0x200) {
177 *max = value;
178 } else {
179 if (*max < value)
180 *max = value;
181 }
182 }
183 }
184
__rtw8822c_dac_iq_sort(struct rtw_dev * rtwdev,u32 * v1,u32 * v2)185 static void __rtw8822c_dac_iq_sort(struct rtw_dev *rtwdev, u32 *v1, u32 *v2)
186 {
187 if (*v1 >= 0x200 && *v2 >= 0x200) {
188 if (*v1 > *v2)
189 swap(*v1, *v2);
190 } else if (*v1 < 0x200 && *v2 < 0x200) {
191 if (*v1 > *v2)
192 swap(*v1, *v2);
193 } else if (*v1 < 0x200 && *v2 >= 0x200) {
194 swap(*v1, *v2);
195 }
196 }
197
rtw8822c_dac_iq_sort(struct rtw_dev * rtwdev,u32 * iv,u32 * qv)198 static void rtw8822c_dac_iq_sort(struct rtw_dev *rtwdev, u32 *iv, u32 *qv)
199 {
200 u32 i, j;
201
202 for (i = 0; i < DACK_SN_8822C - 1; i++) {
203 for (j = 0; j < (DACK_SN_8822C - 1 - i) ; j++) {
204 __rtw8822c_dac_iq_sort(rtwdev, &iv[j], &iv[j + 1]);
205 __rtw8822c_dac_iq_sort(rtwdev, &qv[j], &qv[j + 1]);
206 }
207 }
208 }
209
rtw8822c_dac_iq_offset(struct rtw_dev * rtwdev,u32 * vec,u32 * val)210 static void rtw8822c_dac_iq_offset(struct rtw_dev *rtwdev, u32 *vec, u32 *val)
211 {
212 u32 p, m, t, i;
213
214 m = 0;
215 p = 0;
216 for (i = 10; i < DACK_SN_8822C - 10; i++) {
217 if (vec[i] > 0x200)
218 m = (0x400 - vec[i]) + m;
219 else
220 p = vec[i] + p;
221 }
222
223 if (p > m) {
224 t = p - m;
225 t = t / (DACK_SN_8822C - 20);
226 } else {
227 t = m - p;
228 t = t / (DACK_SN_8822C - 20);
229 if (t != 0x0)
230 t = 0x400 - t;
231 }
232
233 *val = t;
234 }
235
rtw8822c_get_path_write_addr(u8 path)236 static u32 rtw8822c_get_path_write_addr(u8 path)
237 {
238 u32 base_addr;
239
240 switch (path) {
241 case RF_PATH_A:
242 base_addr = 0x1800;
243 break;
244 case RF_PATH_B:
245 base_addr = 0x4100;
246 break;
247 default:
248 WARN_ON(1);
249 return -1;
250 }
251
252 return base_addr;
253 }
254
rtw8822c_get_path_read_addr(u8 path)255 static u32 rtw8822c_get_path_read_addr(u8 path)
256 {
257 u32 base_addr;
258
259 switch (path) {
260 case RF_PATH_A:
261 base_addr = 0x2800;
262 break;
263 case RF_PATH_B:
264 base_addr = 0x4500;
265 break;
266 default:
267 WARN_ON(1);
268 return -1;
269 }
270
271 return base_addr;
272 }
273
rtw8822c_dac_iq_check(struct rtw_dev * rtwdev,u32 value)274 static bool rtw8822c_dac_iq_check(struct rtw_dev *rtwdev, u32 value)
275 {
276 bool ret = true;
277
278 if ((value >= 0x200 && (0x400 - value) > 0x64) ||
279 (value < 0x200 && value > 0x64)) {
280 ret = false;
281 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] Error overflow\n");
282 }
283
284 return ret;
285 }
286
rtw8822c_dac_cal_iq_sample(struct rtw_dev * rtwdev,u32 * iv,u32 * qv)287 static void rtw8822c_dac_cal_iq_sample(struct rtw_dev *rtwdev, u32 *iv, u32 *qv)
288 {
289 u32 temp;
290 int i = 0, cnt = 0;
291
292 while (i < DACK_SN_8822C && cnt < 10000) {
293 cnt++;
294 temp = rtw_read32_mask(rtwdev, 0x2dbc, 0x3fffff);
295 iv[i] = (temp & 0x3ff000) >> 12;
296 qv[i] = temp & 0x3ff;
297
298 if (rtw8822c_dac_iq_check(rtwdev, iv[i]) &&
299 rtw8822c_dac_iq_check(rtwdev, qv[i]))
300 i++;
301 }
302 }
303
rtw8822c_dac_cal_iq_search(struct rtw_dev * rtwdev,u32 * iv,u32 * qv,u32 * i_value,u32 * q_value)304 static void rtw8822c_dac_cal_iq_search(struct rtw_dev *rtwdev,
305 u32 *iv, u32 *qv,
306 u32 *i_value, u32 *q_value)
307 {
308 u32 i_max = 0, q_max = 0, i_min = 0, q_min = 0;
309 u32 i_delta, q_delta;
310 u32 temp;
311 int i, cnt = 0;
312
313 do {
314 i_min = iv[0];
315 i_max = iv[0];
316 q_min = qv[0];
317 q_max = qv[0];
318 for (i = 0; i < DACK_SN_8822C; i++) {
319 rtw8822c_rf_minmax_cmp(rtwdev, iv[i], &i_min, &i_max);
320 rtw8822c_rf_minmax_cmp(rtwdev, qv[i], &q_min, &q_max);
321 }
322
323 if (i_max < 0x200 && i_min < 0x200)
324 i_delta = i_max - i_min;
325 else if (i_max >= 0x200 && i_min >= 0x200)
326 i_delta = i_max - i_min;
327 else
328 i_delta = i_max + (0x400 - i_min);
329
330 if (q_max < 0x200 && q_min < 0x200)
331 q_delta = q_max - q_min;
332 else if (q_max >= 0x200 && q_min >= 0x200)
333 q_delta = q_max - q_min;
334 else
335 q_delta = q_max + (0x400 - q_min);
336
337 rtw_dbg(rtwdev, RTW_DBG_RFK,
338 "[DACK] i: min=0x%08x, max=0x%08x, delta=0x%08x\n",
339 i_min, i_max, i_delta);
340 rtw_dbg(rtwdev, RTW_DBG_RFK,
341 "[DACK] q: min=0x%08x, max=0x%08x, delta=0x%08x\n",
342 q_min, q_max, q_delta);
343
344 rtw8822c_dac_iq_sort(rtwdev, iv, qv);
345
346 if (i_delta > 5 || q_delta > 5) {
347 temp = rtw_read32_mask(rtwdev, 0x2dbc, 0x3fffff);
348 iv[0] = (temp & 0x3ff000) >> 12;
349 qv[0] = temp & 0x3ff;
350 temp = rtw_read32_mask(rtwdev, 0x2dbc, 0x3fffff);
351 iv[DACK_SN_8822C - 1] = (temp & 0x3ff000) >> 12;
352 qv[DACK_SN_8822C - 1] = temp & 0x3ff;
353 } else {
354 break;
355 }
356 } while (cnt++ < 100);
357
358 rtw8822c_dac_iq_offset(rtwdev, iv, i_value);
359 rtw8822c_dac_iq_offset(rtwdev, qv, q_value);
360 }
361
rtw8822c_dac_cal_rf_mode(struct rtw_dev * rtwdev,u32 * i_value,u32 * q_value)362 static void rtw8822c_dac_cal_rf_mode(struct rtw_dev *rtwdev,
363 u32 *i_value, u32 *q_value)
364 {
365 u32 iv[DACK_SN_8822C], qv[DACK_SN_8822C];
366 u32 rf_a, rf_b;
367
368 rf_a = rtw_read_rf(rtwdev, RF_PATH_A, 0x0, RFREG_MASK);
369 rf_b = rtw_read_rf(rtwdev, RF_PATH_B, 0x0, RFREG_MASK);
370
371 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] RF path-A=0x%05x\n", rf_a);
372 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] RF path-B=0x%05x\n", rf_b);
373
374 rtw8822c_dac_cal_iq_sample(rtwdev, iv, qv);
375 rtw8822c_dac_cal_iq_search(rtwdev, iv, qv, i_value, q_value);
376 }
377
rtw8822c_dac_bb_setting(struct rtw_dev * rtwdev)378 static void rtw8822c_dac_bb_setting(struct rtw_dev *rtwdev)
379 {
380 rtw_write32_mask(rtwdev, 0x1d58, 0xff8, 0x1ff);
381 rtw_write32_mask(rtwdev, 0x1a00, 0x3, 0x2);
382 rtw_write32_mask(rtwdev, 0x1a14, 0x300, 0x3);
383 rtw_write32(rtwdev, 0x1d70, 0x7e7e7e7e);
384 rtw_write32_mask(rtwdev, 0x180c, 0x3, 0x0);
385 rtw_write32_mask(rtwdev, 0x410c, 0x3, 0x0);
386 rtw_write32(rtwdev, 0x1b00, 0x00000008);
387 rtw_write8(rtwdev, 0x1bcc, 0x3f);
388 rtw_write32(rtwdev, 0x1b00, 0x0000000a);
389 rtw_write8(rtwdev, 0x1bcc, 0x3f);
390 rtw_write32_mask(rtwdev, 0x1e24, BIT(31), 0x0);
391 rtw_write32_mask(rtwdev, 0x1e28, 0xf, 0x3);
392 }
393
rtw8822c_dac_cal_adc(struct rtw_dev * rtwdev,u8 path,u32 * adc_ic,u32 * adc_qc)394 static void rtw8822c_dac_cal_adc(struct rtw_dev *rtwdev,
395 u8 path, u32 *adc_ic, u32 *adc_qc)
396 {
397 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
398 u32 ic = 0, qc = 0, temp = 0;
399 u32 base_addr;
400 u32 path_sel;
401 int i;
402
403 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] ADCK path(%d)\n", path);
404
405 base_addr = rtw8822c_get_path_write_addr(path);
406 switch (path) {
407 case RF_PATH_A:
408 path_sel = 0xa0000;
409 break;
410 case RF_PATH_B:
411 path_sel = 0x80000;
412 break;
413 default:
414 WARN_ON(1);
415 return;
416 }
417
418 /* ADCK step1 */
419 rtw_write32_mask(rtwdev, base_addr + 0x30, BIT(30), 0x0);
420 if (path == RF_PATH_B)
421 rtw_write32(rtwdev, base_addr + 0x30, 0x30db8041);
422 rtw_write32(rtwdev, base_addr + 0x60, 0xf0040ff0);
423 rtw_write32(rtwdev, base_addr + 0x0c, 0xdff00220);
424 rtw_write32(rtwdev, base_addr + 0x10, 0x02dd08c4);
425 rtw_write32(rtwdev, base_addr + 0x0c, 0x10000260);
426 rtw_write_rf(rtwdev, RF_PATH_A, 0x0, RFREG_MASK, 0x10000);
427 rtw_write_rf(rtwdev, RF_PATH_B, 0x0, RFREG_MASK, 0x10000);
428 for (i = 0; i < 10; i++) {
429 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] ADCK count=%d\n", i);
430 rtw_write32(rtwdev, 0x1c3c, path_sel + 0x8003);
431 rtw_write32(rtwdev, 0x1c24, 0x00010002);
432 rtw8822c_dac_cal_rf_mode(rtwdev, &ic, &qc);
433 rtw_dbg(rtwdev, RTW_DBG_RFK,
434 "[DACK] before: i=0x%x, q=0x%x\n", ic, qc);
435
436 /* compensation value */
437 if (ic != 0x0) {
438 ic = 0x400 - ic;
439 *adc_ic = ic;
440 }
441 if (qc != 0x0) {
442 qc = 0x400 - qc;
443 *adc_qc = qc;
444 }
445 temp = (ic & 0x3ff) | ((qc & 0x3ff) << 10);
446 rtw_write32(rtwdev, base_addr + 0x68, temp);
447 dm_info->dack_adck[path] = temp;
448 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] ADCK 0x%08x=0x08%x\n",
449 base_addr + 0x68, temp);
450 /* check ADC DC offset */
451 rtw_write32(rtwdev, 0x1c3c, path_sel + 0x8103);
452 rtw8822c_dac_cal_rf_mode(rtwdev, &ic, &qc);
453 rtw_dbg(rtwdev, RTW_DBG_RFK,
454 "[DACK] after: i=0x%08x, q=0x%08x\n", ic, qc);
455 if (ic >= 0x200)
456 ic = 0x400 - ic;
457 if (qc >= 0x200)
458 qc = 0x400 - qc;
459 if (ic < 5 && qc < 5)
460 break;
461 }
462
463 /* ADCK step2 */
464 rtw_write32(rtwdev, 0x1c3c, 0x00000003);
465 rtw_write32(rtwdev, base_addr + 0x0c, 0x10000260);
466 rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c4);
467
468 /* release pull low switch on IQ path */
469 rtw_write_rf(rtwdev, path, 0x8f, BIT(13), 0x1);
470 }
471
rtw8822c_dac_cal_step1(struct rtw_dev * rtwdev,u8 path)472 static void rtw8822c_dac_cal_step1(struct rtw_dev *rtwdev, u8 path)
473 {
474 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
475 u32 base_addr;
476 u32 read_addr;
477
478 base_addr = rtw8822c_get_path_write_addr(path);
479 read_addr = rtw8822c_get_path_read_addr(path);
480
481 rtw_write32(rtwdev, base_addr + 0x68, dm_info->dack_adck[path]);
482 rtw_write32(rtwdev, base_addr + 0x0c, 0xdff00220);
483 if (path == RF_PATH_A) {
484 rtw_write32(rtwdev, base_addr + 0x60, 0xf0040ff0);
485 rtw_write32(rtwdev, 0x1c38, 0xffffffff);
486 }
487 rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c5);
488 rtw_write32(rtwdev, 0x9b4, 0xdb66db00);
489 rtw_write32(rtwdev, base_addr + 0xb0, 0x0a11fb88);
490 rtw_write32(rtwdev, base_addr + 0xbc, 0x0008ff81);
491 rtw_write32(rtwdev, base_addr + 0xc0, 0x0003d208);
492 rtw_write32(rtwdev, base_addr + 0xcc, 0x0a11fb88);
493 rtw_write32(rtwdev, base_addr + 0xd8, 0x0008ff81);
494 rtw_write32(rtwdev, base_addr + 0xdc, 0x0003d208);
495 rtw_write32(rtwdev, base_addr + 0xb8, 0x60000000);
496 mdelay(2);
497 rtw_write32(rtwdev, base_addr + 0xbc, 0x000aff8d);
498 mdelay(2);
499 rtw_write32(rtwdev, base_addr + 0xb0, 0x0a11fb89);
500 rtw_write32(rtwdev, base_addr + 0xcc, 0x0a11fb89);
501 mdelay(1);
502 rtw_write32(rtwdev, base_addr + 0xb8, 0x62000000);
503 rtw_write32(rtwdev, base_addr + 0xd4, 0x62000000);
504 mdelay(20);
505 if (!check_hw_ready(rtwdev, read_addr + 0x08, 0x7fff80, 0xffff) ||
506 !check_hw_ready(rtwdev, read_addr + 0x34, 0x7fff80, 0xffff))
507 rtw_err(rtwdev, "failed to wait for dack ready\n");
508 rtw_write32(rtwdev, base_addr + 0xb8, 0x02000000);
509 mdelay(1);
510 rtw_write32(rtwdev, base_addr + 0xbc, 0x0008ff87);
511 rtw_write32(rtwdev, 0x9b4, 0xdb6db600);
512 rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c5);
513 rtw_write32(rtwdev, base_addr + 0xbc, 0x0008ff87);
514 rtw_write32(rtwdev, base_addr + 0x60, 0xf0000000);
515 }
516
rtw8822c_dac_cal_step2(struct rtw_dev * rtwdev,u8 path,u32 * ic_out,u32 * qc_out)517 static void rtw8822c_dac_cal_step2(struct rtw_dev *rtwdev,
518 u8 path, u32 *ic_out, u32 *qc_out)
519 {
520 u32 base_addr;
521 u32 ic, qc, ic_in, qc_in;
522
523 base_addr = rtw8822c_get_path_write_addr(path);
524 rtw_write32_mask(rtwdev, base_addr + 0xbc, 0xf0000000, 0x0);
525 rtw_write32_mask(rtwdev, base_addr + 0xc0, 0xf, 0x8);
526 rtw_write32_mask(rtwdev, base_addr + 0xd8, 0xf0000000, 0x0);
527 rtw_write32_mask(rtwdev, base_addr + 0xdc, 0xf, 0x8);
528
529 rtw_write32(rtwdev, 0x1b00, 0x00000008);
530 rtw_write8(rtwdev, 0x1bcc, 0x03f);
531 rtw_write32(rtwdev, base_addr + 0x0c, 0xdff00220);
532 rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c5);
533 rtw_write32(rtwdev, 0x1c3c, 0x00088103);
534
535 rtw8822c_dac_cal_rf_mode(rtwdev, &ic_in, &qc_in);
536 ic = ic_in;
537 qc = qc_in;
538
539 /* compensation value */
540 if (ic != 0x0)
541 ic = 0x400 - ic;
542 if (qc != 0x0)
543 qc = 0x400 - qc;
544 if (ic < 0x300) {
545 ic = ic * 2 * 6 / 5;
546 ic = ic + 0x80;
547 } else {
548 ic = (0x400 - ic) * 2 * 6 / 5;
549 ic = 0x7f - ic;
550 }
551 if (qc < 0x300) {
552 qc = qc * 2 * 6 / 5;
553 qc = qc + 0x80;
554 } else {
555 qc = (0x400 - qc) * 2 * 6 / 5;
556 qc = 0x7f - qc;
557 }
558
559 *ic_out = ic;
560 *qc_out = qc;
561
562 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] before i=0x%x, q=0x%x\n", ic_in, qc_in);
563 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] after i=0x%x, q=0x%x\n", ic, qc);
564 }
565
rtw8822c_dac_cal_step3(struct rtw_dev * rtwdev,u8 path,u32 adc_ic,u32 adc_qc,u32 * ic_in,u32 * qc_in,u32 * i_out,u32 * q_out)566 static void rtw8822c_dac_cal_step3(struct rtw_dev *rtwdev, u8 path,
567 u32 adc_ic, u32 adc_qc,
568 u32 *ic_in, u32 *qc_in,
569 u32 *i_out, u32 *q_out)
570 {
571 u32 base_addr;
572 u32 read_addr;
573 u32 ic, qc;
574 u32 temp;
575
576 base_addr = rtw8822c_get_path_write_addr(path);
577 read_addr = rtw8822c_get_path_read_addr(path);
578 ic = *ic_in;
579 qc = *qc_in;
580
581 rtw_write32(rtwdev, base_addr + 0x0c, 0xdff00220);
582 rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c5);
583 rtw_write32(rtwdev, 0x9b4, 0xdb66db00);
584 rtw_write32(rtwdev, base_addr + 0xb0, 0x0a11fb88);
585 rtw_write32(rtwdev, base_addr + 0xbc, 0xc008ff81);
586 rtw_write32(rtwdev, base_addr + 0xc0, 0x0003d208);
587 rtw_write32_mask(rtwdev, base_addr + 0xbc, 0xf0000000, ic & 0xf);
588 rtw_write32_mask(rtwdev, base_addr + 0xc0, 0xf, (ic & 0xf0) >> 4);
589 rtw_write32(rtwdev, base_addr + 0xcc, 0x0a11fb88);
590 rtw_write32(rtwdev, base_addr + 0xd8, 0xe008ff81);
591 rtw_write32(rtwdev, base_addr + 0xdc, 0x0003d208);
592 rtw_write32_mask(rtwdev, base_addr + 0xd8, 0xf0000000, qc & 0xf);
593 rtw_write32_mask(rtwdev, base_addr + 0xdc, 0xf, (qc & 0xf0) >> 4);
594 rtw_write32(rtwdev, base_addr + 0xb8, 0x60000000);
595 mdelay(2);
596 rtw_write32_mask(rtwdev, base_addr + 0xbc, 0xe, 0x6);
597 mdelay(2);
598 rtw_write32(rtwdev, base_addr + 0xb0, 0x0a11fb89);
599 rtw_write32(rtwdev, base_addr + 0xcc, 0x0a11fb89);
600 mdelay(1);
601 rtw_write32(rtwdev, base_addr + 0xb8, 0x62000000);
602 rtw_write32(rtwdev, base_addr + 0xd4, 0x62000000);
603 mdelay(20);
604 if (!check_hw_ready(rtwdev, read_addr + 0x24, 0x07f80000, ic) ||
605 !check_hw_ready(rtwdev, read_addr + 0x50, 0x07f80000, qc))
606 rtw_err(rtwdev, "failed to write IQ vector to hardware\n");
607 rtw_write32(rtwdev, base_addr + 0xb8, 0x02000000);
608 mdelay(1);
609 rtw_write32_mask(rtwdev, base_addr + 0xbc, 0xe, 0x3);
610 rtw_write32(rtwdev, 0x9b4, 0xdb6db600);
611
612 /* check DAC DC offset */
613 temp = ((adc_ic + 0x10) & 0x3ff) | (((adc_qc + 0x10) & 0x3ff) << 10);
614 rtw_write32(rtwdev, base_addr + 0x68, temp);
615 rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c5);
616 rtw_write32(rtwdev, base_addr + 0x60, 0xf0000000);
617 rtw8822c_dac_cal_rf_mode(rtwdev, &ic, &qc);
618 if (ic >= 0x10)
619 ic = ic - 0x10;
620 else
621 ic = 0x400 - (0x10 - ic);
622
623 if (qc >= 0x10)
624 qc = qc - 0x10;
625 else
626 qc = 0x400 - (0x10 - qc);
627
628 *i_out = ic;
629 *q_out = qc;
630
631 if (ic >= 0x200)
632 ic = 0x400 - ic;
633 if (qc >= 0x200)
634 qc = 0x400 - qc;
635
636 *ic_in = ic;
637 *qc_in = qc;
638
639 rtw_dbg(rtwdev, RTW_DBG_RFK,
640 "[DACK] after DACK i=0x%x, q=0x%x\n", *i_out, *q_out);
641 }
642
rtw8822c_dac_cal_step4(struct rtw_dev * rtwdev,u8 path)643 static void rtw8822c_dac_cal_step4(struct rtw_dev *rtwdev, u8 path)
644 {
645 u32 base_addr = rtw8822c_get_path_write_addr(path);
646
647 rtw_write32(rtwdev, base_addr + 0x68, 0x0);
648 rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c4);
649 rtw_write32_mask(rtwdev, base_addr + 0xbc, 0x1, 0x0);
650 rtw_write32_mask(rtwdev, base_addr + 0x30, BIT(30), 0x1);
651 }
652
rtw8822c_dac_cal_backup_vec(struct rtw_dev * rtwdev,u8 path,u8 vec,u32 w_addr,u32 r_addr)653 static void rtw8822c_dac_cal_backup_vec(struct rtw_dev *rtwdev,
654 u8 path, u8 vec, u32 w_addr, u32 r_addr)
655 {
656 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
657 u16 val;
658 u32 i;
659
660 if (WARN_ON(vec >= 2))
661 return;
662
663 for (i = 0; i < DACK_MSBK_BACKUP_NUM; i++) {
664 rtw_write32_mask(rtwdev, w_addr, 0xf0000000, i);
665 val = (u16)rtw_read32_mask(rtwdev, r_addr, 0x7fc0000);
666 dm_info->dack_msbk[path][vec][i] = val;
667 }
668 }
669
rtw8822c_dac_cal_backup_path(struct rtw_dev * rtwdev,u8 path)670 static void rtw8822c_dac_cal_backup_path(struct rtw_dev *rtwdev, u8 path)
671 {
672 u32 w_off = 0x1c;
673 u32 r_off = 0x2c;
674 u32 w_addr, r_addr;
675
676 if (WARN_ON(path >= 2))
677 return;
678
679 /* backup I vector */
680 w_addr = rtw8822c_get_path_write_addr(path) + 0xb0;
681 r_addr = rtw8822c_get_path_read_addr(path) + 0x10;
682 rtw8822c_dac_cal_backup_vec(rtwdev, path, 0, w_addr, r_addr);
683
684 /* backup Q vector */
685 w_addr = rtw8822c_get_path_write_addr(path) + 0xb0 + w_off;
686 r_addr = rtw8822c_get_path_read_addr(path) + 0x10 + r_off;
687 rtw8822c_dac_cal_backup_vec(rtwdev, path, 1, w_addr, r_addr);
688 }
689
rtw8822c_dac_cal_backup_dck(struct rtw_dev * rtwdev)690 static void rtw8822c_dac_cal_backup_dck(struct rtw_dev *rtwdev)
691 {
692 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
693 u8 val;
694
695 val = (u8)rtw_read32_mask(rtwdev, REG_DCKA_I_0, 0xf0000000);
696 dm_info->dack_dck[RF_PATH_A][0][0] = val;
697 val = (u8)rtw_read32_mask(rtwdev, REG_DCKA_I_1, 0xf);
698 dm_info->dack_dck[RF_PATH_A][0][1] = val;
699 val = (u8)rtw_read32_mask(rtwdev, REG_DCKA_Q_0, 0xf0000000);
700 dm_info->dack_dck[RF_PATH_A][1][0] = val;
701 val = (u8)rtw_read32_mask(rtwdev, REG_DCKA_Q_1, 0xf);
702 dm_info->dack_dck[RF_PATH_A][1][1] = val;
703
704 val = (u8)rtw_read32_mask(rtwdev, REG_DCKB_I_0, 0xf0000000);
705 dm_info->dack_dck[RF_PATH_B][0][0] = val;
706 val = (u8)rtw_read32_mask(rtwdev, REG_DCKB_I_1, 0xf);
707 dm_info->dack_dck[RF_PATH_B][1][0] = val;
708 val = (u8)rtw_read32_mask(rtwdev, REG_DCKB_Q_0, 0xf0000000);
709 dm_info->dack_dck[RF_PATH_B][0][1] = val;
710 val = (u8)rtw_read32_mask(rtwdev, REG_DCKB_Q_1, 0xf);
711 dm_info->dack_dck[RF_PATH_B][1][1] = val;
712 }
713
rtw8822c_dac_cal_backup(struct rtw_dev * rtwdev)714 static void rtw8822c_dac_cal_backup(struct rtw_dev *rtwdev)
715 {
716 u32 temp[3];
717
718 temp[0] = rtw_read32(rtwdev, 0x1860);
719 temp[1] = rtw_read32(rtwdev, 0x4160);
720 temp[2] = rtw_read32(rtwdev, 0x9b4);
721
722 /* set clock */
723 rtw_write32(rtwdev, 0x9b4, 0xdb66db00);
724
725 /* backup path-A I/Q */
726 rtw_write32_clr(rtwdev, 0x1830, BIT(30));
727 rtw_write32_mask(rtwdev, 0x1860, 0xfc000000, 0x3c);
728 rtw8822c_dac_cal_backup_path(rtwdev, RF_PATH_A);
729
730 /* backup path-B I/Q */
731 rtw_write32_clr(rtwdev, 0x4130, BIT(30));
732 rtw_write32_mask(rtwdev, 0x4160, 0xfc000000, 0x3c);
733 rtw8822c_dac_cal_backup_path(rtwdev, RF_PATH_B);
734
735 rtw8822c_dac_cal_backup_dck(rtwdev);
736 rtw_write32_set(rtwdev, 0x1830, BIT(30));
737 rtw_write32_set(rtwdev, 0x4130, BIT(30));
738
739 rtw_write32(rtwdev, 0x1860, temp[0]);
740 rtw_write32(rtwdev, 0x4160, temp[1]);
741 rtw_write32(rtwdev, 0x9b4, temp[2]);
742 }
743
rtw8822c_dac_cal_restore_dck(struct rtw_dev * rtwdev)744 static void rtw8822c_dac_cal_restore_dck(struct rtw_dev *rtwdev)
745 {
746 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
747 u8 val;
748
749 rtw_write32_set(rtwdev, REG_DCKA_I_0, BIT(19));
750 val = dm_info->dack_dck[RF_PATH_A][0][0];
751 rtw_write32_mask(rtwdev, REG_DCKA_I_0, 0xf0000000, val);
752 val = dm_info->dack_dck[RF_PATH_A][0][1];
753 rtw_write32_mask(rtwdev, REG_DCKA_I_1, 0xf, val);
754
755 rtw_write32_set(rtwdev, REG_DCKA_Q_0, BIT(19));
756 val = dm_info->dack_dck[RF_PATH_A][1][0];
757 rtw_write32_mask(rtwdev, REG_DCKA_Q_0, 0xf0000000, val);
758 val = dm_info->dack_dck[RF_PATH_A][1][1];
759 rtw_write32_mask(rtwdev, REG_DCKA_Q_1, 0xf, val);
760
761 rtw_write32_set(rtwdev, REG_DCKB_I_0, BIT(19));
762 val = dm_info->dack_dck[RF_PATH_B][0][0];
763 rtw_write32_mask(rtwdev, REG_DCKB_I_0, 0xf0000000, val);
764 val = dm_info->dack_dck[RF_PATH_B][0][1];
765 rtw_write32_mask(rtwdev, REG_DCKB_I_1, 0xf, val);
766
767 rtw_write32_set(rtwdev, REG_DCKB_Q_0, BIT(19));
768 val = dm_info->dack_dck[RF_PATH_B][1][0];
769 rtw_write32_mask(rtwdev, REG_DCKB_Q_0, 0xf0000000, val);
770 val = dm_info->dack_dck[RF_PATH_B][1][1];
771 rtw_write32_mask(rtwdev, REG_DCKB_Q_1, 0xf, val);
772 }
773
rtw8822c_dac_cal_restore_prepare(struct rtw_dev * rtwdev)774 static void rtw8822c_dac_cal_restore_prepare(struct rtw_dev *rtwdev)
775 {
776 rtw_write32(rtwdev, 0x9b4, 0xdb66db00);
777
778 rtw_write32_mask(rtwdev, 0x18b0, BIT(27), 0x0);
779 rtw_write32_mask(rtwdev, 0x18cc, BIT(27), 0x0);
780 rtw_write32_mask(rtwdev, 0x41b0, BIT(27), 0x0);
781 rtw_write32_mask(rtwdev, 0x41cc, BIT(27), 0x0);
782
783 rtw_write32_mask(rtwdev, 0x1830, BIT(30), 0x0);
784 rtw_write32_mask(rtwdev, 0x1860, 0xfc000000, 0x3c);
785 rtw_write32_mask(rtwdev, 0x18b4, BIT(0), 0x1);
786 rtw_write32_mask(rtwdev, 0x18d0, BIT(0), 0x1);
787
788 rtw_write32_mask(rtwdev, 0x4130, BIT(30), 0x0);
789 rtw_write32_mask(rtwdev, 0x4160, 0xfc000000, 0x3c);
790 rtw_write32_mask(rtwdev, 0x41b4, BIT(0), 0x1);
791 rtw_write32_mask(rtwdev, 0x41d0, BIT(0), 0x1);
792
793 rtw_write32_mask(rtwdev, 0x18b0, 0xf00, 0x0);
794 rtw_write32_mask(rtwdev, 0x18c0, BIT(14), 0x0);
795 rtw_write32_mask(rtwdev, 0x18cc, 0xf00, 0x0);
796 rtw_write32_mask(rtwdev, 0x18dc, BIT(14), 0x0);
797
798 rtw_write32_mask(rtwdev, 0x18b0, BIT(0), 0x0);
799 rtw_write32_mask(rtwdev, 0x18cc, BIT(0), 0x0);
800 rtw_write32_mask(rtwdev, 0x18b0, BIT(0), 0x1);
801 rtw_write32_mask(rtwdev, 0x18cc, BIT(0), 0x1);
802
803 rtw8822c_dac_cal_restore_dck(rtwdev);
804
805 rtw_write32_mask(rtwdev, 0x18c0, 0x38000, 0x7);
806 rtw_write32_mask(rtwdev, 0x18dc, 0x38000, 0x7);
807 rtw_write32_mask(rtwdev, 0x41c0, 0x38000, 0x7);
808 rtw_write32_mask(rtwdev, 0x41dc, 0x38000, 0x7);
809
810 rtw_write32_mask(rtwdev, 0x18b8, BIT(26) | BIT(25), 0x1);
811 rtw_write32_mask(rtwdev, 0x18d4, BIT(26) | BIT(25), 0x1);
812
813 rtw_write32_mask(rtwdev, 0x41b0, 0xf00, 0x0);
814 rtw_write32_mask(rtwdev, 0x41c0, BIT(14), 0x0);
815 rtw_write32_mask(rtwdev, 0x41cc, 0xf00, 0x0);
816 rtw_write32_mask(rtwdev, 0x41dc, BIT(14), 0x0);
817
818 rtw_write32_mask(rtwdev, 0x41b0, BIT(0), 0x0);
819 rtw_write32_mask(rtwdev, 0x41cc, BIT(0), 0x0);
820 rtw_write32_mask(rtwdev, 0x41b0, BIT(0), 0x1);
821 rtw_write32_mask(rtwdev, 0x41cc, BIT(0), 0x1);
822
823 rtw_write32_mask(rtwdev, 0x41b8, BIT(26) | BIT(25), 0x1);
824 rtw_write32_mask(rtwdev, 0x41d4, BIT(26) | BIT(25), 0x1);
825 }
826
rtw8822c_dac_cal_restore_wait(struct rtw_dev * rtwdev,u32 target_addr,u32 toggle_addr)827 static bool rtw8822c_dac_cal_restore_wait(struct rtw_dev *rtwdev,
828 u32 target_addr, u32 toggle_addr)
829 {
830 u32 cnt = 0;
831
832 do {
833 rtw_write32_mask(rtwdev, toggle_addr, BIT(26) | BIT(25), 0x0);
834 rtw_write32_mask(rtwdev, toggle_addr, BIT(26) | BIT(25), 0x2);
835
836 if (rtw_read32_mask(rtwdev, target_addr, 0xf) == 0x6)
837 return true;
838
839 } while (cnt++ < 100);
840
841 return false;
842 }
843
rtw8822c_dac_cal_restore_path(struct rtw_dev * rtwdev,u8 path)844 static bool rtw8822c_dac_cal_restore_path(struct rtw_dev *rtwdev, u8 path)
845 {
846 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
847 u32 w_off = 0x1c;
848 u32 r_off = 0x2c;
849 u32 w_i, r_i, w_q, r_q;
850 u32 value;
851 u32 i;
852
853 w_i = rtw8822c_get_path_write_addr(path) + 0xb0;
854 r_i = rtw8822c_get_path_read_addr(path) + 0x08;
855 w_q = rtw8822c_get_path_write_addr(path) + 0xb0 + w_off;
856 r_q = rtw8822c_get_path_read_addr(path) + 0x08 + r_off;
857
858 if (!rtw8822c_dac_cal_restore_wait(rtwdev, r_i, w_i + 0x8))
859 return false;
860
861 for (i = 0; i < DACK_MSBK_BACKUP_NUM; i++) {
862 rtw_write32_mask(rtwdev, w_i + 0x4, BIT(2), 0x0);
863 value = dm_info->dack_msbk[path][0][i];
864 rtw_write32_mask(rtwdev, w_i + 0x4, 0xff8, value);
865 rtw_write32_mask(rtwdev, w_i, 0xf0000000, i);
866 rtw_write32_mask(rtwdev, w_i + 0x4, BIT(2), 0x1);
867 }
868
869 rtw_write32_mask(rtwdev, w_i + 0x4, BIT(2), 0x0);
870
871 if (!rtw8822c_dac_cal_restore_wait(rtwdev, r_q, w_q + 0x8))
872 return false;
873
874 for (i = 0; i < DACK_MSBK_BACKUP_NUM; i++) {
875 rtw_write32_mask(rtwdev, w_q + 0x4, BIT(2), 0x0);
876 value = dm_info->dack_msbk[path][1][i];
877 rtw_write32_mask(rtwdev, w_q + 0x4, 0xff8, value);
878 rtw_write32_mask(rtwdev, w_q, 0xf0000000, i);
879 rtw_write32_mask(rtwdev, w_q + 0x4, BIT(2), 0x1);
880 }
881 rtw_write32_mask(rtwdev, w_q + 0x4, BIT(2), 0x0);
882
883 rtw_write32_mask(rtwdev, w_i + 0x8, BIT(26) | BIT(25), 0x0);
884 rtw_write32_mask(rtwdev, w_q + 0x8, BIT(26) | BIT(25), 0x0);
885 rtw_write32_mask(rtwdev, w_i + 0x4, BIT(0), 0x0);
886 rtw_write32_mask(rtwdev, w_q + 0x4, BIT(0), 0x0);
887
888 return true;
889 }
890
__rtw8822c_dac_cal_restore(struct rtw_dev * rtwdev)891 static bool __rtw8822c_dac_cal_restore(struct rtw_dev *rtwdev)
892 {
893 if (!rtw8822c_dac_cal_restore_path(rtwdev, RF_PATH_A))
894 return false;
895
896 if (!rtw8822c_dac_cal_restore_path(rtwdev, RF_PATH_B))
897 return false;
898
899 return true;
900 }
901
rtw8822c_dac_cal_restore(struct rtw_dev * rtwdev)902 static bool rtw8822c_dac_cal_restore(struct rtw_dev *rtwdev)
903 {
904 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
905 u32 temp[3];
906
907 /* sample the first element for both path's IQ vector */
908 if (dm_info->dack_msbk[RF_PATH_A][0][0] == 0 &&
909 dm_info->dack_msbk[RF_PATH_A][1][0] == 0 &&
910 dm_info->dack_msbk[RF_PATH_B][0][0] == 0 &&
911 dm_info->dack_msbk[RF_PATH_B][1][0] == 0)
912 return false;
913
914 temp[0] = rtw_read32(rtwdev, 0x1860);
915 temp[1] = rtw_read32(rtwdev, 0x4160);
916 temp[2] = rtw_read32(rtwdev, 0x9b4);
917
918 rtw8822c_dac_cal_restore_prepare(rtwdev);
919 if (!check_hw_ready(rtwdev, 0x2808, 0x7fff80, 0xffff) ||
920 !check_hw_ready(rtwdev, 0x2834, 0x7fff80, 0xffff) ||
921 !check_hw_ready(rtwdev, 0x4508, 0x7fff80, 0xffff) ||
922 !check_hw_ready(rtwdev, 0x4534, 0x7fff80, 0xffff))
923 return false;
924
925 if (!__rtw8822c_dac_cal_restore(rtwdev)) {
926 rtw_err(rtwdev, "failed to restore dack vectors\n");
927 return false;
928 }
929
930 rtw_write32_mask(rtwdev, 0x1830, BIT(30), 0x1);
931 rtw_write32_mask(rtwdev, 0x4130, BIT(30), 0x1);
932 rtw_write32(rtwdev, 0x1860, temp[0]);
933 rtw_write32(rtwdev, 0x4160, temp[1]);
934 rtw_write32_mask(rtwdev, 0x18b0, BIT(27), 0x1);
935 rtw_write32_mask(rtwdev, 0x18cc, BIT(27), 0x1);
936 rtw_write32_mask(rtwdev, 0x41b0, BIT(27), 0x1);
937 rtw_write32_mask(rtwdev, 0x41cc, BIT(27), 0x1);
938 rtw_write32(rtwdev, 0x9b4, temp[2]);
939
940 return true;
941 }
942
rtw8822c_rf_dac_cal(struct rtw_dev * rtwdev)943 static void rtw8822c_rf_dac_cal(struct rtw_dev *rtwdev)
944 {
945 struct rtw_backup_info backup_rf[DACK_RF_8822C * DACK_PATH_8822C];
946 struct rtw_backup_info backup[DACK_REG_8822C];
947 u32 ic = 0, qc = 0, i;
948 u32 i_a = 0x0, q_a = 0x0, i_b = 0x0, q_b = 0x0;
949 u32 ic_a = 0x0, qc_a = 0x0, ic_b = 0x0, qc_b = 0x0;
950 u32 adc_ic_a = 0x0, adc_qc_a = 0x0, adc_ic_b = 0x0, adc_qc_b = 0x0;
951
952 if (rtw8822c_dac_cal_restore(rtwdev))
953 return;
954
955 /* not able to restore, do it */
956
957 rtw8822c_dac_backup_reg(rtwdev, backup, backup_rf);
958
959 rtw8822c_dac_bb_setting(rtwdev);
960
961 /* path-A */
962 rtw8822c_dac_cal_adc(rtwdev, RF_PATH_A, &adc_ic_a, &adc_qc_a);
963 for (i = 0; i < 10; i++) {
964 rtw8822c_dac_cal_step1(rtwdev, RF_PATH_A);
965 rtw8822c_dac_cal_step2(rtwdev, RF_PATH_A, &ic, &qc);
966 ic_a = ic;
967 qc_a = qc;
968
969 rtw8822c_dac_cal_step3(rtwdev, RF_PATH_A, adc_ic_a, adc_qc_a,
970 &ic, &qc, &i_a, &q_a);
971
972 if (ic < 5 && qc < 5)
973 break;
974 }
975 rtw8822c_dac_cal_step4(rtwdev, RF_PATH_A);
976
977 /* path-B */
978 rtw8822c_dac_cal_adc(rtwdev, RF_PATH_B, &adc_ic_b, &adc_qc_b);
979 for (i = 0; i < 10; i++) {
980 rtw8822c_dac_cal_step1(rtwdev, RF_PATH_B);
981 rtw8822c_dac_cal_step2(rtwdev, RF_PATH_B, &ic, &qc);
982 ic_b = ic;
983 qc_b = qc;
984
985 rtw8822c_dac_cal_step3(rtwdev, RF_PATH_B, adc_ic_b, adc_qc_b,
986 &ic, &qc, &i_b, &q_b);
987
988 if (ic < 5 && qc < 5)
989 break;
990 }
991 rtw8822c_dac_cal_step4(rtwdev, RF_PATH_B);
992
993 rtw_write32(rtwdev, 0x1b00, 0x00000008);
994 rtw_write32_mask(rtwdev, 0x4130, BIT(30), 0x1);
995 rtw_write8(rtwdev, 0x1bcc, 0x0);
996 rtw_write32(rtwdev, 0x1b00, 0x0000000a);
997 rtw_write8(rtwdev, 0x1bcc, 0x0);
998
999 rtw8822c_dac_restore_reg(rtwdev, backup, backup_rf);
1000
1001 /* backup results to restore, saving a lot of time */
1002 rtw8822c_dac_cal_backup(rtwdev);
1003
1004 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] path A: ic=0x%x, qc=0x%x\n", ic_a, qc_a);
1005 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] path B: ic=0x%x, qc=0x%x\n", ic_b, qc_b);
1006 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] path A: i=0x%x, q=0x%x\n", i_a, q_a);
1007 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] path B: i=0x%x, q=0x%x\n", i_b, q_b);
1008 }
1009
rtw8822c_rf_x2_check(struct rtw_dev * rtwdev)1010 static void rtw8822c_rf_x2_check(struct rtw_dev *rtwdev)
1011 {
1012 u8 x2k_busy;
1013
1014 mdelay(1);
1015 x2k_busy = rtw_read_rf(rtwdev, RF_PATH_A, 0xb8, BIT(15));
1016 if (x2k_busy == 1) {
1017 rtw_write_rf(rtwdev, RF_PATH_A, 0xb8, RFREG_MASK, 0xC4440);
1018 rtw_write_rf(rtwdev, RF_PATH_A, 0xba, RFREG_MASK, 0x6840D);
1019 rtw_write_rf(rtwdev, RF_PATH_A, 0xb8, RFREG_MASK, 0x80440);
1020 mdelay(1);
1021 }
1022 }
1023
rtw8822c_set_power_trim(struct rtw_dev * rtwdev,s8 bb_gain[2][8])1024 static void rtw8822c_set_power_trim(struct rtw_dev *rtwdev, s8 bb_gain[2][8])
1025 {
1026 #define RF_SET_POWER_TRIM(_path, _seq, _idx) \
1027 do { \
1028 rtw_write_rf(rtwdev, _path, 0x33, RFREG_MASK, _seq); \
1029 rtw_write_rf(rtwdev, _path, 0x3f, RFREG_MASK, \
1030 bb_gain[_path][_idx]); \
1031 } while (0)
1032 u8 path;
1033
1034 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1035 rtw_write_rf(rtwdev, path, 0xee, BIT(19), 1);
1036 RF_SET_POWER_TRIM(path, 0x0, 0);
1037 RF_SET_POWER_TRIM(path, 0x1, 1);
1038 RF_SET_POWER_TRIM(path, 0x2, 2);
1039 RF_SET_POWER_TRIM(path, 0x3, 2);
1040 RF_SET_POWER_TRIM(path, 0x4, 3);
1041 RF_SET_POWER_TRIM(path, 0x5, 4);
1042 RF_SET_POWER_TRIM(path, 0x6, 5);
1043 RF_SET_POWER_TRIM(path, 0x7, 6);
1044 RF_SET_POWER_TRIM(path, 0x8, 7);
1045 RF_SET_POWER_TRIM(path, 0x9, 3);
1046 RF_SET_POWER_TRIM(path, 0xa, 4);
1047 RF_SET_POWER_TRIM(path, 0xb, 5);
1048 RF_SET_POWER_TRIM(path, 0xc, 6);
1049 RF_SET_POWER_TRIM(path, 0xd, 7);
1050 RF_SET_POWER_TRIM(path, 0xe, 7);
1051 rtw_write_rf(rtwdev, path, 0xee, BIT(19), 0);
1052 }
1053 #undef RF_SET_POWER_TRIM
1054 }
1055
rtw8822c_power_trim(struct rtw_dev * rtwdev)1056 static void rtw8822c_power_trim(struct rtw_dev *rtwdev)
1057 {
1058 u8 pg_pwr = 0xff, i, path, idx;
1059 s8 bb_gain[2][8] = {};
1060 u16 rf_efuse_2g[3] = {PPG_2GL_TXAB, PPG_2GM_TXAB, PPG_2GH_TXAB};
1061 u16 rf_efuse_5g[2][5] = {{PPG_5GL1_TXA, PPG_5GL2_TXA, PPG_5GM1_TXA,
1062 PPG_5GM2_TXA, PPG_5GH1_TXA},
1063 {PPG_5GL1_TXB, PPG_5GL2_TXB, PPG_5GM1_TXB,
1064 PPG_5GM2_TXB, PPG_5GH1_TXB} };
1065 bool set = false;
1066
1067 for (i = 0; i < ARRAY_SIZE(rf_efuse_2g); i++) {
1068 rtw_read8_physical_efuse(rtwdev, rf_efuse_2g[i], &pg_pwr);
1069 if (pg_pwr == EFUSE_READ_FAIL)
1070 continue;
1071 set = true;
1072 bb_gain[RF_PATH_A][i] = FIELD_GET(PPG_2G_A_MASK, pg_pwr);
1073 bb_gain[RF_PATH_B][i] = FIELD_GET(PPG_2G_B_MASK, pg_pwr);
1074 }
1075
1076 for (i = 0; i < ARRAY_SIZE(rf_efuse_5g[0]); i++) {
1077 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1078 rtw_read8_physical_efuse(rtwdev, rf_efuse_5g[path][i],
1079 &pg_pwr);
1080 if (pg_pwr == EFUSE_READ_FAIL)
1081 continue;
1082 set = true;
1083 idx = i + ARRAY_SIZE(rf_efuse_2g);
1084 bb_gain[path][idx] = FIELD_GET(PPG_5G_MASK, pg_pwr);
1085 }
1086 }
1087 if (set)
1088 rtw8822c_set_power_trim(rtwdev, bb_gain);
1089
1090 rtw_write32_mask(rtwdev, REG_DIS_DPD, DIS_DPD_MASK, DIS_DPD_RATEALL);
1091 }
1092
rtw8822c_thermal_trim(struct rtw_dev * rtwdev)1093 static void rtw8822c_thermal_trim(struct rtw_dev *rtwdev)
1094 {
1095 u16 rf_efuse[2] = {PPG_THERMAL_A, PPG_THERMAL_B};
1096 u8 pg_therm = 0xff, thermal[2] = {0}, path;
1097
1098 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1099 rtw_read8_physical_efuse(rtwdev, rf_efuse[path], &pg_therm);
1100 if (pg_therm == EFUSE_READ_FAIL)
1101 return;
1102 /* Efuse value of BIT(0) shall be move to BIT(3), and the value
1103 * of BIT(1) to BIT(3) should be right shifted 1 bit.
1104 */
1105 thermal[path] = FIELD_GET(GENMASK(3, 1), pg_therm);
1106 thermal[path] |= FIELD_PREP(BIT(3), pg_therm & BIT(0));
1107 rtw_write_rf(rtwdev, path, 0x43, RF_THEMAL_MASK, thermal[path]);
1108 }
1109 }
1110
rtw8822c_pa_bias(struct rtw_dev * rtwdev)1111 static void rtw8822c_pa_bias(struct rtw_dev *rtwdev)
1112 {
1113 u16 rf_efuse_2g[2] = {PPG_PABIAS_2GA, PPG_PABIAS_2GB};
1114 u16 rf_efuse_5g[2] = {PPG_PABIAS_5GA, PPG_PABIAS_5GB};
1115 u8 pg_pa_bias = 0xff, path;
1116
1117 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1118 rtw_read8_physical_efuse(rtwdev, rf_efuse_2g[path],
1119 &pg_pa_bias);
1120 if (pg_pa_bias == EFUSE_READ_FAIL)
1121 return;
1122 pg_pa_bias = FIELD_GET(PPG_PABIAS_MASK, pg_pa_bias);
1123 rtw_write_rf(rtwdev, path, RF_PA, RF_PABIAS_2G_MASK, pg_pa_bias);
1124 }
1125 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1126 rtw_read8_physical_efuse(rtwdev, rf_efuse_5g[path],
1127 &pg_pa_bias);
1128 pg_pa_bias = FIELD_GET(PPG_PABIAS_MASK, pg_pa_bias);
1129 rtw_write_rf(rtwdev, path, RF_PA, RF_PABIAS_5G_MASK, pg_pa_bias);
1130 }
1131 }
1132
rtw8822c_rfk_handshake(struct rtw_dev * rtwdev,bool is_before_k)1133 static void rtw8822c_rfk_handshake(struct rtw_dev *rtwdev, bool is_before_k)
1134 {
1135 struct rtw_dm_info *dm = &rtwdev->dm_info;
1136 u8 u1b_tmp;
1137 u8 u4b_tmp;
1138 int ret;
1139
1140 if (is_before_k) {
1141 rtw_dbg(rtwdev, RTW_DBG_RFK,
1142 "[RFK] WiFi / BT RFK handshake start!!\n");
1143
1144 if (!dm->is_bt_iqk_timeout) {
1145 ret = read_poll_timeout(rtw_read32_mask, u4b_tmp,
1146 u4b_tmp == 0, 20, 600000, false,
1147 rtwdev, REG_PMC_DBG_CTRL1,
1148 BITS_PMC_BT_IQK_STS);
1149 if (ret) {
1150 rtw_dbg(rtwdev, RTW_DBG_RFK,
1151 "[RFK] Wait BT IQK finish timeout!!\n");
1152 dm->is_bt_iqk_timeout = true;
1153 }
1154 }
1155
1156 rtw_fw_inform_rfk_status(rtwdev, true);
1157
1158 ret = read_poll_timeout(rtw_read8_mask, u1b_tmp,
1159 u1b_tmp == 1, 20, 100000, false,
1160 rtwdev, REG_ARFR4, BIT_WL_RFK);
1161 if (ret)
1162 rtw_dbg(rtwdev, RTW_DBG_RFK,
1163 "[RFK] Send WiFi RFK start H2C cmd FAIL!!\n");
1164 } else {
1165 rtw_fw_inform_rfk_status(rtwdev, false);
1166 ret = read_poll_timeout(rtw_read8_mask, u1b_tmp,
1167 u1b_tmp == 1, 20, 100000, false,
1168 rtwdev, REG_ARFR4,
1169 BIT_WL_RFK);
1170 if (ret)
1171 rtw_dbg(rtwdev, RTW_DBG_RFK,
1172 "[RFK] Send WiFi RFK finish H2C cmd FAIL!!\n");
1173
1174 rtw_dbg(rtwdev, RTW_DBG_RFK,
1175 "[RFK] WiFi / BT RFK handshake finish!!\n");
1176 }
1177 }
1178
rtw8822c_rfk_power_save(struct rtw_dev * rtwdev,bool is_power_save)1179 static void rtw8822c_rfk_power_save(struct rtw_dev *rtwdev,
1180 bool is_power_save)
1181 {
1182 u8 path;
1183
1184 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1185 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, path);
1186 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S0, BIT_PS_EN,
1187 is_power_save ? 0 : 1);
1188 }
1189 }
1190
rtw8822c_txgapk_backup_bb_reg(struct rtw_dev * rtwdev,const u32 reg[],u32 reg_backup[],u32 reg_num)1191 static void rtw8822c_txgapk_backup_bb_reg(struct rtw_dev *rtwdev, const u32 reg[],
1192 u32 reg_backup[], u32 reg_num)
1193 {
1194 u32 i;
1195
1196 for (i = 0; i < reg_num; i++) {
1197 reg_backup[i] = rtw_read32(rtwdev, reg[i]);
1198
1199 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] Backup BB 0x%x = 0x%x\n",
1200 reg[i], reg_backup[i]);
1201 }
1202 }
1203
rtw8822c_txgapk_reload_bb_reg(struct rtw_dev * rtwdev,const u32 reg[],u32 reg_backup[],u32 reg_num)1204 static void rtw8822c_txgapk_reload_bb_reg(struct rtw_dev *rtwdev,
1205 const u32 reg[], u32 reg_backup[],
1206 u32 reg_num)
1207 {
1208 u32 i;
1209
1210 for (i = 0; i < reg_num; i++) {
1211 rtw_write32(rtwdev, reg[i], reg_backup[i]);
1212 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] Reload BB 0x%x = 0x%x\n",
1213 reg[i], reg_backup[i]);
1214 }
1215 }
1216
check_rf_status(struct rtw_dev * rtwdev,u8 status)1217 static bool check_rf_status(struct rtw_dev *rtwdev, u8 status)
1218 {
1219 u8 reg_rf0_a, reg_rf0_b;
1220
1221 reg_rf0_a = (u8)rtw_read_rf(rtwdev, RF_PATH_A,
1222 RF_MODE_TRXAGC, BIT_RF_MODE);
1223 reg_rf0_b = (u8)rtw_read_rf(rtwdev, RF_PATH_B,
1224 RF_MODE_TRXAGC, BIT_RF_MODE);
1225
1226 if (reg_rf0_a == status || reg_rf0_b == status)
1227 return false;
1228
1229 return true;
1230 }
1231
rtw8822c_txgapk_tx_pause(struct rtw_dev * rtwdev)1232 static void rtw8822c_txgapk_tx_pause(struct rtw_dev *rtwdev)
1233 {
1234 bool status;
1235 int ret;
1236
1237 rtw_write8(rtwdev, REG_TXPAUSE, BIT_AC_QUEUE);
1238 rtw_write32_mask(rtwdev, REG_TX_FIFO, BIT_STOP_TX, 0x2);
1239
1240 ret = read_poll_timeout_atomic(check_rf_status, status, status,
1241 2, 5000, false, rtwdev, 2);
1242 if (ret)
1243 rtw_warn(rtwdev, "failed to pause TX\n");
1244
1245 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] Tx pause!!\n");
1246 }
1247
rtw8822c_txgapk_bb_dpk(struct rtw_dev * rtwdev,u8 path)1248 static void rtw8822c_txgapk_bb_dpk(struct rtw_dev *rtwdev, u8 path)
1249 {
1250 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1251
1252 rtw_write32_mask(rtwdev, REG_ENFN, BIT_IQK_DPK_EN, 0x1);
1253 rtw_write32_mask(rtwdev, REG_CH_DELAY_EXTR2,
1254 BIT_IQK_DPK_CLOCK_SRC, 0x1);
1255 rtw_write32_mask(rtwdev, REG_CH_DELAY_EXTR2,
1256 BIT_IQK_DPK_RESET_SRC, 0x1);
1257 rtw_write32_mask(rtwdev, REG_CH_DELAY_EXTR2, BIT_EN_IOQ_IQK_DPK, 0x1);
1258 rtw_write32_mask(rtwdev, REG_CH_DELAY_EXTR2, BIT_TST_IQK2SET_SRC, 0x0);
1259 rtw_write32_mask(rtwdev, REG_CCA_OFF, BIT_CCA_ON_BY_PW, 0x1ff);
1260
1261 if (path == RF_PATH_A) {
1262 rtw_write32_mask(rtwdev, REG_RFTXEN_GCK_A,
1263 BIT_RFTXEN_GCK_FORCE_ON, 0x1);
1264 rtw_write32_mask(rtwdev, REG_3WIRE, BIT_DIS_SHARERX_TXGAT, 0x1);
1265 rtw_write32_mask(rtwdev, REG_DIS_SHARE_RX_A,
1266 BIT_TX_SCALE_0DB, 0x1);
1267 rtw_write32_mask(rtwdev, REG_3WIRE, BIT_3WIRE_EN, 0x0);
1268 } else if (path == RF_PATH_B) {
1269 rtw_write32_mask(rtwdev, REG_RFTXEN_GCK_B,
1270 BIT_RFTXEN_GCK_FORCE_ON, 0x1);
1271 rtw_write32_mask(rtwdev, REG_3WIRE2,
1272 BIT_DIS_SHARERX_TXGAT, 0x1);
1273 rtw_write32_mask(rtwdev, REG_DIS_SHARE_RX_B,
1274 BIT_TX_SCALE_0DB, 0x1);
1275 rtw_write32_mask(rtwdev, REG_3WIRE2, BIT_3WIRE_EN, 0x0);
1276 }
1277 rtw_write32_mask(rtwdev, REG_CCKSB, BIT_BBMODE, 0x2);
1278 }
1279
rtw8822c_txgapk_afe_dpk(struct rtw_dev * rtwdev,u8 path)1280 static void rtw8822c_txgapk_afe_dpk(struct rtw_dev *rtwdev, u8 path)
1281 {
1282 u32 reg;
1283
1284 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1285
1286 if (path == RF_PATH_A) {
1287 reg = REG_ANAPAR_A;
1288 } else if (path == RF_PATH_B) {
1289 reg = REG_ANAPAR_B;
1290 } else {
1291 rtw_err(rtwdev, "[TXGAPK] unknown path %d!!\n", path);
1292 return;
1293 }
1294
1295 rtw_write32_mask(rtwdev, REG_IQK_CTRL, MASKDWORD, MASKDWORD);
1296 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x700f0001);
1297 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x700f0001);
1298 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x701f0001);
1299 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x702f0001);
1300 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x703f0001);
1301 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x704f0001);
1302 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x705f0001);
1303 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x706f0001);
1304 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x707f0001);
1305 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x708f0001);
1306 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x709f0001);
1307 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70af0001);
1308 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70bf0001);
1309 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70cf0001);
1310 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70df0001);
1311 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70ef0001);
1312 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70ff0001);
1313 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70ff0001);
1314 }
1315
rtw8822c_txgapk_afe_dpk_restore(struct rtw_dev * rtwdev,u8 path)1316 static void rtw8822c_txgapk_afe_dpk_restore(struct rtw_dev *rtwdev, u8 path)
1317 {
1318 u32 reg;
1319
1320 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1321
1322 if (path == RF_PATH_A) {
1323 reg = REG_ANAPAR_A;
1324 } else if (path == RF_PATH_B) {
1325 reg = REG_ANAPAR_B;
1326 } else {
1327 rtw_err(rtwdev, "[TXGAPK] unknown path %d!!\n", path);
1328 return;
1329 }
1330 rtw_write32_mask(rtwdev, REG_IQK_CTRL, MASKDWORD, 0xffa1005e);
1331 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x700b8041);
1332 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70144041);
1333 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70244041);
1334 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70344041);
1335 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70444041);
1336 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x705b8041);
1337 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70644041);
1338 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x707b8041);
1339 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x708b8041);
1340 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x709b8041);
1341 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70ab8041);
1342 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70bb8041);
1343 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70cb8041);
1344 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70db8041);
1345 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70eb8041);
1346 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70fb8041);
1347 }
1348
rtw8822c_txgapk_bb_dpk_restore(struct rtw_dev * rtwdev,u8 path)1349 static void rtw8822c_txgapk_bb_dpk_restore(struct rtw_dev *rtwdev, u8 path)
1350 {
1351 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1352
1353 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TX_GAIN, 0x0);
1354 rtw_write_rf(rtwdev, path, RF_DIS_BYPASS_TXBB, BIT_TIA_BYPASS, 0x0);
1355 rtw_write_rf(rtwdev, path, RF_DIS_BYPASS_TXBB, BIT_TXBB, 0x0);
1356
1357 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, 0x0);
1358 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x0);
1359 rtw_write32_mask(rtwdev, REG_SINGLE_TONE_SW, BIT_IRQ_TEST_MODE, 0x0);
1360 rtw_write32_mask(rtwdev, REG_R_CONFIG, MASKBYTE0, 0x00);
1361 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, 0x1);
1362 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x0);
1363 rtw_write32_mask(rtwdev, REG_SINGLE_TONE_SW, BIT_IRQ_TEST_MODE, 0x0);
1364 rtw_write32_mask(rtwdev, REG_R_CONFIG, MASKBYTE0, 0x00);
1365 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, 0x0);
1366 rtw_write32_mask(rtwdev, REG_CCA_OFF, BIT_CCA_ON_BY_PW, 0x0);
1367
1368 if (path == RF_PATH_A) {
1369 rtw_write32_mask(rtwdev, REG_RFTXEN_GCK_A,
1370 BIT_RFTXEN_GCK_FORCE_ON, 0x0);
1371 rtw_write32_mask(rtwdev, REG_3WIRE, BIT_DIS_SHARERX_TXGAT, 0x0);
1372 rtw_write32_mask(rtwdev, REG_DIS_SHARE_RX_A,
1373 BIT_TX_SCALE_0DB, 0x0);
1374 rtw_write32_mask(rtwdev, REG_3WIRE, BIT_3WIRE_EN, 0x3);
1375 } else if (path == RF_PATH_B) {
1376 rtw_write32_mask(rtwdev, REG_RFTXEN_GCK_B,
1377 BIT_RFTXEN_GCK_FORCE_ON, 0x0);
1378 rtw_write32_mask(rtwdev, REG_3WIRE2,
1379 BIT_DIS_SHARERX_TXGAT, 0x0);
1380 rtw_write32_mask(rtwdev, REG_DIS_SHARE_RX_B,
1381 BIT_TX_SCALE_0DB, 0x0);
1382 rtw_write32_mask(rtwdev, REG_3WIRE2, BIT_3WIRE_EN, 0x3);
1383 }
1384
1385 rtw_write32_mask(rtwdev, REG_CCKSB, BIT_BBMODE, 0x0);
1386 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_CFIR_EN, 0x5);
1387 }
1388
_rtw8822c_txgapk_gain_valid(struct rtw_dev * rtwdev,u32 gain)1389 static bool _rtw8822c_txgapk_gain_valid(struct rtw_dev *rtwdev, u32 gain)
1390 {
1391 if ((FIELD_GET(BIT_GAIN_TX_PAD_H, gain) >= 0xc) &&
1392 (FIELD_GET(BIT_GAIN_TX_PAD_L, gain) >= 0xe))
1393 return true;
1394
1395 return false;
1396 }
1397
_rtw8822c_txgapk_write_gain_bb_table(struct rtw_dev * rtwdev,u8 band,u8 path)1398 static void _rtw8822c_txgapk_write_gain_bb_table(struct rtw_dev *rtwdev,
1399 u8 band, u8 path)
1400 {
1401 struct rtw_gapk_info *txgapk = &rtwdev->dm_info.gapk;
1402 u32 v, tmp_3f = 0;
1403 u8 gain, check_txgain;
1404
1405 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, path);
1406
1407 switch (band) {
1408 case RF_BAND_2G_OFDM:
1409 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x0);
1410 break;
1411 case RF_BAND_5G_L:
1412 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x2);
1413 break;
1414 case RF_BAND_5G_M:
1415 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x3);
1416 break;
1417 case RF_BAND_5G_H:
1418 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x4);
1419 break;
1420 default:
1421 break;
1422 }
1423
1424 rtw_write32_mask(rtwdev, REG_TX_GAIN_SET, MASKBYTE0, 0x88);
1425
1426 check_txgain = 0;
1427 for (gain = 0; gain < RF_GAIN_NUM; gain++) {
1428 v = txgapk->rf3f_bp[band][gain][path];
1429 if (_rtw8822c_txgapk_gain_valid(rtwdev, v)) {
1430 if (!check_txgain) {
1431 tmp_3f = txgapk->rf3f_bp[band][gain][path];
1432 check_txgain = 1;
1433 }
1434 rtw_dbg(rtwdev, RTW_DBG_RFK,
1435 "[TXGAPK] tx_gain=0x%03X >= 0xCEX\n",
1436 txgapk->rf3f_bp[band][gain][path]);
1437 } else {
1438 tmp_3f = txgapk->rf3f_bp[band][gain][path];
1439 }
1440
1441 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_Q_GAIN, tmp_3f);
1442 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_I_GAIN, gain);
1443 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_GAIN_RST, 0x1);
1444 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_GAIN_RST, 0x0);
1445
1446 rtw_dbg(rtwdev, RTW_DBG_RFK,
1447 "[TXGAPK] Band=%d 0x1b98[11:0]=0x%03X path=%d\n",
1448 band, tmp_3f, path);
1449 }
1450 }
1451
rtw8822c_txgapk_write_gain_bb_table(struct rtw_dev * rtwdev)1452 static void rtw8822c_txgapk_write_gain_bb_table(struct rtw_dev *rtwdev)
1453 {
1454 u8 path, band;
1455
1456 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s channel=%d\n",
1457 __func__, rtwdev->dm_info.gapk.channel);
1458
1459 for (band = 0; band < RF_BAND_MAX; band++) {
1460 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1461 _rtw8822c_txgapk_write_gain_bb_table(rtwdev,
1462 band, path);
1463 }
1464 }
1465 }
1466
rtw8822c_txgapk_read_offset(struct rtw_dev * rtwdev,u8 path)1467 static void rtw8822c_txgapk_read_offset(struct rtw_dev *rtwdev, u8 path)
1468 {
1469 static const u32 cfg1_1b00[2] = {0x00000d18, 0x00000d2a};
1470 static const u32 cfg2_1b00[2] = {0x00000d19, 0x00000d2b};
1471 static const u32 set_pi[2] = {REG_RSV_CTRL, REG_WLRF1};
1472 static const u32 path_setting[2] = {REG_ORITXCODE, REG_ORITXCODE2};
1473 struct rtw_gapk_info *txgapk = &rtwdev->dm_info.gapk;
1474 u8 channel = txgapk->channel;
1475 u32 val;
1476 int i;
1477
1478 if (path >= ARRAY_SIZE(cfg1_1b00) ||
1479 path >= ARRAY_SIZE(cfg2_1b00) ||
1480 path >= ARRAY_SIZE(set_pi) ||
1481 path >= ARRAY_SIZE(path_setting)) {
1482 rtw_warn(rtwdev, "[TXGAPK] wrong path %d\n", path);
1483 return;
1484 }
1485
1486 rtw_write32_mask(rtwdev, REG_ANTMAP0, BIT_ANT_PATH, path + 1);
1487 rtw_write32_mask(rtwdev, REG_TXLGMAP, MASKDWORD, 0xe4e40000);
1488 rtw_write32_mask(rtwdev, REG_TXANTSEG, BIT_ANTSEG, 0x3);
1489 rtw_write32_mask(rtwdev, path_setting[path], MASK20BITS, 0x33312);
1490 rtw_write32_mask(rtwdev, path_setting[path], BIT_PATH_EN, 0x1);
1491 rtw_write32_mask(rtwdev, set_pi[path], BITS_RFC_DIRECT, 0x0);
1492 rtw_write_rf(rtwdev, path, RF_LUTDBG, BIT_TXA_TANK, 0x1);
1493 rtw_write_rf(rtwdev, path, RF_IDAC, BIT_TX_MODE, 0x820);
1494 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, path);
1495 rtw_write32_mask(rtwdev, REG_IQKSTAT, MASKBYTE0, 0x0);
1496
1497 rtw_write32_mask(rtwdev, REG_TX_TONE_IDX, MASKBYTE0, 0x018);
1498 fsleep(1000);
1499 if (channel >= 1 && channel <= 14)
1500 rtw_write32_mask(rtwdev, REG_R_CONFIG, MASKBYTE0, BIT_2G_SWING);
1501 else
1502 rtw_write32_mask(rtwdev, REG_R_CONFIG, MASKBYTE0, BIT_5G_SWING);
1503 fsleep(1000);
1504
1505 rtw_write32_mask(rtwdev, REG_NCTL0, MASKDWORD, cfg1_1b00[path]);
1506 rtw_write32_mask(rtwdev, REG_NCTL0, MASKDWORD, cfg2_1b00[path]);
1507
1508 read_poll_timeout(rtw_read32_mask, val,
1509 val == 0x55, 1000, 100000, false,
1510 rtwdev, REG_RPT_CIP, BIT_RPT_CIP_STATUS);
1511
1512 rtw_write32_mask(rtwdev, set_pi[path], BITS_RFC_DIRECT, 0x2);
1513 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, path);
1514 rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, BIT_RPT_EN, 0x1);
1515 rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, BIT_RPT_SEL, 0x12);
1516 rtw_write32_mask(rtwdev, REG_TX_GAIN_SET, BIT_GAPK_RPT_IDX, 0x3);
1517 val = rtw_read32(rtwdev, REG_STAT_RPT);
1518
1519 txgapk->offset[0][path] = (s8)FIELD_GET(BIT_GAPK_RPT0, val);
1520 txgapk->offset[1][path] = (s8)FIELD_GET(BIT_GAPK_RPT1, val);
1521 txgapk->offset[2][path] = (s8)FIELD_GET(BIT_GAPK_RPT2, val);
1522 txgapk->offset[3][path] = (s8)FIELD_GET(BIT_GAPK_RPT3, val);
1523 txgapk->offset[4][path] = (s8)FIELD_GET(BIT_GAPK_RPT4, val);
1524 txgapk->offset[5][path] = (s8)FIELD_GET(BIT_GAPK_RPT5, val);
1525 txgapk->offset[6][path] = (s8)FIELD_GET(BIT_GAPK_RPT6, val);
1526 txgapk->offset[7][path] = (s8)FIELD_GET(BIT_GAPK_RPT7, val);
1527
1528 rtw_write32_mask(rtwdev, REG_TX_GAIN_SET, BIT_GAPK_RPT_IDX, 0x4);
1529 val = rtw_read32(rtwdev, REG_STAT_RPT);
1530
1531 txgapk->offset[8][path] = (s8)FIELD_GET(BIT_GAPK_RPT0, val);
1532 txgapk->offset[9][path] = (s8)FIELD_GET(BIT_GAPK_RPT1, val);
1533
1534 for (i = 0; i < RF_HW_OFFSET_NUM; i++)
1535 if (txgapk->offset[i][path] & BIT(3))
1536 txgapk->offset[i][path] = txgapk->offset[i][path] |
1537 0xf0;
1538 for (i = 0; i < RF_HW_OFFSET_NUM; i++)
1539 rtw_dbg(rtwdev, RTW_DBG_RFK,
1540 "[TXGAPK] offset %d %d path=%d\n",
1541 txgapk->offset[i][path], i, path);
1542 }
1543
rtw8822c_txgapk_calculate_offset(struct rtw_dev * rtwdev,u8 path)1544 static void rtw8822c_txgapk_calculate_offset(struct rtw_dev *rtwdev, u8 path)
1545 {
1546 static const u32 bb_reg[] = {REG_ANTMAP0, REG_TXLGMAP, REG_TXANTSEG,
1547 REG_ORITXCODE, REG_ORITXCODE2};
1548 struct rtw_gapk_info *txgapk = &rtwdev->dm_info.gapk;
1549 u8 channel = txgapk->channel;
1550 u32 reg_backup[ARRAY_SIZE(bb_reg)] = {0};
1551
1552 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s channel=%d\n",
1553 __func__, channel);
1554
1555 rtw8822c_txgapk_backup_bb_reg(rtwdev, bb_reg,
1556 reg_backup, ARRAY_SIZE(bb_reg));
1557
1558 if (channel >= 1 && channel <= 14) {
1559 rtw_write32_mask(rtwdev,
1560 REG_SINGLE_TONE_SW, BIT_IRQ_TEST_MODE, 0x0);
1561 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, path);
1562 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_IQ_SWITCH, 0x3f);
1563 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x0);
1564 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TX_GAIN, 0x1);
1565 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, RFREG_MASK, 0x5000f);
1566 rtw_write_rf(rtwdev, path, RF_TX_GAIN_OFFSET, BIT_RF_GAIN, 0x0);
1567 rtw_write_rf(rtwdev, path, RF_RXG_GAIN, BIT_RXG_GAIN, 0x1);
1568 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, BIT_RXAGC, 0x0f);
1569 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TRXBW, 0x1);
1570 rtw_write_rf(rtwdev, path, RF_BW_TRXBB, BIT_BW_TXBB, 0x1);
1571 rtw_write_rf(rtwdev, path, RF_BW_TRXBB, BIT_BW_RXBB, 0x0);
1572 rtw_write_rf(rtwdev, path, RF_EXT_TIA_BW, BIT_PW_EXT_TIA, 0x1);
1573
1574 rtw_write32_mask(rtwdev, REG_IQKSTAT, MASKBYTE0, 0x00);
1575 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x0);
1576
1577 rtw8822c_txgapk_read_offset(rtwdev, path);
1578 rtw_dbg(rtwdev, RTW_DBG_RFK, "=============================\n");
1579
1580 } else {
1581 rtw_write32_mask(rtwdev,
1582 REG_SINGLE_TONE_SW, BIT_IRQ_TEST_MODE, 0x0);
1583 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, path);
1584 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_IQ_SWITCH, 0x3f);
1585 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x0);
1586 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TX_GAIN, 0x1);
1587 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, RFREG_MASK, 0x50011);
1588 rtw_write_rf(rtwdev, path, RF_TXA_LB_SW, BIT_TXA_LB_ATT, 0x3);
1589 rtw_write_rf(rtwdev, path, RF_TXA_LB_SW, BIT_LB_ATT, 0x3);
1590 rtw_write_rf(rtwdev, path, RF_TXA_LB_SW, BIT_LB_SW, 0x1);
1591 rtw_write_rf(rtwdev, path,
1592 RF_RXA_MIX_GAIN, BIT_RXA_MIX_GAIN, 0x2);
1593 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, BIT_RXAGC, 0x12);
1594 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TRXBW, 0x1);
1595 rtw_write_rf(rtwdev, path, RF_BW_TRXBB, BIT_BW_RXBB, 0x0);
1596 rtw_write_rf(rtwdev, path, RF_EXT_TIA_BW, BIT_PW_EXT_TIA, 0x1);
1597 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, BIT_RF_MODE, 0x5);
1598
1599 rtw_write32_mask(rtwdev, REG_IQKSTAT, MASKBYTE0, 0x0);
1600
1601 if (channel >= 36 && channel <= 64)
1602 rtw_write32_mask(rtwdev,
1603 REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x2);
1604 else if (channel >= 100 && channel <= 144)
1605 rtw_write32_mask(rtwdev,
1606 REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x3);
1607 else if (channel >= 149 && channel <= 177)
1608 rtw_write32_mask(rtwdev,
1609 REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x4);
1610
1611 rtw8822c_txgapk_read_offset(rtwdev, path);
1612 rtw_dbg(rtwdev, RTW_DBG_RFK, "=============================\n");
1613 }
1614 rtw8822c_txgapk_reload_bb_reg(rtwdev, bb_reg,
1615 reg_backup, ARRAY_SIZE(bb_reg));
1616 }
1617
rtw8822c_txgapk_rf_restore(struct rtw_dev * rtwdev,u8 path)1618 static void rtw8822c_txgapk_rf_restore(struct rtw_dev *rtwdev, u8 path)
1619 {
1620 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1621
1622 if (path >= rtwdev->hal.rf_path_num)
1623 return;
1624
1625 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, BIT_RF_MODE, 0x3);
1626 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TRXBW, 0x0);
1627 rtw_write_rf(rtwdev, path, RF_EXT_TIA_BW, BIT_PW_EXT_TIA, 0x0);
1628 }
1629
rtw8822c_txgapk_cal_gain(struct rtw_dev * rtwdev,u32 gain,s8 offset)1630 static u32 rtw8822c_txgapk_cal_gain(struct rtw_dev *rtwdev, u32 gain, s8 offset)
1631 {
1632 u32 gain_x2, new_gain;
1633
1634 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1635
1636 if (_rtw8822c_txgapk_gain_valid(rtwdev, gain)) {
1637 new_gain = gain;
1638 rtw_dbg(rtwdev, RTW_DBG_RFK,
1639 "[TXGAPK] gain=0x%03X(>=0xCEX) offset=%d new_gain=0x%03X\n",
1640 gain, offset, new_gain);
1641 return new_gain;
1642 }
1643
1644 gain_x2 = (gain << 1) + offset;
1645 new_gain = (gain_x2 >> 1) | (gain_x2 & BIT(0) ? BIT_GAIN_EXT : 0);
1646
1647 rtw_dbg(rtwdev, RTW_DBG_RFK,
1648 "[TXGAPK] gain=0x%X offset=%d new_gain=0x%X\n",
1649 gain, offset, new_gain);
1650
1651 return new_gain;
1652 }
1653
rtw8822c_txgapk_write_tx_gain(struct rtw_dev * rtwdev)1654 static void rtw8822c_txgapk_write_tx_gain(struct rtw_dev *rtwdev)
1655 {
1656 struct rtw_gapk_info *txgapk = &rtwdev->dm_info.gapk;
1657 u32 i, j, tmp = 0x20, tmp_3f, v;
1658 s8 offset_tmp[RF_GAIN_NUM] = {0};
1659 u8 path, band = RF_BAND_2G_OFDM, channel = txgapk->channel;
1660
1661 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1662
1663 if (channel >= 1 && channel <= 14) {
1664 tmp = 0x20;
1665 band = RF_BAND_2G_OFDM;
1666 } else if (channel >= 36 && channel <= 64) {
1667 tmp = 0x200;
1668 band = RF_BAND_5G_L;
1669 } else if (channel >= 100 && channel <= 144) {
1670 tmp = 0x280;
1671 band = RF_BAND_5G_M;
1672 } else if (channel >= 149 && channel <= 177) {
1673 tmp = 0x300;
1674 band = RF_BAND_5G_H;
1675 } else {
1676 rtw_err(rtwdev, "[TXGAPK] unknown channel %d!!\n", channel);
1677 return;
1678 }
1679
1680 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1681 for (i = 0; i < RF_GAIN_NUM; i++) {
1682 offset_tmp[i] = 0;
1683 for (j = i; j < RF_GAIN_NUM; j++) {
1684 v = txgapk->rf3f_bp[band][j][path];
1685 if (_rtw8822c_txgapk_gain_valid(rtwdev, v))
1686 continue;
1687
1688 offset_tmp[i] += txgapk->offset[j][path];
1689 txgapk->fianl_offset[i][path] = offset_tmp[i];
1690 }
1691
1692 v = txgapk->rf3f_bp[band][i][path];
1693 if (_rtw8822c_txgapk_gain_valid(rtwdev, v)) {
1694 rtw_dbg(rtwdev, RTW_DBG_RFK,
1695 "[TXGAPK] tx_gain=0x%03X >= 0xCEX\n",
1696 txgapk->rf3f_bp[band][i][path]);
1697 } else {
1698 txgapk->rf3f_fs[path][i] = offset_tmp[i];
1699 rtw_dbg(rtwdev, RTW_DBG_RFK,
1700 "[TXGAPK] offset %d %d\n",
1701 offset_tmp[i], i);
1702 }
1703 }
1704
1705 rtw_write_rf(rtwdev, path, RF_LUTWE2, RFREG_MASK, 0x10000);
1706 for (i = 0; i < RF_GAIN_NUM; i++) {
1707 rtw_write_rf(rtwdev, path,
1708 RF_LUTWA, RFREG_MASK, tmp + i);
1709
1710 tmp_3f = rtw8822c_txgapk_cal_gain(rtwdev,
1711 txgapk->rf3f_bp[band][i][path],
1712 offset_tmp[i]);
1713 rtw_write_rf(rtwdev, path, RF_LUTWD0,
1714 BIT_GAIN_EXT | BIT_DATA_L, tmp_3f);
1715
1716 rtw_dbg(rtwdev, RTW_DBG_RFK,
1717 "[TXGAPK] 0x33=0x%05X 0x3f=0x%04X\n",
1718 tmp + i, tmp_3f);
1719 }
1720 rtw_write_rf(rtwdev, path, RF_LUTWE2, RFREG_MASK, 0x0);
1721 }
1722 }
1723
rtw8822c_txgapk_save_all_tx_gain_table(struct rtw_dev * rtwdev)1724 static void rtw8822c_txgapk_save_all_tx_gain_table(struct rtw_dev *rtwdev)
1725 {
1726 struct rtw_gapk_info *txgapk = &rtwdev->dm_info.gapk;
1727 static const u32 three_wire[2] = {REG_3WIRE, REG_3WIRE2};
1728 static const u8 ch_num[RF_BAND_MAX] = {1, 1, 36, 100, 149};
1729 static const u8 band_num[RF_BAND_MAX] = {0x0, 0x0, 0x1, 0x3, 0x5};
1730 static const u8 cck[RF_BAND_MAX] = {0x1, 0x0, 0x0, 0x0, 0x0};
1731 u8 path, band, gain, rf0_idx;
1732 u32 rf18, v;
1733
1734 if (rtwdev->dm_info.dm_flags & BIT(RTW_DM_CAP_TXGAPK))
1735 return;
1736
1737 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1738
1739 if (txgapk->read_txgain == 1) {
1740 rtw_dbg(rtwdev, RTW_DBG_RFK,
1741 "[TXGAPK] Already Read txgapk->read_txgain return!!!\n");
1742 rtw8822c_txgapk_write_gain_bb_table(rtwdev);
1743 return;
1744 }
1745
1746 for (band = 0; band < RF_BAND_MAX; band++) {
1747 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1748 rf18 = rtw_read_rf(rtwdev, path, RF_CFGCH, RFREG_MASK);
1749
1750 rtw_write32_mask(rtwdev,
1751 three_wire[path], BIT_3WIRE_EN, 0x0);
1752 rtw_write_rf(rtwdev, path,
1753 RF_CFGCH, MASKBYTE0, ch_num[band]);
1754 rtw_write_rf(rtwdev, path,
1755 RF_CFGCH, BIT_BAND, band_num[band]);
1756 rtw_write_rf(rtwdev, path,
1757 RF_BW_TRXBB, BIT_DBG_CCK_CCA, cck[band]);
1758 rtw_write_rf(rtwdev, path,
1759 RF_BW_TRXBB, BIT_TX_CCK_IND, cck[band]);
1760 gain = 0;
1761 for (rf0_idx = 1; rf0_idx < 32; rf0_idx += 3) {
1762 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC,
1763 MASKBYTE0, rf0_idx);
1764 v = rtw_read_rf(rtwdev, path,
1765 RF_TX_RESULT, RFREG_MASK);
1766 txgapk->rf3f_bp[band][gain][path] = v & BIT_DATA_L;
1767
1768 rtw_dbg(rtwdev, RTW_DBG_RFK,
1769 "[TXGAPK] 0x5f=0x%03X band=%d path=%d\n",
1770 txgapk->rf3f_bp[band][gain][path],
1771 band, path);
1772 gain++;
1773 }
1774 rtw_write_rf(rtwdev, path, RF_CFGCH, RFREG_MASK, rf18);
1775 rtw_write32_mask(rtwdev,
1776 three_wire[path], BIT_3WIRE_EN, 0x3);
1777 }
1778 }
1779 rtw8822c_txgapk_write_gain_bb_table(rtwdev);
1780 txgapk->read_txgain = 1;
1781 }
1782
rtw8822c_txgapk(struct rtw_dev * rtwdev)1783 static void rtw8822c_txgapk(struct rtw_dev *rtwdev)
1784 {
1785 static const u32 bb_reg[2] = {REG_TX_PTCL_CTRL, REG_TX_FIFO};
1786 struct rtw_gapk_info *txgapk = &rtwdev->dm_info.gapk;
1787 u32 bb_reg_backup[2];
1788 u8 path;
1789
1790 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1791
1792 rtw8822c_txgapk_save_all_tx_gain_table(rtwdev);
1793
1794 if (txgapk->read_txgain == 0) {
1795 rtw_dbg(rtwdev, RTW_DBG_RFK,
1796 "[TXGAPK] txgapk->read_txgain == 0 return!!!\n");
1797 return;
1798 }
1799
1800 if (rtwdev->efuse.power_track_type >= 4 &&
1801 rtwdev->efuse.power_track_type <= 7) {
1802 rtw_dbg(rtwdev, RTW_DBG_RFK,
1803 "[TXGAPK] Normal Mode in TSSI mode. return!!!\n");
1804 return;
1805 }
1806
1807 rtw8822c_txgapk_backup_bb_reg(rtwdev, bb_reg,
1808 bb_reg_backup, ARRAY_SIZE(bb_reg));
1809 rtw8822c_txgapk_tx_pause(rtwdev);
1810 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1811 txgapk->channel = rtw_read_rf(rtwdev, path,
1812 RF_CFGCH, RFREG_MASK) & MASKBYTE0;
1813 rtw8822c_txgapk_bb_dpk(rtwdev, path);
1814 rtw8822c_txgapk_afe_dpk(rtwdev, path);
1815 rtw8822c_txgapk_calculate_offset(rtwdev, path);
1816 rtw8822c_txgapk_rf_restore(rtwdev, path);
1817 rtw8822c_txgapk_afe_dpk_restore(rtwdev, path);
1818 rtw8822c_txgapk_bb_dpk_restore(rtwdev, path);
1819 }
1820 rtw8822c_txgapk_write_tx_gain(rtwdev);
1821 rtw8822c_txgapk_reload_bb_reg(rtwdev, bb_reg,
1822 bb_reg_backup, ARRAY_SIZE(bb_reg));
1823 }
1824
rtw8822c_do_gapk(struct rtw_dev * rtwdev)1825 static void rtw8822c_do_gapk(struct rtw_dev *rtwdev)
1826 {
1827 struct rtw_dm_info *dm = &rtwdev->dm_info;
1828
1829 if (dm->dm_flags & BIT(RTW_DM_CAP_TXGAPK)) {
1830 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] feature disable!!!\n");
1831 return;
1832 }
1833 rtw8822c_rfk_handshake(rtwdev, true);
1834 rtw8822c_txgapk(rtwdev);
1835 rtw8822c_rfk_handshake(rtwdev, false);
1836 }
1837
rtw8822c_rf_init(struct rtw_dev * rtwdev)1838 static void rtw8822c_rf_init(struct rtw_dev *rtwdev)
1839 {
1840 rtw8822c_rf_dac_cal(rtwdev);
1841 rtw8822c_rf_x2_check(rtwdev);
1842 rtw8822c_thermal_trim(rtwdev);
1843 rtw8822c_power_trim(rtwdev);
1844 rtw8822c_pa_bias(rtwdev);
1845 }
1846
rtw8822c_pwrtrack_init(struct rtw_dev * rtwdev)1847 static void rtw8822c_pwrtrack_init(struct rtw_dev *rtwdev)
1848 {
1849 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
1850 u8 path;
1851
1852 for (path = RF_PATH_A; path < RTW_RF_PATH_MAX; path++) {
1853 dm_info->delta_power_index[path] = 0;
1854 ewma_thermal_init(&dm_info->avg_thermal[path]);
1855 dm_info->thermal_avg[path] = 0xff;
1856 }
1857
1858 dm_info->pwr_trk_triggered = false;
1859 dm_info->thermal_meter_k = rtwdev->efuse.thermal_meter_k;
1860 dm_info->thermal_meter_lck = rtwdev->efuse.thermal_meter_k;
1861 }
1862
rtw8822c_phy_set_param(struct rtw_dev * rtwdev)1863 static void rtw8822c_phy_set_param(struct rtw_dev *rtwdev)
1864 {
1865 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
1866 struct rtw_hal *hal = &rtwdev->hal;
1867 u8 crystal_cap;
1868 u8 cck_gi_u_bnd_msb = 0;
1869 u8 cck_gi_u_bnd_lsb = 0;
1870 u8 cck_gi_l_bnd_msb = 0;
1871 u8 cck_gi_l_bnd_lsb = 0;
1872 bool is_tx2_path;
1873
1874 /* power on BB/RF domain */
1875 rtw_write8_set(rtwdev, REG_SYS_FUNC_EN,
1876 BIT_FEN_BB_GLB_RST | BIT_FEN_BB_RSTB);
1877 rtw_write8_set(rtwdev, REG_RF_CTRL,
1878 BIT_RF_EN | BIT_RF_RSTB | BIT_RF_SDM_RSTB);
1879 rtw_write32_set(rtwdev, REG_WLRF1, BIT_WLRF1_BBRF_EN);
1880
1881 /* disable low rate DPD */
1882 rtw_write32_mask(rtwdev, REG_DIS_DPD, DIS_DPD_MASK, DIS_DPD_RATEALL);
1883
1884 /* pre init before header files config */
1885 rtw8822c_header_file_init(rtwdev, true);
1886
1887 rtw_phy_load_tables(rtwdev);
1888
1889 crystal_cap = rtwdev->efuse.crystal_cap & 0x7f;
1890 rtw_write32_mask(rtwdev, REG_ANAPAR_XTAL_0, 0xfffc00,
1891 crystal_cap | (crystal_cap << 7));
1892
1893 /* post init after header files config */
1894 rtw8822c_header_file_init(rtwdev, false);
1895
1896 is_tx2_path = false;
1897 rtw8822c_config_trx_mode(rtwdev, hal->antenna_tx, hal->antenna_rx,
1898 is_tx2_path);
1899 rtw_phy_init(rtwdev);
1900
1901 cck_gi_u_bnd_msb = (u8)rtw_read32_mask(rtwdev, 0x1a98, 0xc000);
1902 cck_gi_u_bnd_lsb = (u8)rtw_read32_mask(rtwdev, 0x1aa8, 0xf0000);
1903 cck_gi_l_bnd_msb = (u8)rtw_read32_mask(rtwdev, 0x1a98, 0xc0);
1904 cck_gi_l_bnd_lsb = (u8)rtw_read32_mask(rtwdev, 0x1a70, 0x0f000000);
1905
1906 dm_info->cck_gi_u_bnd = ((cck_gi_u_bnd_msb << 4) | (cck_gi_u_bnd_lsb));
1907 dm_info->cck_gi_l_bnd = ((cck_gi_l_bnd_msb << 4) | (cck_gi_l_bnd_lsb));
1908
1909 rtw8822c_rf_init(rtwdev);
1910 rtw8822c_pwrtrack_init(rtwdev);
1911
1912 rtw_bf_phy_init(rtwdev);
1913 }
1914
1915 #define WLAN_TXQ_RPT_EN 0x1F
1916 #define WLAN_SLOT_TIME 0x09
1917 #define WLAN_PIFS_TIME 0x1C
1918 #define WLAN_SIFS_CCK_CONT_TX 0x0A
1919 #define WLAN_SIFS_OFDM_CONT_TX 0x0E
1920 #define WLAN_SIFS_CCK_TRX 0x0A
1921 #define WLAN_SIFS_OFDM_TRX 0x10
1922 #define WLAN_NAV_MAX 0xC8
1923 #define WLAN_RDG_NAV 0x05
1924 #define WLAN_TXOP_NAV 0x1B
1925 #define WLAN_CCK_RX_TSF 0x30
1926 #define WLAN_OFDM_RX_TSF 0x30
1927 #define WLAN_TBTT_PROHIBIT 0x04 /* unit : 32us */
1928 #define WLAN_TBTT_HOLD_TIME 0x064 /* unit : 32us */
1929 #define WLAN_DRV_EARLY_INT 0x04
1930 #define WLAN_BCN_CTRL_CLT0 0x10
1931 #define WLAN_BCN_DMA_TIME 0x02
1932 #define WLAN_BCN_MAX_ERR 0xFF
1933 #define WLAN_SIFS_CCK_DUR_TUNE 0x0A
1934 #define WLAN_SIFS_OFDM_DUR_TUNE 0x10
1935 #define WLAN_SIFS_CCK_CTX 0x0A
1936 #define WLAN_SIFS_CCK_IRX 0x0A
1937 #define WLAN_SIFS_OFDM_CTX 0x0E
1938 #define WLAN_SIFS_OFDM_IRX 0x0E
1939 #define WLAN_EIFS_DUR_TUNE 0x40
1940 #define WLAN_EDCA_VO_PARAM 0x002FA226
1941 #define WLAN_EDCA_VI_PARAM 0x005EA328
1942 #define WLAN_EDCA_BE_PARAM 0x005EA42B
1943 #define WLAN_EDCA_BK_PARAM 0x0000A44F
1944
1945 #define WLAN_RX_FILTER0 0xFFFFFFFF
1946 #define WLAN_RX_FILTER2 0xFFFF
1947 #define WLAN_RCR_CFG 0xE400220E
1948 #define WLAN_RXPKT_MAX_SZ 12288
1949 #define WLAN_RXPKT_MAX_SZ_512 (WLAN_RXPKT_MAX_SZ >> 9)
1950
1951 #define WLAN_AMPDU_MAX_TIME 0x70
1952 #define WLAN_RTS_LEN_TH 0xFF
1953 #define WLAN_RTS_TX_TIME_TH 0x08
1954 #define WLAN_MAX_AGG_PKT_LIMIT 0x3f
1955 #define WLAN_RTS_MAX_AGG_PKT_LIMIT 0x3f
1956 #define WLAN_PRE_TXCNT_TIME_TH 0x1E0
1957 #define FAST_EDCA_VO_TH 0x06
1958 #define FAST_EDCA_VI_TH 0x06
1959 #define FAST_EDCA_BE_TH 0x06
1960 #define FAST_EDCA_BK_TH 0x06
1961 #define WLAN_BAR_RETRY_LIMIT 0x01
1962 #define WLAN_BAR_ACK_TYPE 0x05
1963 #define WLAN_RA_TRY_RATE_AGG_LIMIT 0x08
1964 #define WLAN_RESP_TXRATE 0x84
1965 #define WLAN_ACK_TO 0x21
1966 #define WLAN_ACK_TO_CCK 0x6A
1967 #define WLAN_DATA_RATE_FB_CNT_1_4 0x01000000
1968 #define WLAN_DATA_RATE_FB_CNT_5_8 0x08070504
1969 #define WLAN_RTS_RATE_FB_CNT_5_8 0x08070504
1970 #define WLAN_DATA_RATE_FB_RATE0 0xFE01F010
1971 #define WLAN_DATA_RATE_FB_RATE0_H 0x40000000
1972 #define WLAN_RTS_RATE_FB_RATE1 0x003FF010
1973 #define WLAN_RTS_RATE_FB_RATE1_H 0x40000000
1974 #define WLAN_RTS_RATE_FB_RATE4 0x0600F010
1975 #define WLAN_RTS_RATE_FB_RATE4_H 0x400003E0
1976 #define WLAN_RTS_RATE_FB_RATE5 0x0600F015
1977 #define WLAN_RTS_RATE_FB_RATE5_H 0x000000E0
1978 #define WLAN_MULTI_ADDR 0xFFFFFFFF
1979
1980 #define WLAN_TX_FUNC_CFG1 0x30
1981 #define WLAN_TX_FUNC_CFG2 0x30
1982 #define WLAN_MAC_OPT_NORM_FUNC1 0x98
1983 #define WLAN_MAC_OPT_LB_FUNC1 0x80
1984 #define WLAN_MAC_OPT_FUNC2 0xb0810041
1985 #define WLAN_MAC_INT_MIG_CFG 0x33330000
1986
1987 #define WLAN_SIFS_CFG (WLAN_SIFS_CCK_CONT_TX | \
1988 (WLAN_SIFS_OFDM_CONT_TX << BIT_SHIFT_SIFS_OFDM_CTX) | \
1989 (WLAN_SIFS_CCK_TRX << BIT_SHIFT_SIFS_CCK_TRX) | \
1990 (WLAN_SIFS_OFDM_TRX << BIT_SHIFT_SIFS_OFDM_TRX))
1991
1992 #define WLAN_SIFS_DUR_TUNE (WLAN_SIFS_CCK_DUR_TUNE | \
1993 (WLAN_SIFS_OFDM_DUR_TUNE << 8))
1994
1995 #define WLAN_TBTT_TIME (WLAN_TBTT_PROHIBIT |\
1996 (WLAN_TBTT_HOLD_TIME << BIT_SHIFT_TBTT_HOLD_TIME_AP))
1997
1998 #define WLAN_NAV_CFG (WLAN_RDG_NAV | (WLAN_TXOP_NAV << 16))
1999 #define WLAN_RX_TSF_CFG (WLAN_CCK_RX_TSF | (WLAN_OFDM_RX_TSF) << 8)
2000
2001 #define MAC_CLK_SPEED 80 /* 80M */
2002 #define EFUSE_PCB_INFO_OFFSET 0xCA
2003
rtw8822c_mac_init(struct rtw_dev * rtwdev)2004 static int rtw8822c_mac_init(struct rtw_dev *rtwdev)
2005 {
2006 u8 value8;
2007 u16 value16;
2008 u32 value32;
2009 u16 pre_txcnt;
2010
2011 /* txq control */
2012 value8 = rtw_read8(rtwdev, REG_FWHW_TXQ_CTRL);
2013 value8 |= (BIT(7) & ~BIT(1) & ~BIT(2));
2014 rtw_write8(rtwdev, REG_FWHW_TXQ_CTRL, value8);
2015 rtw_write8(rtwdev, REG_FWHW_TXQ_CTRL + 1, WLAN_TXQ_RPT_EN);
2016 /* sifs control */
2017 rtw_write16(rtwdev, REG_SPEC_SIFS, WLAN_SIFS_DUR_TUNE);
2018 rtw_write32(rtwdev, REG_SIFS, WLAN_SIFS_CFG);
2019 rtw_write16(rtwdev, REG_RESP_SIFS_CCK,
2020 WLAN_SIFS_CCK_CTX | WLAN_SIFS_CCK_IRX << 8);
2021 rtw_write16(rtwdev, REG_RESP_SIFS_OFDM,
2022 WLAN_SIFS_OFDM_CTX | WLAN_SIFS_OFDM_IRX << 8);
2023 /* rate fallback control */
2024 rtw_write32(rtwdev, REG_DARFRC, WLAN_DATA_RATE_FB_CNT_1_4);
2025 rtw_write32(rtwdev, REG_DARFRCH, WLAN_DATA_RATE_FB_CNT_5_8);
2026 rtw_write32(rtwdev, REG_RARFRCH, WLAN_RTS_RATE_FB_CNT_5_8);
2027 rtw_write32(rtwdev, REG_ARFR0, WLAN_DATA_RATE_FB_RATE0);
2028 rtw_write32(rtwdev, REG_ARFRH0, WLAN_DATA_RATE_FB_RATE0_H);
2029 rtw_write32(rtwdev, REG_ARFR1_V1, WLAN_RTS_RATE_FB_RATE1);
2030 rtw_write32(rtwdev, REG_ARFRH1_V1, WLAN_RTS_RATE_FB_RATE1_H);
2031 rtw_write32(rtwdev, REG_ARFR4, WLAN_RTS_RATE_FB_RATE4);
2032 rtw_write32(rtwdev, REG_ARFRH4, WLAN_RTS_RATE_FB_RATE4_H);
2033 rtw_write32(rtwdev, REG_ARFR5, WLAN_RTS_RATE_FB_RATE5);
2034 rtw_write32(rtwdev, REG_ARFRH5, WLAN_RTS_RATE_FB_RATE5_H);
2035 /* protocol configuration */
2036 rtw_write8(rtwdev, REG_AMPDU_MAX_TIME_V1, WLAN_AMPDU_MAX_TIME);
2037 rtw_write8_set(rtwdev, REG_TX_HANG_CTRL, BIT_EN_EOF_V1);
2038 pre_txcnt = WLAN_PRE_TXCNT_TIME_TH | BIT_EN_PRECNT;
2039 rtw_write8(rtwdev, REG_PRECNT_CTRL, (u8)(pre_txcnt & 0xFF));
2040 rtw_write8(rtwdev, REG_PRECNT_CTRL + 1, (u8)(pre_txcnt >> 8));
2041 value32 = WLAN_RTS_LEN_TH | (WLAN_RTS_TX_TIME_TH << 8) |
2042 (WLAN_MAX_AGG_PKT_LIMIT << 16) |
2043 (WLAN_RTS_MAX_AGG_PKT_LIMIT << 24);
2044 rtw_write32(rtwdev, REG_PROT_MODE_CTRL, value32);
2045 rtw_write16(rtwdev, REG_BAR_MODE_CTRL + 2,
2046 WLAN_BAR_RETRY_LIMIT | WLAN_RA_TRY_RATE_AGG_LIMIT << 8);
2047 rtw_write8(rtwdev, REG_FAST_EDCA_VOVI_SETTING, FAST_EDCA_VO_TH);
2048 rtw_write8(rtwdev, REG_FAST_EDCA_VOVI_SETTING + 2, FAST_EDCA_VI_TH);
2049 rtw_write8(rtwdev, REG_FAST_EDCA_BEBK_SETTING, FAST_EDCA_BE_TH);
2050 rtw_write8(rtwdev, REG_FAST_EDCA_BEBK_SETTING + 2, FAST_EDCA_BK_TH);
2051 /* close BA parser */
2052 rtw_write8_clr(rtwdev, REG_LIFETIME_EN, BIT_BA_PARSER_EN);
2053 rtw_write32_clr(rtwdev, REG_RRSR, BITS_RRSR_RSC);
2054
2055 /* EDCA configuration */
2056 rtw_write32(rtwdev, REG_EDCA_VO_PARAM, WLAN_EDCA_VO_PARAM);
2057 rtw_write32(rtwdev, REG_EDCA_VI_PARAM, WLAN_EDCA_VI_PARAM);
2058 rtw_write32(rtwdev, REG_EDCA_BE_PARAM, WLAN_EDCA_BE_PARAM);
2059 rtw_write32(rtwdev, REG_EDCA_BK_PARAM, WLAN_EDCA_BK_PARAM);
2060 rtw_write8(rtwdev, REG_PIFS, WLAN_PIFS_TIME);
2061 rtw_write8_clr(rtwdev, REG_TX_PTCL_CTRL + 1, BIT_SIFS_BK_EN >> 8);
2062 rtw_write8_set(rtwdev, REG_RD_CTRL + 1,
2063 (BIT_DIS_TXOP_CFE | BIT_DIS_LSIG_CFE |
2064 BIT_DIS_STBC_CFE) >> 8);
2065
2066 /* MAC clock configuration */
2067 rtw_write32_clr(rtwdev, REG_AFE_CTRL1, BIT_MAC_CLK_SEL);
2068 rtw_write8(rtwdev, REG_USTIME_TSF, MAC_CLK_SPEED);
2069 rtw_write8(rtwdev, REG_USTIME_EDCA, MAC_CLK_SPEED);
2070
2071 rtw_write8_set(rtwdev, REG_MISC_CTRL,
2072 BIT_EN_FREE_CNT | BIT_DIS_SECOND_CCA);
2073 rtw_write8_clr(rtwdev, REG_TIMER0_SRC_SEL, BIT_TSFT_SEL_TIMER0);
2074 rtw_write16(rtwdev, REG_TXPAUSE, 0x0000);
2075 rtw_write8(rtwdev, REG_SLOT, WLAN_SLOT_TIME);
2076 rtw_write32(rtwdev, REG_RD_NAV_NXT, WLAN_NAV_CFG);
2077 rtw_write16(rtwdev, REG_RXTSF_OFFSET_CCK, WLAN_RX_TSF_CFG);
2078 /* Set beacon cotnrol - enable TSF and other related functions */
2079 rtw_write8_set(rtwdev, REG_BCN_CTRL, BIT_EN_BCN_FUNCTION);
2080 /* Set send beacon related registers */
2081 rtw_write32(rtwdev, REG_TBTT_PROHIBIT, WLAN_TBTT_TIME);
2082 rtw_write8(rtwdev, REG_DRVERLYINT, WLAN_DRV_EARLY_INT);
2083 rtw_write8(rtwdev, REG_BCN_CTRL_CLINT0, WLAN_BCN_CTRL_CLT0);
2084 rtw_write8(rtwdev, REG_BCNDMATIM, WLAN_BCN_DMA_TIME);
2085 rtw_write8(rtwdev, REG_BCN_MAX_ERR, WLAN_BCN_MAX_ERR);
2086
2087 /* WMAC configuration */
2088 rtw_write32(rtwdev, REG_MAR, WLAN_MULTI_ADDR);
2089 rtw_write32(rtwdev, REG_MAR + 4, WLAN_MULTI_ADDR);
2090 rtw_write8(rtwdev, REG_BBPSF_CTRL + 2, WLAN_RESP_TXRATE);
2091 rtw_write8(rtwdev, REG_ACKTO, WLAN_ACK_TO);
2092 rtw_write8(rtwdev, REG_ACKTO_CCK, WLAN_ACK_TO_CCK);
2093 rtw_write16(rtwdev, REG_EIFS, WLAN_EIFS_DUR_TUNE);
2094 rtw_write8(rtwdev, REG_NAV_CTRL + 2, WLAN_NAV_MAX);
2095 rtw_write8(rtwdev, REG_WMAC_TRXPTCL_CTL_H + 2, WLAN_BAR_ACK_TYPE);
2096 rtw_write32(rtwdev, REG_RXFLTMAP0, WLAN_RX_FILTER0);
2097 rtw_write16(rtwdev, REG_RXFLTMAP2, WLAN_RX_FILTER2);
2098 rtw_write32(rtwdev, REG_RCR, WLAN_RCR_CFG);
2099 rtw_write8(rtwdev, REG_RX_PKT_LIMIT, WLAN_RXPKT_MAX_SZ_512);
2100 rtw_write8(rtwdev, REG_TCR + 2, WLAN_TX_FUNC_CFG2);
2101 rtw_write8(rtwdev, REG_TCR + 1, WLAN_TX_FUNC_CFG1);
2102 rtw_write32_set(rtwdev, REG_GENERAL_OPTION, BIT_DUMMY_FCS_READY_MASK_EN);
2103 rtw_write32(rtwdev, REG_WMAC_OPTION_FUNCTION + 8, WLAN_MAC_OPT_FUNC2);
2104 rtw_write8(rtwdev, REG_WMAC_OPTION_FUNCTION_1, WLAN_MAC_OPT_NORM_FUNC1);
2105
2106 /* init low power */
2107 value16 = rtw_read16(rtwdev, REG_RXPSF_CTRL + 2) & 0xF00F;
2108 value16 |= (BIT_RXGCK_VHT_FIFOTHR(1) | BIT_RXGCK_HT_FIFOTHR(1) |
2109 BIT_RXGCK_OFDM_FIFOTHR(1) | BIT_RXGCK_CCK_FIFOTHR(1)) >> 16;
2110 rtw_write16(rtwdev, REG_RXPSF_CTRL + 2, value16);
2111 value16 = 0;
2112 value16 = BIT_SET_RXPSF_PKTLENTHR(value16, 1);
2113 value16 |= BIT_RXPSF_CTRLEN | BIT_RXPSF_VHTCHKEN | BIT_RXPSF_HTCHKEN
2114 | BIT_RXPSF_OFDMCHKEN | BIT_RXPSF_CCKCHKEN
2115 | BIT_RXPSF_OFDMRST;
2116 rtw_write16(rtwdev, REG_RXPSF_CTRL, value16);
2117 rtw_write32(rtwdev, REG_RXPSF_TYPE_CTRL, 0xFFFFFFFF);
2118 /* rx ignore configuration */
2119 value16 = rtw_read16(rtwdev, REG_RXPSF_CTRL);
2120 value16 &= ~(BIT_RXPSF_MHCHKEN | BIT_RXPSF_CCKRST |
2121 BIT_RXPSF_CONT_ERRCHKEN);
2122 value16 = BIT_SET_RXPSF_ERRTHR(value16, 0x07);
2123 rtw_write16(rtwdev, REG_RXPSF_CTRL, value16);
2124 rtw_write8_set(rtwdev, REG_SND_PTCL_CTRL,
2125 BIT_DIS_CHK_VHTSIGB_CRC);
2126
2127 /* Interrupt migration configuration */
2128 rtw_write32(rtwdev, REG_INT_MIG, WLAN_MAC_INT_MIG_CFG);
2129
2130 return 0;
2131 }
2132
2133 #define FWCD_SIZE_REG_8822C 0x2000
2134 #define FWCD_SIZE_DMEM_8822C 0x10000
2135 #define FWCD_SIZE_IMEM_8822C 0x10000
2136 #define FWCD_SIZE_EMEM_8822C 0x20000
2137 #define FWCD_SIZE_ROM_8822C 0x10000
2138
2139 static const u32 __fwcd_segs_8822c[] = {
2140 FWCD_SIZE_REG_8822C,
2141 FWCD_SIZE_DMEM_8822C,
2142 FWCD_SIZE_IMEM_8822C,
2143 FWCD_SIZE_EMEM_8822C,
2144 FWCD_SIZE_ROM_8822C,
2145 };
2146
2147 static const struct rtw_fwcd_segs rtw8822c_fwcd_segs = {
2148 .segs = __fwcd_segs_8822c,
2149 .num = ARRAY_SIZE(__fwcd_segs_8822c),
2150 };
2151
rtw8822c_dump_fw_crash(struct rtw_dev * rtwdev)2152 static int rtw8822c_dump_fw_crash(struct rtw_dev *rtwdev)
2153 {
2154 #define __dump_fw_8822c(_dev, _mem) \
2155 rtw_dump_fw(_dev, OCPBASE_ ## _mem ## _88XX, \
2156 FWCD_SIZE_ ## _mem ## _8822C, RTW_FWCD_ ## _mem)
2157 int ret;
2158
2159 ret = rtw_dump_reg(rtwdev, 0x0, FWCD_SIZE_REG_8822C);
2160 if (ret)
2161 return ret;
2162 ret = __dump_fw_8822c(rtwdev, DMEM);
2163 if (ret)
2164 return ret;
2165 ret = __dump_fw_8822c(rtwdev, IMEM);
2166 if (ret)
2167 return ret;
2168 ret = __dump_fw_8822c(rtwdev, EMEM);
2169 if (ret)
2170 return ret;
2171 ret = __dump_fw_8822c(rtwdev, ROM);
2172 if (ret)
2173 return ret;
2174
2175 return 0;
2176
2177 #undef __dump_fw_8822c
2178 }
2179
rtw8822c_rstb_3wire(struct rtw_dev * rtwdev,bool enable)2180 static void rtw8822c_rstb_3wire(struct rtw_dev *rtwdev, bool enable)
2181 {
2182 if (enable) {
2183 rtw_write32_mask(rtwdev, REG_RSTB, BIT_RSTB_3WIRE, 0x1);
2184 rtw_write32_mask(rtwdev, REG_ANAPAR_A, BIT_ANAPAR_UPDATE, 0x1);
2185 rtw_write32_mask(rtwdev, REG_ANAPAR_B, BIT_ANAPAR_UPDATE, 0x1);
2186 } else {
2187 rtw_write32_mask(rtwdev, REG_RSTB, BIT_RSTB_3WIRE, 0x0);
2188 }
2189 }
2190
rtw8822c_set_channel_rf(struct rtw_dev * rtwdev,u8 channel,u8 bw)2191 static void rtw8822c_set_channel_rf(struct rtw_dev *rtwdev, u8 channel, u8 bw)
2192 {
2193 #define RF18_BAND_MASK (BIT(16) | BIT(9) | BIT(8))
2194 #define RF18_BAND_2G (0)
2195 #define RF18_BAND_5G (BIT(16) | BIT(8))
2196 #define RF18_CHANNEL_MASK (MASKBYTE0)
2197 #define RF18_RFSI_MASK (BIT(18) | BIT(17))
2198 #define RF18_RFSI_GE_CH80 (BIT(17))
2199 #define RF18_RFSI_GT_CH140 (BIT(18))
2200 #define RF18_BW_MASK (BIT(13) | BIT(12))
2201 #define RF18_BW_20M (BIT(13) | BIT(12))
2202 #define RF18_BW_40M (BIT(13))
2203 #define RF18_BW_80M (BIT(12))
2204
2205 u32 rf_reg18 = 0;
2206 u32 rf_rxbb = 0;
2207
2208 rf_reg18 = rtw_read_rf(rtwdev, RF_PATH_A, 0x18, RFREG_MASK);
2209
2210 rf_reg18 &= ~(RF18_BAND_MASK | RF18_CHANNEL_MASK | RF18_RFSI_MASK |
2211 RF18_BW_MASK);
2212
2213 rf_reg18 |= (IS_CH_2G_BAND(channel) ? RF18_BAND_2G : RF18_BAND_5G);
2214 rf_reg18 |= (channel & RF18_CHANNEL_MASK);
2215 if (IS_CH_5G_BAND_4(channel))
2216 rf_reg18 |= RF18_RFSI_GT_CH140;
2217 else if (IS_CH_5G_BAND_3(channel))
2218 rf_reg18 |= RF18_RFSI_GE_CH80;
2219
2220 switch (bw) {
2221 case RTW_CHANNEL_WIDTH_5:
2222 case RTW_CHANNEL_WIDTH_10:
2223 case RTW_CHANNEL_WIDTH_20:
2224 default:
2225 rf_reg18 |= RF18_BW_20M;
2226 rf_rxbb = 0x18;
2227 break;
2228 case RTW_CHANNEL_WIDTH_40:
2229 /* RF bandwidth */
2230 rf_reg18 |= RF18_BW_40M;
2231 rf_rxbb = 0x10;
2232 break;
2233 case RTW_CHANNEL_WIDTH_80:
2234 rf_reg18 |= RF18_BW_80M;
2235 rf_rxbb = 0x8;
2236 break;
2237 }
2238
2239 rtw8822c_rstb_3wire(rtwdev, false);
2240
2241 rtw_write_rf(rtwdev, RF_PATH_A, RF_LUTWE2, 0x04, 0x01);
2242 rtw_write_rf(rtwdev, RF_PATH_A, RF_LUTWA, 0x1f, 0x12);
2243 rtw_write_rf(rtwdev, RF_PATH_A, RF_LUTWD0, 0xfffff, rf_rxbb);
2244 rtw_write_rf(rtwdev, RF_PATH_A, RF_LUTWE2, 0x04, 0x00);
2245
2246 rtw_write_rf(rtwdev, RF_PATH_B, RF_LUTWE2, 0x04, 0x01);
2247 rtw_write_rf(rtwdev, RF_PATH_B, RF_LUTWA, 0x1f, 0x12);
2248 rtw_write_rf(rtwdev, RF_PATH_B, RF_LUTWD0, 0xfffff, rf_rxbb);
2249 rtw_write_rf(rtwdev, RF_PATH_B, RF_LUTWE2, 0x04, 0x00);
2250
2251 rtw_write_rf(rtwdev, RF_PATH_A, RF_CFGCH, RFREG_MASK, rf_reg18);
2252 rtw_write_rf(rtwdev, RF_PATH_B, RF_CFGCH, RFREG_MASK, rf_reg18);
2253
2254 rtw8822c_rstb_3wire(rtwdev, true);
2255 }
2256
rtw8822c_toggle_igi(struct rtw_dev * rtwdev)2257 static void rtw8822c_toggle_igi(struct rtw_dev *rtwdev)
2258 {
2259 u32 igi;
2260
2261 igi = rtw_read32_mask(rtwdev, REG_RXIGI, 0x7f);
2262 rtw_write32_mask(rtwdev, REG_RXIGI, 0x7f, igi - 2);
2263 rtw_write32_mask(rtwdev, REG_RXIGI, 0x7f00, igi - 2);
2264 rtw_write32_mask(rtwdev, REG_RXIGI, 0x7f, igi);
2265 rtw_write32_mask(rtwdev, REG_RXIGI, 0x7f00, igi);
2266 }
2267
rtw8822c_set_channel_bb(struct rtw_dev * rtwdev,u8 channel,u8 bw,u8 primary_ch_idx)2268 static void rtw8822c_set_channel_bb(struct rtw_dev *rtwdev, u8 channel, u8 bw,
2269 u8 primary_ch_idx)
2270 {
2271 if (IS_CH_2G_BAND(channel)) {
2272 rtw_write32_clr(rtwdev, REG_BGCTRL, BITS_RX_IQ_WEIGHT);
2273 rtw_write32_set(rtwdev, REG_TXF4, BIT(20));
2274 rtw_write32_clr(rtwdev, REG_CCK_CHECK, BIT_CHECK_CCK_EN);
2275 rtw_write32_clr(rtwdev, REG_CCKTXONLY, BIT_BB_CCK_CHECK_EN);
2276 rtw_write32_mask(rtwdev, REG_CCAMSK, 0x3F000000, 0xF);
2277
2278 switch (bw) {
2279 case RTW_CHANNEL_WIDTH_20:
2280 rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_CCK,
2281 0x5);
2282 rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_CCK,
2283 0x5);
2284 rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_OFDM,
2285 0x6);
2286 rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_OFDM,
2287 0x6);
2288 break;
2289 case RTW_CHANNEL_WIDTH_40:
2290 rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_CCK,
2291 0x4);
2292 rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_CCK,
2293 0x4);
2294 rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_OFDM,
2295 0x0);
2296 rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_OFDM,
2297 0x0);
2298 break;
2299 }
2300 if (channel == 13 || channel == 14)
2301 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x969);
2302 else if (channel == 11 || channel == 12)
2303 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x96a);
2304 else
2305 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x9aa);
2306 if (channel == 14) {
2307 rtw_write32_mask(rtwdev, REG_TXF0, MASKHWORD, 0x3da0);
2308 rtw_write32_mask(rtwdev, REG_TXF1, MASKDWORD,
2309 0x4962c931);
2310 rtw_write32_mask(rtwdev, REG_TXF2, MASKLWORD, 0x6aa3);
2311 rtw_write32_mask(rtwdev, REG_TXF3, MASKHWORD, 0xaa7b);
2312 rtw_write32_mask(rtwdev, REG_TXF4, MASKLWORD, 0xf3d7);
2313 rtw_write32_mask(rtwdev, REG_TXF5, MASKDWORD, 0x0);
2314 rtw_write32_mask(rtwdev, REG_TXF6, MASKDWORD,
2315 0xff012455);
2316 rtw_write32_mask(rtwdev, REG_TXF7, MASKDWORD, 0xffff);
2317 } else {
2318 rtw_write32_mask(rtwdev, REG_TXF0, MASKHWORD, 0x5284);
2319 rtw_write32_mask(rtwdev, REG_TXF1, MASKDWORD,
2320 0x3e18fec8);
2321 rtw_write32_mask(rtwdev, REG_TXF2, MASKLWORD, 0x0a88);
2322 rtw_write32_mask(rtwdev, REG_TXF3, MASKHWORD, 0xacc4);
2323 rtw_write32_mask(rtwdev, REG_TXF4, MASKLWORD, 0xc8b2);
2324 rtw_write32_mask(rtwdev, REG_TXF5, MASKDWORD,
2325 0x00faf0de);
2326 rtw_write32_mask(rtwdev, REG_TXF6, MASKDWORD,
2327 0x00122344);
2328 rtw_write32_mask(rtwdev, REG_TXF7, MASKDWORD,
2329 0x0fffffff);
2330 }
2331 if (channel == 13)
2332 rtw_write32_mask(rtwdev, REG_TXDFIR0, 0x70, 0x3);
2333 else
2334 rtw_write32_mask(rtwdev, REG_TXDFIR0, 0x70, 0x1);
2335 } else if (IS_CH_5G_BAND(channel)) {
2336 rtw_write32_set(rtwdev, REG_CCKTXONLY, BIT_BB_CCK_CHECK_EN);
2337 rtw_write32_set(rtwdev, REG_CCK_CHECK, BIT_CHECK_CCK_EN);
2338 rtw_write32_set(rtwdev, REG_BGCTRL, BITS_RX_IQ_WEIGHT);
2339 rtw_write32_clr(rtwdev, REG_TXF4, BIT(20));
2340 rtw_write32_mask(rtwdev, REG_CCAMSK, 0x3F000000, 0x22);
2341 rtw_write32_mask(rtwdev, REG_TXDFIR0, 0x70, 0x3);
2342 if (IS_CH_5G_BAND_1(channel) || IS_CH_5G_BAND_2(channel)) {
2343 rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_OFDM,
2344 0x1);
2345 rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_OFDM,
2346 0x1);
2347 } else if (IS_CH_5G_BAND_3(channel)) {
2348 rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_OFDM,
2349 0x2);
2350 rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_OFDM,
2351 0x2);
2352 } else if (IS_CH_5G_BAND_4(channel)) {
2353 rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_OFDM,
2354 0x3);
2355 rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_OFDM,
2356 0x3);
2357 }
2358
2359 if (channel >= 36 && channel <= 51)
2360 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x494);
2361 else if (channel >= 52 && channel <= 55)
2362 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x493);
2363 else if (channel >= 56 && channel <= 111)
2364 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x453);
2365 else if (channel >= 112 && channel <= 119)
2366 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x452);
2367 else if (channel >= 120 && channel <= 172)
2368 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x412);
2369 else if (channel >= 173 && channel <= 177)
2370 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x411);
2371 }
2372
2373 switch (bw) {
2374 case RTW_CHANNEL_WIDTH_20:
2375 rtw_write32_mask(rtwdev, REG_DFIRBW, 0x3FF0, 0x19B);
2376 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xf, 0x0);
2377 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xffc0, 0x0);
2378 rtw_write32_mask(rtwdev, REG_TXCLK, 0x700, 0x7);
2379 rtw_write32_mask(rtwdev, REG_TXCLK, 0x700000, 0x6);
2380 rtw_write32_mask(rtwdev, REG_CCK_SOURCE, BIT_NBI_EN, 0x0);
2381 rtw_write32_mask(rtwdev, REG_SBD, BITS_SUBTUNE, 0x1);
2382 rtw_write32_mask(rtwdev, REG_PT_CHSMO, BIT_PT_OPT, 0x0);
2383 break;
2384 case RTW_CHANNEL_WIDTH_40:
2385 rtw_write32_mask(rtwdev, REG_CCKSB, BIT(4),
2386 (primary_ch_idx == RTW_SC_20_UPPER ? 1 : 0));
2387 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xf, 0x5);
2388 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xc0, 0x0);
2389 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xff00,
2390 (primary_ch_idx | (primary_ch_idx << 4)));
2391 rtw_write32_mask(rtwdev, REG_CCK_SOURCE, BIT_NBI_EN, 0x1);
2392 rtw_write32_mask(rtwdev, REG_SBD, BITS_SUBTUNE, 0x1);
2393 rtw_write32_mask(rtwdev, REG_PT_CHSMO, BIT_PT_OPT, 0x1);
2394 break;
2395 case RTW_CHANNEL_WIDTH_80:
2396 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xf, 0xa);
2397 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xc0, 0x0);
2398 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xff00,
2399 (primary_ch_idx | (primary_ch_idx << 4)));
2400 rtw_write32_mask(rtwdev, REG_SBD, BITS_SUBTUNE, 0x6);
2401 rtw_write32_mask(rtwdev, REG_PT_CHSMO, BIT_PT_OPT, 0x1);
2402 break;
2403 case RTW_CHANNEL_WIDTH_5:
2404 rtw_write32_mask(rtwdev, REG_DFIRBW, 0x3FF0, 0x2AB);
2405 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xf, 0x0);
2406 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xffc0, 0x1);
2407 rtw_write32_mask(rtwdev, REG_TXCLK, 0x700, 0x4);
2408 rtw_write32_mask(rtwdev, REG_TXCLK, 0x700000, 0x4);
2409 rtw_write32_mask(rtwdev, REG_CCK_SOURCE, BIT_NBI_EN, 0x0);
2410 rtw_write32_mask(rtwdev, REG_SBD, BITS_SUBTUNE, 0x1);
2411 rtw_write32_mask(rtwdev, REG_PT_CHSMO, BIT_PT_OPT, 0x0);
2412 break;
2413 case RTW_CHANNEL_WIDTH_10:
2414 rtw_write32_mask(rtwdev, REG_DFIRBW, 0x3FF0, 0x2AB);
2415 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xf, 0x0);
2416 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xffc0, 0x2);
2417 rtw_write32_mask(rtwdev, REG_TXCLK, 0x700, 0x6);
2418 rtw_write32_mask(rtwdev, REG_TXCLK, 0x700000, 0x5);
2419 rtw_write32_mask(rtwdev, REG_CCK_SOURCE, BIT_NBI_EN, 0x0);
2420 rtw_write32_mask(rtwdev, REG_SBD, BITS_SUBTUNE, 0x1);
2421 rtw_write32_mask(rtwdev, REG_PT_CHSMO, BIT_PT_OPT, 0x0);
2422 break;
2423 }
2424 }
2425
rtw8822c_set_channel(struct rtw_dev * rtwdev,u8 channel,u8 bw,u8 primary_chan_idx)2426 static void rtw8822c_set_channel(struct rtw_dev *rtwdev, u8 channel, u8 bw,
2427 u8 primary_chan_idx)
2428 {
2429 rtw8822c_set_channel_bb(rtwdev, channel, bw, primary_chan_idx);
2430 rtw_set_channel_mac(rtwdev, channel, bw, primary_chan_idx);
2431 rtw8822c_set_channel_rf(rtwdev, channel, bw);
2432 rtw8822c_toggle_igi(rtwdev);
2433 }
2434
rtw8822c_config_cck_rx_path(struct rtw_dev * rtwdev,u8 rx_path)2435 static void rtw8822c_config_cck_rx_path(struct rtw_dev *rtwdev, u8 rx_path)
2436 {
2437 if (rx_path == BB_PATH_A || rx_path == BB_PATH_B) {
2438 rtw_write32_mask(rtwdev, REG_CCANRX, 0x00060000, 0x0);
2439 rtw_write32_mask(rtwdev, REG_CCANRX, 0x00600000, 0x0);
2440 } else if (rx_path == BB_PATH_AB) {
2441 rtw_write32_mask(rtwdev, REG_CCANRX, 0x00600000, 0x1);
2442 rtw_write32_mask(rtwdev, REG_CCANRX, 0x00060000, 0x1);
2443 }
2444
2445 if (rx_path == BB_PATH_A)
2446 rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0x0f000000, 0x0);
2447 else if (rx_path == BB_PATH_B)
2448 rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0x0f000000, 0x5);
2449 else if (rx_path == BB_PATH_AB)
2450 rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0x0f000000, 0x1);
2451 }
2452
rtw8822c_config_ofdm_rx_path(struct rtw_dev * rtwdev,u8 rx_path)2453 static void rtw8822c_config_ofdm_rx_path(struct rtw_dev *rtwdev, u8 rx_path)
2454 {
2455 if (rx_path == BB_PATH_A || rx_path == BB_PATH_B) {
2456 rtw_write32_mask(rtwdev, REG_RXFNCTL, 0x300, 0x0);
2457 rtw_write32_mask(rtwdev, REG_RXFNCTL, 0x600000, 0x0);
2458 rtw_write32_mask(rtwdev, REG_AGCSWSH, BIT(17), 0x0);
2459 rtw_write32_mask(rtwdev, REG_ANTWTPD, BIT(20), 0x0);
2460 rtw_write32_mask(rtwdev, REG_MRCM, BIT(24), 0x0);
2461 } else if (rx_path == BB_PATH_AB) {
2462 rtw_write32_mask(rtwdev, REG_RXFNCTL, 0x300, 0x1);
2463 rtw_write32_mask(rtwdev, REG_RXFNCTL, 0x600000, 0x1);
2464 rtw_write32_mask(rtwdev, REG_AGCSWSH, BIT(17), 0x1);
2465 rtw_write32_mask(rtwdev, REG_ANTWTPD, BIT(20), 0x1);
2466 rtw_write32_mask(rtwdev, REG_MRCM, BIT(24), 0x1);
2467 }
2468
2469 rtw_write32_mask(rtwdev, 0x824, 0x0f000000, rx_path);
2470 rtw_write32_mask(rtwdev, 0x824, 0x000f0000, rx_path);
2471 }
2472
rtw8822c_config_rx_path(struct rtw_dev * rtwdev,u8 rx_path)2473 static void rtw8822c_config_rx_path(struct rtw_dev *rtwdev, u8 rx_path)
2474 {
2475 rtw8822c_config_cck_rx_path(rtwdev, rx_path);
2476 rtw8822c_config_ofdm_rx_path(rtwdev, rx_path);
2477 }
2478
rtw8822c_config_cck_tx_path(struct rtw_dev * rtwdev,u8 tx_path,bool is_tx2_path)2479 static void rtw8822c_config_cck_tx_path(struct rtw_dev *rtwdev, u8 tx_path,
2480 bool is_tx2_path)
2481 {
2482 if (tx_path == BB_PATH_A) {
2483 rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0xf0000000, 0x8);
2484 } else if (tx_path == BB_PATH_B) {
2485 rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0xf0000000, 0x4);
2486 } else {
2487 if (is_tx2_path)
2488 rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0xf0000000, 0xc);
2489 else
2490 rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0xf0000000, 0x8);
2491 }
2492 rtw8822c_bb_reset(rtwdev);
2493 }
2494
rtw8822c_config_ofdm_tx_path(struct rtw_dev * rtwdev,u8 tx_path,enum rtw_bb_path tx_path_sel_1ss)2495 static void rtw8822c_config_ofdm_tx_path(struct rtw_dev *rtwdev, u8 tx_path,
2496 enum rtw_bb_path tx_path_sel_1ss)
2497 {
2498 if (tx_path == BB_PATH_A) {
2499 rtw_write32_mask(rtwdev, REG_ANTMAP0, 0xff, 0x11);
2500 rtw_write32_mask(rtwdev, REG_TXLGMAP, 0xff, 0x0);
2501 } else if (tx_path == BB_PATH_B) {
2502 rtw_write32_mask(rtwdev, REG_ANTMAP0, 0xff, 0x12);
2503 rtw_write32_mask(rtwdev, REG_TXLGMAP, 0xff, 0x0);
2504 } else {
2505 if (tx_path_sel_1ss == BB_PATH_AB) {
2506 rtw_write32_mask(rtwdev, REG_ANTMAP0, 0xff, 0x33);
2507 rtw_write32_mask(rtwdev, REG_TXLGMAP, 0xffff, 0x0404);
2508 } else if (tx_path_sel_1ss == BB_PATH_B) {
2509 rtw_write32_mask(rtwdev, REG_ANTMAP0, 0xff, 0x32);
2510 rtw_write32_mask(rtwdev, REG_TXLGMAP, 0xffff, 0x0400);
2511 } else if (tx_path_sel_1ss == BB_PATH_A) {
2512 rtw_write32_mask(rtwdev, REG_ANTMAP0, 0xff, 0x31);
2513 rtw_write32_mask(rtwdev, REG_TXLGMAP, 0xffff, 0x0400);
2514 }
2515 }
2516 rtw8822c_bb_reset(rtwdev);
2517 }
2518
rtw8822c_config_tx_path(struct rtw_dev * rtwdev,u8 tx_path,enum rtw_bb_path tx_path_sel_1ss,enum rtw_bb_path tx_path_cck,bool is_tx2_path)2519 static void rtw8822c_config_tx_path(struct rtw_dev *rtwdev, u8 tx_path,
2520 enum rtw_bb_path tx_path_sel_1ss,
2521 enum rtw_bb_path tx_path_cck,
2522 bool is_tx2_path)
2523 {
2524 rtw8822c_config_cck_tx_path(rtwdev, tx_path_cck, is_tx2_path);
2525 rtw8822c_config_ofdm_tx_path(rtwdev, tx_path, tx_path_sel_1ss);
2526 rtw8822c_bb_reset(rtwdev);
2527 }
2528
rtw8822c_config_trx_mode(struct rtw_dev * rtwdev,u8 tx_path,u8 rx_path,bool is_tx2_path)2529 static void rtw8822c_config_trx_mode(struct rtw_dev *rtwdev, u8 tx_path,
2530 u8 rx_path, bool is_tx2_path)
2531 {
2532 if ((tx_path | rx_path) & BB_PATH_A)
2533 rtw_write32_mask(rtwdev, REG_ORITXCODE, MASK20BITS, 0x33312);
2534 else
2535 rtw_write32_mask(rtwdev, REG_ORITXCODE, MASK20BITS, 0x11111);
2536 if ((tx_path | rx_path) & BB_PATH_B)
2537 rtw_write32_mask(rtwdev, REG_ORITXCODE2, MASK20BITS, 0x33312);
2538 else
2539 rtw_write32_mask(rtwdev, REG_ORITXCODE2, MASK20BITS, 0x11111);
2540
2541 rtw8822c_config_rx_path(rtwdev, rx_path);
2542 rtw8822c_config_tx_path(rtwdev, tx_path, BB_PATH_A, BB_PATH_A,
2543 is_tx2_path);
2544
2545 rtw8822c_toggle_igi(rtwdev);
2546 }
2547
query_phy_status_page0(struct rtw_dev * rtwdev,u8 * phy_status,struct rtw_rx_pkt_stat * pkt_stat)2548 static void query_phy_status_page0(struct rtw_dev *rtwdev, u8 *phy_status,
2549 struct rtw_rx_pkt_stat *pkt_stat)
2550 {
2551 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
2552 u8 l_bnd, u_bnd;
2553 u8 gain_a, gain_b;
2554 s8 rx_power[RTW_RF_PATH_MAX];
2555 s8 min_rx_power = -120;
2556 u8 rssi;
2557 u8 channel;
2558 int path;
2559
2560 rx_power[RF_PATH_A] = GET_PHY_STAT_P0_PWDB_A(phy_status);
2561 rx_power[RF_PATH_B] = GET_PHY_STAT_P0_PWDB_B(phy_status);
2562 l_bnd = dm_info->cck_gi_l_bnd;
2563 u_bnd = dm_info->cck_gi_u_bnd;
2564 gain_a = GET_PHY_STAT_P0_GAIN_A(phy_status);
2565 gain_b = GET_PHY_STAT_P0_GAIN_B(phy_status);
2566 if (gain_a < l_bnd)
2567 rx_power[RF_PATH_A] += (l_bnd - gain_a) << 1;
2568 else if (gain_a > u_bnd)
2569 rx_power[RF_PATH_A] -= (gain_a - u_bnd) << 1;
2570 if (gain_b < l_bnd)
2571 rx_power[RF_PATH_B] += (l_bnd - gain_b) << 1;
2572 else if (gain_b > u_bnd)
2573 rx_power[RF_PATH_B] -= (gain_b - u_bnd) << 1;
2574
2575 rx_power[RF_PATH_A] -= 110;
2576 rx_power[RF_PATH_B] -= 110;
2577
2578 channel = GET_PHY_STAT_P0_CHANNEL(phy_status);
2579 if (channel != 0)
2580 rtw_set_rx_freq_band(pkt_stat, channel);
2581 else
2582 pkt_stat->channel_invalid = true;
2583
2584 pkt_stat->rx_power[RF_PATH_A] = rx_power[RF_PATH_A];
2585 pkt_stat->rx_power[RF_PATH_B] = rx_power[RF_PATH_B];
2586
2587 for (path = 0; path <= rtwdev->hal.rf_path_num; path++) {
2588 rssi = rtw_phy_rf_power_2_rssi(&pkt_stat->rx_power[path], 1);
2589 dm_info->rssi[path] = rssi;
2590 }
2591
2592 pkt_stat->rssi = rtw_phy_rf_power_2_rssi(pkt_stat->rx_power, 1);
2593 pkt_stat->bw = RTW_CHANNEL_WIDTH_20;
2594 pkt_stat->signal_power = max(pkt_stat->rx_power[RF_PATH_A],
2595 min_rx_power);
2596 }
2597
query_phy_status_page1(struct rtw_dev * rtwdev,u8 * phy_status,struct rtw_rx_pkt_stat * pkt_stat)2598 static void query_phy_status_page1(struct rtw_dev *rtwdev, u8 *phy_status,
2599 struct rtw_rx_pkt_stat *pkt_stat)
2600 {
2601 struct rtw_path_div *p_div = &rtwdev->dm_path_div;
2602 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
2603 u8 rxsc, bw;
2604 s8 min_rx_power = -120;
2605 s8 rx_evm;
2606 u8 evm_dbm = 0;
2607 u8 rssi;
2608 int path;
2609 u8 channel;
2610
2611 if (pkt_stat->rate > DESC_RATE11M && pkt_stat->rate < DESC_RATEMCS0)
2612 rxsc = GET_PHY_STAT_P1_L_RXSC(phy_status);
2613 else
2614 rxsc = GET_PHY_STAT_P1_HT_RXSC(phy_status);
2615
2616 if (rxsc == 0)
2617 bw = rtwdev->hal.current_band_width;
2618 else if (rxsc >= 1 && rxsc <= 8)
2619 bw = RTW_CHANNEL_WIDTH_20;
2620 else if (rxsc >= 9 && rxsc <= 12)
2621 bw = RTW_CHANNEL_WIDTH_40;
2622 else
2623 bw = RTW_CHANNEL_WIDTH_80;
2624
2625 channel = GET_PHY_STAT_P1_CHANNEL(phy_status);
2626 rtw_set_rx_freq_band(pkt_stat, channel);
2627
2628 pkt_stat->rx_power[RF_PATH_A] = GET_PHY_STAT_P1_PWDB_A(phy_status) - 110;
2629 pkt_stat->rx_power[RF_PATH_B] = GET_PHY_STAT_P1_PWDB_B(phy_status) - 110;
2630 pkt_stat->rssi = rtw_phy_rf_power_2_rssi(pkt_stat->rx_power, 2);
2631 pkt_stat->bw = bw;
2632 pkt_stat->signal_power = max3(pkt_stat->rx_power[RF_PATH_A],
2633 pkt_stat->rx_power[RF_PATH_B],
2634 min_rx_power);
2635
2636 dm_info->curr_rx_rate = pkt_stat->rate;
2637
2638 pkt_stat->rx_evm[RF_PATH_A] = GET_PHY_STAT_P1_RXEVM_A(phy_status);
2639 pkt_stat->rx_evm[RF_PATH_B] = GET_PHY_STAT_P1_RXEVM_B(phy_status);
2640
2641 pkt_stat->rx_snr[RF_PATH_A] = GET_PHY_STAT_P1_RXSNR_A(phy_status);
2642 pkt_stat->rx_snr[RF_PATH_B] = GET_PHY_STAT_P1_RXSNR_B(phy_status);
2643
2644 pkt_stat->cfo_tail[RF_PATH_A] = GET_PHY_STAT_P1_CFO_TAIL_A(phy_status);
2645 pkt_stat->cfo_tail[RF_PATH_B] = GET_PHY_STAT_P1_CFO_TAIL_B(phy_status);
2646
2647 for (path = 0; path <= rtwdev->hal.rf_path_num; path++) {
2648 rssi = rtw_phy_rf_power_2_rssi(&pkt_stat->rx_power[path], 1);
2649 dm_info->rssi[path] = rssi;
2650 if (path == RF_PATH_A) {
2651 p_div->path_a_sum += rssi;
2652 p_div->path_a_cnt++;
2653 } else if (path == RF_PATH_B) {
2654 p_div->path_b_sum += rssi;
2655 p_div->path_b_cnt++;
2656 }
2657 dm_info->rx_snr[path] = pkt_stat->rx_snr[path] >> 1;
2658 dm_info->cfo_tail[path] = (pkt_stat->cfo_tail[path] * 5) >> 1;
2659
2660 rx_evm = pkt_stat->rx_evm[path];
2661
2662 if (rx_evm < 0) {
2663 if (rx_evm == S8_MIN)
2664 evm_dbm = 0;
2665 else
2666 evm_dbm = ((u8)-rx_evm >> 1);
2667 }
2668 dm_info->rx_evm_dbm[path] = evm_dbm;
2669 }
2670 rtw_phy_parsing_cfo(rtwdev, pkt_stat);
2671 }
2672
query_phy_status(struct rtw_dev * rtwdev,u8 * phy_status,struct rtw_rx_pkt_stat * pkt_stat)2673 static void query_phy_status(struct rtw_dev *rtwdev, u8 *phy_status,
2674 struct rtw_rx_pkt_stat *pkt_stat)
2675 {
2676 u8 page;
2677
2678 page = *phy_status & 0xf;
2679
2680 switch (page) {
2681 case 0:
2682 query_phy_status_page0(rtwdev, phy_status, pkt_stat);
2683 break;
2684 case 1:
2685 query_phy_status_page1(rtwdev, phy_status, pkt_stat);
2686 break;
2687 default:
2688 rtw_warn(rtwdev, "unused phy status page (%d)\n", page);
2689 return;
2690 }
2691 }
2692
rtw8822c_query_rx_desc(struct rtw_dev * rtwdev,u8 * rx_desc,struct rtw_rx_pkt_stat * pkt_stat,struct ieee80211_rx_status * rx_status)2693 static void rtw8822c_query_rx_desc(struct rtw_dev *rtwdev, u8 *rx_desc,
2694 struct rtw_rx_pkt_stat *pkt_stat,
2695 struct ieee80211_rx_status *rx_status)
2696 {
2697 struct ieee80211_hdr *hdr;
2698 u32 desc_sz = rtwdev->chip->rx_pkt_desc_sz;
2699 u8 *phy_status = NULL;
2700
2701 memset(pkt_stat, 0, sizeof(*pkt_stat));
2702
2703 pkt_stat->phy_status = GET_RX_DESC_PHYST(rx_desc);
2704 pkt_stat->icv_err = GET_RX_DESC_ICV_ERR(rx_desc);
2705 pkt_stat->crc_err = GET_RX_DESC_CRC32(rx_desc);
2706 pkt_stat->decrypted = !GET_RX_DESC_SWDEC(rx_desc) &&
2707 GET_RX_DESC_ENC_TYPE(rx_desc) != RX_DESC_ENC_NONE;
2708 pkt_stat->is_c2h = GET_RX_DESC_C2H(rx_desc);
2709 pkt_stat->pkt_len = GET_RX_DESC_PKT_LEN(rx_desc);
2710 pkt_stat->drv_info_sz = GET_RX_DESC_DRV_INFO_SIZE(rx_desc);
2711 pkt_stat->shift = GET_RX_DESC_SHIFT(rx_desc);
2712 pkt_stat->rate = GET_RX_DESC_RX_RATE(rx_desc);
2713 pkt_stat->cam_id = GET_RX_DESC_MACID(rx_desc);
2714 pkt_stat->ppdu_cnt = GET_RX_DESC_PPDU_CNT(rx_desc);
2715 pkt_stat->tsf_low = GET_RX_DESC_TSFL(rx_desc);
2716
2717 /* drv_info_sz is in unit of 8-bytes */
2718 pkt_stat->drv_info_sz *= 8;
2719
2720 /* c2h cmd pkt's rx/phy status is not interested */
2721 if (pkt_stat->is_c2h)
2722 return;
2723
2724 hdr = (struct ieee80211_hdr *)(rx_desc + desc_sz + pkt_stat->shift +
2725 pkt_stat->drv_info_sz);
2726 pkt_stat->hdr = hdr;
2727 if (pkt_stat->phy_status) {
2728 phy_status = rx_desc + desc_sz + pkt_stat->shift;
2729 query_phy_status(rtwdev, phy_status, pkt_stat);
2730 }
2731
2732 rtw_rx_fill_rx_status(rtwdev, pkt_stat, hdr, rx_status, phy_status);
2733 }
2734
2735 static void
rtw8822c_set_write_tx_power_ref(struct rtw_dev * rtwdev,u8 * tx_pwr_ref_cck,u8 * tx_pwr_ref_ofdm)2736 rtw8822c_set_write_tx_power_ref(struct rtw_dev *rtwdev, u8 *tx_pwr_ref_cck,
2737 u8 *tx_pwr_ref_ofdm)
2738 {
2739 struct rtw_hal *hal = &rtwdev->hal;
2740 u32 txref_cck[2] = {0x18a0, 0x41a0};
2741 u32 txref_ofdm[2] = {0x18e8, 0x41e8};
2742 u8 path;
2743
2744 for (path = 0; path < hal->rf_path_num; path++) {
2745 rtw_write32_mask(rtwdev, 0x1c90, BIT(15), 0);
2746 rtw_write32_mask(rtwdev, txref_cck[path], 0x7f0000,
2747 tx_pwr_ref_cck[path]);
2748 }
2749 for (path = 0; path < hal->rf_path_num; path++) {
2750 rtw_write32_mask(rtwdev, 0x1c90, BIT(15), 0);
2751 rtw_write32_mask(rtwdev, txref_ofdm[path], 0x1fc00,
2752 tx_pwr_ref_ofdm[path]);
2753 }
2754 }
2755
rtw8822c_set_tx_power_diff(struct rtw_dev * rtwdev,u8 rate,s8 * diff_idx)2756 static void rtw8822c_set_tx_power_diff(struct rtw_dev *rtwdev, u8 rate,
2757 s8 *diff_idx)
2758 {
2759 u32 offset_txagc = 0x3a00;
2760 u8 rate_idx = rate & 0xfc;
2761 u8 pwr_idx[4];
2762 u32 phy_pwr_idx;
2763 int i;
2764
2765 for (i = 0; i < 4; i++)
2766 pwr_idx[i] = diff_idx[i] & 0x7f;
2767
2768 phy_pwr_idx = pwr_idx[0] |
2769 (pwr_idx[1] << 8) |
2770 (pwr_idx[2] << 16) |
2771 (pwr_idx[3] << 24);
2772
2773 rtw_write32_mask(rtwdev, 0x1c90, BIT(15), 0x0);
2774 rtw_write32_mask(rtwdev, offset_txagc + rate_idx, MASKDWORD,
2775 phy_pwr_idx);
2776 }
2777
rtw8822c_set_tx_power_index(struct rtw_dev * rtwdev)2778 static void rtw8822c_set_tx_power_index(struct rtw_dev *rtwdev)
2779 {
2780 struct rtw_hal *hal = &rtwdev->hal;
2781 u8 rs, rate, j;
2782 u8 pwr_ref_cck[2] = {hal->tx_pwr_tbl[RF_PATH_A][DESC_RATE11M],
2783 hal->tx_pwr_tbl[RF_PATH_B][DESC_RATE11M]};
2784 u8 pwr_ref_ofdm[2] = {hal->tx_pwr_tbl[RF_PATH_A][DESC_RATEMCS7],
2785 hal->tx_pwr_tbl[RF_PATH_B][DESC_RATEMCS7]};
2786 s8 diff_a, diff_b;
2787 u8 pwr_a, pwr_b;
2788 s8 diff_idx[4];
2789
2790 rtw8822c_set_write_tx_power_ref(rtwdev, pwr_ref_cck, pwr_ref_ofdm);
2791 for (rs = 0; rs < RTW_RATE_SECTION_MAX; rs++) {
2792 for (j = 0; j < rtw_rate_size[rs]; j++) {
2793 rate = rtw_rate_section[rs][j];
2794 pwr_a = hal->tx_pwr_tbl[RF_PATH_A][rate];
2795 pwr_b = hal->tx_pwr_tbl[RF_PATH_B][rate];
2796 if (rs == 0) {
2797 diff_a = (s8)pwr_a - (s8)pwr_ref_cck[0];
2798 diff_b = (s8)pwr_b - (s8)pwr_ref_cck[1];
2799 } else {
2800 diff_a = (s8)pwr_a - (s8)pwr_ref_ofdm[0];
2801 diff_b = (s8)pwr_b - (s8)pwr_ref_ofdm[1];
2802 }
2803 diff_idx[rate % 4] = min(diff_a, diff_b);
2804 if (rate % 4 == 3)
2805 rtw8822c_set_tx_power_diff(rtwdev, rate - 3,
2806 diff_idx);
2807 }
2808 }
2809 }
2810
rtw8822c_set_antenna(struct rtw_dev * rtwdev,u32 antenna_tx,u32 antenna_rx)2811 static int rtw8822c_set_antenna(struct rtw_dev *rtwdev,
2812 u32 antenna_tx,
2813 u32 antenna_rx)
2814 {
2815 struct rtw_hal *hal = &rtwdev->hal;
2816
2817 switch (antenna_tx) {
2818 case BB_PATH_A:
2819 case BB_PATH_B:
2820 case BB_PATH_AB:
2821 break;
2822 default:
2823 rtw_warn(rtwdev, "unsupported tx path 0x%x\n", antenna_tx);
2824 return -EINVAL;
2825 }
2826
2827 /* path B only is not available for RX */
2828 switch (antenna_rx) {
2829 case BB_PATH_A:
2830 case BB_PATH_AB:
2831 break;
2832 default:
2833 rtw_warn(rtwdev, "unsupported rx path 0x%x\n", antenna_rx);
2834 return -EINVAL;
2835 }
2836
2837 hal->antenna_tx = antenna_tx;
2838 hal->antenna_rx = antenna_rx;
2839
2840 rtw8822c_config_trx_mode(rtwdev, antenna_tx, antenna_rx, false);
2841
2842 return 0;
2843 }
2844
rtw8822c_cfg_ldo25(struct rtw_dev * rtwdev,bool enable)2845 static void rtw8822c_cfg_ldo25(struct rtw_dev *rtwdev, bool enable)
2846 {
2847 u8 ldo_pwr;
2848
2849 ldo_pwr = rtw_read8(rtwdev, REG_ANAPARLDO_POW_MAC);
2850 ldo_pwr = enable ? ldo_pwr | BIT_LDOE25_PON : ldo_pwr & ~BIT_LDOE25_PON;
2851 rtw_write8(rtwdev, REG_ANAPARLDO_POW_MAC, ldo_pwr);
2852 }
2853
rtw8822c_false_alarm_statistics(struct rtw_dev * rtwdev)2854 static void rtw8822c_false_alarm_statistics(struct rtw_dev *rtwdev)
2855 {
2856 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
2857 u32 cck_enable;
2858 u32 cck_fa_cnt;
2859 u32 crc32_cnt;
2860 u32 cca32_cnt;
2861 u32 ofdm_fa_cnt;
2862 u32 ofdm_fa_cnt1, ofdm_fa_cnt2, ofdm_fa_cnt3, ofdm_fa_cnt4, ofdm_fa_cnt5;
2863 u16 parity_fail, rate_illegal, crc8_fail, mcs_fail, sb_search_fail,
2864 fast_fsync, crc8_fail_vhta, mcs_fail_vht;
2865
2866 cck_enable = rtw_read32(rtwdev, REG_ENCCK) & BIT_CCK_BLK_EN;
2867 cck_fa_cnt = rtw_read16(rtwdev, REG_CCK_FACNT);
2868
2869 ofdm_fa_cnt1 = rtw_read32(rtwdev, REG_OFDM_FACNT1);
2870 ofdm_fa_cnt2 = rtw_read32(rtwdev, REG_OFDM_FACNT2);
2871 ofdm_fa_cnt3 = rtw_read32(rtwdev, REG_OFDM_FACNT3);
2872 ofdm_fa_cnt4 = rtw_read32(rtwdev, REG_OFDM_FACNT4);
2873 ofdm_fa_cnt5 = rtw_read32(rtwdev, REG_OFDM_FACNT5);
2874
2875 parity_fail = FIELD_GET(GENMASK(31, 16), ofdm_fa_cnt1);
2876 rate_illegal = FIELD_GET(GENMASK(15, 0), ofdm_fa_cnt2);
2877 crc8_fail = FIELD_GET(GENMASK(31, 16), ofdm_fa_cnt2);
2878 crc8_fail_vhta = FIELD_GET(GENMASK(15, 0), ofdm_fa_cnt3);
2879 mcs_fail = FIELD_GET(GENMASK(15, 0), ofdm_fa_cnt4);
2880 mcs_fail_vht = FIELD_GET(GENMASK(31, 16), ofdm_fa_cnt4);
2881 fast_fsync = FIELD_GET(GENMASK(15, 0), ofdm_fa_cnt5);
2882 sb_search_fail = FIELD_GET(GENMASK(31, 16), ofdm_fa_cnt5);
2883
2884 ofdm_fa_cnt = parity_fail + rate_illegal + crc8_fail + crc8_fail_vhta +
2885 mcs_fail + mcs_fail_vht + fast_fsync + sb_search_fail;
2886
2887 dm_info->cck_fa_cnt = cck_fa_cnt;
2888 dm_info->ofdm_fa_cnt = ofdm_fa_cnt;
2889 dm_info->total_fa_cnt = ofdm_fa_cnt;
2890 dm_info->total_fa_cnt += cck_enable ? cck_fa_cnt : 0;
2891
2892 crc32_cnt = rtw_read32(rtwdev, 0x2c04);
2893 dm_info->cck_ok_cnt = crc32_cnt & 0xffff;
2894 dm_info->cck_err_cnt = (crc32_cnt & 0xffff0000) >> 16;
2895 crc32_cnt = rtw_read32(rtwdev, 0x2c14);
2896 dm_info->ofdm_ok_cnt = crc32_cnt & 0xffff;
2897 dm_info->ofdm_err_cnt = (crc32_cnt & 0xffff0000) >> 16;
2898 crc32_cnt = rtw_read32(rtwdev, 0x2c10);
2899 dm_info->ht_ok_cnt = crc32_cnt & 0xffff;
2900 dm_info->ht_err_cnt = (crc32_cnt & 0xffff0000) >> 16;
2901 crc32_cnt = rtw_read32(rtwdev, 0x2c0c);
2902 dm_info->vht_ok_cnt = crc32_cnt & 0xffff;
2903 dm_info->vht_err_cnt = (crc32_cnt & 0xffff0000) >> 16;
2904
2905 cca32_cnt = rtw_read32(rtwdev, 0x2c08);
2906 dm_info->ofdm_cca_cnt = ((cca32_cnt & 0xffff0000) >> 16);
2907 dm_info->cck_cca_cnt = cca32_cnt & 0xffff;
2908 dm_info->total_cca_cnt = dm_info->ofdm_cca_cnt;
2909 if (cck_enable)
2910 dm_info->total_cca_cnt += dm_info->cck_cca_cnt;
2911
2912 rtw_write32_mask(rtwdev, REG_CCANRX, BIT_CCK_FA_RST, 0);
2913 rtw_write32_mask(rtwdev, REG_CCANRX, BIT_CCK_FA_RST, 2);
2914 rtw_write32_mask(rtwdev, REG_CCANRX, BIT_OFDM_FA_RST, 0);
2915 rtw_write32_mask(rtwdev, REG_CCANRX, BIT_OFDM_FA_RST, 2);
2916
2917 /* disable rx clk gating to reset counters */
2918 rtw_write32_clr(rtwdev, REG_RX_BREAK, BIT_COM_RX_GCK_EN);
2919 rtw_write32_set(rtwdev, REG_CNT_CTRL, BIT_ALL_CNT_RST);
2920 rtw_write32_clr(rtwdev, REG_CNT_CTRL, BIT_ALL_CNT_RST);
2921 rtw_write32_set(rtwdev, REG_RX_BREAK, BIT_COM_RX_GCK_EN);
2922 }
2923
rtw8822c_do_lck(struct rtw_dev * rtwdev)2924 static void rtw8822c_do_lck(struct rtw_dev *rtwdev)
2925 {
2926 u32 val;
2927
2928 rtw_write_rf(rtwdev, RF_PATH_A, RF_SYN_CTRL, RFREG_MASK, 0x80010);
2929 rtw_write_rf(rtwdev, RF_PATH_A, RF_SYN_PFD, RFREG_MASK, 0x1F0FA);
2930 fsleep(1);
2931 rtw_write_rf(rtwdev, RF_PATH_A, RF_AAC_CTRL, RFREG_MASK, 0x80000);
2932 rtw_write_rf(rtwdev, RF_PATH_A, RF_SYN_AAC, RFREG_MASK, 0x80001);
2933 read_poll_timeout(rtw_read_rf, val, val != 0x1, 1000, 100000,
2934 true, rtwdev, RF_PATH_A, RF_AAC_CTRL, 0x1000);
2935 rtw_write_rf(rtwdev, RF_PATH_A, RF_SYN_PFD, RFREG_MASK, 0x1F0F8);
2936 rtw_write_rf(rtwdev, RF_PATH_B, RF_SYN_CTRL, RFREG_MASK, 0x80010);
2937
2938 rtw_write_rf(rtwdev, RF_PATH_A, RF_FAST_LCK, RFREG_MASK, 0x0f000);
2939 rtw_write_rf(rtwdev, RF_PATH_A, RF_FAST_LCK, RFREG_MASK, 0x4f000);
2940 fsleep(1);
2941 rtw_write_rf(rtwdev, RF_PATH_A, RF_FAST_LCK, RFREG_MASK, 0x0f000);
2942 }
2943
rtw8822c_do_iqk(struct rtw_dev * rtwdev)2944 static void rtw8822c_do_iqk(struct rtw_dev *rtwdev)
2945 {
2946 struct rtw_iqk_para para = {0};
2947 u8 iqk_chk;
2948 int ret;
2949
2950 para.clear = 1;
2951 rtw_fw_do_iqk(rtwdev, ¶);
2952
2953 ret = read_poll_timeout(rtw_read8, iqk_chk, iqk_chk == IQK_DONE_8822C,
2954 20000, 300000, false, rtwdev, REG_RPT_CIP);
2955 if (ret)
2956 rtw_warn(rtwdev, "failed to poll iqk status bit\n");
2957
2958 rtw_write8(rtwdev, REG_IQKSTAT, 0x0);
2959 }
2960
2961 /* for coex */
rtw8822c_coex_cfg_init(struct rtw_dev * rtwdev)2962 static void rtw8822c_coex_cfg_init(struct rtw_dev *rtwdev)
2963 {
2964 /* enable TBTT nterrupt */
2965 rtw_write8_set(rtwdev, REG_BCN_CTRL, BIT_EN_BCN_FUNCTION);
2966
2967 /* BT report packet sample rate */
2968 /* 0x790[5:0]=0x5 */
2969 rtw_write8_mask(rtwdev, REG_BT_TDMA_TIME, BIT_MASK_SAMPLE_RATE, 0x5);
2970
2971 /* enable BT counter statistics */
2972 rtw_write8(rtwdev, REG_BT_STAT_CTRL, 0x1);
2973
2974 /* enable PTA (3-wire function form BT side) */
2975 rtw_write32_set(rtwdev, REG_GPIO_MUXCFG, BIT_BT_PTA_EN);
2976 rtw_write32_set(rtwdev, REG_GPIO_MUXCFG, BIT_PO_BT_PTA_PINS);
2977
2978 /* enable PTA (tx/rx signal form WiFi side) */
2979 rtw_write8_set(rtwdev, REG_QUEUE_CTRL, BIT_PTA_WL_TX_EN);
2980 /* wl tx signal to PTA not case EDCCA */
2981 rtw_write8_clr(rtwdev, REG_QUEUE_CTRL, BIT_PTA_EDCCA_EN);
2982 /* GNT_BT=1 while select both */
2983 rtw_write16_set(rtwdev, REG_BT_COEX_V2, BIT_GNT_BT_POLARITY);
2984 /* BT_CCA = ~GNT_WL_BB, not or GNT_BT_BB, LTE_Rx */
2985 rtw_write8_clr(rtwdev, REG_DUMMY_PAGE4_V1, BIT_BTCCA_CTRL);
2986
2987 /* to avoid RF parameter error */
2988 rtw_write_rf(rtwdev, RF_PATH_B, RF_MODOPT, 0xfffff, 0x40000);
2989 }
2990
rtw8822c_coex_cfg_gnt_fix(struct rtw_dev * rtwdev)2991 static void rtw8822c_coex_cfg_gnt_fix(struct rtw_dev *rtwdev)
2992 {
2993 struct rtw_coex *coex = &rtwdev->coex;
2994 struct rtw_coex_stat *coex_stat = &coex->stat;
2995 struct rtw_efuse *efuse = &rtwdev->efuse;
2996 u32 rf_0x1;
2997
2998 if (coex_stat->gnt_workaround_state == coex_stat->wl_coex_mode)
2999 return;
3000
3001 coex_stat->gnt_workaround_state = coex_stat->wl_coex_mode;
3002
3003 if ((coex_stat->kt_ver == 0 && coex->under_5g) || coex->freerun)
3004 rf_0x1 = 0x40021;
3005 else
3006 rf_0x1 = 0x40000;
3007
3008 /* BT at S1 for Shared-Ant */
3009 if (efuse->share_ant)
3010 rf_0x1 |= BIT(13);
3011
3012 rtw_write_rf(rtwdev, RF_PATH_B, 0x1, 0xfffff, rf_0x1);
3013
3014 /* WL-S0 2G RF TRX cannot be masked by GNT_BT
3015 * enable "WLS0 BB chage RF mode if GNT_BT = 1" for shared-antenna type
3016 * disable:0x1860[3] = 1, enable:0x1860[3] = 0
3017 *
3018 * enable "DAC off if GNT_WL = 0" for non-shared-antenna
3019 * disable 0x1c30[22] = 0,
3020 * enable: 0x1c30[22] = 1, 0x1c38[12] = 0, 0x1c38[28] = 1
3021 */
3022 if (coex_stat->wl_coex_mode == COEX_WLINK_2GFREE) {
3023 rtw_write8_mask(rtwdev, REG_ANAPAR + 2,
3024 BIT_ANAPAR_BTPS >> 16, 0);
3025 } else {
3026 rtw_write8_mask(rtwdev, REG_ANAPAR + 2,
3027 BIT_ANAPAR_BTPS >> 16, 1);
3028 rtw_write8_mask(rtwdev, REG_RSTB_SEL + 1,
3029 BIT_DAC_OFF_ENABLE, 0);
3030 rtw_write8_mask(rtwdev, REG_RSTB_SEL + 3,
3031 BIT_DAC_OFF_ENABLE, 1);
3032 }
3033
3034 /* disable WL-S1 BB chage RF mode if GNT_BT
3035 * since RF TRx mask can do it
3036 */
3037 rtw_write8_mask(rtwdev, REG_IGN_GNTBT4,
3038 BIT_PI_IGNORE_GNT_BT, 1);
3039
3040 /* disable WL-S0 BB chage RF mode if wifi is at 5G,
3041 * or antenna path is separated
3042 */
3043 if (coex_stat->wl_coex_mode == COEX_WLINK_2GFREE) {
3044 rtw_write8_mask(rtwdev, REG_IGN_GNT_BT1,
3045 BIT_PI_IGNORE_GNT_BT, 1);
3046 rtw_write8_mask(rtwdev, REG_NOMASK_TXBT,
3047 BIT_NOMASK_TXBT_ENABLE, 1);
3048 } else if (coex_stat->wl_coex_mode == COEX_WLINK_5G ||
3049 coex->under_5g || !efuse->share_ant) {
3050 if (coex_stat->kt_ver >= 3) {
3051 rtw_write8_mask(rtwdev, REG_IGN_GNT_BT1,
3052 BIT_PI_IGNORE_GNT_BT, 0);
3053 rtw_write8_mask(rtwdev, REG_NOMASK_TXBT,
3054 BIT_NOMASK_TXBT_ENABLE, 1);
3055 } else {
3056 rtw_write8_mask(rtwdev, REG_IGN_GNT_BT1,
3057 BIT_PI_IGNORE_GNT_BT, 1);
3058 }
3059 } else {
3060 /* shared-antenna */
3061 rtw_write8_mask(rtwdev, REG_IGN_GNT_BT1,
3062 BIT_PI_IGNORE_GNT_BT, 0);
3063 if (coex_stat->kt_ver >= 3) {
3064 rtw_write8_mask(rtwdev, REG_NOMASK_TXBT,
3065 BIT_NOMASK_TXBT_ENABLE, 0);
3066 }
3067 }
3068 }
3069
rtw8822c_coex_cfg_gnt_debug(struct rtw_dev * rtwdev)3070 static void rtw8822c_coex_cfg_gnt_debug(struct rtw_dev *rtwdev)
3071 {
3072 rtw_write8_mask(rtwdev, REG_PAD_CTRL1 + 2, BIT_BTGP_SPI_EN >> 16, 0);
3073 rtw_write8_mask(rtwdev, REG_PAD_CTRL1 + 3, BIT_BTGP_JTAG_EN >> 24, 0);
3074 rtw_write8_mask(rtwdev, REG_GPIO_MUXCFG + 2, BIT_FSPI_EN >> 16, 0);
3075 rtw_write8_mask(rtwdev, REG_PAD_CTRL1 + 1, BIT_LED1DIS >> 8, 0);
3076 rtw_write8_mask(rtwdev, REG_SYS_SDIO_CTRL + 3, BIT_DBG_GNT_WL_BT >> 24, 0);
3077 }
3078
rtw8822c_coex_cfg_rfe_type(struct rtw_dev * rtwdev)3079 static void rtw8822c_coex_cfg_rfe_type(struct rtw_dev *rtwdev)
3080 {
3081 struct rtw_coex *coex = &rtwdev->coex;
3082 struct rtw_coex_rfe *coex_rfe = &coex->rfe;
3083 struct rtw_efuse *efuse = &rtwdev->efuse;
3084
3085 coex_rfe->rfe_module_type = rtwdev->efuse.rfe_option;
3086 coex_rfe->ant_switch_polarity = 0;
3087 coex_rfe->ant_switch_exist = false;
3088 coex_rfe->ant_switch_with_bt = false;
3089 coex_rfe->ant_switch_diversity = false;
3090
3091 if (efuse->share_ant)
3092 coex_rfe->wlg_at_btg = true;
3093 else
3094 coex_rfe->wlg_at_btg = false;
3095
3096 /* disable LTE coex in wifi side */
3097 rtw_coex_write_indirect_reg(rtwdev, LTE_COEX_CTRL, BIT_LTE_COEX_EN, 0x0);
3098 rtw_coex_write_indirect_reg(rtwdev, LTE_WL_TRX_CTRL, MASKLWORD, 0xffff);
3099 rtw_coex_write_indirect_reg(rtwdev, LTE_BT_TRX_CTRL, MASKLWORD, 0xffff);
3100 }
3101
rtw8822c_coex_cfg_wl_tx_power(struct rtw_dev * rtwdev,u8 wl_pwr)3102 static void rtw8822c_coex_cfg_wl_tx_power(struct rtw_dev *rtwdev, u8 wl_pwr)
3103 {
3104 struct rtw_coex *coex = &rtwdev->coex;
3105 struct rtw_coex_dm *coex_dm = &coex->dm;
3106
3107 if (wl_pwr == coex_dm->cur_wl_pwr_lvl)
3108 return;
3109
3110 coex_dm->cur_wl_pwr_lvl = wl_pwr;
3111 }
3112
rtw8822c_coex_cfg_wl_rx_gain(struct rtw_dev * rtwdev,bool low_gain)3113 static void rtw8822c_coex_cfg_wl_rx_gain(struct rtw_dev *rtwdev, bool low_gain)
3114 {
3115 struct rtw_coex *coex = &rtwdev->coex;
3116 struct rtw_coex_dm *coex_dm = &coex->dm;
3117
3118 if (low_gain == coex_dm->cur_wl_rx_low_gain_en)
3119 return;
3120
3121 coex_dm->cur_wl_rx_low_gain_en = low_gain;
3122
3123 if (coex_dm->cur_wl_rx_low_gain_en) {
3124 rtw_dbg(rtwdev, RTW_DBG_COEX, "[BTCoex], Hi-Li Table On!\n");
3125
3126 /* set Rx filter corner RCK offset */
3127 rtw_write_rf(rtwdev, RF_PATH_A, RF_RCKD, RFREG_MASK, 0x22);
3128 rtw_write_rf(rtwdev, RF_PATH_A, RF_RCK, RFREG_MASK, 0x36);
3129 rtw_write_rf(rtwdev, RF_PATH_B, RF_RCKD, RFREG_MASK, 0x22);
3130 rtw_write_rf(rtwdev, RF_PATH_B, RF_RCK, RFREG_MASK, 0x36);
3131
3132 } else {
3133 rtw_dbg(rtwdev, RTW_DBG_COEX, "[BTCoex], Hi-Li Table Off!\n");
3134
3135 /* set Rx filter corner RCK offset */
3136 rtw_write_rf(rtwdev, RF_PATH_A, RF_RCKD, RFREG_MASK, 0x20);
3137 rtw_write_rf(rtwdev, RF_PATH_A, RF_RCK, RFREG_MASK, 0x0);
3138 rtw_write_rf(rtwdev, RF_PATH_B, RF_RCKD, RFREG_MASK, 0x20);
3139 rtw_write_rf(rtwdev, RF_PATH_B, RF_RCK, RFREG_MASK, 0x0);
3140 }
3141 }
3142
rtw8822c_bf_enable_bfee_su(struct rtw_dev * rtwdev,struct rtw_vif * vif,struct rtw_bfee * bfee)3143 static void rtw8822c_bf_enable_bfee_su(struct rtw_dev *rtwdev,
3144 struct rtw_vif *vif,
3145 struct rtw_bfee *bfee)
3146 {
3147 u8 csi_rsc = 0;
3148 u32 tmp6dc;
3149
3150 rtw_bf_enable_bfee_su(rtwdev, vif, bfee);
3151
3152 tmp6dc = rtw_read32(rtwdev, REG_BBPSF_CTRL) |
3153 BIT_WMAC_USE_NDPARATE |
3154 (csi_rsc << 13);
3155 if (vif->net_type == RTW_NET_AP_MODE)
3156 rtw_write32(rtwdev, REG_BBPSF_CTRL, tmp6dc | BIT(12));
3157 else
3158 rtw_write32(rtwdev, REG_BBPSF_CTRL, tmp6dc & ~BIT(12));
3159
3160 rtw_write32(rtwdev, REG_CSI_RRSR, 0x550);
3161 }
3162
rtw8822c_bf_config_bfee_su(struct rtw_dev * rtwdev,struct rtw_vif * vif,struct rtw_bfee * bfee,bool enable)3163 static void rtw8822c_bf_config_bfee_su(struct rtw_dev *rtwdev,
3164 struct rtw_vif *vif,
3165 struct rtw_bfee *bfee, bool enable)
3166 {
3167 if (enable)
3168 rtw8822c_bf_enable_bfee_su(rtwdev, vif, bfee);
3169 else
3170 rtw_bf_remove_bfee_su(rtwdev, bfee);
3171 }
3172
rtw8822c_bf_config_bfee_mu(struct rtw_dev * rtwdev,struct rtw_vif * vif,struct rtw_bfee * bfee,bool enable)3173 static void rtw8822c_bf_config_bfee_mu(struct rtw_dev *rtwdev,
3174 struct rtw_vif *vif,
3175 struct rtw_bfee *bfee, bool enable)
3176 {
3177 if (enable)
3178 rtw_bf_enable_bfee_mu(rtwdev, vif, bfee);
3179 else
3180 rtw_bf_remove_bfee_mu(rtwdev, bfee);
3181 }
3182
rtw8822c_bf_config_bfee(struct rtw_dev * rtwdev,struct rtw_vif * vif,struct rtw_bfee * bfee,bool enable)3183 static void rtw8822c_bf_config_bfee(struct rtw_dev *rtwdev, struct rtw_vif *vif,
3184 struct rtw_bfee *bfee, bool enable)
3185 {
3186 if (bfee->role == RTW_BFEE_SU)
3187 rtw8822c_bf_config_bfee_su(rtwdev, vif, bfee, enable);
3188 else if (bfee->role == RTW_BFEE_MU)
3189 rtw8822c_bf_config_bfee_mu(rtwdev, vif, bfee, enable);
3190 else
3191 rtw_warn(rtwdev, "wrong bfee role\n");
3192 }
3193
3194 struct dpk_cfg_pair {
3195 u32 addr;
3196 u32 bitmask;
3197 u32 data;
3198 };
3199
rtw8822c_parse_tbl_dpk(struct rtw_dev * rtwdev,const struct rtw_table * tbl)3200 void rtw8822c_parse_tbl_dpk(struct rtw_dev *rtwdev,
3201 const struct rtw_table *tbl)
3202 {
3203 const struct dpk_cfg_pair *p = tbl->data;
3204 const struct dpk_cfg_pair *end = p + tbl->size / 3;
3205
3206 BUILD_BUG_ON(sizeof(struct dpk_cfg_pair) != sizeof(u32) * 3);
3207
3208 for (; p < end; p++)
3209 rtw_write32_mask(rtwdev, p->addr, p->bitmask, p->data);
3210 }
3211
rtw8822c_dpk_set_gnt_wl(struct rtw_dev * rtwdev,bool is_before_k)3212 static void rtw8822c_dpk_set_gnt_wl(struct rtw_dev *rtwdev, bool is_before_k)
3213 {
3214 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3215
3216 if (is_before_k) {
3217 dpk_info->gnt_control = rtw_read32(rtwdev, 0x70);
3218 dpk_info->gnt_value = rtw_coex_read_indirect_reg(rtwdev, 0x38);
3219 rtw_write32_mask(rtwdev, 0x70, BIT(26), 0x1);
3220 rtw_coex_write_indirect_reg(rtwdev, 0x38, MASKBYTE1, 0x77);
3221 } else {
3222 rtw_coex_write_indirect_reg(rtwdev, 0x38, MASKDWORD,
3223 dpk_info->gnt_value);
3224 rtw_write32(rtwdev, 0x70, dpk_info->gnt_control);
3225 }
3226 }
3227
3228 static void
rtw8822c_dpk_restore_registers(struct rtw_dev * rtwdev,u32 reg_num,struct rtw_backup_info * bckp)3229 rtw8822c_dpk_restore_registers(struct rtw_dev *rtwdev, u32 reg_num,
3230 struct rtw_backup_info *bckp)
3231 {
3232 rtw_restore_reg(rtwdev, bckp, reg_num);
3233 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
3234 rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, BIT_DPD_CLK, 0x4);
3235 }
3236
3237 static void
rtw8822c_dpk_backup_registers(struct rtw_dev * rtwdev,u32 * reg,u32 reg_num,struct rtw_backup_info * bckp)3238 rtw8822c_dpk_backup_registers(struct rtw_dev *rtwdev, u32 *reg,
3239 u32 reg_num, struct rtw_backup_info *bckp)
3240 {
3241 u32 i;
3242
3243 for (i = 0; i < reg_num; i++) {
3244 bckp[i].len = 4;
3245 bckp[i].reg = reg[i];
3246 bckp[i].val = rtw_read32(rtwdev, reg[i]);
3247 }
3248 }
3249
rtw8822c_dpk_backup_rf_registers(struct rtw_dev * rtwdev,u32 * rf_reg,u32 rf_reg_bak[][2])3250 static void rtw8822c_dpk_backup_rf_registers(struct rtw_dev *rtwdev,
3251 u32 *rf_reg,
3252 u32 rf_reg_bak[][2])
3253 {
3254 u32 i;
3255
3256 for (i = 0; i < DPK_RF_REG_NUM; i++) {
3257 rf_reg_bak[i][RF_PATH_A] = rtw_read_rf(rtwdev, RF_PATH_A,
3258 rf_reg[i], RFREG_MASK);
3259 rf_reg_bak[i][RF_PATH_B] = rtw_read_rf(rtwdev, RF_PATH_B,
3260 rf_reg[i], RFREG_MASK);
3261 }
3262 }
3263
rtw8822c_dpk_reload_rf_registers(struct rtw_dev * rtwdev,u32 * rf_reg,u32 rf_reg_bak[][2])3264 static void rtw8822c_dpk_reload_rf_registers(struct rtw_dev *rtwdev,
3265 u32 *rf_reg,
3266 u32 rf_reg_bak[][2])
3267 {
3268 u32 i;
3269
3270 for (i = 0; i < DPK_RF_REG_NUM; i++) {
3271 rtw_write_rf(rtwdev, RF_PATH_A, rf_reg[i], RFREG_MASK,
3272 rf_reg_bak[i][RF_PATH_A]);
3273 rtw_write_rf(rtwdev, RF_PATH_B, rf_reg[i], RFREG_MASK,
3274 rf_reg_bak[i][RF_PATH_B]);
3275 }
3276 }
3277
rtw8822c_dpk_information(struct rtw_dev * rtwdev)3278 static void rtw8822c_dpk_information(struct rtw_dev *rtwdev)
3279 {
3280 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3281 u32 reg;
3282 u8 band_shift;
3283
3284 reg = rtw_read_rf(rtwdev, RF_PATH_A, 0x18, RFREG_MASK);
3285
3286 band_shift = FIELD_GET(BIT(16), reg);
3287 dpk_info->dpk_band = 1 << band_shift;
3288 dpk_info->dpk_ch = FIELD_GET(0xff, reg);
3289 dpk_info->dpk_bw = FIELD_GET(0x3000, reg);
3290 }
3291
rtw8822c_dpk_rxbb_dc_cal(struct rtw_dev * rtwdev,u8 path)3292 static void rtw8822c_dpk_rxbb_dc_cal(struct rtw_dev *rtwdev, u8 path)
3293 {
3294 rtw_write_rf(rtwdev, path, 0x92, RFREG_MASK, 0x84800);
3295 udelay(5);
3296 rtw_write_rf(rtwdev, path, 0x92, RFREG_MASK, 0x84801);
3297 usleep_range(600, 610);
3298 rtw_write_rf(rtwdev, path, 0x92, RFREG_MASK, 0x84800);
3299 }
3300
rtw8822c_dpk_dc_corr_check(struct rtw_dev * rtwdev,u8 path)3301 static u8 rtw8822c_dpk_dc_corr_check(struct rtw_dev *rtwdev, u8 path)
3302 {
3303 u16 dc_i, dc_q;
3304 u8 corr_idx;
3305
3306 rtw_write32(rtwdev, REG_RXSRAM_CTL, 0x000900f0);
3307 dc_i = (u16)rtw_read32_mask(rtwdev, REG_STAT_RPT, GENMASK(27, 16));
3308 dc_q = (u16)rtw_read32_mask(rtwdev, REG_STAT_RPT, GENMASK(11, 0));
3309
3310 if (dc_i & BIT(11))
3311 dc_i = 0x1000 - dc_i;
3312 if (dc_q & BIT(11))
3313 dc_q = 0x1000 - dc_q;
3314
3315 rtw_write32(rtwdev, REG_RXSRAM_CTL, 0x000000f0);
3316 corr_idx = (u8)rtw_read32_mask(rtwdev, REG_STAT_RPT, GENMASK(7, 0));
3317 rtw_read32_mask(rtwdev, REG_STAT_RPT, GENMASK(15, 8));
3318
3319 if (dc_i > 200 || dc_q > 200 || corr_idx < 40 || corr_idx > 65)
3320 return 1;
3321 else
3322 return 0;
3323
3324 }
3325
rtw8822c_dpk_tx_pause(struct rtw_dev * rtwdev)3326 static void rtw8822c_dpk_tx_pause(struct rtw_dev *rtwdev)
3327 {
3328 u8 reg_a, reg_b;
3329 u16 count = 0;
3330
3331 rtw_write8(rtwdev, 0x522, 0xff);
3332 rtw_write32_mask(rtwdev, 0x1e70, 0xf, 0x2);
3333
3334 do {
3335 reg_a = (u8)rtw_read_rf(rtwdev, RF_PATH_A, 0x00, 0xf0000);
3336 reg_b = (u8)rtw_read_rf(rtwdev, RF_PATH_B, 0x00, 0xf0000);
3337 udelay(2);
3338 count++;
3339 } while ((reg_a == 2 || reg_b == 2) && count < 2500);
3340 }
3341
rtw8822c_dpk_mac_bb_setting(struct rtw_dev * rtwdev)3342 static void rtw8822c_dpk_mac_bb_setting(struct rtw_dev *rtwdev)
3343 {
3344 rtw8822c_dpk_tx_pause(rtwdev);
3345 rtw_load_table(rtwdev, &rtw8822c_dpk_mac_bb_tbl);
3346 }
3347
rtw8822c_dpk_afe_setting(struct rtw_dev * rtwdev,bool is_do_dpk)3348 static void rtw8822c_dpk_afe_setting(struct rtw_dev *rtwdev, bool is_do_dpk)
3349 {
3350 if (is_do_dpk)
3351 rtw_load_table(rtwdev, &rtw8822c_dpk_afe_is_dpk_tbl);
3352 else
3353 rtw_load_table(rtwdev, &rtw8822c_dpk_afe_no_dpk_tbl);
3354 }
3355
rtw8822c_dpk_pre_setting(struct rtw_dev * rtwdev)3356 static void rtw8822c_dpk_pre_setting(struct rtw_dev *rtwdev)
3357 {
3358 u8 path;
3359
3360 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
3361 rtw_write_rf(rtwdev, path, RF_RXAGC_OFFSET, RFREG_MASK, 0x0);
3362 rtw_write32(rtwdev, REG_NCTL0, 0x8 | (path << 1));
3363 if (rtwdev->dm_info.dpk_info.dpk_band == RTW_BAND_2G)
3364 rtw_write32(rtwdev, REG_DPD_CTL1_S1, 0x1f100000);
3365 else
3366 rtw_write32(rtwdev, REG_DPD_CTL1_S1, 0x1f0d0000);
3367 rtw_write32_mask(rtwdev, REG_DPD_LUT0, BIT_GLOSS_DB, 0x4);
3368 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x3);
3369 }
3370 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
3371 rtw_write32(rtwdev, REG_DPD_CTL11, 0x3b23170b);
3372 rtw_write32(rtwdev, REG_DPD_CTL12, 0x775f5347);
3373 }
3374
rtw8822c_dpk_rf_setting(struct rtw_dev * rtwdev,u8 path)3375 static u32 rtw8822c_dpk_rf_setting(struct rtw_dev *rtwdev, u8 path)
3376 {
3377 u32 ori_txbb;
3378
3379 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, RFREG_MASK, 0x50017);
3380 ori_txbb = rtw_read_rf(rtwdev, path, RF_TX_GAIN, RFREG_MASK);
3381
3382 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TX_GAIN, 0x1);
3383 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_PWR_TRIM, 0x1);
3384 rtw_write_rf(rtwdev, path, RF_TX_GAIN_OFFSET, BIT_BB_GAIN, 0x0);
3385 rtw_write_rf(rtwdev, path, RF_TX_GAIN, RFREG_MASK, ori_txbb);
3386
3387 if (rtwdev->dm_info.dpk_info.dpk_band == RTW_BAND_2G) {
3388 rtw_write_rf(rtwdev, path, RF_TX_GAIN_OFFSET, BIT_RF_GAIN, 0x1);
3389 rtw_write_rf(rtwdev, path, RF_RXG_GAIN, BIT_RXG_GAIN, 0x0);
3390 } else {
3391 rtw_write_rf(rtwdev, path, RF_TXA_LB_SW, BIT_TXA_LB_ATT, 0x0);
3392 rtw_write_rf(rtwdev, path, RF_TXA_LB_SW, BIT_LB_ATT, 0x6);
3393 rtw_write_rf(rtwdev, path, RF_TXA_LB_SW, BIT_LB_SW, 0x1);
3394 rtw_write_rf(rtwdev, path, RF_RXA_MIX_GAIN, BIT_RXA_MIX_GAIN, 0);
3395 }
3396
3397 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, BIT_RXAGC, 0xf);
3398 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TRXBW, 0x1);
3399 rtw_write_rf(rtwdev, path, RF_BW_TRXBB, BIT_BW_RXBB, 0x0);
3400
3401 if (rtwdev->dm_info.dpk_info.dpk_bw == DPK_CHANNEL_WIDTH_80)
3402 rtw_write_rf(rtwdev, path, RF_BW_TRXBB, BIT_BW_TXBB, 0x2);
3403 else
3404 rtw_write_rf(rtwdev, path, RF_BW_TRXBB, BIT_BW_TXBB, 0x1);
3405
3406 rtw_write_rf(rtwdev, path, RF_EXT_TIA_BW, BIT(1), 0x1);
3407
3408 usleep_range(100, 110);
3409
3410 return ori_txbb & 0x1f;
3411 }
3412
rtw8822c_dpk_get_cmd(struct rtw_dev * rtwdev,u8 action,u8 path)3413 static u16 rtw8822c_dpk_get_cmd(struct rtw_dev *rtwdev, u8 action, u8 path)
3414 {
3415 u16 cmd;
3416 u8 bw = rtwdev->dm_info.dpk_info.dpk_bw == DPK_CHANNEL_WIDTH_80 ? 2 : 0;
3417
3418 switch (action) {
3419 case RTW_DPK_GAIN_LOSS:
3420 cmd = 0x14 + path;
3421 break;
3422 case RTW_DPK_DO_DPK:
3423 cmd = 0x16 + path + bw;
3424 break;
3425 case RTW_DPK_DPK_ON:
3426 cmd = 0x1a + path;
3427 break;
3428 case RTW_DPK_DAGC:
3429 cmd = 0x1c + path + bw;
3430 break;
3431 default:
3432 return 0;
3433 }
3434
3435 return (cmd << 8) | 0x48;
3436 }
3437
rtw8822c_dpk_one_shot(struct rtw_dev * rtwdev,u8 path,u8 action)3438 static u8 rtw8822c_dpk_one_shot(struct rtw_dev *rtwdev, u8 path, u8 action)
3439 {
3440 u16 dpk_cmd;
3441 u8 result = 0;
3442
3443 rtw8822c_dpk_set_gnt_wl(rtwdev, true);
3444
3445 if (action == RTW_DPK_CAL_PWR) {
3446 rtw_write32_mask(rtwdev, REG_DPD_CTL0, BIT(12), 0x1);
3447 rtw_write32_mask(rtwdev, REG_DPD_CTL0, BIT(12), 0x0);
3448 rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, BIT_RPT_SEL, 0x0);
3449 msleep(10);
3450 if (!check_hw_ready(rtwdev, REG_STAT_RPT, BIT(31), 0x1)) {
3451 result = 1;
3452 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DPK] one-shot over 20ms\n");
3453 }
3454 } else {
3455 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE,
3456 0x8 | (path << 1));
3457 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_IQ_SWITCH, 0x9);
3458
3459 dpk_cmd = rtw8822c_dpk_get_cmd(rtwdev, action, path);
3460 rtw_write32(rtwdev, REG_NCTL0, dpk_cmd);
3461 rtw_write32(rtwdev, REG_NCTL0, dpk_cmd + 1);
3462 msleep(10);
3463 if (!check_hw_ready(rtwdev, 0x2d9c, 0xff, 0x55)) {
3464 result = 1;
3465 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DPK] one-shot over 20ms\n");
3466 }
3467 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE,
3468 0x8 | (path << 1));
3469 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_IQ_SWITCH, 0x0);
3470 }
3471
3472 rtw8822c_dpk_set_gnt_wl(rtwdev, false);
3473
3474 rtw_write8(rtwdev, 0x1b10, 0x0);
3475
3476 return result;
3477 }
3478
rtw8822c_dpk_dgain_read(struct rtw_dev * rtwdev,u8 path)3479 static u16 rtw8822c_dpk_dgain_read(struct rtw_dev *rtwdev, u8 path)
3480 {
3481 u16 dgain;
3482
3483 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
3484 rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, 0x00ff0000, 0x0);
3485
3486 dgain = (u16)rtw_read32_mask(rtwdev, REG_STAT_RPT, GENMASK(27, 16));
3487
3488 return dgain;
3489 }
3490
rtw8822c_dpk_thermal_read(struct rtw_dev * rtwdev,u8 path)3491 static u8 rtw8822c_dpk_thermal_read(struct rtw_dev *rtwdev, u8 path)
3492 {
3493 rtw_write_rf(rtwdev, path, RF_T_METER, BIT(19), 0x1);
3494 rtw_write_rf(rtwdev, path, RF_T_METER, BIT(19), 0x0);
3495 rtw_write_rf(rtwdev, path, RF_T_METER, BIT(19), 0x1);
3496 udelay(15);
3497
3498 return (u8)rtw_read_rf(rtwdev, path, RF_T_METER, 0x0007e);
3499 }
3500
rtw8822c_dpk_pas_read(struct rtw_dev * rtwdev,u8 path)3501 static u32 rtw8822c_dpk_pas_read(struct rtw_dev *rtwdev, u8 path)
3502 {
3503 u32 i_val, q_val;
3504
3505 rtw_write32(rtwdev, REG_NCTL0, 0x8 | (path << 1));
3506 rtw_write32_mask(rtwdev, 0x1b48, BIT(14), 0x0);
3507 rtw_write32(rtwdev, REG_RXSRAM_CTL, 0x00060001);
3508 rtw_write32(rtwdev, 0x1b4c, 0x00000000);
3509 rtw_write32(rtwdev, 0x1b4c, 0x00080000);
3510
3511 q_val = rtw_read32_mask(rtwdev, REG_STAT_RPT, MASKHWORD);
3512 i_val = rtw_read32_mask(rtwdev, REG_STAT_RPT, MASKLWORD);
3513
3514 if (i_val & BIT(15))
3515 i_val = 0x10000 - i_val;
3516 if (q_val & BIT(15))
3517 q_val = 0x10000 - q_val;
3518
3519 rtw_write32(rtwdev, 0x1b4c, 0x00000000);
3520
3521 return i_val * i_val + q_val * q_val;
3522 }
3523
rtw8822c_psd_log2base(u32 val)3524 static u32 rtw8822c_psd_log2base(u32 val)
3525 {
3526 u32 tmp, val_integerd_b, tindex;
3527 u32 result, val_fractiond_b;
3528 u32 table_fraction[21] = {0, 432, 332, 274, 232, 200, 174,
3529 151, 132, 115, 100, 86, 74, 62, 51,
3530 42, 32, 23, 15, 7, 0};
3531
3532 if (val == 0)
3533 return 0;
3534
3535 val_integerd_b = __fls(val) + 1;
3536
3537 tmp = (val * 100) / (1 << val_integerd_b);
3538 tindex = tmp / 5;
3539
3540 if (tindex >= ARRAY_SIZE(table_fraction))
3541 tindex = ARRAY_SIZE(table_fraction) - 1;
3542
3543 val_fractiond_b = table_fraction[tindex];
3544
3545 result = val_integerd_b * 100 - val_fractiond_b;
3546
3547 return result;
3548 }
3549
rtw8822c_dpk_gainloss_result(struct rtw_dev * rtwdev,u8 path)3550 static u8 rtw8822c_dpk_gainloss_result(struct rtw_dev *rtwdev, u8 path)
3551 {
3552 u8 result;
3553
3554 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x8 | (path << 1));
3555 rtw_write32_mask(rtwdev, 0x1b48, BIT(14), 0x1);
3556 rtw_write32(rtwdev, REG_RXSRAM_CTL, 0x00060000);
3557
3558 result = (u8)rtw_read32_mask(rtwdev, REG_STAT_RPT, 0x000000f0);
3559
3560 rtw_write32_mask(rtwdev, 0x1b48, BIT(14), 0x0);
3561
3562 return result;
3563 }
3564
rtw8822c_dpk_agc_gain_chk(struct rtw_dev * rtwdev,u8 path,u8 limited_pga)3565 static u8 rtw8822c_dpk_agc_gain_chk(struct rtw_dev *rtwdev, u8 path,
3566 u8 limited_pga)
3567 {
3568 u8 result = 0;
3569 u16 dgain;
3570
3571 rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_DAGC);
3572 dgain = rtw8822c_dpk_dgain_read(rtwdev, path);
3573
3574 if (dgain > 1535 && !limited_pga)
3575 return RTW_DPK_GAIN_LESS;
3576 else if (dgain < 768 && !limited_pga)
3577 return RTW_DPK_GAIN_LARGE;
3578 else
3579 return result;
3580 }
3581
rtw8822c_dpk_agc_loss_chk(struct rtw_dev * rtwdev,u8 path)3582 static u8 rtw8822c_dpk_agc_loss_chk(struct rtw_dev *rtwdev, u8 path)
3583 {
3584 u32 loss, loss_db;
3585
3586 loss = rtw8822c_dpk_pas_read(rtwdev, path);
3587 if (loss < 0x4000000)
3588 return RTW_DPK_GL_LESS;
3589 loss_db = 3 * rtw8822c_psd_log2base(loss >> 13) - 3870;
3590
3591 if (loss_db > 1000)
3592 return RTW_DPK_GL_LARGE;
3593 else if (loss_db < 250)
3594 return RTW_DPK_GL_LESS;
3595 else
3596 return RTW_DPK_AGC_OUT;
3597 }
3598
3599 struct rtw8822c_dpk_data {
3600 u8 txbb;
3601 u8 pga;
3602 u8 limited_pga;
3603 u8 agc_cnt;
3604 bool loss_only;
3605 bool gain_only;
3606 u8 path;
3607 };
3608
rtw8822c_gain_check_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data)3609 static u8 rtw8822c_gain_check_state(struct rtw_dev *rtwdev,
3610 struct rtw8822c_dpk_data *data)
3611 {
3612 u8 state;
3613
3614 data->txbb = (u8)rtw_read_rf(rtwdev, data->path, RF_TX_GAIN,
3615 BIT_GAIN_TXBB);
3616 data->pga = (u8)rtw_read_rf(rtwdev, data->path, RF_MODE_TRXAGC,
3617 BIT_RXAGC);
3618
3619 if (data->loss_only) {
3620 state = RTW_DPK_LOSS_CHECK;
3621 goto check_end;
3622 }
3623
3624 state = rtw8822c_dpk_agc_gain_chk(rtwdev, data->path,
3625 data->limited_pga);
3626 if (state == RTW_DPK_GAIN_CHECK && data->gain_only)
3627 state = RTW_DPK_AGC_OUT;
3628 else if (state == RTW_DPK_GAIN_CHECK)
3629 state = RTW_DPK_LOSS_CHECK;
3630
3631 check_end:
3632 data->agc_cnt++;
3633 if (data->agc_cnt >= 6)
3634 state = RTW_DPK_AGC_OUT;
3635
3636 return state;
3637 }
3638
rtw8822c_gain_large_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data)3639 static u8 rtw8822c_gain_large_state(struct rtw_dev *rtwdev,
3640 struct rtw8822c_dpk_data *data)
3641 {
3642 u8 pga = data->pga;
3643
3644 if (pga > 0xe)
3645 rtw_write_rf(rtwdev, data->path, RF_MODE_TRXAGC, BIT_RXAGC, 0xc);
3646 else if (pga > 0xb && pga < 0xf)
3647 rtw_write_rf(rtwdev, data->path, RF_MODE_TRXAGC, BIT_RXAGC, 0x0);
3648 else if (pga < 0xc)
3649 data->limited_pga = 1;
3650
3651 return RTW_DPK_GAIN_CHECK;
3652 }
3653
rtw8822c_gain_less_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data)3654 static u8 rtw8822c_gain_less_state(struct rtw_dev *rtwdev,
3655 struct rtw8822c_dpk_data *data)
3656 {
3657 u8 pga = data->pga;
3658
3659 if (pga < 0xc)
3660 rtw_write_rf(rtwdev, data->path, RF_MODE_TRXAGC, BIT_RXAGC, 0xc);
3661 else if (pga > 0xb && pga < 0xf)
3662 rtw_write_rf(rtwdev, data->path, RF_MODE_TRXAGC, BIT_RXAGC, 0xf);
3663 else if (pga > 0xe)
3664 data->limited_pga = 1;
3665
3666 return RTW_DPK_GAIN_CHECK;
3667 }
3668
rtw8822c_gl_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data,u8 is_large)3669 static u8 rtw8822c_gl_state(struct rtw_dev *rtwdev,
3670 struct rtw8822c_dpk_data *data, u8 is_large)
3671 {
3672 u8 txbb_bound[] = {0x1f, 0};
3673
3674 if (data->txbb == txbb_bound[is_large])
3675 return RTW_DPK_AGC_OUT;
3676
3677 if (is_large == 1)
3678 data->txbb -= 2;
3679 else
3680 data->txbb += 3;
3681
3682 rtw_write_rf(rtwdev, data->path, RF_TX_GAIN, BIT_GAIN_TXBB, data->txbb);
3683 data->limited_pga = 0;
3684
3685 return RTW_DPK_GAIN_CHECK;
3686 }
3687
rtw8822c_gl_large_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data)3688 static u8 rtw8822c_gl_large_state(struct rtw_dev *rtwdev,
3689 struct rtw8822c_dpk_data *data)
3690 {
3691 return rtw8822c_gl_state(rtwdev, data, 1);
3692 }
3693
rtw8822c_gl_less_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data)3694 static u8 rtw8822c_gl_less_state(struct rtw_dev *rtwdev,
3695 struct rtw8822c_dpk_data *data)
3696 {
3697 return rtw8822c_gl_state(rtwdev, data, 0);
3698 }
3699
rtw8822c_loss_check_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data)3700 static u8 rtw8822c_loss_check_state(struct rtw_dev *rtwdev,
3701 struct rtw8822c_dpk_data *data)
3702 {
3703 u8 path = data->path;
3704 u8 state;
3705
3706 rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_GAIN_LOSS);
3707 state = rtw8822c_dpk_agc_loss_chk(rtwdev, path);
3708
3709 return state;
3710 }
3711
3712 static u8 (*dpk_state[])(struct rtw_dev *rtwdev,
3713 struct rtw8822c_dpk_data *data) = {
3714 rtw8822c_gain_check_state, rtw8822c_gain_large_state,
3715 rtw8822c_gain_less_state, rtw8822c_gl_large_state,
3716 rtw8822c_gl_less_state, rtw8822c_loss_check_state };
3717
rtw8822c_dpk_pas_agc(struct rtw_dev * rtwdev,u8 path,bool gain_only,bool loss_only)3718 static u8 rtw8822c_dpk_pas_agc(struct rtw_dev *rtwdev, u8 path,
3719 bool gain_only, bool loss_only)
3720 {
3721 struct rtw8822c_dpk_data data = {0};
3722 u8 (*func)(struct rtw_dev *rtwdev, struct rtw8822c_dpk_data *data);
3723 u8 state = RTW_DPK_GAIN_CHECK;
3724
3725 data.loss_only = loss_only;
3726 data.gain_only = gain_only;
3727 data.path = path;
3728
3729 for (;;) {
3730 func = dpk_state[state];
3731 state = func(rtwdev, &data);
3732 if (state == RTW_DPK_AGC_OUT)
3733 break;
3734 }
3735
3736 return data.txbb;
3737 }
3738
rtw8822c_dpk_coef_iq_check(struct rtw_dev * rtwdev,u16 coef_i,u16 coef_q)3739 static bool rtw8822c_dpk_coef_iq_check(struct rtw_dev *rtwdev,
3740 u16 coef_i, u16 coef_q)
3741 {
3742 if (coef_i == 0x1000 || coef_i == 0x0fff ||
3743 coef_q == 0x1000 || coef_q == 0x0fff)
3744 return true;
3745
3746 return false;
3747 }
3748
rtw8822c_dpk_coef_transfer(struct rtw_dev * rtwdev)3749 static u32 rtw8822c_dpk_coef_transfer(struct rtw_dev *rtwdev)
3750 {
3751 u32 reg = 0;
3752 u16 coef_i = 0, coef_q = 0;
3753
3754 reg = rtw_read32(rtwdev, REG_STAT_RPT);
3755
3756 coef_i = (u16)rtw_read32_mask(rtwdev, REG_STAT_RPT, MASKHWORD) & 0x1fff;
3757 coef_q = (u16)rtw_read32_mask(rtwdev, REG_STAT_RPT, MASKLWORD) & 0x1fff;
3758
3759 coef_q = ((0x2000 - coef_q) & 0x1fff) - 1;
3760
3761 reg = (coef_i << 16) | coef_q;
3762
3763 return reg;
3764 }
3765
3766 static const u32 rtw8822c_dpk_get_coef_tbl[] = {
3767 0x000400f0, 0x040400f0, 0x080400f0, 0x010400f0, 0x050400f0,
3768 0x090400f0, 0x020400f0, 0x060400f0, 0x0a0400f0, 0x030400f0,
3769 0x070400f0, 0x0b0400f0, 0x0c0400f0, 0x100400f0, 0x0d0400f0,
3770 0x110400f0, 0x0e0400f0, 0x120400f0, 0x0f0400f0, 0x130400f0,
3771 };
3772
rtw8822c_dpk_coef_tbl_apply(struct rtw_dev * rtwdev,u8 path)3773 static void rtw8822c_dpk_coef_tbl_apply(struct rtw_dev *rtwdev, u8 path)
3774 {
3775 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3776 int i;
3777
3778 for (i = 0; i < 20; i++) {
3779 rtw_write32(rtwdev, REG_RXSRAM_CTL,
3780 rtw8822c_dpk_get_coef_tbl[i]);
3781 dpk_info->coef[path][i] = rtw8822c_dpk_coef_transfer(rtwdev);
3782 }
3783 }
3784
rtw8822c_dpk_get_coef(struct rtw_dev * rtwdev,u8 path)3785 static void rtw8822c_dpk_get_coef(struct rtw_dev *rtwdev, u8 path)
3786 {
3787 rtw_write32(rtwdev, REG_NCTL0, 0x0000000c);
3788
3789 if (path == RF_PATH_A) {
3790 rtw_write32_mask(rtwdev, REG_DPD_CTL0, BIT(24), 0x0);
3791 rtw_write32(rtwdev, REG_DPD_CTL0_S0, 0x30000080);
3792 } else if (path == RF_PATH_B) {
3793 rtw_write32_mask(rtwdev, REG_DPD_CTL0, BIT(24), 0x1);
3794 rtw_write32(rtwdev, REG_DPD_CTL0_S1, 0x30000080);
3795 }
3796
3797 rtw8822c_dpk_coef_tbl_apply(rtwdev, path);
3798 }
3799
rtw8822c_dpk_coef_read(struct rtw_dev * rtwdev,u8 path)3800 static u8 rtw8822c_dpk_coef_read(struct rtw_dev *rtwdev, u8 path)
3801 {
3802 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3803 u8 addr, result = 1;
3804 u16 coef_i, coef_q;
3805
3806 for (addr = 0; addr < 20; addr++) {
3807 coef_i = FIELD_GET(0x1fff0000, dpk_info->coef[path][addr]);
3808 coef_q = FIELD_GET(0x1fff, dpk_info->coef[path][addr]);
3809
3810 if (rtw8822c_dpk_coef_iq_check(rtwdev, coef_i, coef_q)) {
3811 result = 0;
3812 break;
3813 }
3814 }
3815 return result;
3816 }
3817
rtw8822c_dpk_coef_write(struct rtw_dev * rtwdev,u8 path,u8 result)3818 static void rtw8822c_dpk_coef_write(struct rtw_dev *rtwdev, u8 path, u8 result)
3819 {
3820 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3821 u16 reg[DPK_RF_PATH_NUM] = {0x1b0c, 0x1b64};
3822 u32 coef;
3823 u8 addr;
3824
3825 rtw_write32(rtwdev, REG_NCTL0, 0x0000000c);
3826 rtw_write32(rtwdev, REG_RXSRAM_CTL, 0x000000f0);
3827
3828 for (addr = 0; addr < 20; addr++) {
3829 if (result == 0) {
3830 if (addr == 3)
3831 coef = 0x04001fff;
3832 else
3833 coef = 0x00001fff;
3834 } else {
3835 coef = dpk_info->coef[path][addr];
3836 }
3837 rtw_write32(rtwdev, reg[path] + addr * 4, coef);
3838 }
3839 }
3840
rtw8822c_dpk_fill_result(struct rtw_dev * rtwdev,u32 dpk_txagc,u8 path,u8 result)3841 static void rtw8822c_dpk_fill_result(struct rtw_dev *rtwdev, u32 dpk_txagc,
3842 u8 path, u8 result)
3843 {
3844 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3845
3846 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x8 | (path << 1));
3847
3848 if (result)
3849 rtw_write8(rtwdev, REG_DPD_AGC, (u8)(dpk_txagc - 6));
3850 else
3851 rtw_write8(rtwdev, REG_DPD_AGC, 0x00);
3852
3853 dpk_info->result[path] = result;
3854 dpk_info->dpk_txagc[path] = rtw_read8(rtwdev, REG_DPD_AGC);
3855
3856 rtw8822c_dpk_coef_write(rtwdev, path, result);
3857 }
3858
rtw8822c_dpk_gainloss(struct rtw_dev * rtwdev,u8 path)3859 static u32 rtw8822c_dpk_gainloss(struct rtw_dev *rtwdev, u8 path)
3860 {
3861 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3862 u8 tx_agc, tx_bb, ori_txbb, ori_txagc, tx_agc_search, t1, t2;
3863
3864 ori_txbb = rtw8822c_dpk_rf_setting(rtwdev, path);
3865 ori_txagc = (u8)rtw_read_rf(rtwdev, path, RF_MODE_TRXAGC, BIT_TXAGC);
3866
3867 rtw8822c_dpk_rxbb_dc_cal(rtwdev, path);
3868 rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_DAGC);
3869 rtw8822c_dpk_dgain_read(rtwdev, path);
3870
3871 if (rtw8822c_dpk_dc_corr_check(rtwdev, path)) {
3872 rtw8822c_dpk_rxbb_dc_cal(rtwdev, path);
3873 rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_DAGC);
3874 rtw8822c_dpk_dc_corr_check(rtwdev, path);
3875 }
3876
3877 t1 = rtw8822c_dpk_thermal_read(rtwdev, path);
3878 tx_bb = rtw8822c_dpk_pas_agc(rtwdev, path, false, true);
3879 tx_agc_search = rtw8822c_dpk_gainloss_result(rtwdev, path);
3880
3881 if (tx_bb < tx_agc_search)
3882 tx_bb = 0;
3883 else
3884 tx_bb = tx_bb - tx_agc_search;
3885
3886 rtw_write_rf(rtwdev, path, RF_TX_GAIN, BIT_GAIN_TXBB, tx_bb);
3887
3888 tx_agc = ori_txagc - (ori_txbb - tx_bb);
3889
3890 t2 = rtw8822c_dpk_thermal_read(rtwdev, path);
3891
3892 dpk_info->thermal_dpk_delta[path] = abs(t2 - t1);
3893
3894 return tx_agc;
3895 }
3896
rtw8822c_dpk_by_path(struct rtw_dev * rtwdev,u32 tx_agc,u8 path)3897 static u8 rtw8822c_dpk_by_path(struct rtw_dev *rtwdev, u32 tx_agc, u8 path)
3898 {
3899 u8 result;
3900
3901 result = rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_DO_DPK);
3902
3903 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x8 | (path << 1));
3904
3905 result = result | (u8)rtw_read32_mask(rtwdev, REG_DPD_CTL1_S0, BIT(26));
3906
3907 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, RFREG_MASK, 0x33e14);
3908
3909 rtw8822c_dpk_get_coef(rtwdev, path);
3910
3911 return result;
3912 }
3913
rtw8822c_dpk_cal_gs(struct rtw_dev * rtwdev,u8 path)3914 static void rtw8822c_dpk_cal_gs(struct rtw_dev *rtwdev, u8 path)
3915 {
3916 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3917 u32 tmp_gs = 0;
3918
3919 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x8 | (path << 1));
3920 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_BYPASS_DPD, 0x0);
3921 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x0);
3922 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_IQ_SWITCH, 0x9);
3923 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_INNER_LB, 0x1);
3924 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
3925 rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, BIT_DPD_CLK, 0xf);
3926
3927 if (path == RF_PATH_A) {
3928 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0, BIT_GS_PWSF,
3929 0x1066680);
3930 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S0, BIT_DPD_EN, 0x1);
3931 } else {
3932 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S1, BIT_GS_PWSF,
3933 0x1066680);
3934 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S1, BIT_DPD_EN, 0x1);
3935 }
3936
3937 if (dpk_info->dpk_bw == DPK_CHANNEL_WIDTH_80) {
3938 rtw_write32(rtwdev, REG_DPD_CTL16, 0x80001310);
3939 rtw_write32(rtwdev, REG_DPD_CTL16, 0x00001310);
3940 rtw_write32(rtwdev, REG_DPD_CTL16, 0x810000db);
3941 rtw_write32(rtwdev, REG_DPD_CTL16, 0x010000db);
3942 rtw_write32(rtwdev, REG_DPD_CTL16, 0x0000b428);
3943 rtw_write32(rtwdev, REG_DPD_CTL15,
3944 0x05020000 | (BIT(path) << 28));
3945 } else {
3946 rtw_write32(rtwdev, REG_DPD_CTL16, 0x8200190c);
3947 rtw_write32(rtwdev, REG_DPD_CTL16, 0x0200190c);
3948 rtw_write32(rtwdev, REG_DPD_CTL16, 0x8301ee14);
3949 rtw_write32(rtwdev, REG_DPD_CTL16, 0x0301ee14);
3950 rtw_write32(rtwdev, REG_DPD_CTL16, 0x0000b428);
3951 rtw_write32(rtwdev, REG_DPD_CTL15,
3952 0x05020008 | (BIT(path) << 28));
3953 }
3954
3955 rtw_write32_mask(rtwdev, REG_DPD_CTL0, MASKBYTE3, 0x8 | path);
3956
3957 rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_CAL_PWR);
3958
3959 rtw_write32_mask(rtwdev, REG_DPD_CTL15, MASKBYTE3, 0x0);
3960 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x8 | (path << 1));
3961 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_IQ_SWITCH, 0x0);
3962 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_INNER_LB, 0x0);
3963 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
3964
3965 if (path == RF_PATH_A)
3966 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0, BIT_GS_PWSF, 0x5b);
3967 else
3968 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S1, BIT_GS_PWSF, 0x5b);
3969
3970 rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, BIT_RPT_SEL, 0x0);
3971
3972 tmp_gs = (u16)rtw_read32_mask(rtwdev, REG_STAT_RPT, BIT_RPT_DGAIN);
3973 tmp_gs = (tmp_gs * 910) >> 10;
3974 tmp_gs = DIV_ROUND_CLOSEST(tmp_gs, 10);
3975
3976 if (path == RF_PATH_A)
3977 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0, BIT_GS_PWSF, tmp_gs);
3978 else
3979 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S1, BIT_GS_PWSF, tmp_gs);
3980
3981 dpk_info->dpk_gs[path] = tmp_gs;
3982 }
3983
rtw8822c_dpk_cal_coef1(struct rtw_dev * rtwdev)3984 static void rtw8822c_dpk_cal_coef1(struct rtw_dev *rtwdev)
3985 {
3986 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3987 u32 offset[DPK_RF_PATH_NUM] = {0, 0x58};
3988 u32 i_scaling;
3989 u8 path;
3990
3991 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x0000000c);
3992 rtw_write32(rtwdev, REG_RXSRAM_CTL, 0x000000f0);
3993 rtw_write32(rtwdev, REG_NCTL0, 0x00001148);
3994 rtw_write32(rtwdev, REG_NCTL0, 0x00001149);
3995
3996 check_hw_ready(rtwdev, 0x2d9c, MASKBYTE0, 0x55);
3997
3998 rtw_write8(rtwdev, 0x1b10, 0x0);
3999 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x0000000c);
4000
4001 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
4002 i_scaling = 0x16c00 / dpk_info->dpk_gs[path];
4003
4004 rtw_write32_mask(rtwdev, 0x1b18 + offset[path], MASKHWORD,
4005 i_scaling);
4006 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0 + offset[path],
4007 GENMASK(31, 28), 0x9);
4008 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0 + offset[path],
4009 GENMASK(31, 28), 0x1);
4010 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0 + offset[path],
4011 GENMASK(31, 28), 0x0);
4012 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S0 + offset[path],
4013 BIT(14), 0x0);
4014 }
4015 }
4016
rtw8822c_dpk_on(struct rtw_dev * rtwdev,u8 path)4017 static void rtw8822c_dpk_on(struct rtw_dev *rtwdev, u8 path)
4018 {
4019 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4020
4021 rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_DPK_ON);
4022
4023 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x8 | (path << 1));
4024 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x0);
4025
4026 if (test_bit(path, dpk_info->dpk_path_ok))
4027 rtw8822c_dpk_cal_gs(rtwdev, path);
4028 }
4029
rtw8822c_dpk_check_pass(struct rtw_dev * rtwdev,bool is_fail,u32 dpk_txagc,u8 path)4030 static bool rtw8822c_dpk_check_pass(struct rtw_dev *rtwdev, bool is_fail,
4031 u32 dpk_txagc, u8 path)
4032 {
4033 bool result;
4034
4035 if (!is_fail) {
4036 if (rtw8822c_dpk_coef_read(rtwdev, path))
4037 result = true;
4038 else
4039 result = false;
4040 } else {
4041 result = false;
4042 }
4043
4044 rtw8822c_dpk_fill_result(rtwdev, dpk_txagc, path, result);
4045
4046 return result;
4047 }
4048
rtw8822c_dpk_result_reset(struct rtw_dev * rtwdev)4049 static void rtw8822c_dpk_result_reset(struct rtw_dev *rtwdev)
4050 {
4051 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4052 u8 path;
4053
4054 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
4055 clear_bit(path, dpk_info->dpk_path_ok);
4056 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE,
4057 0x8 | (path << 1));
4058 rtw_write32_mask(rtwdev, 0x1b58, 0x0000007f, 0x0);
4059
4060 dpk_info->dpk_txagc[path] = 0;
4061 dpk_info->result[path] = 0;
4062 dpk_info->dpk_gs[path] = 0x5b;
4063 dpk_info->pre_pwsf[path] = 0;
4064 dpk_info->thermal_dpk[path] = rtw8822c_dpk_thermal_read(rtwdev,
4065 path);
4066 }
4067 }
4068
rtw8822c_dpk_calibrate(struct rtw_dev * rtwdev,u8 path)4069 static void rtw8822c_dpk_calibrate(struct rtw_dev *rtwdev, u8 path)
4070 {
4071 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4072 u32 dpk_txagc;
4073 u8 dpk_fail;
4074
4075 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DPK] s%d dpk start\n", path);
4076
4077 dpk_txagc = rtw8822c_dpk_gainloss(rtwdev, path);
4078
4079 dpk_fail = rtw8822c_dpk_by_path(rtwdev, dpk_txagc, path);
4080
4081 if (!rtw8822c_dpk_check_pass(rtwdev, dpk_fail, dpk_txagc, path))
4082 rtw_err(rtwdev, "failed to do dpk calibration\n");
4083
4084 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DPK] s%d dpk finish\n", path);
4085
4086 if (dpk_info->result[path])
4087 set_bit(path, dpk_info->dpk_path_ok);
4088 }
4089
rtw8822c_dpk_path_select(struct rtw_dev * rtwdev)4090 static void rtw8822c_dpk_path_select(struct rtw_dev *rtwdev)
4091 {
4092 rtw8822c_dpk_calibrate(rtwdev, RF_PATH_A);
4093 rtw8822c_dpk_calibrate(rtwdev, RF_PATH_B);
4094 rtw8822c_dpk_on(rtwdev, RF_PATH_A);
4095 rtw8822c_dpk_on(rtwdev, RF_PATH_B);
4096 rtw8822c_dpk_cal_coef1(rtwdev);
4097 }
4098
rtw8822c_dpk_enable_disable(struct rtw_dev * rtwdev)4099 static void rtw8822c_dpk_enable_disable(struct rtw_dev *rtwdev)
4100 {
4101 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4102 u32 mask = BIT(15) | BIT(14);
4103
4104 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
4105
4106 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S0, BIT_DPD_EN,
4107 dpk_info->is_dpk_pwr_on);
4108 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S1, BIT_DPD_EN,
4109 dpk_info->is_dpk_pwr_on);
4110
4111 if (test_bit(RF_PATH_A, dpk_info->dpk_path_ok)) {
4112 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S0, mask, 0x0);
4113 rtw_write8(rtwdev, REG_DPD_CTL0_S0, dpk_info->dpk_gs[RF_PATH_A]);
4114 }
4115 if (test_bit(RF_PATH_B, dpk_info->dpk_path_ok)) {
4116 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S1, mask, 0x0);
4117 rtw_write8(rtwdev, REG_DPD_CTL0_S1, dpk_info->dpk_gs[RF_PATH_B]);
4118 }
4119 }
4120
rtw8822c_dpk_reload_data(struct rtw_dev * rtwdev)4121 static void rtw8822c_dpk_reload_data(struct rtw_dev *rtwdev)
4122 {
4123 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4124 u8 path;
4125
4126 if (!test_bit(RF_PATH_A, dpk_info->dpk_path_ok) &&
4127 !test_bit(RF_PATH_B, dpk_info->dpk_path_ok) &&
4128 dpk_info->dpk_ch == 0)
4129 return;
4130
4131 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
4132 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE,
4133 0x8 | (path << 1));
4134 if (dpk_info->dpk_band == RTW_BAND_2G)
4135 rtw_write32(rtwdev, REG_DPD_CTL1_S1, 0x1f100000);
4136 else
4137 rtw_write32(rtwdev, REG_DPD_CTL1_S1, 0x1f0d0000);
4138
4139 rtw_write8(rtwdev, REG_DPD_AGC, dpk_info->dpk_txagc[path]);
4140
4141 rtw8822c_dpk_coef_write(rtwdev, path,
4142 test_bit(path, dpk_info->dpk_path_ok));
4143
4144 rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_DPK_ON);
4145
4146 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
4147
4148 if (path == RF_PATH_A)
4149 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0, BIT_GS_PWSF,
4150 dpk_info->dpk_gs[path]);
4151 else
4152 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S1, BIT_GS_PWSF,
4153 dpk_info->dpk_gs[path]);
4154 }
4155 rtw8822c_dpk_cal_coef1(rtwdev);
4156 }
4157
rtw8822c_dpk_reload(struct rtw_dev * rtwdev)4158 static bool rtw8822c_dpk_reload(struct rtw_dev *rtwdev)
4159 {
4160 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4161 u8 channel;
4162
4163 dpk_info->is_reload = false;
4164
4165 channel = (u8)(rtw_read_rf(rtwdev, RF_PATH_A, 0x18, RFREG_MASK) & 0xff);
4166
4167 if (channel == dpk_info->dpk_ch) {
4168 rtw_dbg(rtwdev, RTW_DBG_RFK,
4169 "[DPK] DPK reload for CH%d!!\n", dpk_info->dpk_ch);
4170 rtw8822c_dpk_reload_data(rtwdev);
4171 dpk_info->is_reload = true;
4172 }
4173
4174 return dpk_info->is_reload;
4175 }
4176
rtw8822c_do_dpk(struct rtw_dev * rtwdev)4177 static void rtw8822c_do_dpk(struct rtw_dev *rtwdev)
4178 {
4179 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4180 struct rtw_backup_info bckp[DPK_BB_REG_NUM];
4181 u32 rf_reg_backup[DPK_RF_REG_NUM][DPK_RF_PATH_NUM];
4182 u32 bb_reg[DPK_BB_REG_NUM] = {
4183 0x520, 0x820, 0x824, 0x1c3c, 0x1d58, 0x1864,
4184 0x4164, 0x180c, 0x410c, 0x186c, 0x416c,
4185 0x1a14, 0x1e70, 0x80c, 0x1d70, 0x1e7c, 0x18a4, 0x41a4};
4186 u32 rf_reg[DPK_RF_REG_NUM] = {
4187 0x0, 0x1a, 0x55, 0x63, 0x87, 0x8f, 0xde};
4188 u8 path;
4189
4190 if (!dpk_info->is_dpk_pwr_on) {
4191 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DPK] Skip DPK due to DPD PWR off\n");
4192 return;
4193 } else if (rtw8822c_dpk_reload(rtwdev)) {
4194 return;
4195 }
4196
4197 for (path = RF_PATH_A; path < DPK_RF_PATH_NUM; path++)
4198 ewma_thermal_init(&dpk_info->avg_thermal[path]);
4199
4200 rtw8822c_dpk_information(rtwdev);
4201
4202 rtw8822c_dpk_backup_registers(rtwdev, bb_reg, DPK_BB_REG_NUM, bckp);
4203 rtw8822c_dpk_backup_rf_registers(rtwdev, rf_reg, rf_reg_backup);
4204
4205 rtw8822c_dpk_mac_bb_setting(rtwdev);
4206 rtw8822c_dpk_afe_setting(rtwdev, true);
4207 rtw8822c_dpk_pre_setting(rtwdev);
4208 rtw8822c_dpk_result_reset(rtwdev);
4209 rtw8822c_dpk_path_select(rtwdev);
4210 rtw8822c_dpk_afe_setting(rtwdev, false);
4211 rtw8822c_dpk_enable_disable(rtwdev);
4212
4213 rtw8822c_dpk_reload_rf_registers(rtwdev, rf_reg, rf_reg_backup);
4214 for (path = 0; path < rtwdev->hal.rf_path_num; path++)
4215 rtw8822c_dpk_rxbb_dc_cal(rtwdev, path);
4216 rtw8822c_dpk_restore_registers(rtwdev, DPK_BB_REG_NUM, bckp);
4217 }
4218
rtw8822c_phy_calibration(struct rtw_dev * rtwdev)4219 static void rtw8822c_phy_calibration(struct rtw_dev *rtwdev)
4220 {
4221 rtw8822c_rfk_power_save(rtwdev, false);
4222 rtw8822c_do_gapk(rtwdev);
4223 rtw8822c_do_iqk(rtwdev);
4224 rtw8822c_do_dpk(rtwdev);
4225 rtw8822c_rfk_power_save(rtwdev, true);
4226 }
4227
rtw8822c_dpk_track(struct rtw_dev * rtwdev)4228 static void rtw8822c_dpk_track(struct rtw_dev *rtwdev)
4229 {
4230 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4231 u8 path;
4232 u8 thermal_value[DPK_RF_PATH_NUM] = {0};
4233 s8 offset[DPK_RF_PATH_NUM], delta_dpk[DPK_RF_PATH_NUM];
4234
4235 if (dpk_info->thermal_dpk[0] == 0 && dpk_info->thermal_dpk[1] == 0)
4236 return;
4237
4238 for (path = 0; path < DPK_RF_PATH_NUM; path++) {
4239 thermal_value[path] = rtw8822c_dpk_thermal_read(rtwdev, path);
4240 ewma_thermal_add(&dpk_info->avg_thermal[path],
4241 thermal_value[path]);
4242 thermal_value[path] =
4243 ewma_thermal_read(&dpk_info->avg_thermal[path]);
4244 delta_dpk[path] = dpk_info->thermal_dpk[path] -
4245 thermal_value[path];
4246 offset[path] = delta_dpk[path] -
4247 dpk_info->thermal_dpk_delta[path];
4248 offset[path] &= 0x7f;
4249
4250 if (offset[path] != dpk_info->pre_pwsf[path]) {
4251 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE,
4252 0x8 | (path << 1));
4253 rtw_write32_mask(rtwdev, 0x1b58, GENMASK(6, 0),
4254 offset[path]);
4255 dpk_info->pre_pwsf[path] = offset[path];
4256 }
4257 }
4258 }
4259
4260 #define XCAP_EXTEND(val) ({typeof(val) _v = (val); _v | _v << 7; })
rtw8822c_set_crystal_cap_reg(struct rtw_dev * rtwdev,u8 crystal_cap)4261 static void rtw8822c_set_crystal_cap_reg(struct rtw_dev *rtwdev, u8 crystal_cap)
4262 {
4263 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4264 struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4265 u32 val = 0;
4266
4267 val = XCAP_EXTEND(crystal_cap);
4268 cfo->crystal_cap = crystal_cap;
4269 rtw_write32_mask(rtwdev, REG_ANAPAR_XTAL_0, BIT_XCAP_0, val);
4270 }
4271
rtw8822c_set_crystal_cap(struct rtw_dev * rtwdev,u8 crystal_cap)4272 static void rtw8822c_set_crystal_cap(struct rtw_dev *rtwdev, u8 crystal_cap)
4273 {
4274 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4275 struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4276
4277 if (cfo->crystal_cap == crystal_cap)
4278 return;
4279
4280 rtw8822c_set_crystal_cap_reg(rtwdev, crystal_cap);
4281 }
4282
rtw8822c_cfo_tracking_reset(struct rtw_dev * rtwdev)4283 static void rtw8822c_cfo_tracking_reset(struct rtw_dev *rtwdev)
4284 {
4285 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4286 struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4287
4288 cfo->is_adjust = true;
4289
4290 if (cfo->crystal_cap > rtwdev->efuse.crystal_cap)
4291 rtw8822c_set_crystal_cap(rtwdev, cfo->crystal_cap - 1);
4292 else if (cfo->crystal_cap < rtwdev->efuse.crystal_cap)
4293 rtw8822c_set_crystal_cap(rtwdev, cfo->crystal_cap + 1);
4294 }
4295
rtw8822c_cfo_init(struct rtw_dev * rtwdev)4296 static void rtw8822c_cfo_init(struct rtw_dev *rtwdev)
4297 {
4298 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4299 struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4300
4301 cfo->crystal_cap = rtwdev->efuse.crystal_cap;
4302 cfo->is_adjust = true;
4303 }
4304
4305 #define REPORT_TO_KHZ(val) ({typeof(val) _v = (val); (_v << 1) + (_v >> 1); })
rtw8822c_cfo_calc_avg(struct rtw_dev * rtwdev,u8 path_num)4306 static s32 rtw8822c_cfo_calc_avg(struct rtw_dev *rtwdev, u8 path_num)
4307 {
4308 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4309 struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4310 s32 cfo_avg, cfo_path_sum = 0, cfo_rpt_sum;
4311 u8 i;
4312
4313 for (i = 0; i < path_num; i++) {
4314 cfo_rpt_sum = REPORT_TO_KHZ(cfo->cfo_tail[i]);
4315
4316 if (cfo->cfo_cnt[i])
4317 cfo_avg = cfo_rpt_sum / cfo->cfo_cnt[i];
4318 else
4319 cfo_avg = 0;
4320
4321 cfo_path_sum += cfo_avg;
4322 }
4323
4324 for (i = 0; i < path_num; i++) {
4325 cfo->cfo_tail[i] = 0;
4326 cfo->cfo_cnt[i] = 0;
4327 }
4328
4329 return cfo_path_sum / path_num;
4330 }
4331
rtw8822c_cfo_need_adjust(struct rtw_dev * rtwdev,s32 cfo_avg)4332 static void rtw8822c_cfo_need_adjust(struct rtw_dev *rtwdev, s32 cfo_avg)
4333 {
4334 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4335 struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4336
4337 if (!cfo->is_adjust) {
4338 if (abs(cfo_avg) > CFO_TRK_ENABLE_TH)
4339 cfo->is_adjust = true;
4340 } else {
4341 if (abs(cfo_avg) <= CFO_TRK_STOP_TH)
4342 cfo->is_adjust = false;
4343 }
4344
4345 if (!rtw_coex_disabled(rtwdev)) {
4346 cfo->is_adjust = false;
4347 rtw8822c_set_crystal_cap(rtwdev, rtwdev->efuse.crystal_cap);
4348 }
4349 }
4350
rtw8822c_cfo_track(struct rtw_dev * rtwdev)4351 static void rtw8822c_cfo_track(struct rtw_dev *rtwdev)
4352 {
4353 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4354 struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4355 u8 path_num = rtwdev->hal.rf_path_num;
4356 s8 crystal_cap = cfo->crystal_cap;
4357 s32 cfo_avg = 0;
4358
4359 if (rtwdev->sta_cnt != 1) {
4360 rtw8822c_cfo_tracking_reset(rtwdev);
4361 return;
4362 }
4363
4364 if (cfo->packet_count == cfo->packet_count_pre)
4365 return;
4366
4367 cfo->packet_count_pre = cfo->packet_count;
4368 cfo_avg = rtw8822c_cfo_calc_avg(rtwdev, path_num);
4369 rtw8822c_cfo_need_adjust(rtwdev, cfo_avg);
4370
4371 if (cfo->is_adjust) {
4372 if (cfo_avg > CFO_TRK_ADJ_TH)
4373 crystal_cap++;
4374 else if (cfo_avg < -CFO_TRK_ADJ_TH)
4375 crystal_cap--;
4376
4377 crystal_cap = clamp_t(s8, crystal_cap, 0, XCAP_MASK);
4378 rtw8822c_set_crystal_cap(rtwdev, (u8)crystal_cap);
4379 }
4380 }
4381
4382 static const struct rtw_phy_cck_pd_reg
4383 rtw8822c_cck_pd_reg[RTW_CHANNEL_WIDTH_40 + 1][RTW_RF_PATH_MAX] = {
4384 {
4385 {0x1ac8, 0x00ff, 0x1ad0, 0x01f},
4386 {0x1ac8, 0xff00, 0x1ad0, 0x3e0}
4387 },
4388 {
4389 {0x1acc, 0x00ff, 0x1ad0, 0x01F00000},
4390 {0x1acc, 0xff00, 0x1ad0, 0x3E000000}
4391 },
4392 };
4393
4394 #define RTW_CCK_PD_MAX 255
4395 #define RTW_CCK_CS_MAX 31
4396 #define RTW_CCK_CS_ERR1 27
4397 #define RTW_CCK_CS_ERR2 29
4398 static void
rtw8822c_phy_cck_pd_set_reg(struct rtw_dev * rtwdev,s8 pd_diff,s8 cs_diff,u8 bw,u8 nrx)4399 rtw8822c_phy_cck_pd_set_reg(struct rtw_dev *rtwdev,
4400 s8 pd_diff, s8 cs_diff, u8 bw, u8 nrx)
4401 {
4402 u32 pd, cs;
4403
4404 if (WARN_ON(bw > RTW_CHANNEL_WIDTH_40 || nrx >= RTW_RF_PATH_MAX))
4405 return;
4406
4407 pd = rtw_read32_mask(rtwdev,
4408 rtw8822c_cck_pd_reg[bw][nrx].reg_pd,
4409 rtw8822c_cck_pd_reg[bw][nrx].mask_pd);
4410 cs = rtw_read32_mask(rtwdev,
4411 rtw8822c_cck_pd_reg[bw][nrx].reg_cs,
4412 rtw8822c_cck_pd_reg[bw][nrx].mask_cs);
4413 pd += pd_diff;
4414 cs += cs_diff;
4415 if (pd > RTW_CCK_PD_MAX)
4416 pd = RTW_CCK_PD_MAX;
4417 if (cs == RTW_CCK_CS_ERR1 || cs == RTW_CCK_CS_ERR2)
4418 cs++;
4419 else if (cs > RTW_CCK_CS_MAX)
4420 cs = RTW_CCK_CS_MAX;
4421 rtw_write32_mask(rtwdev,
4422 rtw8822c_cck_pd_reg[bw][nrx].reg_pd,
4423 rtw8822c_cck_pd_reg[bw][nrx].mask_pd,
4424 pd);
4425 rtw_write32_mask(rtwdev,
4426 rtw8822c_cck_pd_reg[bw][nrx].reg_cs,
4427 rtw8822c_cck_pd_reg[bw][nrx].mask_cs,
4428 cs);
4429
4430 rtw_dbg(rtwdev, RTW_DBG_PHY,
4431 "is_linked=%d, bw=%d, nrx=%d, cs_ratio=0x%x, pd_th=0x%x\n",
4432 rtw_is_assoc(rtwdev), bw, nrx, cs, pd);
4433 }
4434
rtw8822c_phy_cck_pd_set(struct rtw_dev * rtwdev,u8 new_lvl)4435 static void rtw8822c_phy_cck_pd_set(struct rtw_dev *rtwdev, u8 new_lvl)
4436 {
4437 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4438 s8 pd_lvl[CCK_PD_LV_MAX] = {0, 2, 4, 6, 8};
4439 s8 cs_lvl[CCK_PD_LV_MAX] = {0, 2, 2, 2, 4};
4440 u8 cur_lvl;
4441 u8 nrx, bw;
4442
4443 nrx = (u8)rtw_read32_mask(rtwdev, 0x1a2c, 0x60000);
4444 bw = (u8)rtw_read32_mask(rtwdev, 0x9b0, 0xc);
4445
4446 rtw_dbg(rtwdev, RTW_DBG_PHY, "lv: (%d) -> (%d) bw=%d nr=%d cck_fa_avg=%d\n",
4447 dm_info->cck_pd_lv[bw][nrx], new_lvl, bw, nrx,
4448 dm_info->cck_fa_avg);
4449
4450 if (dm_info->cck_pd_lv[bw][nrx] == new_lvl)
4451 return;
4452
4453 cur_lvl = dm_info->cck_pd_lv[bw][nrx];
4454
4455 /* update cck pd info */
4456 dm_info->cck_fa_avg = CCK_FA_AVG_RESET;
4457
4458 rtw8822c_phy_cck_pd_set_reg(rtwdev,
4459 pd_lvl[new_lvl] - pd_lvl[cur_lvl],
4460 cs_lvl[new_lvl] - cs_lvl[cur_lvl],
4461 bw, nrx);
4462 dm_info->cck_pd_lv[bw][nrx] = new_lvl;
4463 }
4464
4465 #define PWR_TRACK_MASK 0x7f
rtw8822c_pwrtrack_set(struct rtw_dev * rtwdev,u8 rf_path)4466 static void rtw8822c_pwrtrack_set(struct rtw_dev *rtwdev, u8 rf_path)
4467 {
4468 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4469
4470 switch (rf_path) {
4471 case RF_PATH_A:
4472 rtw_write32_mask(rtwdev, 0x18a0, PWR_TRACK_MASK,
4473 dm_info->delta_power_index[rf_path]);
4474 break;
4475 case RF_PATH_B:
4476 rtw_write32_mask(rtwdev, 0x41a0, PWR_TRACK_MASK,
4477 dm_info->delta_power_index[rf_path]);
4478 break;
4479 default:
4480 break;
4481 }
4482 }
4483
rtw8822c_pwr_track_stats(struct rtw_dev * rtwdev,u8 path)4484 static void rtw8822c_pwr_track_stats(struct rtw_dev *rtwdev, u8 path)
4485 {
4486 u8 thermal_value;
4487
4488 if (rtwdev->efuse.thermal_meter[path] == 0xff)
4489 return;
4490
4491 thermal_value = rtw_read_rf(rtwdev, path, RF_T_METER, 0x7e);
4492 rtw_phy_pwrtrack_avg(rtwdev, thermal_value, path);
4493 }
4494
rtw8822c_pwr_track_path(struct rtw_dev * rtwdev,struct rtw_swing_table * swing_table,u8 path)4495 static void rtw8822c_pwr_track_path(struct rtw_dev *rtwdev,
4496 struct rtw_swing_table *swing_table,
4497 u8 path)
4498 {
4499 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4500 u8 delta;
4501
4502 delta = rtw_phy_pwrtrack_get_delta(rtwdev, path);
4503 dm_info->delta_power_index[path] =
4504 rtw_phy_pwrtrack_get_pwridx(rtwdev, swing_table, path, path,
4505 delta);
4506 rtw8822c_pwrtrack_set(rtwdev, path);
4507 }
4508
__rtw8822c_pwr_track(struct rtw_dev * rtwdev)4509 static void __rtw8822c_pwr_track(struct rtw_dev *rtwdev)
4510 {
4511 struct rtw_swing_table swing_table;
4512 u8 i;
4513
4514 rtw_phy_config_swing_table(rtwdev, &swing_table);
4515
4516 for (i = 0; i < rtwdev->hal.rf_path_num; i++)
4517 rtw8822c_pwr_track_stats(rtwdev, i);
4518 if (rtw_phy_pwrtrack_need_lck(rtwdev))
4519 rtw8822c_do_lck(rtwdev);
4520 for (i = 0; i < rtwdev->hal.rf_path_num; i++)
4521 rtw8822c_pwr_track_path(rtwdev, &swing_table, i);
4522 }
4523
rtw8822c_pwr_track(struct rtw_dev * rtwdev)4524 static void rtw8822c_pwr_track(struct rtw_dev *rtwdev)
4525 {
4526 struct rtw_efuse *efuse = &rtwdev->efuse;
4527 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4528
4529 if (efuse->power_track_type != 0)
4530 return;
4531
4532 if (!dm_info->pwr_trk_triggered) {
4533 rtw_write_rf(rtwdev, RF_PATH_A, RF_T_METER, BIT(19), 0x01);
4534 rtw_write_rf(rtwdev, RF_PATH_A, RF_T_METER, BIT(19), 0x00);
4535 rtw_write_rf(rtwdev, RF_PATH_A, RF_T_METER, BIT(19), 0x01);
4536
4537 rtw_write_rf(rtwdev, RF_PATH_B, RF_T_METER, BIT(19), 0x01);
4538 rtw_write_rf(rtwdev, RF_PATH_B, RF_T_METER, BIT(19), 0x00);
4539 rtw_write_rf(rtwdev, RF_PATH_B, RF_T_METER, BIT(19), 0x01);
4540
4541 dm_info->pwr_trk_triggered = true;
4542 return;
4543 }
4544
4545 __rtw8822c_pwr_track(rtwdev);
4546 dm_info->pwr_trk_triggered = false;
4547 }
4548
rtw8822c_adaptivity_init(struct rtw_dev * rtwdev)4549 static void rtw8822c_adaptivity_init(struct rtw_dev *rtwdev)
4550 {
4551 rtw_phy_set_edcca_th(rtwdev, RTW8822C_EDCCA_MAX, RTW8822C_EDCCA_MAX);
4552
4553 /* mac edcca state setting */
4554 rtw_write32_clr(rtwdev, REG_TX_PTCL_CTRL, BIT_DIS_EDCCA);
4555 rtw_write32_set(rtwdev, REG_RD_CTRL, BIT_EDCCA_MSK_CNTDOWN_EN);
4556
4557 /* edcca decistion opt */
4558 rtw_write32_clr(rtwdev, REG_EDCCA_DECISION, BIT_EDCCA_OPTION);
4559 }
4560
rtw8822c_adaptivity(struct rtw_dev * rtwdev)4561 static void rtw8822c_adaptivity(struct rtw_dev *rtwdev)
4562 {
4563 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4564 s8 l2h, h2l;
4565 u8 igi;
4566
4567 igi = dm_info->igi_history[0];
4568 if (dm_info->edcca_mode == RTW_EDCCA_NORMAL) {
4569 l2h = max_t(s8, igi + EDCCA_IGI_L2H_DIFF, EDCCA_TH_L2H_LB);
4570 h2l = l2h - EDCCA_L2H_H2L_DIFF_NORMAL;
4571 } else {
4572 if (igi < dm_info->l2h_th_ini - EDCCA_ADC_BACKOFF)
4573 l2h = igi + EDCCA_ADC_BACKOFF;
4574 else
4575 l2h = dm_info->l2h_th_ini;
4576 h2l = l2h - EDCCA_L2H_H2L_DIFF;
4577 }
4578
4579 rtw_phy_set_edcca_th(rtwdev, l2h, h2l);
4580 }
4581
rtw8822c_fill_txdesc_checksum(struct rtw_dev * rtwdev,struct rtw_tx_pkt_info * pkt_info,u8 * txdesc)4582 static void rtw8822c_fill_txdesc_checksum(struct rtw_dev *rtwdev,
4583 struct rtw_tx_pkt_info *pkt_info,
4584 u8 *txdesc)
4585 {
4586 const struct rtw_chip_info *chip = rtwdev->chip;
4587 size_t words;
4588
4589 words = (pkt_info->pkt_offset * 8 + chip->tx_pkt_desc_sz) / 2;
4590
4591 fill_txdesc_checksum_common(txdesc, words);
4592 }
4593
4594 static const struct rtw_pwr_seq_cmd trans_carddis_to_cardemu_8822c[] = {
4595 {0x0086,
4596 RTW_PWR_CUT_ALL_MSK,
4597 RTW_PWR_INTF_SDIO_MSK,
4598 RTW_PWR_ADDR_SDIO,
4599 RTW_PWR_CMD_WRITE, BIT(0), 0},
4600 {0x0086,
4601 RTW_PWR_CUT_ALL_MSK,
4602 RTW_PWR_INTF_SDIO_MSK,
4603 RTW_PWR_ADDR_SDIO,
4604 RTW_PWR_CMD_POLLING, BIT(1), BIT(1)},
4605 {0x002E,
4606 RTW_PWR_CUT_ALL_MSK,
4607 RTW_PWR_INTF_ALL_MSK,
4608 RTW_PWR_ADDR_MAC,
4609 RTW_PWR_CMD_WRITE, BIT(2), BIT(2)},
4610 {0x002D,
4611 RTW_PWR_CUT_ALL_MSK,
4612 RTW_PWR_INTF_ALL_MSK,
4613 RTW_PWR_ADDR_MAC,
4614 RTW_PWR_CMD_WRITE, BIT(0), 0},
4615 {0x007F,
4616 RTW_PWR_CUT_ALL_MSK,
4617 RTW_PWR_INTF_ALL_MSK,
4618 RTW_PWR_ADDR_MAC,
4619 RTW_PWR_CMD_WRITE, BIT(7), 0},
4620 {0x004A,
4621 RTW_PWR_CUT_ALL_MSK,
4622 RTW_PWR_INTF_USB_MSK,
4623 RTW_PWR_ADDR_MAC,
4624 RTW_PWR_CMD_WRITE, BIT(0), 0},
4625 {0x0005,
4626 RTW_PWR_CUT_ALL_MSK,
4627 RTW_PWR_INTF_ALL_MSK,
4628 RTW_PWR_ADDR_MAC,
4629 RTW_PWR_CMD_WRITE, BIT(3) | BIT(4) | BIT(7), 0},
4630 {0xFFFF,
4631 RTW_PWR_CUT_ALL_MSK,
4632 RTW_PWR_INTF_ALL_MSK,
4633 0,
4634 RTW_PWR_CMD_END, 0, 0},
4635 };
4636
4637 static const struct rtw_pwr_seq_cmd trans_cardemu_to_act_8822c[] = {
4638 {0x0000,
4639 RTW_PWR_CUT_ALL_MSK,
4640 RTW_PWR_INTF_USB_MSK | RTW_PWR_INTF_SDIO_MSK,
4641 RTW_PWR_ADDR_MAC,
4642 RTW_PWR_CMD_WRITE, BIT(5), 0},
4643 {0x0005,
4644 RTW_PWR_CUT_ALL_MSK,
4645 RTW_PWR_INTF_ALL_MSK,
4646 RTW_PWR_ADDR_MAC,
4647 RTW_PWR_CMD_WRITE, (BIT(4) | BIT(3) | BIT(2)), 0},
4648 {0x0075,
4649 RTW_PWR_CUT_ALL_MSK,
4650 RTW_PWR_INTF_PCI_MSK,
4651 RTW_PWR_ADDR_MAC,
4652 RTW_PWR_CMD_WRITE, BIT(0), BIT(0)},
4653 {0x0006,
4654 RTW_PWR_CUT_ALL_MSK,
4655 RTW_PWR_INTF_ALL_MSK,
4656 RTW_PWR_ADDR_MAC,
4657 RTW_PWR_CMD_POLLING, BIT(1), BIT(1)},
4658 {0x0075,
4659 RTW_PWR_CUT_ALL_MSK,
4660 RTW_PWR_INTF_PCI_MSK,
4661 RTW_PWR_ADDR_MAC,
4662 RTW_PWR_CMD_WRITE, BIT(0), 0},
4663 {0xFF1A,
4664 RTW_PWR_CUT_ALL_MSK,
4665 RTW_PWR_INTF_USB_MSK,
4666 RTW_PWR_ADDR_MAC,
4667 RTW_PWR_CMD_WRITE, 0xFF, 0},
4668 {0x002E,
4669 RTW_PWR_CUT_ALL_MSK,
4670 RTW_PWR_INTF_ALL_MSK,
4671 RTW_PWR_ADDR_MAC,
4672 RTW_PWR_CMD_WRITE, BIT(3), 0},
4673 {0x0006,
4674 RTW_PWR_CUT_ALL_MSK,
4675 RTW_PWR_INTF_ALL_MSK,
4676 RTW_PWR_ADDR_MAC,
4677 RTW_PWR_CMD_WRITE, BIT(0), BIT(0)},
4678 {0x0005,
4679 RTW_PWR_CUT_ALL_MSK,
4680 RTW_PWR_INTF_ALL_MSK,
4681 RTW_PWR_ADDR_MAC,
4682 RTW_PWR_CMD_WRITE, (BIT(4) | BIT(3)), 0},
4683 {0x1018,
4684 RTW_PWR_CUT_ALL_MSK,
4685 RTW_PWR_INTF_ALL_MSK,
4686 RTW_PWR_ADDR_MAC,
4687 RTW_PWR_CMD_WRITE, BIT(2), BIT(2)},
4688 {0x0005,
4689 RTW_PWR_CUT_ALL_MSK,
4690 RTW_PWR_INTF_ALL_MSK,
4691 RTW_PWR_ADDR_MAC,
4692 RTW_PWR_CMD_WRITE, BIT(0), BIT(0)},
4693 {0x0005,
4694 RTW_PWR_CUT_ALL_MSK,
4695 RTW_PWR_INTF_ALL_MSK,
4696 RTW_PWR_ADDR_MAC,
4697 RTW_PWR_CMD_POLLING, BIT(0), 0},
4698 {0x0074,
4699 RTW_PWR_CUT_ALL_MSK,
4700 RTW_PWR_INTF_PCI_MSK,
4701 RTW_PWR_ADDR_MAC,
4702 RTW_PWR_CMD_WRITE, BIT(5), BIT(5)},
4703 {0x0071,
4704 RTW_PWR_CUT_ALL_MSK,
4705 RTW_PWR_INTF_PCI_MSK,
4706 RTW_PWR_ADDR_MAC,
4707 RTW_PWR_CMD_WRITE, BIT(4), 0},
4708 {0x0062,
4709 RTW_PWR_CUT_ALL_MSK,
4710 RTW_PWR_INTF_PCI_MSK,
4711 RTW_PWR_ADDR_MAC,
4712 RTW_PWR_CMD_WRITE, (BIT(7) | BIT(6) | BIT(5)),
4713 (BIT(7) | BIT(6) | BIT(5))},
4714 {0x0061,
4715 RTW_PWR_CUT_ALL_MSK,
4716 RTW_PWR_INTF_PCI_MSK,
4717 RTW_PWR_ADDR_MAC,
4718 RTW_PWR_CMD_WRITE, (BIT(7) | BIT(6) | BIT(5)), 0},
4719 {0x001F,
4720 RTW_PWR_CUT_ALL_MSK,
4721 RTW_PWR_INTF_ALL_MSK,
4722 RTW_PWR_ADDR_MAC,
4723 RTW_PWR_CMD_WRITE, (BIT(7) | BIT(6)), BIT(7)},
4724 {0x00EF,
4725 RTW_PWR_CUT_ALL_MSK,
4726 RTW_PWR_INTF_ALL_MSK,
4727 RTW_PWR_ADDR_MAC,
4728 RTW_PWR_CMD_WRITE, (BIT(7) | BIT(6)), BIT(7)},
4729 {0x1045,
4730 RTW_PWR_CUT_ALL_MSK,
4731 RTW_PWR_INTF_ALL_MSK,
4732 RTW_PWR_ADDR_MAC,
4733 RTW_PWR_CMD_WRITE, BIT(4), BIT(4)},
4734 {0x0010,
4735 RTW_PWR_CUT_ALL_MSK,
4736 RTW_PWR_INTF_ALL_MSK,
4737 RTW_PWR_ADDR_MAC,
4738 RTW_PWR_CMD_WRITE, BIT(2), BIT(2)},
4739 {0x1064,
4740 RTW_PWR_CUT_ALL_MSK,
4741 RTW_PWR_INTF_ALL_MSK,
4742 RTW_PWR_ADDR_MAC,
4743 RTW_PWR_CMD_WRITE, BIT(1), BIT(1)},
4744 {0xFFFF,
4745 RTW_PWR_CUT_ALL_MSK,
4746 RTW_PWR_INTF_ALL_MSK,
4747 0,
4748 RTW_PWR_CMD_END, 0, 0},
4749 };
4750
4751 static const struct rtw_pwr_seq_cmd trans_act_to_cardemu_8822c[] = {
4752 {0x0093,
4753 RTW_PWR_CUT_ALL_MSK,
4754 RTW_PWR_INTF_ALL_MSK,
4755 RTW_PWR_ADDR_MAC,
4756 RTW_PWR_CMD_WRITE, BIT(3), 0},
4757 {0x001F,
4758 RTW_PWR_CUT_ALL_MSK,
4759 RTW_PWR_INTF_ALL_MSK,
4760 RTW_PWR_ADDR_MAC,
4761 RTW_PWR_CMD_WRITE, 0xFF, 0},
4762 {0x00EF,
4763 RTW_PWR_CUT_ALL_MSK,
4764 RTW_PWR_INTF_ALL_MSK,
4765 RTW_PWR_ADDR_MAC,
4766 RTW_PWR_CMD_WRITE, 0xFF, 0},
4767 {0x1045,
4768 RTW_PWR_CUT_ALL_MSK,
4769 RTW_PWR_INTF_ALL_MSK,
4770 RTW_PWR_ADDR_MAC,
4771 RTW_PWR_CMD_WRITE, BIT(4), 0},
4772 {0xFF1A,
4773 RTW_PWR_CUT_ALL_MSK,
4774 RTW_PWR_INTF_USB_MSK,
4775 RTW_PWR_ADDR_MAC,
4776 RTW_PWR_CMD_WRITE, 0xFF, 0x30},
4777 {0x0049,
4778 RTW_PWR_CUT_ALL_MSK,
4779 RTW_PWR_INTF_ALL_MSK,
4780 RTW_PWR_ADDR_MAC,
4781 RTW_PWR_CMD_WRITE, BIT(1), 0},
4782 {0x0006,
4783 RTW_PWR_CUT_ALL_MSK,
4784 RTW_PWR_INTF_ALL_MSK,
4785 RTW_PWR_ADDR_MAC,
4786 RTW_PWR_CMD_WRITE, BIT(0), BIT(0)},
4787 {0x0002,
4788 RTW_PWR_CUT_ALL_MSK,
4789 RTW_PWR_INTF_ALL_MSK,
4790 RTW_PWR_ADDR_MAC,
4791 RTW_PWR_CMD_WRITE, BIT(1), 0},
4792 {0x0005,
4793 RTW_PWR_CUT_ALL_MSK,
4794 RTW_PWR_INTF_ALL_MSK,
4795 RTW_PWR_ADDR_MAC,
4796 RTW_PWR_CMD_WRITE, BIT(1), BIT(1)},
4797 {0x0005,
4798 RTW_PWR_CUT_ALL_MSK,
4799 RTW_PWR_INTF_ALL_MSK,
4800 RTW_PWR_ADDR_MAC,
4801 RTW_PWR_CMD_POLLING, BIT(1), 0},
4802 {0x0000,
4803 RTW_PWR_CUT_ALL_MSK,
4804 RTW_PWR_INTF_USB_MSK | RTW_PWR_INTF_SDIO_MSK,
4805 RTW_PWR_ADDR_MAC,
4806 RTW_PWR_CMD_WRITE, BIT(5), BIT(5)},
4807 {0xFFFF,
4808 RTW_PWR_CUT_ALL_MSK,
4809 RTW_PWR_INTF_ALL_MSK,
4810 0,
4811 RTW_PWR_CMD_END, 0, 0},
4812 };
4813
4814 static const struct rtw_pwr_seq_cmd trans_cardemu_to_carddis_8822c[] = {
4815 {0x0005,
4816 RTW_PWR_CUT_ALL_MSK,
4817 RTW_PWR_INTF_SDIO_MSK,
4818 RTW_PWR_ADDR_MAC,
4819 RTW_PWR_CMD_WRITE, BIT(7), BIT(7)},
4820 {0x0007,
4821 RTW_PWR_CUT_ALL_MSK,
4822 RTW_PWR_INTF_USB_MSK | RTW_PWR_INTF_SDIO_MSK,
4823 RTW_PWR_ADDR_MAC,
4824 RTW_PWR_CMD_WRITE, 0xFF, 0x00},
4825 {0x0067,
4826 RTW_PWR_CUT_ALL_MSK,
4827 RTW_PWR_INTF_ALL_MSK,
4828 RTW_PWR_ADDR_MAC,
4829 RTW_PWR_CMD_WRITE, BIT(5), 0},
4830 {0x004A,
4831 RTW_PWR_CUT_ALL_MSK,
4832 RTW_PWR_INTF_USB_MSK,
4833 RTW_PWR_ADDR_MAC,
4834 RTW_PWR_CMD_WRITE, BIT(0), 0},
4835 {0x0081,
4836 RTW_PWR_CUT_ALL_MSK,
4837 RTW_PWR_INTF_ALL_MSK,
4838 RTW_PWR_ADDR_MAC,
4839 RTW_PWR_CMD_WRITE, BIT(7) | BIT(6), 0},
4840 {0x0090,
4841 RTW_PWR_CUT_ALL_MSK,
4842 RTW_PWR_INTF_ALL_MSK,
4843 RTW_PWR_ADDR_MAC,
4844 RTW_PWR_CMD_WRITE, BIT(1), 0},
4845 {0x0092,
4846 RTW_PWR_CUT_ALL_MSK,
4847 RTW_PWR_INTF_PCI_MSK,
4848 RTW_PWR_ADDR_MAC,
4849 RTW_PWR_CMD_WRITE, 0xFF, 0x20},
4850 {0x0093,
4851 RTW_PWR_CUT_ALL_MSK,
4852 RTW_PWR_INTF_PCI_MSK,
4853 RTW_PWR_ADDR_MAC,
4854 RTW_PWR_CMD_WRITE, 0xFF, 0x04},
4855 {0x0005,
4856 RTW_PWR_CUT_ALL_MSK,
4857 RTW_PWR_INTF_USB_MSK | RTW_PWR_INTF_SDIO_MSK,
4858 RTW_PWR_ADDR_MAC,
4859 RTW_PWR_CMD_WRITE, BIT(3) | BIT(4), BIT(3)},
4860 {0x0005,
4861 RTW_PWR_CUT_ALL_MSK,
4862 RTW_PWR_INTF_PCI_MSK,
4863 RTW_PWR_ADDR_MAC,
4864 RTW_PWR_CMD_WRITE, BIT(2), BIT(2)},
4865 {0x0086,
4866 RTW_PWR_CUT_ALL_MSK,
4867 RTW_PWR_INTF_SDIO_MSK,
4868 RTW_PWR_ADDR_SDIO,
4869 RTW_PWR_CMD_WRITE, BIT(0), BIT(0)},
4870 {0xFFFF,
4871 RTW_PWR_CUT_ALL_MSK,
4872 RTW_PWR_INTF_ALL_MSK,
4873 0,
4874 RTW_PWR_CMD_END, 0, 0},
4875 };
4876
4877 static const struct rtw_pwr_seq_cmd *card_enable_flow_8822c[] = {
4878 trans_carddis_to_cardemu_8822c,
4879 trans_cardemu_to_act_8822c,
4880 NULL
4881 };
4882
4883 static const struct rtw_pwr_seq_cmd *card_disable_flow_8822c[] = {
4884 trans_act_to_cardemu_8822c,
4885 trans_cardemu_to_carddis_8822c,
4886 NULL
4887 };
4888
4889 static const struct rtw_intf_phy_para usb2_param_8822c[] = {
4890 {0xFFFF, 0x00,
4891 RTW_IP_SEL_PHY,
4892 RTW_INTF_PHY_CUT_ALL,
4893 RTW_INTF_PHY_PLATFORM_ALL},
4894 };
4895
4896 static const struct rtw_intf_phy_para usb3_param_8822c[] = {
4897 {0xFFFF, 0x0000,
4898 RTW_IP_SEL_PHY,
4899 RTW_INTF_PHY_CUT_ALL,
4900 RTW_INTF_PHY_PLATFORM_ALL},
4901 };
4902
4903 static const struct rtw_intf_phy_para pcie_gen1_param_8822c[] = {
4904 {0xFFFF, 0x0000,
4905 RTW_IP_SEL_PHY,
4906 RTW_INTF_PHY_CUT_ALL,
4907 RTW_INTF_PHY_PLATFORM_ALL},
4908 };
4909
4910 static const struct rtw_intf_phy_para pcie_gen2_param_8822c[] = {
4911 {0xFFFF, 0x0000,
4912 RTW_IP_SEL_PHY,
4913 RTW_INTF_PHY_CUT_ALL,
4914 RTW_INTF_PHY_PLATFORM_ALL},
4915 };
4916
4917 static const struct rtw_intf_phy_para_table phy_para_table_8822c = {
4918 .usb2_para = usb2_param_8822c,
4919 .usb3_para = usb3_param_8822c,
4920 .gen1_para = pcie_gen1_param_8822c,
4921 .gen2_para = pcie_gen2_param_8822c,
4922 .n_usb2_para = ARRAY_SIZE(usb2_param_8822c),
4923 .n_usb3_para = ARRAY_SIZE(usb2_param_8822c),
4924 .n_gen1_para = ARRAY_SIZE(pcie_gen1_param_8822c),
4925 .n_gen2_para = ARRAY_SIZE(pcie_gen2_param_8822c),
4926 };
4927
4928 static const struct rtw_rfe_def rtw8822c_rfe_defs[] = {
4929 [0] = RTW_DEF_RFE(8822c, 0, 0),
4930 [1] = RTW_DEF_RFE(8822c, 0, 0),
4931 [2] = RTW_DEF_RFE(8822c, 0, 0),
4932 [3] = RTW_DEF_RFE(8822c, 0, 0),
4933 [4] = RTW_DEF_RFE(8822c, 0, 0),
4934 [5] = RTW_DEF_RFE(8822c, 0, 5),
4935 [6] = RTW_DEF_RFE(8822c, 0, 0),
4936 };
4937
4938 static const struct rtw_hw_reg rtw8822c_dig[] = {
4939 [0] = { .addr = 0x1d70, .mask = 0x7f },
4940 [1] = { .addr = 0x1d70, .mask = 0x7f00 },
4941 };
4942
4943 static const struct rtw_ltecoex_addr rtw8822c_ltecoex_addr = {
4944 .ctrl = LTECOEX_ACCESS_CTRL,
4945 .wdata = LTECOEX_WRITE_DATA,
4946 .rdata = LTECOEX_READ_DATA,
4947 };
4948
4949 static const struct rtw_page_table page_table_8822c[] = {
4950 {64, 64, 64, 64, 1},
4951 {64, 64, 64, 64, 1},
4952 {64, 64, 0, 0, 1},
4953 {64, 64, 64, 0, 1},
4954 {64, 64, 64, 64, 1},
4955 };
4956
4957 static const struct rtw_rqpn rqpn_table_8822c[] = {
4958 {RTW_DMA_MAPPING_NORMAL, RTW_DMA_MAPPING_NORMAL,
4959 RTW_DMA_MAPPING_LOW, RTW_DMA_MAPPING_LOW,
4960 RTW_DMA_MAPPING_EXTRA, RTW_DMA_MAPPING_HIGH},
4961 {RTW_DMA_MAPPING_NORMAL, RTW_DMA_MAPPING_NORMAL,
4962 RTW_DMA_MAPPING_LOW, RTW_DMA_MAPPING_LOW,
4963 RTW_DMA_MAPPING_EXTRA, RTW_DMA_MAPPING_HIGH},
4964 {RTW_DMA_MAPPING_NORMAL, RTW_DMA_MAPPING_NORMAL,
4965 RTW_DMA_MAPPING_NORMAL, RTW_DMA_MAPPING_HIGH,
4966 RTW_DMA_MAPPING_HIGH, RTW_DMA_MAPPING_HIGH},
4967 {RTW_DMA_MAPPING_NORMAL, RTW_DMA_MAPPING_NORMAL,
4968 RTW_DMA_MAPPING_LOW, RTW_DMA_MAPPING_LOW,
4969 RTW_DMA_MAPPING_HIGH, RTW_DMA_MAPPING_HIGH},
4970 {RTW_DMA_MAPPING_NORMAL, RTW_DMA_MAPPING_NORMAL,
4971 RTW_DMA_MAPPING_LOW, RTW_DMA_MAPPING_LOW,
4972 RTW_DMA_MAPPING_EXTRA, RTW_DMA_MAPPING_HIGH},
4973 };
4974
4975 static struct rtw_prioq_addrs prioq_addrs_8822c = {
4976 .prio[RTW_DMA_MAPPING_EXTRA] = {
4977 .rsvd = REG_FIFOPAGE_INFO_4, .avail = REG_FIFOPAGE_INFO_4 + 2,
4978 },
4979 .prio[RTW_DMA_MAPPING_LOW] = {
4980 .rsvd = REG_FIFOPAGE_INFO_2, .avail = REG_FIFOPAGE_INFO_2 + 2,
4981 },
4982 .prio[RTW_DMA_MAPPING_NORMAL] = {
4983 .rsvd = REG_FIFOPAGE_INFO_3, .avail = REG_FIFOPAGE_INFO_3 + 2,
4984 },
4985 .prio[RTW_DMA_MAPPING_HIGH] = {
4986 .rsvd = REG_FIFOPAGE_INFO_1, .avail = REG_FIFOPAGE_INFO_1 + 2,
4987 },
4988 .wsize = true,
4989 };
4990
4991 static struct rtw_chip_ops rtw8822c_ops = {
4992 .phy_set_param = rtw8822c_phy_set_param,
4993 .read_efuse = rtw8822c_read_efuse,
4994 .query_rx_desc = rtw8822c_query_rx_desc,
4995 .set_channel = rtw8822c_set_channel,
4996 .mac_init = rtw8822c_mac_init,
4997 .dump_fw_crash = rtw8822c_dump_fw_crash,
4998 .read_rf = rtw_phy_read_rf,
4999 .write_rf = rtw_phy_write_rf_reg_mix,
5000 .set_tx_power_index = rtw8822c_set_tx_power_index,
5001 .set_antenna = rtw8822c_set_antenna,
5002 .cfg_ldo25 = rtw8822c_cfg_ldo25,
5003 .false_alarm_statistics = rtw8822c_false_alarm_statistics,
5004 .dpk_track = rtw8822c_dpk_track,
5005 .phy_calibration = rtw8822c_phy_calibration,
5006 .cck_pd_set = rtw8822c_phy_cck_pd_set,
5007 .pwr_track = rtw8822c_pwr_track,
5008 .config_bfee = rtw8822c_bf_config_bfee,
5009 .set_gid_table = rtw_bf_set_gid_table,
5010 .cfg_csi_rate = rtw_bf_cfg_csi_rate,
5011 .adaptivity_init = rtw8822c_adaptivity_init,
5012 .adaptivity = rtw8822c_adaptivity,
5013 .cfo_init = rtw8822c_cfo_init,
5014 .cfo_track = rtw8822c_cfo_track,
5015 .config_tx_path = rtw8822c_config_tx_path,
5016 .config_txrx_mode = rtw8822c_config_trx_mode,
5017 .fill_txdesc_checksum = rtw8822c_fill_txdesc_checksum,
5018
5019 .coex_set_init = rtw8822c_coex_cfg_init,
5020 .coex_set_ant_switch = NULL,
5021 .coex_set_gnt_fix = rtw8822c_coex_cfg_gnt_fix,
5022 .coex_set_gnt_debug = rtw8822c_coex_cfg_gnt_debug,
5023 .coex_set_rfe_type = rtw8822c_coex_cfg_rfe_type,
5024 .coex_set_wl_tx_power = rtw8822c_coex_cfg_wl_tx_power,
5025 .coex_set_wl_rx_gain = rtw8822c_coex_cfg_wl_rx_gain,
5026 };
5027
5028 /* Shared-Antenna Coex Table */
5029 static const struct coex_table_para table_sant_8822c[] = {
5030 {0xffffffff, 0xffffffff}, /* case-0 */
5031 {0x55555555, 0x55555555},
5032 {0x66555555, 0x66555555},
5033 {0xaaaaaaaa, 0xaaaaaaaa},
5034 {0x5a5a5a5a, 0x5a5a5a5a},
5035 {0xfafafafa, 0xfafafafa}, /* case-5 */
5036 {0x6a5a5555, 0xaaaaaaaa},
5037 {0x6a5a56aa, 0x6a5a56aa},
5038 {0x6a5a5a5a, 0x6a5a5a5a},
5039 {0x66555555, 0x5a5a5a5a},
5040 {0x66555555, 0x6a5a5a5a}, /* case-10 */
5041 {0x66555555, 0x6a5a5aaa},
5042 {0x66555555, 0x5a5a5aaa},
5043 {0x66555555, 0x6aaa5aaa},
5044 {0x66555555, 0xaaaa5aaa},
5045 {0x66555555, 0xaaaaaaaa}, /* case-15 */
5046 {0xffff55ff, 0xfafafafa},
5047 {0xffff55ff, 0x6afa5afa},
5048 {0xaaffffaa, 0xfafafafa},
5049 {0xaa5555aa, 0x5a5a5a5a},
5050 {0xaa5555aa, 0x6a5a5a5a}, /* case-20 */
5051 {0xaa5555aa, 0xaaaaaaaa},
5052 {0xffffffff, 0x5a5a5a5a},
5053 {0xffffffff, 0x5a5a5a5a},
5054 {0xffffffff, 0x55555555},
5055 {0xffffffff, 0x5a5a5aaa}, /* case-25 */
5056 {0x55555555, 0x5a5a5a5a},
5057 {0x55555555, 0xaaaaaaaa},
5058 {0x55555555, 0x6a5a6a5a},
5059 {0x66556655, 0x66556655},
5060 {0x66556aaa, 0x6a5a6aaa}, /*case-30*/
5061 {0xffffffff, 0x5aaa5aaa},
5062 {0x56555555, 0x5a5a5aaa},
5063 {0xdaffdaff, 0xdaffdaff},
5064 {0xddffddff, 0xddffddff},
5065 };
5066
5067 /* Non-Shared-Antenna Coex Table */
5068 static const struct coex_table_para table_nsant_8822c[] = {
5069 {0xffffffff, 0xffffffff}, /* case-100 */
5070 {0x55555555, 0x55555555},
5071 {0x66555555, 0x66555555},
5072 {0xaaaaaaaa, 0xaaaaaaaa},
5073 {0x5a5a5a5a, 0x5a5a5a5a},
5074 {0xfafafafa, 0xfafafafa}, /* case-105 */
5075 {0x5afa5afa, 0x5afa5afa},
5076 {0x55555555, 0xfafafafa},
5077 {0x66555555, 0xfafafafa},
5078 {0x66555555, 0x5a5a5a5a},
5079 {0x66555555, 0x6a5a5a5a}, /* case-110 */
5080 {0x66555555, 0xaaaaaaaa},
5081 {0xffff55ff, 0xfafafafa},
5082 {0xffff55ff, 0x5afa5afa},
5083 {0xffff55ff, 0xaaaaaaaa},
5084 {0xffff55ff, 0xffff55ff}, /* case-115 */
5085 {0xaaffffaa, 0x5afa5afa},
5086 {0xaaffffaa, 0xaaaaaaaa},
5087 {0xffffffff, 0xfafafafa},
5088 {0xffffffff, 0x5afa5afa},
5089 {0xffffffff, 0xaaaaaaaa}, /* case-120 */
5090 {0x55ff55ff, 0x5afa5afa},
5091 {0x55ff55ff, 0xaaaaaaaa},
5092 {0x55ff55ff, 0x55ff55ff}
5093 };
5094
5095 /* Shared-Antenna TDMA */
5096 static const struct coex_tdma_para tdma_sant_8822c[] = {
5097 { {0x00, 0x00, 0x00, 0x00, 0x00} }, /* case-0 */
5098 { {0x61, 0x45, 0x03, 0x11, 0x11} }, /* case-1 */
5099 { {0x61, 0x3a, 0x03, 0x11, 0x11} },
5100 { {0x61, 0x30, 0x03, 0x11, 0x11} },
5101 { {0x61, 0x20, 0x03, 0x11, 0x11} },
5102 { {0x61, 0x10, 0x03, 0x11, 0x11} }, /* case-5 */
5103 { {0x61, 0x45, 0x03, 0x11, 0x10} },
5104 { {0x61, 0x3a, 0x03, 0x11, 0x10} },
5105 { {0x61, 0x30, 0x03, 0x11, 0x10} },
5106 { {0x61, 0x20, 0x03, 0x11, 0x10} },
5107 { {0x61, 0x10, 0x03, 0x11, 0x10} }, /* case-10 */
5108 { {0x61, 0x08, 0x03, 0x11, 0x14} },
5109 { {0x61, 0x08, 0x03, 0x10, 0x14} },
5110 { {0x51, 0x08, 0x03, 0x10, 0x54} },
5111 { {0x51, 0x08, 0x03, 0x10, 0x55} },
5112 { {0x51, 0x08, 0x07, 0x10, 0x54} }, /* case-15 */
5113 { {0x51, 0x45, 0x03, 0x10, 0x50} },
5114 { {0x51, 0x3a, 0x03, 0x10, 0x50} },
5115 { {0x51, 0x30, 0x03, 0x10, 0x50} },
5116 { {0x51, 0x20, 0x03, 0x10, 0x50} },
5117 { {0x51, 0x10, 0x03, 0x10, 0x50} }, /* case-20 */
5118 { {0x51, 0x4a, 0x03, 0x10, 0x50} },
5119 { {0x51, 0x0c, 0x03, 0x10, 0x54} },
5120 { {0x55, 0x08, 0x03, 0x10, 0x54} },
5121 { {0x65, 0x10, 0x03, 0x11, 0x10} },
5122 { {0x51, 0x10, 0x03, 0x10, 0x51} }, /* case-25 */
5123 { {0x51, 0x08, 0x03, 0x10, 0x50} },
5124 { {0x61, 0x08, 0x03, 0x11, 0x11} }
5125 };
5126
5127 /* Non-Shared-Antenna TDMA */
5128 static const struct coex_tdma_para tdma_nsant_8822c[] = {
5129 { {0x00, 0x00, 0x00, 0x00, 0x00} }, /* case-100 */
5130 { {0x61, 0x45, 0x03, 0x11, 0x11} },
5131 { {0x61, 0x3a, 0x03, 0x11, 0x11} },
5132 { {0x61, 0x30, 0x03, 0x11, 0x11} },
5133 { {0x61, 0x20, 0x03, 0x11, 0x11} },
5134 { {0x61, 0x10, 0x03, 0x11, 0x11} }, /* case-105 */
5135 { {0x61, 0x45, 0x03, 0x11, 0x10} },
5136 { {0x61, 0x3a, 0x03, 0x11, 0x10} },
5137 { {0x61, 0x30, 0x03, 0x11, 0x10} },
5138 { {0x61, 0x20, 0x03, 0x11, 0x10} },
5139 { {0x61, 0x10, 0x03, 0x11, 0x10} }, /* case-110 */
5140 { {0x61, 0x08, 0x03, 0x11, 0x14} },
5141 { {0x61, 0x08, 0x03, 0x10, 0x14} },
5142 { {0x51, 0x08, 0x03, 0x10, 0x54} },
5143 { {0x51, 0x08, 0x03, 0x10, 0x55} },
5144 { {0x51, 0x08, 0x07, 0x10, 0x54} }, /* case-115 */
5145 { {0x51, 0x45, 0x03, 0x10, 0x50} },
5146 { {0x51, 0x3a, 0x03, 0x10, 0x50} },
5147 { {0x51, 0x30, 0x03, 0x10, 0x50} },
5148 { {0x51, 0x20, 0x03, 0x10, 0x50} },
5149 { {0x51, 0x10, 0x03, 0x10, 0x50} }, /* case-120 */
5150 { {0x51, 0x08, 0x03, 0x10, 0x50} }
5151 };
5152
5153 /* rssi in percentage % (dbm = % - 100) */
5154 static const u8 wl_rssi_step_8822c[] = {60, 50, 44, 30};
5155 static const u8 bt_rssi_step_8822c[] = {8, 15, 20, 25};
5156 static const struct coex_5g_afh_map afh_5g_8822c[] = { {0, 0, 0} };
5157
5158 /* wl_tx_dec_power, bt_tx_dec_power, wl_rx_gain, bt_rx_lna_constrain */
5159 static const struct coex_rf_para rf_para_tx_8822c[] = {
5160 {0, 0, false, 7}, /* for normal */
5161 {0, 16, false, 7}, /* for WL-CPT */
5162 {8, 17, true, 4},
5163 {7, 18, true, 4},
5164 {6, 19, true, 4},
5165 {5, 20, true, 4},
5166 {0, 21, true, 4} /* for gamg hid */
5167 };
5168
5169 static const struct coex_rf_para rf_para_rx_8822c[] = {
5170 {0, 0, false, 7}, /* for normal */
5171 {0, 16, false, 7}, /* for WL-CPT */
5172 {3, 24, true, 5},
5173 {2, 26, true, 5},
5174 {1, 27, true, 5},
5175 {0, 28, true, 5},
5176 {0, 28, true, 5} /* for gamg hid */
5177 };
5178
5179 static_assert(ARRAY_SIZE(rf_para_tx_8822c) == ARRAY_SIZE(rf_para_rx_8822c));
5180
5181 static const u8
5182 rtw8822c_pwrtrk_5gb_n[RTW_PWR_TRK_5G_NUM][RTW_PWR_TRK_TBL_SZ] = {
5183 { 0, 1, 2, 3, 5, 6, 7, 8, 9, 10,
5184 11, 12, 13, 14, 15, 16, 18, 19, 20, 21,
5185 22, 23, 24, 25, 26, 27, 28, 29, 30, 32 },
5186 { 0, 1, 2, 3, 5, 6, 7, 8, 9, 10,
5187 11, 12, 13, 14, 15, 16, 18, 19, 20, 21,
5188 22, 23, 24, 25, 26, 27, 28, 29, 30, 32 },
5189 { 0, 1, 2, 3, 5, 6, 7, 8, 9, 10,
5190 11, 12, 13, 14, 15, 16, 18, 19, 20, 21,
5191 22, 23, 24, 25, 26, 27, 28, 29, 30, 32 },
5192 };
5193
5194 static const u8
5195 rtw8822c_pwrtrk_5gb_p[RTW_PWR_TRK_5G_NUM][RTW_PWR_TRK_TBL_SZ] = {
5196 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5197 10, 10, 11, 12, 13, 14, 15, 16, 17, 18,
5198 19, 20, 21, 22, 22, 23, 24, 25, 26, 27 },
5199 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5200 10, 10, 11, 12, 13, 14, 15, 16, 17, 18,
5201 19, 20, 21, 22, 22, 23, 24, 25, 26, 27 },
5202 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5203 10, 10, 11, 12, 13, 14, 15, 16, 17, 18,
5204 19, 20, 21, 22, 22, 23, 24, 25, 26, 27 },
5205 };
5206
5207 static const u8
5208 rtw8822c_pwrtrk_5ga_n[RTW_PWR_TRK_5G_NUM][RTW_PWR_TRK_TBL_SZ] = {
5209 { 0, 1, 2, 4, 5, 6, 7, 8, 9, 10,
5210 11, 13, 14, 15, 16, 17, 18, 19, 20, 21,
5211 23, 24, 25, 26, 27, 28, 29, 30, 31, 33 },
5212 { 0, 1, 2, 4, 5, 6, 7, 8, 9, 10,
5213 11, 13, 14, 15, 16, 17, 18, 19, 20, 21,
5214 23, 24, 25, 26, 27, 28, 29, 30, 31, 33 },
5215 { 0, 1, 2, 4, 5, 6, 7, 8, 9, 10,
5216 11, 13, 14, 15, 16, 17, 18, 19, 20, 21,
5217 23, 24, 25, 26, 27, 28, 29, 30, 31, 33 },
5218 };
5219
5220 static const u8
5221 rtw8822c_pwrtrk_5ga_p[RTW_PWR_TRK_5G_NUM][RTW_PWR_TRK_TBL_SZ] = {
5222 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5223 10, 11, 12, 13, 14, 15, 16, 17, 18, 20,
5224 21, 22, 23, 24, 25, 26, 27, 28, 29, 30 },
5225 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5226 10, 11, 12, 13, 14, 15, 16, 17, 18, 20,
5227 21, 22, 23, 24, 25, 26, 27, 28, 29, 30 },
5228 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5229 10, 11, 12, 13, 14, 15, 16, 17, 18, 20,
5230 21, 22, 23, 24, 25, 26, 27, 28, 29, 30 },
5231 };
5232
5233 static const u8 rtw8822c_pwrtrk_2gb_n[RTW_PWR_TRK_TBL_SZ] = {
5234 0, 1, 2, 3, 4, 4, 5, 6, 7, 8,
5235 9, 9, 10, 11, 12, 13, 14, 15, 15, 16,
5236 17, 18, 19, 20, 20, 21, 22, 23, 24, 25
5237 };
5238
5239 static const u8 rtw8822c_pwrtrk_2gb_p[RTW_PWR_TRK_TBL_SZ] = {
5240 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5241 10, 11, 12, 13, 14, 14, 15, 16, 17, 18,
5242 19, 20, 21, 22, 23, 24, 25, 26, 27, 28
5243 };
5244
5245 static const u8 rtw8822c_pwrtrk_2ga_n[RTW_PWR_TRK_TBL_SZ] = {
5246 0, 1, 2, 2, 3, 4, 4, 5, 6, 6,
5247 7, 8, 8, 9, 9, 10, 11, 11, 12, 13,
5248 13, 14, 15, 15, 16, 17, 17, 18, 19, 19
5249 };
5250
5251 static const u8 rtw8822c_pwrtrk_2ga_p[RTW_PWR_TRK_TBL_SZ] = {
5252 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5253 10, 11, 11, 12, 13, 14, 15, 16, 17, 18,
5254 19, 20, 21, 22, 23, 24, 25, 25, 26, 27
5255 };
5256
5257 static const u8 rtw8822c_pwrtrk_2g_cck_b_n[RTW_PWR_TRK_TBL_SZ] = {
5258 0, 1, 2, 3, 4, 5, 5, 6, 7, 8,
5259 9, 10, 11, 11, 12, 13, 14, 15, 16, 17,
5260 17, 18, 19, 20, 21, 22, 23, 23, 24, 25
5261 };
5262
5263 static const u8 rtw8822c_pwrtrk_2g_cck_b_p[RTW_PWR_TRK_TBL_SZ] = {
5264 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5265 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
5266 20, 21, 22, 23, 24, 25, 26, 27, 28, 29
5267 };
5268
5269 static const u8 rtw8822c_pwrtrk_2g_cck_a_n[RTW_PWR_TRK_TBL_SZ] = {
5270 0, 1, 2, 3, 3, 4, 5, 6, 6, 7,
5271 8, 9, 9, 10, 11, 12, 12, 13, 14, 15,
5272 15, 16, 17, 18, 18, 19, 20, 21, 21, 22
5273 };
5274
5275 static const u8 rtw8822c_pwrtrk_2g_cck_a_p[RTW_PWR_TRK_TBL_SZ] = {
5276 0, 1, 2, 3, 4, 5, 5, 6, 7, 8,
5277 9, 10, 11, 11, 12, 13, 14, 15, 16, 17,
5278 18, 18, 19, 20, 21, 22, 23, 24, 24, 25
5279 };
5280
5281 static const struct rtw_pwr_track_tbl rtw8822c_rtw_pwr_track_tbl = {
5282 .pwrtrk_5gb_n[RTW_PWR_TRK_5G_1] = rtw8822c_pwrtrk_5gb_n[RTW_PWR_TRK_5G_1],
5283 .pwrtrk_5gb_n[RTW_PWR_TRK_5G_2] = rtw8822c_pwrtrk_5gb_n[RTW_PWR_TRK_5G_2],
5284 .pwrtrk_5gb_n[RTW_PWR_TRK_5G_3] = rtw8822c_pwrtrk_5gb_n[RTW_PWR_TRK_5G_3],
5285 .pwrtrk_5gb_p[RTW_PWR_TRK_5G_1] = rtw8822c_pwrtrk_5gb_p[RTW_PWR_TRK_5G_1],
5286 .pwrtrk_5gb_p[RTW_PWR_TRK_5G_2] = rtw8822c_pwrtrk_5gb_p[RTW_PWR_TRK_5G_2],
5287 .pwrtrk_5gb_p[RTW_PWR_TRK_5G_3] = rtw8822c_pwrtrk_5gb_p[RTW_PWR_TRK_5G_3],
5288 .pwrtrk_5ga_n[RTW_PWR_TRK_5G_1] = rtw8822c_pwrtrk_5ga_n[RTW_PWR_TRK_5G_1],
5289 .pwrtrk_5ga_n[RTW_PWR_TRK_5G_2] = rtw8822c_pwrtrk_5ga_n[RTW_PWR_TRK_5G_2],
5290 .pwrtrk_5ga_n[RTW_PWR_TRK_5G_3] = rtw8822c_pwrtrk_5ga_n[RTW_PWR_TRK_5G_3],
5291 .pwrtrk_5ga_p[RTW_PWR_TRK_5G_1] = rtw8822c_pwrtrk_5ga_p[RTW_PWR_TRK_5G_1],
5292 .pwrtrk_5ga_p[RTW_PWR_TRK_5G_2] = rtw8822c_pwrtrk_5ga_p[RTW_PWR_TRK_5G_2],
5293 .pwrtrk_5ga_p[RTW_PWR_TRK_5G_3] = rtw8822c_pwrtrk_5ga_p[RTW_PWR_TRK_5G_3],
5294 .pwrtrk_2gb_n = rtw8822c_pwrtrk_2gb_n,
5295 .pwrtrk_2gb_p = rtw8822c_pwrtrk_2gb_p,
5296 .pwrtrk_2ga_n = rtw8822c_pwrtrk_2ga_n,
5297 .pwrtrk_2ga_p = rtw8822c_pwrtrk_2ga_p,
5298 .pwrtrk_2g_cckb_n = rtw8822c_pwrtrk_2g_cck_b_n,
5299 .pwrtrk_2g_cckb_p = rtw8822c_pwrtrk_2g_cck_b_p,
5300 .pwrtrk_2g_ccka_n = rtw8822c_pwrtrk_2g_cck_a_n,
5301 .pwrtrk_2g_ccka_p = rtw8822c_pwrtrk_2g_cck_a_p,
5302 };
5303
5304 static struct rtw_hw_reg_offset rtw8822c_edcca_th[] = {
5305 [EDCCA_TH_L2H_IDX] = {
5306 {.addr = 0x84c, .mask = MASKBYTE2}, .offset = 0x80
5307 },
5308 [EDCCA_TH_H2L_IDX] = {
5309 {.addr = 0x84c, .mask = MASKBYTE3}, .offset = 0x80
5310 },
5311 };
5312
5313 #ifdef CONFIG_PM
5314 static const struct wiphy_wowlan_support rtw_wowlan_stub_8822c = {
5315 .flags = WIPHY_WOWLAN_MAGIC_PKT | WIPHY_WOWLAN_GTK_REKEY_FAILURE |
5316 WIPHY_WOWLAN_DISCONNECT | WIPHY_WOWLAN_SUPPORTS_GTK_REKEY |
5317 WIPHY_WOWLAN_NET_DETECT,
5318 .n_patterns = RTW_MAX_PATTERN_NUM,
5319 .pattern_max_len = RTW_MAX_PATTERN_SIZE,
5320 .pattern_min_len = 1,
5321 .max_nd_match_sets = 4,
5322 };
5323 #endif
5324
5325 static const struct rtw_reg_domain coex_info_hw_regs_8822c[] = {
5326 {0x1860, BIT(3), RTW_REG_DOMAIN_MAC8},
5327 {0x4160, BIT(3), RTW_REG_DOMAIN_MAC8},
5328 {0x1c32, BIT(6), RTW_REG_DOMAIN_MAC8},
5329 {0x1c38, BIT(28), RTW_REG_DOMAIN_MAC32},
5330 {0, 0, RTW_REG_DOMAIN_NL},
5331 {0x430, MASKDWORD, RTW_REG_DOMAIN_MAC32},
5332 {0x434, MASKDWORD, RTW_REG_DOMAIN_MAC32},
5333 {0x42a, MASKLWORD, RTW_REG_DOMAIN_MAC16},
5334 {0x426, MASKBYTE0, RTW_REG_DOMAIN_MAC8},
5335 {0x45e, BIT(3), RTW_REG_DOMAIN_MAC8},
5336 {0x454, MASKLWORD, RTW_REG_DOMAIN_MAC16},
5337 {0, 0, RTW_REG_DOMAIN_NL},
5338 {0x4c, BIT(24) | BIT(23), RTW_REG_DOMAIN_MAC32},
5339 {0x64, BIT(0), RTW_REG_DOMAIN_MAC8},
5340 {0x4c6, BIT(4), RTW_REG_DOMAIN_MAC8},
5341 {0x40, BIT(5), RTW_REG_DOMAIN_MAC8},
5342 {0x1, RFREG_MASK, RTW_REG_DOMAIN_RF_B},
5343 {0, 0, RTW_REG_DOMAIN_NL},
5344 {0x550, MASKDWORD, RTW_REG_DOMAIN_MAC32},
5345 {0x522, MASKBYTE0, RTW_REG_DOMAIN_MAC8},
5346 {0x953, BIT(1), RTW_REG_DOMAIN_MAC8},
5347 {0xc50, MASKBYTE0, RTW_REG_DOMAIN_MAC8},
5348 };
5349
5350 const struct rtw_chip_info rtw8822c_hw_spec = {
5351 .ops = &rtw8822c_ops,
5352 .id = RTW_CHIP_TYPE_8822C,
5353 .fw_name = "rtw88/rtw8822c_fw.bin",
5354 .wlan_cpu = RTW_WCPU_11AC,
5355 .tx_pkt_desc_sz = 48,
5356 .tx_buf_desc_sz = 16,
5357 .rx_pkt_desc_sz = 24,
5358 .rx_buf_desc_sz = 8,
5359 .phy_efuse_size = 512,
5360 .log_efuse_size = 768,
5361 .ptct_efuse_size = 124,
5362 .txff_size = 262144,
5363 .rxff_size = 24576,
5364 .fw_rxff_size = 12288,
5365 .rsvd_drv_pg_num = 16,
5366 .txgi_factor = 2,
5367 .is_pwr_by_rate_dec = false,
5368 .max_power_index = 0x7f,
5369 .csi_buf_pg_num = 50,
5370 .band = RTW_BAND_2G | RTW_BAND_5G,
5371 .page_size = TX_PAGE_SIZE,
5372 .dig_min = 0x20,
5373 .usb_tx_agg_desc_num = 3,
5374 .default_1ss_tx_path = BB_PATH_A,
5375 .path_div_supported = true,
5376 .ht_supported = true,
5377 .vht_supported = true,
5378 .lps_deep_mode_supported = BIT(LPS_DEEP_MODE_LCLK) | BIT(LPS_DEEP_MODE_PG),
5379 .sys_func_en = 0xD8,
5380 .pwr_on_seq = card_enable_flow_8822c,
5381 .pwr_off_seq = card_disable_flow_8822c,
5382 .page_table = page_table_8822c,
5383 .rqpn_table = rqpn_table_8822c,
5384 .prioq_addrs = &prioq_addrs_8822c,
5385 .intf_table = &phy_para_table_8822c,
5386 .dig = rtw8822c_dig,
5387 .dig_cck = NULL,
5388 .rf_base_addr = {0x3c00, 0x4c00},
5389 .rf_sipi_addr = {0x1808, 0x4108},
5390 .ltecoex_addr = &rtw8822c_ltecoex_addr,
5391 .mac_tbl = &rtw8822c_mac_tbl,
5392 .agc_tbl = &rtw8822c_agc_tbl,
5393 .bb_tbl = &rtw8822c_bb_tbl,
5394 .rfk_init_tbl = &rtw8822c_array_mp_cal_init_tbl,
5395 .rf_tbl = {&rtw8822c_rf_b_tbl, &rtw8822c_rf_a_tbl},
5396 .rfe_defs = rtw8822c_rfe_defs,
5397 .rfe_defs_size = ARRAY_SIZE(rtw8822c_rfe_defs),
5398 .en_dis_dpd = true,
5399 .dpd_ratemask = DIS_DPD_RATEALL,
5400 .pwr_track_tbl = &rtw8822c_rtw_pwr_track_tbl,
5401 .iqk_threshold = 8,
5402 .lck_threshold = 8,
5403 .bfer_su_max_num = 2,
5404 .bfer_mu_max_num = 1,
5405 .rx_ldpc = true,
5406 .tx_stbc = true,
5407 .edcca_th = rtw8822c_edcca_th,
5408 .l2h_th_ini_cs = 60,
5409 .l2h_th_ini_ad = 45,
5410 .ampdu_density = IEEE80211_HT_MPDU_DENSITY_2,
5411
5412 #ifdef CONFIG_PM
5413 .wow_fw_name = "rtw88/rtw8822c_wow_fw.bin",
5414 .wowlan_stub = &rtw_wowlan_stub_8822c,
5415 .max_sched_scan_ssids = 4,
5416 #endif
5417 .max_scan_ie_len = (RTW_PROBE_PG_CNT - 1) * TX_PAGE_SIZE,
5418 .coex_para_ver = 0x22020720,
5419 .bt_desired_ver = 0x20,
5420 .scbd_support = true,
5421 .new_scbd10_def = true,
5422 .ble_hid_profile_support = true,
5423 .wl_mimo_ps_support = true,
5424 .pstdma_type = COEX_PSTDMA_FORCE_LPSOFF,
5425 .bt_rssi_type = COEX_BTRSSI_DBM,
5426 .ant_isolation = 15,
5427 .rssi_tolerance = 2,
5428 .wl_rssi_step = wl_rssi_step_8822c,
5429 .bt_rssi_step = bt_rssi_step_8822c,
5430 .table_sant_num = ARRAY_SIZE(table_sant_8822c),
5431 .table_sant = table_sant_8822c,
5432 .table_nsant_num = ARRAY_SIZE(table_nsant_8822c),
5433 .table_nsant = table_nsant_8822c,
5434 .tdma_sant_num = ARRAY_SIZE(tdma_sant_8822c),
5435 .tdma_sant = tdma_sant_8822c,
5436 .tdma_nsant_num = ARRAY_SIZE(tdma_nsant_8822c),
5437 .tdma_nsant = tdma_nsant_8822c,
5438 .wl_rf_para_num = ARRAY_SIZE(rf_para_tx_8822c),
5439 .wl_rf_para_tx = rf_para_tx_8822c,
5440 .wl_rf_para_rx = rf_para_rx_8822c,
5441 .bt_afh_span_bw20 = 0x24,
5442 .bt_afh_span_bw40 = 0x36,
5443 .afh_5g_num = ARRAY_SIZE(afh_5g_8822c),
5444 .afh_5g = afh_5g_8822c,
5445
5446 .coex_info_hw_regs_num = ARRAY_SIZE(coex_info_hw_regs_8822c),
5447 .coex_info_hw_regs = coex_info_hw_regs_8822c,
5448
5449 .fw_fifo_addr = {0x780, 0x700, 0x780, 0x660, 0x650, 0x680},
5450 .fwcd_segs = &rtw8822c_fwcd_segs,
5451 };
5452 EXPORT_SYMBOL(rtw8822c_hw_spec);
5453
5454 MODULE_FIRMWARE("rtw88/rtw8822c_fw.bin");
5455 MODULE_FIRMWARE("rtw88/rtw8822c_wow_fw.bin");
5456
5457 MODULE_AUTHOR("Realtek Corporation");
5458 MODULE_DESCRIPTION("Realtek 802.11ac wireless 8822c driver");
5459 MODULE_LICENSE("Dual BSD/GPL");
5460