1 // SPDX-License-Identifier: GPL-2.0 OR BSD-3-Clause
2 /* Copyright(c) 2018-2019 Realtek Corporation
3 */
4
5 #include <linux/module.h>
6 #include "main.h"
7 #include "coex.h"
8 #include "fw.h"
9 #include "tx.h"
10 #include "rx.h"
11 #include "phy.h"
12 #include "rtw8822c.h"
13 #include "rtw8822c_table.h"
14 #include "mac.h"
15 #include "reg.h"
16 #include "debug.h"
17 #include "util.h"
18 #include "bf.h"
19 #include "efuse.h"
20
21 #define IQK_DONE_8822C 0xaa
22
23 static void rtw8822c_config_trx_mode(struct rtw_dev *rtwdev, u8 tx_path,
24 u8 rx_path, bool is_tx2_path);
25
rtw8822ce_efuse_parsing(struct rtw_efuse * efuse,struct rtw8822c_efuse * map)26 static void rtw8822ce_efuse_parsing(struct rtw_efuse *efuse,
27 struct rtw8822c_efuse *map)
28 {
29 ether_addr_copy(efuse->addr, map->e.mac_addr);
30 }
31
rtw8822cu_efuse_parsing(struct rtw_efuse * efuse,struct rtw8822c_efuse * map)32 static void rtw8822cu_efuse_parsing(struct rtw_efuse *efuse,
33 struct rtw8822c_efuse *map)
34 {
35 ether_addr_copy(efuse->addr, map->u.mac_addr);
36 }
37
rtw8822cs_efuse_parsing(struct rtw_efuse * efuse,struct rtw8822c_efuse * map)38 static void rtw8822cs_efuse_parsing(struct rtw_efuse *efuse,
39 struct rtw8822c_efuse *map)
40 {
41 ether_addr_copy(efuse->addr, map->s.mac_addr);
42 }
43
rtw8822c_read_efuse(struct rtw_dev * rtwdev,u8 * log_map)44 static int rtw8822c_read_efuse(struct rtw_dev *rtwdev, u8 *log_map)
45 {
46 struct rtw_efuse *efuse = &rtwdev->efuse;
47 struct rtw8822c_efuse *map;
48 int i;
49
50 map = (struct rtw8822c_efuse *)log_map;
51
52 efuse->usb_mode_switch = u8_get_bits(map->usb_mode, BIT(7));
53 efuse->rfe_option = map->rfe_option;
54 efuse->rf_board_option = map->rf_board_option;
55 efuse->crystal_cap = map->xtal_k & XCAP_MASK;
56 efuse->channel_plan = map->channel_plan;
57 efuse->country_code[0] = map->country_code[0];
58 efuse->country_code[1] = map->country_code[1];
59 efuse->bt_setting = map->rf_bt_setting;
60 efuse->regd = map->rf_board_option & 0x7;
61 efuse->thermal_meter[RF_PATH_A] = map->path_a_thermal;
62 efuse->thermal_meter[RF_PATH_B] = map->path_b_thermal;
63 efuse->thermal_meter_k =
64 (map->path_a_thermal + map->path_b_thermal) >> 1;
65 efuse->power_track_type = (map->tx_pwr_calibrate_rate >> 4) & 0xf;
66
67 for (i = 0; i < 4; i++)
68 efuse->txpwr_idx_table[i] = map->txpwr_idx_table[i];
69
70 switch (rtw_hci_type(rtwdev)) {
71 case RTW_HCI_TYPE_PCIE:
72 rtw8822ce_efuse_parsing(efuse, map);
73 break;
74 case RTW_HCI_TYPE_USB:
75 rtw8822cu_efuse_parsing(efuse, map);
76 break;
77 case RTW_HCI_TYPE_SDIO:
78 rtw8822cs_efuse_parsing(efuse, map);
79 break;
80 default:
81 /* unsupported now */
82 return -ENOTSUPP;
83 }
84
85 return 0;
86 }
87
rtw8822c_header_file_init(struct rtw_dev * rtwdev,bool pre)88 static void rtw8822c_header_file_init(struct rtw_dev *rtwdev, bool pre)
89 {
90 rtw_write32_set(rtwdev, REG_3WIRE, BIT_3WIRE_TX_EN | BIT_3WIRE_RX_EN);
91 rtw_write32_set(rtwdev, REG_3WIRE, BIT_3WIRE_PI_ON);
92 rtw_write32_set(rtwdev, REG_3WIRE2, BIT_3WIRE_TX_EN | BIT_3WIRE_RX_EN);
93 rtw_write32_set(rtwdev, REG_3WIRE2, BIT_3WIRE_PI_ON);
94
95 if (pre)
96 rtw_write32_clr(rtwdev, REG_ENCCK, BIT_CCK_OFDM_BLK_EN);
97 else
98 rtw_write32_set(rtwdev, REG_ENCCK, BIT_CCK_OFDM_BLK_EN);
99 }
100
rtw8822c_bb_reset(struct rtw_dev * rtwdev)101 static void rtw8822c_bb_reset(struct rtw_dev *rtwdev)
102 {
103 rtw_write16_set(rtwdev, REG_SYS_FUNC_EN, BIT_FEN_BB_RSTB);
104 rtw_write16_clr(rtwdev, REG_SYS_FUNC_EN, BIT_FEN_BB_RSTB);
105 rtw_write16_set(rtwdev, REG_SYS_FUNC_EN, BIT_FEN_BB_RSTB);
106 }
107
rtw8822c_dac_backup_reg(struct rtw_dev * rtwdev,struct rtw_backup_info * backup,struct rtw_backup_info * backup_rf)108 static void rtw8822c_dac_backup_reg(struct rtw_dev *rtwdev,
109 struct rtw_backup_info *backup,
110 struct rtw_backup_info *backup_rf)
111 {
112 u32 path, i;
113 u32 val;
114 u32 reg;
115 u32 rf_addr[DACK_RF_8822C] = {0x8f};
116 u32 addrs[DACK_REG_8822C] = {0x180c, 0x1810, 0x410c, 0x4110,
117 0x1c3c, 0x1c24, 0x1d70, 0x9b4,
118 0x1a00, 0x1a14, 0x1d58, 0x1c38,
119 0x1e24, 0x1e28, 0x1860, 0x4160};
120
121 for (i = 0; i < DACK_REG_8822C; i++) {
122 backup[i].len = 4;
123 backup[i].reg = addrs[i];
124 backup[i].val = rtw_read32(rtwdev, addrs[i]);
125 }
126
127 for (path = 0; path < DACK_PATH_8822C; path++) {
128 for (i = 0; i < DACK_RF_8822C; i++) {
129 reg = rf_addr[i];
130 val = rtw_read_rf(rtwdev, path, reg, RFREG_MASK);
131 backup_rf[path * i + i].reg = reg;
132 backup_rf[path * i + i].val = val;
133 }
134 }
135 }
136
rtw8822c_dac_restore_reg(struct rtw_dev * rtwdev,struct rtw_backup_info * backup,struct rtw_backup_info * backup_rf)137 static void rtw8822c_dac_restore_reg(struct rtw_dev *rtwdev,
138 struct rtw_backup_info *backup,
139 struct rtw_backup_info *backup_rf)
140 {
141 u32 path, i;
142 u32 val;
143 u32 reg;
144
145 rtw_restore_reg(rtwdev, backup, DACK_REG_8822C);
146
147 for (path = 0; path < DACK_PATH_8822C; path++) {
148 for (i = 0; i < DACK_RF_8822C; i++) {
149 val = backup_rf[path * i + i].val;
150 reg = backup_rf[path * i + i].reg;
151 rtw_write_rf(rtwdev, path, reg, RFREG_MASK, val);
152 }
153 }
154 }
155
rtw8822c_rf_minmax_cmp(struct rtw_dev * rtwdev,u32 value,u32 * min,u32 * max)156 static void rtw8822c_rf_minmax_cmp(struct rtw_dev *rtwdev, u32 value,
157 u32 *min, u32 *max)
158 {
159 if (value >= 0x200) {
160 if (*min >= 0x200) {
161 if (*min > value)
162 *min = value;
163 } else {
164 *min = value;
165 }
166 if (*max >= 0x200) {
167 if (*max < value)
168 *max = value;
169 }
170 } else {
171 if (*min < 0x200) {
172 if (*min > value)
173 *min = value;
174 }
175
176 if (*max >= 0x200) {
177 *max = value;
178 } else {
179 if (*max < value)
180 *max = value;
181 }
182 }
183 }
184
__rtw8822c_dac_iq_sort(struct rtw_dev * rtwdev,u32 * v1,u32 * v2)185 static void __rtw8822c_dac_iq_sort(struct rtw_dev *rtwdev, u32 *v1, u32 *v2)
186 {
187 if (*v1 >= 0x200 && *v2 >= 0x200) {
188 if (*v1 > *v2)
189 swap(*v1, *v2);
190 } else if (*v1 < 0x200 && *v2 < 0x200) {
191 if (*v1 > *v2)
192 swap(*v1, *v2);
193 } else if (*v1 < 0x200 && *v2 >= 0x200) {
194 swap(*v1, *v2);
195 }
196 }
197
rtw8822c_dac_iq_sort(struct rtw_dev * rtwdev,u32 * iv,u32 * qv)198 static void rtw8822c_dac_iq_sort(struct rtw_dev *rtwdev, u32 *iv, u32 *qv)
199 {
200 u32 i, j;
201
202 for (i = 0; i < DACK_SN_8822C - 1; i++) {
203 for (j = 0; j < (DACK_SN_8822C - 1 - i) ; j++) {
204 __rtw8822c_dac_iq_sort(rtwdev, &iv[j], &iv[j + 1]);
205 __rtw8822c_dac_iq_sort(rtwdev, &qv[j], &qv[j + 1]);
206 }
207 }
208 }
209
rtw8822c_dac_iq_offset(struct rtw_dev * rtwdev,u32 * vec,u32 * val)210 static void rtw8822c_dac_iq_offset(struct rtw_dev *rtwdev, u32 *vec, u32 *val)
211 {
212 u32 p, m, t, i;
213
214 m = 0;
215 p = 0;
216 for (i = 10; i < DACK_SN_8822C - 10; i++) {
217 if (vec[i] > 0x200)
218 m = (0x400 - vec[i]) + m;
219 else
220 p = vec[i] + p;
221 }
222
223 if (p > m) {
224 t = p - m;
225 t = t / (DACK_SN_8822C - 20);
226 } else {
227 t = m - p;
228 t = t / (DACK_SN_8822C - 20);
229 if (t != 0x0)
230 t = 0x400 - t;
231 }
232
233 *val = t;
234 }
235
rtw8822c_get_path_write_addr(u8 path)236 static u32 rtw8822c_get_path_write_addr(u8 path)
237 {
238 u32 base_addr;
239
240 switch (path) {
241 case RF_PATH_A:
242 base_addr = 0x1800;
243 break;
244 case RF_PATH_B:
245 base_addr = 0x4100;
246 break;
247 default:
248 WARN_ON(1);
249 return -1;
250 }
251
252 return base_addr;
253 }
254
rtw8822c_get_path_read_addr(u8 path)255 static u32 rtw8822c_get_path_read_addr(u8 path)
256 {
257 u32 base_addr;
258
259 switch (path) {
260 case RF_PATH_A:
261 base_addr = 0x2800;
262 break;
263 case RF_PATH_B:
264 base_addr = 0x4500;
265 break;
266 default:
267 WARN_ON(1);
268 return -1;
269 }
270
271 return base_addr;
272 }
273
rtw8822c_dac_iq_check(struct rtw_dev * rtwdev,u32 value)274 static bool rtw8822c_dac_iq_check(struct rtw_dev *rtwdev, u32 value)
275 {
276 bool ret = true;
277
278 if ((value >= 0x200 && (0x400 - value) > 0x64) ||
279 (value < 0x200 && value > 0x64)) {
280 ret = false;
281 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] Error overflow\n");
282 }
283
284 return ret;
285 }
286
rtw8822c_dac_cal_iq_sample(struct rtw_dev * rtwdev,u32 * iv,u32 * qv)287 static void rtw8822c_dac_cal_iq_sample(struct rtw_dev *rtwdev, u32 *iv, u32 *qv)
288 {
289 u32 temp;
290 int i = 0, cnt = 0;
291
292 while (i < DACK_SN_8822C && cnt < 10000) {
293 cnt++;
294 temp = rtw_read32_mask(rtwdev, 0x2dbc, 0x3fffff);
295 iv[i] = (temp & 0x3ff000) >> 12;
296 qv[i] = temp & 0x3ff;
297
298 if (rtw8822c_dac_iq_check(rtwdev, iv[i]) &&
299 rtw8822c_dac_iq_check(rtwdev, qv[i]))
300 i++;
301 }
302 }
303
rtw8822c_dac_cal_iq_search(struct rtw_dev * rtwdev,u32 * iv,u32 * qv,u32 * i_value,u32 * q_value)304 static void rtw8822c_dac_cal_iq_search(struct rtw_dev *rtwdev,
305 u32 *iv, u32 *qv,
306 u32 *i_value, u32 *q_value)
307 {
308 u32 i_max = 0, q_max = 0, i_min = 0, q_min = 0;
309 u32 i_delta, q_delta;
310 u32 temp;
311 int i, cnt = 0;
312
313 do {
314 i_min = iv[0];
315 i_max = iv[0];
316 q_min = qv[0];
317 q_max = qv[0];
318 for (i = 0; i < DACK_SN_8822C; i++) {
319 rtw8822c_rf_minmax_cmp(rtwdev, iv[i], &i_min, &i_max);
320 rtw8822c_rf_minmax_cmp(rtwdev, qv[i], &q_min, &q_max);
321 }
322
323 if (i_max < 0x200 && i_min < 0x200)
324 i_delta = i_max - i_min;
325 else if (i_max >= 0x200 && i_min >= 0x200)
326 i_delta = i_max - i_min;
327 else
328 i_delta = i_max + (0x400 - i_min);
329
330 if (q_max < 0x200 && q_min < 0x200)
331 q_delta = q_max - q_min;
332 else if (q_max >= 0x200 && q_min >= 0x200)
333 q_delta = q_max - q_min;
334 else
335 q_delta = q_max + (0x400 - q_min);
336
337 rtw_dbg(rtwdev, RTW_DBG_RFK,
338 "[DACK] i: min=0x%08x, max=0x%08x, delta=0x%08x\n",
339 i_min, i_max, i_delta);
340 rtw_dbg(rtwdev, RTW_DBG_RFK,
341 "[DACK] q: min=0x%08x, max=0x%08x, delta=0x%08x\n",
342 q_min, q_max, q_delta);
343
344 rtw8822c_dac_iq_sort(rtwdev, iv, qv);
345
346 if (i_delta > 5 || q_delta > 5) {
347 temp = rtw_read32_mask(rtwdev, 0x2dbc, 0x3fffff);
348 iv[0] = (temp & 0x3ff000) >> 12;
349 qv[0] = temp & 0x3ff;
350 temp = rtw_read32_mask(rtwdev, 0x2dbc, 0x3fffff);
351 iv[DACK_SN_8822C - 1] = (temp & 0x3ff000) >> 12;
352 qv[DACK_SN_8822C - 1] = temp & 0x3ff;
353 } else {
354 break;
355 }
356 } while (cnt++ < 100);
357
358 rtw8822c_dac_iq_offset(rtwdev, iv, i_value);
359 rtw8822c_dac_iq_offset(rtwdev, qv, q_value);
360 }
361
rtw8822c_dac_cal_rf_mode(struct rtw_dev * rtwdev,u32 * i_value,u32 * q_value)362 static void rtw8822c_dac_cal_rf_mode(struct rtw_dev *rtwdev,
363 u32 *i_value, u32 *q_value)
364 {
365 u32 iv[DACK_SN_8822C], qv[DACK_SN_8822C];
366 u32 rf_a, rf_b;
367
368 rf_a = rtw_read_rf(rtwdev, RF_PATH_A, 0x0, RFREG_MASK);
369 rf_b = rtw_read_rf(rtwdev, RF_PATH_B, 0x0, RFREG_MASK);
370
371 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] RF path-A=0x%05x\n", rf_a);
372 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] RF path-B=0x%05x\n", rf_b);
373
374 rtw8822c_dac_cal_iq_sample(rtwdev, iv, qv);
375 rtw8822c_dac_cal_iq_search(rtwdev, iv, qv, i_value, q_value);
376 }
377
rtw8822c_dac_bb_setting(struct rtw_dev * rtwdev)378 static void rtw8822c_dac_bb_setting(struct rtw_dev *rtwdev)
379 {
380 rtw_write32_mask(rtwdev, 0x1d58, 0xff8, 0x1ff);
381 rtw_write32_mask(rtwdev, 0x1a00, 0x3, 0x2);
382 rtw_write32_mask(rtwdev, 0x1a14, 0x300, 0x3);
383 rtw_write32(rtwdev, 0x1d70, 0x7e7e7e7e);
384 rtw_write32_mask(rtwdev, 0x180c, 0x3, 0x0);
385 rtw_write32_mask(rtwdev, 0x410c, 0x3, 0x0);
386 rtw_write32(rtwdev, 0x1b00, 0x00000008);
387 rtw_write8(rtwdev, 0x1bcc, 0x3f);
388 rtw_write32(rtwdev, 0x1b00, 0x0000000a);
389 rtw_write8(rtwdev, 0x1bcc, 0x3f);
390 rtw_write32_mask(rtwdev, 0x1e24, BIT(31), 0x0);
391 rtw_write32_mask(rtwdev, 0x1e28, 0xf, 0x3);
392 }
393
rtw8822c_dac_cal_adc(struct rtw_dev * rtwdev,u8 path,u32 * adc_ic,u32 * adc_qc)394 static void rtw8822c_dac_cal_adc(struct rtw_dev *rtwdev,
395 u8 path, u32 *adc_ic, u32 *adc_qc)
396 {
397 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
398 u32 ic = 0, qc = 0, temp = 0;
399 u32 base_addr;
400 u32 path_sel;
401 int i;
402
403 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] ADCK path(%d)\n", path);
404
405 base_addr = rtw8822c_get_path_write_addr(path);
406 switch (path) {
407 case RF_PATH_A:
408 path_sel = 0xa0000;
409 break;
410 case RF_PATH_B:
411 path_sel = 0x80000;
412 break;
413 default:
414 WARN_ON(1);
415 return;
416 }
417
418 /* ADCK step1 */
419 rtw_write32_mask(rtwdev, base_addr + 0x30, BIT(30), 0x0);
420 if (path == RF_PATH_B)
421 rtw_write32(rtwdev, base_addr + 0x30, 0x30db8041);
422 rtw_write32(rtwdev, base_addr + 0x60, 0xf0040ff0);
423 rtw_write32(rtwdev, base_addr + 0x0c, 0xdff00220);
424 rtw_write32(rtwdev, base_addr + 0x10, 0x02dd08c4);
425 rtw_write32(rtwdev, base_addr + 0x0c, 0x10000260);
426 rtw_write_rf(rtwdev, RF_PATH_A, 0x0, RFREG_MASK, 0x10000);
427 rtw_write_rf(rtwdev, RF_PATH_B, 0x0, RFREG_MASK, 0x10000);
428 for (i = 0; i < 10; i++) {
429 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] ADCK count=%d\n", i);
430 rtw_write32(rtwdev, 0x1c3c, path_sel + 0x8003);
431 rtw_write32(rtwdev, 0x1c24, 0x00010002);
432 rtw8822c_dac_cal_rf_mode(rtwdev, &ic, &qc);
433 rtw_dbg(rtwdev, RTW_DBG_RFK,
434 "[DACK] before: i=0x%x, q=0x%x\n", ic, qc);
435
436 /* compensation value */
437 if (ic != 0x0) {
438 ic = 0x400 - ic;
439 *adc_ic = ic;
440 }
441 if (qc != 0x0) {
442 qc = 0x400 - qc;
443 *adc_qc = qc;
444 }
445 temp = (ic & 0x3ff) | ((qc & 0x3ff) << 10);
446 rtw_write32(rtwdev, base_addr + 0x68, temp);
447 dm_info->dack_adck[path] = temp;
448 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] ADCK 0x%08x=0x08%x\n",
449 base_addr + 0x68, temp);
450 /* check ADC DC offset */
451 rtw_write32(rtwdev, 0x1c3c, path_sel + 0x8103);
452 rtw8822c_dac_cal_rf_mode(rtwdev, &ic, &qc);
453 rtw_dbg(rtwdev, RTW_DBG_RFK,
454 "[DACK] after: i=0x%08x, q=0x%08x\n", ic, qc);
455 if (ic >= 0x200)
456 ic = 0x400 - ic;
457 if (qc >= 0x200)
458 qc = 0x400 - qc;
459 if (ic < 5 && qc < 5)
460 break;
461 }
462
463 /* ADCK step2 */
464 rtw_write32(rtwdev, 0x1c3c, 0x00000003);
465 rtw_write32(rtwdev, base_addr + 0x0c, 0x10000260);
466 rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c4);
467
468 /* release pull low switch on IQ path */
469 rtw_write_rf(rtwdev, path, 0x8f, BIT(13), 0x1);
470 }
471
rtw8822c_dac_cal_step1(struct rtw_dev * rtwdev,u8 path)472 static void rtw8822c_dac_cal_step1(struct rtw_dev *rtwdev, u8 path)
473 {
474 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
475 u32 base_addr;
476 u32 read_addr;
477
478 base_addr = rtw8822c_get_path_write_addr(path);
479 read_addr = rtw8822c_get_path_read_addr(path);
480
481 rtw_write32(rtwdev, base_addr + 0x68, dm_info->dack_adck[path]);
482 rtw_write32(rtwdev, base_addr + 0x0c, 0xdff00220);
483 if (path == RF_PATH_A) {
484 rtw_write32(rtwdev, base_addr + 0x60, 0xf0040ff0);
485 rtw_write32(rtwdev, 0x1c38, 0xffffffff);
486 }
487 rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c5);
488 rtw_write32(rtwdev, 0x9b4, 0xdb66db00);
489 rtw_write32(rtwdev, base_addr + 0xb0, 0x0a11fb88);
490 rtw_write32(rtwdev, base_addr + 0xbc, 0x0008ff81);
491 rtw_write32(rtwdev, base_addr + 0xc0, 0x0003d208);
492 rtw_write32(rtwdev, base_addr + 0xcc, 0x0a11fb88);
493 rtw_write32(rtwdev, base_addr + 0xd8, 0x0008ff81);
494 rtw_write32(rtwdev, base_addr + 0xdc, 0x0003d208);
495 rtw_write32(rtwdev, base_addr + 0xb8, 0x60000000);
496 mdelay(2);
497 rtw_write32(rtwdev, base_addr + 0xbc, 0x000aff8d);
498 mdelay(2);
499 rtw_write32(rtwdev, base_addr + 0xb0, 0x0a11fb89);
500 rtw_write32(rtwdev, base_addr + 0xcc, 0x0a11fb89);
501 mdelay(1);
502 rtw_write32(rtwdev, base_addr + 0xb8, 0x62000000);
503 rtw_write32(rtwdev, base_addr + 0xd4, 0x62000000);
504 mdelay(20);
505 if (!check_hw_ready(rtwdev, read_addr + 0x08, 0x7fff80, 0xffff) ||
506 !check_hw_ready(rtwdev, read_addr + 0x34, 0x7fff80, 0xffff))
507 rtw_err(rtwdev, "failed to wait for dack ready\n");
508 rtw_write32(rtwdev, base_addr + 0xb8, 0x02000000);
509 mdelay(1);
510 rtw_write32(rtwdev, base_addr + 0xbc, 0x0008ff87);
511 rtw_write32(rtwdev, 0x9b4, 0xdb6db600);
512 rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c5);
513 rtw_write32(rtwdev, base_addr + 0xbc, 0x0008ff87);
514 rtw_write32(rtwdev, base_addr + 0x60, 0xf0000000);
515 }
516
rtw8822c_dac_cal_step2(struct rtw_dev * rtwdev,u8 path,u32 * ic_out,u32 * qc_out)517 static void rtw8822c_dac_cal_step2(struct rtw_dev *rtwdev,
518 u8 path, u32 *ic_out, u32 *qc_out)
519 {
520 u32 base_addr;
521 u32 ic, qc, ic_in, qc_in;
522
523 base_addr = rtw8822c_get_path_write_addr(path);
524 rtw_write32_mask(rtwdev, base_addr + 0xbc, 0xf0000000, 0x0);
525 rtw_write32_mask(rtwdev, base_addr + 0xc0, 0xf, 0x8);
526 rtw_write32_mask(rtwdev, base_addr + 0xd8, 0xf0000000, 0x0);
527 rtw_write32_mask(rtwdev, base_addr + 0xdc, 0xf, 0x8);
528
529 rtw_write32(rtwdev, 0x1b00, 0x00000008);
530 rtw_write8(rtwdev, 0x1bcc, 0x03f);
531 rtw_write32(rtwdev, base_addr + 0x0c, 0xdff00220);
532 rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c5);
533 rtw_write32(rtwdev, 0x1c3c, 0x00088103);
534
535 rtw8822c_dac_cal_rf_mode(rtwdev, &ic_in, &qc_in);
536 ic = ic_in;
537 qc = qc_in;
538
539 /* compensation value */
540 if (ic != 0x0)
541 ic = 0x400 - ic;
542 if (qc != 0x0)
543 qc = 0x400 - qc;
544 if (ic < 0x300) {
545 ic = ic * 2 * 6 / 5;
546 ic = ic + 0x80;
547 } else {
548 ic = (0x400 - ic) * 2 * 6 / 5;
549 ic = 0x7f - ic;
550 }
551 if (qc < 0x300) {
552 qc = qc * 2 * 6 / 5;
553 qc = qc + 0x80;
554 } else {
555 qc = (0x400 - qc) * 2 * 6 / 5;
556 qc = 0x7f - qc;
557 }
558
559 *ic_out = ic;
560 *qc_out = qc;
561
562 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] before i=0x%x, q=0x%x\n", ic_in, qc_in);
563 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] after i=0x%x, q=0x%x\n", ic, qc);
564 }
565
rtw8822c_dac_cal_step3(struct rtw_dev * rtwdev,u8 path,u32 adc_ic,u32 adc_qc,u32 * ic_in,u32 * qc_in,u32 * i_out,u32 * q_out)566 static void rtw8822c_dac_cal_step3(struct rtw_dev *rtwdev, u8 path,
567 u32 adc_ic, u32 adc_qc,
568 u32 *ic_in, u32 *qc_in,
569 u32 *i_out, u32 *q_out)
570 {
571 u32 base_addr;
572 u32 read_addr;
573 u32 ic, qc;
574 u32 temp;
575
576 base_addr = rtw8822c_get_path_write_addr(path);
577 read_addr = rtw8822c_get_path_read_addr(path);
578 ic = *ic_in;
579 qc = *qc_in;
580
581 rtw_write32(rtwdev, base_addr + 0x0c, 0xdff00220);
582 rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c5);
583 rtw_write32(rtwdev, 0x9b4, 0xdb66db00);
584 rtw_write32(rtwdev, base_addr + 0xb0, 0x0a11fb88);
585 rtw_write32(rtwdev, base_addr + 0xbc, 0xc008ff81);
586 rtw_write32(rtwdev, base_addr + 0xc0, 0x0003d208);
587 rtw_write32_mask(rtwdev, base_addr + 0xbc, 0xf0000000, ic & 0xf);
588 rtw_write32_mask(rtwdev, base_addr + 0xc0, 0xf, (ic & 0xf0) >> 4);
589 rtw_write32(rtwdev, base_addr + 0xcc, 0x0a11fb88);
590 rtw_write32(rtwdev, base_addr + 0xd8, 0xe008ff81);
591 rtw_write32(rtwdev, base_addr + 0xdc, 0x0003d208);
592 rtw_write32_mask(rtwdev, base_addr + 0xd8, 0xf0000000, qc & 0xf);
593 rtw_write32_mask(rtwdev, base_addr + 0xdc, 0xf, (qc & 0xf0) >> 4);
594 rtw_write32(rtwdev, base_addr + 0xb8, 0x60000000);
595 mdelay(2);
596 rtw_write32_mask(rtwdev, base_addr + 0xbc, 0xe, 0x6);
597 mdelay(2);
598 rtw_write32(rtwdev, base_addr + 0xb0, 0x0a11fb89);
599 rtw_write32(rtwdev, base_addr + 0xcc, 0x0a11fb89);
600 mdelay(1);
601 rtw_write32(rtwdev, base_addr + 0xb8, 0x62000000);
602 rtw_write32(rtwdev, base_addr + 0xd4, 0x62000000);
603 mdelay(20);
604 if (!check_hw_ready(rtwdev, read_addr + 0x24, 0x07f80000, ic) ||
605 !check_hw_ready(rtwdev, read_addr + 0x50, 0x07f80000, qc))
606 rtw_err(rtwdev, "failed to write IQ vector to hardware\n");
607 rtw_write32(rtwdev, base_addr + 0xb8, 0x02000000);
608 mdelay(1);
609 rtw_write32_mask(rtwdev, base_addr + 0xbc, 0xe, 0x3);
610 rtw_write32(rtwdev, 0x9b4, 0xdb6db600);
611
612 /* check DAC DC offset */
613 temp = ((adc_ic + 0x10) & 0x3ff) | (((adc_qc + 0x10) & 0x3ff) << 10);
614 rtw_write32(rtwdev, base_addr + 0x68, temp);
615 rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c5);
616 rtw_write32(rtwdev, base_addr + 0x60, 0xf0000000);
617 rtw8822c_dac_cal_rf_mode(rtwdev, &ic, &qc);
618 if (ic >= 0x10)
619 ic = ic - 0x10;
620 else
621 ic = 0x400 - (0x10 - ic);
622
623 if (qc >= 0x10)
624 qc = qc - 0x10;
625 else
626 qc = 0x400 - (0x10 - qc);
627
628 *i_out = ic;
629 *q_out = qc;
630
631 if (ic >= 0x200)
632 ic = 0x400 - ic;
633 if (qc >= 0x200)
634 qc = 0x400 - qc;
635
636 *ic_in = ic;
637 *qc_in = qc;
638
639 rtw_dbg(rtwdev, RTW_DBG_RFK,
640 "[DACK] after DACK i=0x%x, q=0x%x\n", *i_out, *q_out);
641 }
642
rtw8822c_dac_cal_step4(struct rtw_dev * rtwdev,u8 path)643 static void rtw8822c_dac_cal_step4(struct rtw_dev *rtwdev, u8 path)
644 {
645 u32 base_addr = rtw8822c_get_path_write_addr(path);
646
647 rtw_write32(rtwdev, base_addr + 0x68, 0x0);
648 rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c4);
649 rtw_write32_mask(rtwdev, base_addr + 0xbc, 0x1, 0x0);
650 rtw_write32_mask(rtwdev, base_addr + 0x30, BIT(30), 0x1);
651 }
652
rtw8822c_dac_cal_backup_vec(struct rtw_dev * rtwdev,u8 path,u8 vec,u32 w_addr,u32 r_addr)653 static void rtw8822c_dac_cal_backup_vec(struct rtw_dev *rtwdev,
654 u8 path, u8 vec, u32 w_addr, u32 r_addr)
655 {
656 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
657 u16 val;
658 u32 i;
659
660 if (WARN_ON(vec >= 2))
661 return;
662
663 for (i = 0; i < DACK_MSBK_BACKUP_NUM; i++) {
664 rtw_write32_mask(rtwdev, w_addr, 0xf0000000, i);
665 val = (u16)rtw_read32_mask(rtwdev, r_addr, 0x7fc0000);
666 dm_info->dack_msbk[path][vec][i] = val;
667 }
668 }
669
rtw8822c_dac_cal_backup_path(struct rtw_dev * rtwdev,u8 path)670 static void rtw8822c_dac_cal_backup_path(struct rtw_dev *rtwdev, u8 path)
671 {
672 u32 w_off = 0x1c;
673 u32 r_off = 0x2c;
674 u32 w_addr, r_addr;
675
676 if (WARN_ON(path >= 2))
677 return;
678
679 /* backup I vector */
680 w_addr = rtw8822c_get_path_write_addr(path) + 0xb0;
681 r_addr = rtw8822c_get_path_read_addr(path) + 0x10;
682 rtw8822c_dac_cal_backup_vec(rtwdev, path, 0, w_addr, r_addr);
683
684 /* backup Q vector */
685 w_addr = rtw8822c_get_path_write_addr(path) + 0xb0 + w_off;
686 r_addr = rtw8822c_get_path_read_addr(path) + 0x10 + r_off;
687 rtw8822c_dac_cal_backup_vec(rtwdev, path, 1, w_addr, r_addr);
688 }
689
rtw8822c_dac_cal_backup_dck(struct rtw_dev * rtwdev)690 static void rtw8822c_dac_cal_backup_dck(struct rtw_dev *rtwdev)
691 {
692 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
693 u8 val;
694
695 val = (u8)rtw_read32_mask(rtwdev, REG_DCKA_I_0, 0xf0000000);
696 dm_info->dack_dck[RF_PATH_A][0][0] = val;
697 val = (u8)rtw_read32_mask(rtwdev, REG_DCKA_I_1, 0xf);
698 dm_info->dack_dck[RF_PATH_A][0][1] = val;
699 val = (u8)rtw_read32_mask(rtwdev, REG_DCKA_Q_0, 0xf0000000);
700 dm_info->dack_dck[RF_PATH_A][1][0] = val;
701 val = (u8)rtw_read32_mask(rtwdev, REG_DCKA_Q_1, 0xf);
702 dm_info->dack_dck[RF_PATH_A][1][1] = val;
703
704 val = (u8)rtw_read32_mask(rtwdev, REG_DCKB_I_0, 0xf0000000);
705 dm_info->dack_dck[RF_PATH_B][0][0] = val;
706 val = (u8)rtw_read32_mask(rtwdev, REG_DCKB_I_1, 0xf);
707 dm_info->dack_dck[RF_PATH_B][1][0] = val;
708 val = (u8)rtw_read32_mask(rtwdev, REG_DCKB_Q_0, 0xf0000000);
709 dm_info->dack_dck[RF_PATH_B][0][1] = val;
710 val = (u8)rtw_read32_mask(rtwdev, REG_DCKB_Q_1, 0xf);
711 dm_info->dack_dck[RF_PATH_B][1][1] = val;
712 }
713
rtw8822c_dac_cal_backup(struct rtw_dev * rtwdev)714 static void rtw8822c_dac_cal_backup(struct rtw_dev *rtwdev)
715 {
716 u32 temp[3];
717
718 temp[0] = rtw_read32(rtwdev, 0x1860);
719 temp[1] = rtw_read32(rtwdev, 0x4160);
720 temp[2] = rtw_read32(rtwdev, 0x9b4);
721
722 /* set clock */
723 rtw_write32(rtwdev, 0x9b4, 0xdb66db00);
724
725 /* backup path-A I/Q */
726 rtw_write32_clr(rtwdev, 0x1830, BIT(30));
727 rtw_write32_mask(rtwdev, 0x1860, 0xfc000000, 0x3c);
728 rtw8822c_dac_cal_backup_path(rtwdev, RF_PATH_A);
729
730 /* backup path-B I/Q */
731 rtw_write32_clr(rtwdev, 0x4130, BIT(30));
732 rtw_write32_mask(rtwdev, 0x4160, 0xfc000000, 0x3c);
733 rtw8822c_dac_cal_backup_path(rtwdev, RF_PATH_B);
734
735 rtw8822c_dac_cal_backup_dck(rtwdev);
736 rtw_write32_set(rtwdev, 0x1830, BIT(30));
737 rtw_write32_set(rtwdev, 0x4130, BIT(30));
738
739 rtw_write32(rtwdev, 0x1860, temp[0]);
740 rtw_write32(rtwdev, 0x4160, temp[1]);
741 rtw_write32(rtwdev, 0x9b4, temp[2]);
742 }
743
rtw8822c_dac_cal_restore_dck(struct rtw_dev * rtwdev)744 static void rtw8822c_dac_cal_restore_dck(struct rtw_dev *rtwdev)
745 {
746 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
747 u8 val;
748
749 rtw_write32_set(rtwdev, REG_DCKA_I_0, BIT(19));
750 val = dm_info->dack_dck[RF_PATH_A][0][0];
751 rtw_write32_mask(rtwdev, REG_DCKA_I_0, 0xf0000000, val);
752 val = dm_info->dack_dck[RF_PATH_A][0][1];
753 rtw_write32_mask(rtwdev, REG_DCKA_I_1, 0xf, val);
754
755 rtw_write32_set(rtwdev, REG_DCKA_Q_0, BIT(19));
756 val = dm_info->dack_dck[RF_PATH_A][1][0];
757 rtw_write32_mask(rtwdev, REG_DCKA_Q_0, 0xf0000000, val);
758 val = dm_info->dack_dck[RF_PATH_A][1][1];
759 rtw_write32_mask(rtwdev, REG_DCKA_Q_1, 0xf, val);
760
761 rtw_write32_set(rtwdev, REG_DCKB_I_0, BIT(19));
762 val = dm_info->dack_dck[RF_PATH_B][0][0];
763 rtw_write32_mask(rtwdev, REG_DCKB_I_0, 0xf0000000, val);
764 val = dm_info->dack_dck[RF_PATH_B][0][1];
765 rtw_write32_mask(rtwdev, REG_DCKB_I_1, 0xf, val);
766
767 rtw_write32_set(rtwdev, REG_DCKB_Q_0, BIT(19));
768 val = dm_info->dack_dck[RF_PATH_B][1][0];
769 rtw_write32_mask(rtwdev, REG_DCKB_Q_0, 0xf0000000, val);
770 val = dm_info->dack_dck[RF_PATH_B][1][1];
771 rtw_write32_mask(rtwdev, REG_DCKB_Q_1, 0xf, val);
772 }
773
rtw8822c_dac_cal_restore_prepare(struct rtw_dev * rtwdev)774 static void rtw8822c_dac_cal_restore_prepare(struct rtw_dev *rtwdev)
775 {
776 rtw_write32(rtwdev, 0x9b4, 0xdb66db00);
777
778 rtw_write32_mask(rtwdev, 0x18b0, BIT(27), 0x0);
779 rtw_write32_mask(rtwdev, 0x18cc, BIT(27), 0x0);
780 rtw_write32_mask(rtwdev, 0x41b0, BIT(27), 0x0);
781 rtw_write32_mask(rtwdev, 0x41cc, BIT(27), 0x0);
782
783 rtw_write32_mask(rtwdev, 0x1830, BIT(30), 0x0);
784 rtw_write32_mask(rtwdev, 0x1860, 0xfc000000, 0x3c);
785 rtw_write32_mask(rtwdev, 0x18b4, BIT(0), 0x1);
786 rtw_write32_mask(rtwdev, 0x18d0, BIT(0), 0x1);
787
788 rtw_write32_mask(rtwdev, 0x4130, BIT(30), 0x0);
789 rtw_write32_mask(rtwdev, 0x4160, 0xfc000000, 0x3c);
790 rtw_write32_mask(rtwdev, 0x41b4, BIT(0), 0x1);
791 rtw_write32_mask(rtwdev, 0x41d0, BIT(0), 0x1);
792
793 rtw_write32_mask(rtwdev, 0x18b0, 0xf00, 0x0);
794 rtw_write32_mask(rtwdev, 0x18c0, BIT(14), 0x0);
795 rtw_write32_mask(rtwdev, 0x18cc, 0xf00, 0x0);
796 rtw_write32_mask(rtwdev, 0x18dc, BIT(14), 0x0);
797
798 rtw_write32_mask(rtwdev, 0x18b0, BIT(0), 0x0);
799 rtw_write32_mask(rtwdev, 0x18cc, BIT(0), 0x0);
800 rtw_write32_mask(rtwdev, 0x18b0, BIT(0), 0x1);
801 rtw_write32_mask(rtwdev, 0x18cc, BIT(0), 0x1);
802
803 rtw8822c_dac_cal_restore_dck(rtwdev);
804
805 rtw_write32_mask(rtwdev, 0x18c0, 0x38000, 0x7);
806 rtw_write32_mask(rtwdev, 0x18dc, 0x38000, 0x7);
807 rtw_write32_mask(rtwdev, 0x41c0, 0x38000, 0x7);
808 rtw_write32_mask(rtwdev, 0x41dc, 0x38000, 0x7);
809
810 rtw_write32_mask(rtwdev, 0x18b8, BIT(26) | BIT(25), 0x1);
811 rtw_write32_mask(rtwdev, 0x18d4, BIT(26) | BIT(25), 0x1);
812
813 rtw_write32_mask(rtwdev, 0x41b0, 0xf00, 0x0);
814 rtw_write32_mask(rtwdev, 0x41c0, BIT(14), 0x0);
815 rtw_write32_mask(rtwdev, 0x41cc, 0xf00, 0x0);
816 rtw_write32_mask(rtwdev, 0x41dc, BIT(14), 0x0);
817
818 rtw_write32_mask(rtwdev, 0x41b0, BIT(0), 0x0);
819 rtw_write32_mask(rtwdev, 0x41cc, BIT(0), 0x0);
820 rtw_write32_mask(rtwdev, 0x41b0, BIT(0), 0x1);
821 rtw_write32_mask(rtwdev, 0x41cc, BIT(0), 0x1);
822
823 rtw_write32_mask(rtwdev, 0x41b8, BIT(26) | BIT(25), 0x1);
824 rtw_write32_mask(rtwdev, 0x41d4, BIT(26) | BIT(25), 0x1);
825 }
826
rtw8822c_dac_cal_restore_wait(struct rtw_dev * rtwdev,u32 target_addr,u32 toggle_addr)827 static bool rtw8822c_dac_cal_restore_wait(struct rtw_dev *rtwdev,
828 u32 target_addr, u32 toggle_addr)
829 {
830 u32 cnt = 0;
831
832 do {
833 rtw_write32_mask(rtwdev, toggle_addr, BIT(26) | BIT(25), 0x0);
834 rtw_write32_mask(rtwdev, toggle_addr, BIT(26) | BIT(25), 0x2);
835
836 if (rtw_read32_mask(rtwdev, target_addr, 0xf) == 0x6)
837 return true;
838
839 } while (cnt++ < 100);
840
841 return false;
842 }
843
rtw8822c_dac_cal_restore_path(struct rtw_dev * rtwdev,u8 path)844 static bool rtw8822c_dac_cal_restore_path(struct rtw_dev *rtwdev, u8 path)
845 {
846 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
847 u32 w_off = 0x1c;
848 u32 r_off = 0x2c;
849 u32 w_i, r_i, w_q, r_q;
850 u32 value;
851 u32 i;
852
853 w_i = rtw8822c_get_path_write_addr(path) + 0xb0;
854 r_i = rtw8822c_get_path_read_addr(path) + 0x08;
855 w_q = rtw8822c_get_path_write_addr(path) + 0xb0 + w_off;
856 r_q = rtw8822c_get_path_read_addr(path) + 0x08 + r_off;
857
858 if (!rtw8822c_dac_cal_restore_wait(rtwdev, r_i, w_i + 0x8))
859 return false;
860
861 for (i = 0; i < DACK_MSBK_BACKUP_NUM; i++) {
862 rtw_write32_mask(rtwdev, w_i + 0x4, BIT(2), 0x0);
863 value = dm_info->dack_msbk[path][0][i];
864 rtw_write32_mask(rtwdev, w_i + 0x4, 0xff8, value);
865 rtw_write32_mask(rtwdev, w_i, 0xf0000000, i);
866 rtw_write32_mask(rtwdev, w_i + 0x4, BIT(2), 0x1);
867 }
868
869 rtw_write32_mask(rtwdev, w_i + 0x4, BIT(2), 0x0);
870
871 if (!rtw8822c_dac_cal_restore_wait(rtwdev, r_q, w_q + 0x8))
872 return false;
873
874 for (i = 0; i < DACK_MSBK_BACKUP_NUM; i++) {
875 rtw_write32_mask(rtwdev, w_q + 0x4, BIT(2), 0x0);
876 value = dm_info->dack_msbk[path][1][i];
877 rtw_write32_mask(rtwdev, w_q + 0x4, 0xff8, value);
878 rtw_write32_mask(rtwdev, w_q, 0xf0000000, i);
879 rtw_write32_mask(rtwdev, w_q + 0x4, BIT(2), 0x1);
880 }
881 rtw_write32_mask(rtwdev, w_q + 0x4, BIT(2), 0x0);
882
883 rtw_write32_mask(rtwdev, w_i + 0x8, BIT(26) | BIT(25), 0x0);
884 rtw_write32_mask(rtwdev, w_q + 0x8, BIT(26) | BIT(25), 0x0);
885 rtw_write32_mask(rtwdev, w_i + 0x4, BIT(0), 0x0);
886 rtw_write32_mask(rtwdev, w_q + 0x4, BIT(0), 0x0);
887
888 return true;
889 }
890
__rtw8822c_dac_cal_restore(struct rtw_dev * rtwdev)891 static bool __rtw8822c_dac_cal_restore(struct rtw_dev *rtwdev)
892 {
893 if (!rtw8822c_dac_cal_restore_path(rtwdev, RF_PATH_A))
894 return false;
895
896 if (!rtw8822c_dac_cal_restore_path(rtwdev, RF_PATH_B))
897 return false;
898
899 return true;
900 }
901
rtw8822c_dac_cal_restore(struct rtw_dev * rtwdev)902 static bool rtw8822c_dac_cal_restore(struct rtw_dev *rtwdev)
903 {
904 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
905 u32 temp[3];
906
907 /* sample the first element for both path's IQ vector */
908 if (dm_info->dack_msbk[RF_PATH_A][0][0] == 0 &&
909 dm_info->dack_msbk[RF_PATH_A][1][0] == 0 &&
910 dm_info->dack_msbk[RF_PATH_B][0][0] == 0 &&
911 dm_info->dack_msbk[RF_PATH_B][1][0] == 0)
912 return false;
913
914 temp[0] = rtw_read32(rtwdev, 0x1860);
915 temp[1] = rtw_read32(rtwdev, 0x4160);
916 temp[2] = rtw_read32(rtwdev, 0x9b4);
917
918 rtw8822c_dac_cal_restore_prepare(rtwdev);
919 if (!check_hw_ready(rtwdev, 0x2808, 0x7fff80, 0xffff) ||
920 !check_hw_ready(rtwdev, 0x2834, 0x7fff80, 0xffff) ||
921 !check_hw_ready(rtwdev, 0x4508, 0x7fff80, 0xffff) ||
922 !check_hw_ready(rtwdev, 0x4534, 0x7fff80, 0xffff))
923 return false;
924
925 if (!__rtw8822c_dac_cal_restore(rtwdev)) {
926 rtw_err(rtwdev, "failed to restore dack vectors\n");
927 return false;
928 }
929
930 rtw_write32_mask(rtwdev, 0x1830, BIT(30), 0x1);
931 rtw_write32_mask(rtwdev, 0x4130, BIT(30), 0x1);
932 rtw_write32(rtwdev, 0x1860, temp[0]);
933 rtw_write32(rtwdev, 0x4160, temp[1]);
934 rtw_write32_mask(rtwdev, 0x18b0, BIT(27), 0x1);
935 rtw_write32_mask(rtwdev, 0x18cc, BIT(27), 0x1);
936 rtw_write32_mask(rtwdev, 0x41b0, BIT(27), 0x1);
937 rtw_write32_mask(rtwdev, 0x41cc, BIT(27), 0x1);
938 rtw_write32(rtwdev, 0x9b4, temp[2]);
939
940 return true;
941 }
942
rtw8822c_rf_dac_cal(struct rtw_dev * rtwdev)943 static void rtw8822c_rf_dac_cal(struct rtw_dev *rtwdev)
944 {
945 struct rtw_backup_info backup_rf[DACK_RF_8822C * DACK_PATH_8822C];
946 struct rtw_backup_info backup[DACK_REG_8822C];
947 u32 ic = 0, qc = 0, i;
948 u32 i_a = 0x0, q_a = 0x0, i_b = 0x0, q_b = 0x0;
949 u32 ic_a = 0x0, qc_a = 0x0, ic_b = 0x0, qc_b = 0x0;
950 u32 adc_ic_a = 0x0, adc_qc_a = 0x0, adc_ic_b = 0x0, adc_qc_b = 0x0;
951
952 if (rtw8822c_dac_cal_restore(rtwdev))
953 return;
954
955 /* not able to restore, do it */
956
957 rtw8822c_dac_backup_reg(rtwdev, backup, backup_rf);
958
959 rtw8822c_dac_bb_setting(rtwdev);
960
961 /* path-A */
962 rtw8822c_dac_cal_adc(rtwdev, RF_PATH_A, &adc_ic_a, &adc_qc_a);
963 for (i = 0; i < 10; i++) {
964 rtw8822c_dac_cal_step1(rtwdev, RF_PATH_A);
965 rtw8822c_dac_cal_step2(rtwdev, RF_PATH_A, &ic, &qc);
966 ic_a = ic;
967 qc_a = qc;
968
969 rtw8822c_dac_cal_step3(rtwdev, RF_PATH_A, adc_ic_a, adc_qc_a,
970 &ic, &qc, &i_a, &q_a);
971
972 if (ic < 5 && qc < 5)
973 break;
974 }
975 rtw8822c_dac_cal_step4(rtwdev, RF_PATH_A);
976
977 /* path-B */
978 rtw8822c_dac_cal_adc(rtwdev, RF_PATH_B, &adc_ic_b, &adc_qc_b);
979 for (i = 0; i < 10; i++) {
980 rtw8822c_dac_cal_step1(rtwdev, RF_PATH_B);
981 rtw8822c_dac_cal_step2(rtwdev, RF_PATH_B, &ic, &qc);
982 ic_b = ic;
983 qc_b = qc;
984
985 rtw8822c_dac_cal_step3(rtwdev, RF_PATH_B, adc_ic_b, adc_qc_b,
986 &ic, &qc, &i_b, &q_b);
987
988 if (ic < 5 && qc < 5)
989 break;
990 }
991 rtw8822c_dac_cal_step4(rtwdev, RF_PATH_B);
992
993 rtw_write32(rtwdev, 0x1b00, 0x00000008);
994 rtw_write32_mask(rtwdev, 0x4130, BIT(30), 0x1);
995 rtw_write8(rtwdev, 0x1bcc, 0x0);
996 rtw_write32(rtwdev, 0x1b00, 0x0000000a);
997 rtw_write8(rtwdev, 0x1bcc, 0x0);
998
999 rtw8822c_dac_restore_reg(rtwdev, backup, backup_rf);
1000
1001 /* backup results to restore, saving a lot of time */
1002 rtw8822c_dac_cal_backup(rtwdev);
1003
1004 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] path A: ic=0x%x, qc=0x%x\n", ic_a, qc_a);
1005 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] path B: ic=0x%x, qc=0x%x\n", ic_b, qc_b);
1006 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] path A: i=0x%x, q=0x%x\n", i_a, q_a);
1007 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] path B: i=0x%x, q=0x%x\n", i_b, q_b);
1008 }
1009
rtw8822c_rf_x2_check(struct rtw_dev * rtwdev)1010 static void rtw8822c_rf_x2_check(struct rtw_dev *rtwdev)
1011 {
1012 u8 x2k_busy;
1013
1014 mdelay(1);
1015 x2k_busy = rtw_read_rf(rtwdev, RF_PATH_A, 0xb8, BIT(15));
1016 if (x2k_busy == 1) {
1017 rtw_write_rf(rtwdev, RF_PATH_A, 0xb8, RFREG_MASK, 0xC4440);
1018 rtw_write_rf(rtwdev, RF_PATH_A, 0xba, RFREG_MASK, 0x6840D);
1019 rtw_write_rf(rtwdev, RF_PATH_A, 0xb8, RFREG_MASK, 0x80440);
1020 mdelay(1);
1021 }
1022 }
1023
rtw8822c_set_power_trim(struct rtw_dev * rtwdev,s8 bb_gain[2][8])1024 static void rtw8822c_set_power_trim(struct rtw_dev *rtwdev, s8 bb_gain[2][8])
1025 {
1026 #define RF_SET_POWER_TRIM(_path, _seq, _idx) \
1027 do { \
1028 rtw_write_rf(rtwdev, _path, 0x33, RFREG_MASK, _seq); \
1029 rtw_write_rf(rtwdev, _path, 0x3f, RFREG_MASK, \
1030 bb_gain[_path][_idx]); \
1031 } while (0)
1032 u8 path;
1033
1034 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1035 rtw_write_rf(rtwdev, path, 0xee, BIT(19), 1);
1036 RF_SET_POWER_TRIM(path, 0x0, 0);
1037 RF_SET_POWER_TRIM(path, 0x1, 1);
1038 RF_SET_POWER_TRIM(path, 0x2, 2);
1039 RF_SET_POWER_TRIM(path, 0x3, 2);
1040 RF_SET_POWER_TRIM(path, 0x4, 3);
1041 RF_SET_POWER_TRIM(path, 0x5, 4);
1042 RF_SET_POWER_TRIM(path, 0x6, 5);
1043 RF_SET_POWER_TRIM(path, 0x7, 6);
1044 RF_SET_POWER_TRIM(path, 0x8, 7);
1045 RF_SET_POWER_TRIM(path, 0x9, 3);
1046 RF_SET_POWER_TRIM(path, 0xa, 4);
1047 RF_SET_POWER_TRIM(path, 0xb, 5);
1048 RF_SET_POWER_TRIM(path, 0xc, 6);
1049 RF_SET_POWER_TRIM(path, 0xd, 7);
1050 RF_SET_POWER_TRIM(path, 0xe, 7);
1051 rtw_write_rf(rtwdev, path, 0xee, BIT(19), 0);
1052 }
1053 #undef RF_SET_POWER_TRIM
1054 }
1055
rtw8822c_power_trim(struct rtw_dev * rtwdev)1056 static void rtw8822c_power_trim(struct rtw_dev *rtwdev)
1057 {
1058 u8 pg_pwr = 0xff, i, path, idx;
1059 s8 bb_gain[2][8] = {};
1060 u16 rf_efuse_2g[3] = {PPG_2GL_TXAB, PPG_2GM_TXAB, PPG_2GH_TXAB};
1061 u16 rf_efuse_5g[2][5] = {{PPG_5GL1_TXA, PPG_5GL2_TXA, PPG_5GM1_TXA,
1062 PPG_5GM2_TXA, PPG_5GH1_TXA},
1063 {PPG_5GL1_TXB, PPG_5GL2_TXB, PPG_5GM1_TXB,
1064 PPG_5GM2_TXB, PPG_5GH1_TXB} };
1065 bool set = false;
1066
1067 for (i = 0; i < ARRAY_SIZE(rf_efuse_2g); i++) {
1068 rtw_read8_physical_efuse(rtwdev, rf_efuse_2g[i], &pg_pwr);
1069 if (pg_pwr == EFUSE_READ_FAIL)
1070 continue;
1071 set = true;
1072 bb_gain[RF_PATH_A][i] = FIELD_GET(PPG_2G_A_MASK, pg_pwr);
1073 bb_gain[RF_PATH_B][i] = FIELD_GET(PPG_2G_B_MASK, pg_pwr);
1074 }
1075
1076 for (i = 0; i < ARRAY_SIZE(rf_efuse_5g[0]); i++) {
1077 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1078 rtw_read8_physical_efuse(rtwdev, rf_efuse_5g[path][i],
1079 &pg_pwr);
1080 if (pg_pwr == EFUSE_READ_FAIL)
1081 continue;
1082 set = true;
1083 idx = i + ARRAY_SIZE(rf_efuse_2g);
1084 bb_gain[path][idx] = FIELD_GET(PPG_5G_MASK, pg_pwr);
1085 }
1086 }
1087 if (set)
1088 rtw8822c_set_power_trim(rtwdev, bb_gain);
1089
1090 rtw_write32_mask(rtwdev, REG_DIS_DPD, DIS_DPD_MASK, DIS_DPD_RATEALL);
1091 }
1092
rtw8822c_thermal_trim(struct rtw_dev * rtwdev)1093 static void rtw8822c_thermal_trim(struct rtw_dev *rtwdev)
1094 {
1095 u16 rf_efuse[2] = {PPG_THERMAL_A, PPG_THERMAL_B};
1096 u8 pg_therm = 0xff, thermal[2] = {0}, path;
1097
1098 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1099 rtw_read8_physical_efuse(rtwdev, rf_efuse[path], &pg_therm);
1100 if (pg_therm == EFUSE_READ_FAIL)
1101 return;
1102 /* Efuse value of BIT(0) shall be move to BIT(3), and the value
1103 * of BIT(1) to BIT(3) should be right shifted 1 bit.
1104 */
1105 thermal[path] = FIELD_GET(GENMASK(3, 1), pg_therm);
1106 thermal[path] |= FIELD_PREP(BIT(3), pg_therm & BIT(0));
1107 rtw_write_rf(rtwdev, path, 0x43, RF_THEMAL_MASK, thermal[path]);
1108 }
1109 }
1110
rtw8822c_pa_bias(struct rtw_dev * rtwdev)1111 static void rtw8822c_pa_bias(struct rtw_dev *rtwdev)
1112 {
1113 u16 rf_efuse_2g[2] = {PPG_PABIAS_2GA, PPG_PABIAS_2GB};
1114 u16 rf_efuse_5g[2] = {PPG_PABIAS_5GA, PPG_PABIAS_5GB};
1115 u8 pg_pa_bias = 0xff, path;
1116
1117 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1118 rtw_read8_physical_efuse(rtwdev, rf_efuse_2g[path],
1119 &pg_pa_bias);
1120 if (pg_pa_bias == EFUSE_READ_FAIL)
1121 return;
1122 pg_pa_bias = FIELD_GET(PPG_PABIAS_MASK, pg_pa_bias);
1123 rtw_write_rf(rtwdev, path, RF_PA, RF_PABIAS_2G_MASK, pg_pa_bias);
1124 }
1125 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1126 rtw_read8_physical_efuse(rtwdev, rf_efuse_5g[path],
1127 &pg_pa_bias);
1128 pg_pa_bias = FIELD_GET(PPG_PABIAS_MASK, pg_pa_bias);
1129 rtw_write_rf(rtwdev, path, RF_PA, RF_PABIAS_5G_MASK, pg_pa_bias);
1130 }
1131 }
1132
rtw8822c_rfk_handshake(struct rtw_dev * rtwdev,bool is_before_k)1133 static void rtw8822c_rfk_handshake(struct rtw_dev *rtwdev, bool is_before_k)
1134 {
1135 struct rtw_dm_info *dm = &rtwdev->dm_info;
1136 u8 u1b_tmp;
1137 u8 u4b_tmp;
1138 int ret;
1139
1140 if (is_before_k) {
1141 rtw_dbg(rtwdev, RTW_DBG_RFK,
1142 "[RFK] WiFi / BT RFK handshake start!!\n");
1143
1144 if (!dm->is_bt_iqk_timeout) {
1145 ret = read_poll_timeout(rtw_read32_mask, u4b_tmp,
1146 u4b_tmp == 0, 20, 600000, false,
1147 rtwdev, REG_PMC_DBG_CTRL1,
1148 BITS_PMC_BT_IQK_STS);
1149 if (ret) {
1150 rtw_dbg(rtwdev, RTW_DBG_RFK,
1151 "[RFK] Wait BT IQK finish timeout!!\n");
1152 dm->is_bt_iqk_timeout = true;
1153 }
1154 }
1155
1156 rtw_fw_inform_rfk_status(rtwdev, true);
1157
1158 ret = read_poll_timeout(rtw_read8_mask, u1b_tmp,
1159 u1b_tmp == 1, 20, 100000, false,
1160 rtwdev, REG_ARFR4, BIT_WL_RFK);
1161 if (ret)
1162 rtw_dbg(rtwdev, RTW_DBG_RFK,
1163 "[RFK] Send WiFi RFK start H2C cmd FAIL!!\n");
1164 } else {
1165 rtw_fw_inform_rfk_status(rtwdev, false);
1166 ret = read_poll_timeout(rtw_read8_mask, u1b_tmp,
1167 u1b_tmp == 1, 20, 100000, false,
1168 rtwdev, REG_ARFR4,
1169 BIT_WL_RFK);
1170 if (ret)
1171 rtw_dbg(rtwdev, RTW_DBG_RFK,
1172 "[RFK] Send WiFi RFK finish H2C cmd FAIL!!\n");
1173
1174 rtw_dbg(rtwdev, RTW_DBG_RFK,
1175 "[RFK] WiFi / BT RFK handshake finish!!\n");
1176 }
1177 }
1178
rtw8822c_rfk_power_save(struct rtw_dev * rtwdev,bool is_power_save)1179 static void rtw8822c_rfk_power_save(struct rtw_dev *rtwdev,
1180 bool is_power_save)
1181 {
1182 u8 path;
1183
1184 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1185 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, path);
1186 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S0, BIT_PS_EN,
1187 is_power_save ? 0 : 1);
1188 }
1189 }
1190
rtw8822c_txgapk_backup_bb_reg(struct rtw_dev * rtwdev,const u32 reg[],u32 reg_backup[],u32 reg_num)1191 static void rtw8822c_txgapk_backup_bb_reg(struct rtw_dev *rtwdev, const u32 reg[],
1192 u32 reg_backup[], u32 reg_num)
1193 {
1194 u32 i;
1195
1196 for (i = 0; i < reg_num; i++) {
1197 reg_backup[i] = rtw_read32(rtwdev, reg[i]);
1198
1199 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] Backup BB 0x%x = 0x%x\n",
1200 reg[i], reg_backup[i]);
1201 }
1202 }
1203
rtw8822c_txgapk_reload_bb_reg(struct rtw_dev * rtwdev,const u32 reg[],u32 reg_backup[],u32 reg_num)1204 static void rtw8822c_txgapk_reload_bb_reg(struct rtw_dev *rtwdev,
1205 const u32 reg[], u32 reg_backup[],
1206 u32 reg_num)
1207 {
1208 u32 i;
1209
1210 for (i = 0; i < reg_num; i++) {
1211 rtw_write32(rtwdev, reg[i], reg_backup[i]);
1212 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] Reload BB 0x%x = 0x%x\n",
1213 reg[i], reg_backup[i]);
1214 }
1215 }
1216
check_rf_status(struct rtw_dev * rtwdev,u8 status)1217 static bool check_rf_status(struct rtw_dev *rtwdev, u8 status)
1218 {
1219 u8 reg_rf0_a, reg_rf0_b;
1220
1221 reg_rf0_a = (u8)rtw_read_rf(rtwdev, RF_PATH_A,
1222 RF_MODE_TRXAGC, BIT_RF_MODE);
1223 reg_rf0_b = (u8)rtw_read_rf(rtwdev, RF_PATH_B,
1224 RF_MODE_TRXAGC, BIT_RF_MODE);
1225
1226 if (reg_rf0_a == status || reg_rf0_b == status)
1227 return false;
1228
1229 return true;
1230 }
1231
rtw8822c_txgapk_tx_pause(struct rtw_dev * rtwdev)1232 static void rtw8822c_txgapk_tx_pause(struct rtw_dev *rtwdev)
1233 {
1234 bool status;
1235 int ret;
1236
1237 rtw_write8(rtwdev, REG_TXPAUSE, BIT_AC_QUEUE);
1238 rtw_write32_mask(rtwdev, REG_TX_FIFO, BIT_STOP_TX, 0x2);
1239
1240 ret = read_poll_timeout_atomic(check_rf_status, status, status,
1241 2, 5000, false, rtwdev, 2);
1242 if (ret)
1243 rtw_warn(rtwdev, "failed to pause TX\n");
1244
1245 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] Tx pause!!\n");
1246 }
1247
rtw8822c_txgapk_bb_dpk(struct rtw_dev * rtwdev,u8 path)1248 static void rtw8822c_txgapk_bb_dpk(struct rtw_dev *rtwdev, u8 path)
1249 {
1250 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1251
1252 rtw_write32_mask(rtwdev, REG_ENFN, BIT_IQK_DPK_EN, 0x1);
1253 rtw_write32_mask(rtwdev, REG_CH_DELAY_EXTR2,
1254 BIT_IQK_DPK_CLOCK_SRC, 0x1);
1255 rtw_write32_mask(rtwdev, REG_CH_DELAY_EXTR2,
1256 BIT_IQK_DPK_RESET_SRC, 0x1);
1257 rtw_write32_mask(rtwdev, REG_CH_DELAY_EXTR2, BIT_EN_IOQ_IQK_DPK, 0x1);
1258 rtw_write32_mask(rtwdev, REG_CH_DELAY_EXTR2, BIT_TST_IQK2SET_SRC, 0x0);
1259 rtw_write32_mask(rtwdev, REG_CCA_OFF, BIT_CCA_ON_BY_PW, 0x1ff);
1260
1261 if (path == RF_PATH_A) {
1262 rtw_write32_mask(rtwdev, REG_RFTXEN_GCK_A,
1263 BIT_RFTXEN_GCK_FORCE_ON, 0x1);
1264 rtw_write32_mask(rtwdev, REG_3WIRE, BIT_DIS_SHARERX_TXGAT, 0x1);
1265 rtw_write32_mask(rtwdev, REG_DIS_SHARE_RX_A,
1266 BIT_TX_SCALE_0DB, 0x1);
1267 rtw_write32_mask(rtwdev, REG_3WIRE, BIT_3WIRE_EN, 0x0);
1268 } else if (path == RF_PATH_B) {
1269 rtw_write32_mask(rtwdev, REG_RFTXEN_GCK_B,
1270 BIT_RFTXEN_GCK_FORCE_ON, 0x1);
1271 rtw_write32_mask(rtwdev, REG_3WIRE2,
1272 BIT_DIS_SHARERX_TXGAT, 0x1);
1273 rtw_write32_mask(rtwdev, REG_DIS_SHARE_RX_B,
1274 BIT_TX_SCALE_0DB, 0x1);
1275 rtw_write32_mask(rtwdev, REG_3WIRE2, BIT_3WIRE_EN, 0x0);
1276 }
1277 rtw_write32_mask(rtwdev, REG_CCKSB, BIT_BBMODE, 0x2);
1278 }
1279
rtw8822c_txgapk_afe_dpk(struct rtw_dev * rtwdev,u8 path)1280 static void rtw8822c_txgapk_afe_dpk(struct rtw_dev *rtwdev, u8 path)
1281 {
1282 u32 reg;
1283
1284 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1285
1286 if (path == RF_PATH_A) {
1287 reg = REG_ANAPAR_A;
1288 } else if (path == RF_PATH_B) {
1289 reg = REG_ANAPAR_B;
1290 } else {
1291 rtw_err(rtwdev, "[TXGAPK] unknown path %d!!\n", path);
1292 return;
1293 }
1294
1295 rtw_write32_mask(rtwdev, REG_IQK_CTRL, MASKDWORD, MASKDWORD);
1296 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x700f0001);
1297 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x700f0001);
1298 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x701f0001);
1299 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x702f0001);
1300 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x703f0001);
1301 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x704f0001);
1302 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x705f0001);
1303 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x706f0001);
1304 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x707f0001);
1305 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x708f0001);
1306 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x709f0001);
1307 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70af0001);
1308 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70bf0001);
1309 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70cf0001);
1310 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70df0001);
1311 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70ef0001);
1312 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70ff0001);
1313 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70ff0001);
1314 }
1315
rtw8822c_txgapk_afe_dpk_restore(struct rtw_dev * rtwdev,u8 path)1316 static void rtw8822c_txgapk_afe_dpk_restore(struct rtw_dev *rtwdev, u8 path)
1317 {
1318 u32 reg;
1319
1320 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1321
1322 if (path == RF_PATH_A) {
1323 reg = REG_ANAPAR_A;
1324 } else if (path == RF_PATH_B) {
1325 reg = REG_ANAPAR_B;
1326 } else {
1327 rtw_err(rtwdev, "[TXGAPK] unknown path %d!!\n", path);
1328 return;
1329 }
1330 rtw_write32_mask(rtwdev, REG_IQK_CTRL, MASKDWORD, 0xffa1005e);
1331 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x700b8041);
1332 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70144041);
1333 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70244041);
1334 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70344041);
1335 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70444041);
1336 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x705b8041);
1337 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70644041);
1338 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x707b8041);
1339 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x708b8041);
1340 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x709b8041);
1341 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70ab8041);
1342 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70bb8041);
1343 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70cb8041);
1344 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70db8041);
1345 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70eb8041);
1346 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70fb8041);
1347 }
1348
rtw8822c_txgapk_bb_dpk_restore(struct rtw_dev * rtwdev,u8 path)1349 static void rtw8822c_txgapk_bb_dpk_restore(struct rtw_dev *rtwdev, u8 path)
1350 {
1351 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1352
1353 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TX_GAIN, 0x0);
1354 rtw_write_rf(rtwdev, path, RF_DIS_BYPASS_TXBB, BIT_TIA_BYPASS, 0x0);
1355 rtw_write_rf(rtwdev, path, RF_DIS_BYPASS_TXBB, BIT_TXBB, 0x0);
1356
1357 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, 0x0);
1358 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x0);
1359 rtw_write32_mask(rtwdev, REG_SINGLE_TONE_SW, BIT_IRQ_TEST_MODE, 0x0);
1360 rtw_write32_mask(rtwdev, REG_R_CONFIG, MASKBYTE0, 0x00);
1361 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, 0x1);
1362 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x0);
1363 rtw_write32_mask(rtwdev, REG_SINGLE_TONE_SW, BIT_IRQ_TEST_MODE, 0x0);
1364 rtw_write32_mask(rtwdev, REG_R_CONFIG, MASKBYTE0, 0x00);
1365 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, 0x0);
1366 rtw_write32_mask(rtwdev, REG_CCA_OFF, BIT_CCA_ON_BY_PW, 0x0);
1367
1368 if (path == RF_PATH_A) {
1369 rtw_write32_mask(rtwdev, REG_RFTXEN_GCK_A,
1370 BIT_RFTXEN_GCK_FORCE_ON, 0x0);
1371 rtw_write32_mask(rtwdev, REG_3WIRE, BIT_DIS_SHARERX_TXGAT, 0x0);
1372 rtw_write32_mask(rtwdev, REG_DIS_SHARE_RX_A,
1373 BIT_TX_SCALE_0DB, 0x0);
1374 rtw_write32_mask(rtwdev, REG_3WIRE, BIT_3WIRE_EN, 0x3);
1375 } else if (path == RF_PATH_B) {
1376 rtw_write32_mask(rtwdev, REG_RFTXEN_GCK_B,
1377 BIT_RFTXEN_GCK_FORCE_ON, 0x0);
1378 rtw_write32_mask(rtwdev, REG_3WIRE2,
1379 BIT_DIS_SHARERX_TXGAT, 0x0);
1380 rtw_write32_mask(rtwdev, REG_DIS_SHARE_RX_B,
1381 BIT_TX_SCALE_0DB, 0x0);
1382 rtw_write32_mask(rtwdev, REG_3WIRE2, BIT_3WIRE_EN, 0x3);
1383 }
1384
1385 rtw_write32_mask(rtwdev, REG_CCKSB, BIT_BBMODE, 0x0);
1386 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_CFIR_EN, 0x5);
1387 }
1388
_rtw8822c_txgapk_gain_valid(struct rtw_dev * rtwdev,u32 gain)1389 static bool _rtw8822c_txgapk_gain_valid(struct rtw_dev *rtwdev, u32 gain)
1390 {
1391 if ((FIELD_GET(BIT_GAIN_TX_PAD_H, gain) >= 0xc) &&
1392 (FIELD_GET(BIT_GAIN_TX_PAD_L, gain) >= 0xe))
1393 return true;
1394
1395 return false;
1396 }
1397
_rtw8822c_txgapk_write_gain_bb_table(struct rtw_dev * rtwdev,u8 band,u8 path)1398 static void _rtw8822c_txgapk_write_gain_bb_table(struct rtw_dev *rtwdev,
1399 u8 band, u8 path)
1400 {
1401 struct rtw_gapk_info *txgapk = &rtwdev->dm_info.gapk;
1402 u32 v, tmp_3f = 0;
1403 u8 gain, check_txgain;
1404
1405 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, path);
1406
1407 switch (band) {
1408 case RF_BAND_2G_OFDM:
1409 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x0);
1410 break;
1411 case RF_BAND_5G_L:
1412 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x2);
1413 break;
1414 case RF_BAND_5G_M:
1415 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x3);
1416 break;
1417 case RF_BAND_5G_H:
1418 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x4);
1419 break;
1420 default:
1421 break;
1422 }
1423
1424 rtw_write32_mask(rtwdev, REG_TX_GAIN_SET, MASKBYTE0, 0x88);
1425
1426 check_txgain = 0;
1427 for (gain = 0; gain < RF_GAIN_NUM; gain++) {
1428 v = txgapk->rf3f_bp[band][gain][path];
1429 if (_rtw8822c_txgapk_gain_valid(rtwdev, v)) {
1430 if (!check_txgain) {
1431 tmp_3f = txgapk->rf3f_bp[band][gain][path];
1432 check_txgain = 1;
1433 }
1434 rtw_dbg(rtwdev, RTW_DBG_RFK,
1435 "[TXGAPK] tx_gain=0x%03X >= 0xCEX\n",
1436 txgapk->rf3f_bp[band][gain][path]);
1437 } else {
1438 tmp_3f = txgapk->rf3f_bp[band][gain][path];
1439 }
1440
1441 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_Q_GAIN, tmp_3f);
1442 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_I_GAIN, gain);
1443 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_GAIN_RST, 0x1);
1444 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_GAIN_RST, 0x0);
1445
1446 rtw_dbg(rtwdev, RTW_DBG_RFK,
1447 "[TXGAPK] Band=%d 0x1b98[11:0]=0x%03X path=%d\n",
1448 band, tmp_3f, path);
1449 }
1450 }
1451
rtw8822c_txgapk_write_gain_bb_table(struct rtw_dev * rtwdev)1452 static void rtw8822c_txgapk_write_gain_bb_table(struct rtw_dev *rtwdev)
1453 {
1454 u8 path, band;
1455
1456 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s channel=%d\n",
1457 __func__, rtwdev->dm_info.gapk.channel);
1458
1459 for (band = 0; band < RF_BAND_MAX; band++) {
1460 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1461 _rtw8822c_txgapk_write_gain_bb_table(rtwdev,
1462 band, path);
1463 }
1464 }
1465 }
1466
rtw8822c_txgapk_read_offset(struct rtw_dev * rtwdev,u8 path)1467 static void rtw8822c_txgapk_read_offset(struct rtw_dev *rtwdev, u8 path)
1468 {
1469 static const u32 cfg1_1b00[2] = {0x00000d18, 0x00000d2a};
1470 static const u32 cfg2_1b00[2] = {0x00000d19, 0x00000d2b};
1471 static const u32 set_pi[2] = {REG_RSV_CTRL, REG_WLRF1};
1472 static const u32 path_setting[2] = {REG_ORITXCODE, REG_ORITXCODE2};
1473 struct rtw_gapk_info *txgapk = &rtwdev->dm_info.gapk;
1474 u8 channel = txgapk->channel;
1475 u32 val;
1476 int i;
1477
1478 if (path >= ARRAY_SIZE(cfg1_1b00) ||
1479 path >= ARRAY_SIZE(cfg2_1b00) ||
1480 path >= ARRAY_SIZE(set_pi) ||
1481 path >= ARRAY_SIZE(path_setting)) {
1482 rtw_warn(rtwdev, "[TXGAPK] wrong path %d\n", path);
1483 return;
1484 }
1485
1486 rtw_write32_mask(rtwdev, REG_ANTMAP0, BIT_ANT_PATH, path + 1);
1487 rtw_write32_mask(rtwdev, REG_TXLGMAP, MASKDWORD, 0xe4e40000);
1488 rtw_write32_mask(rtwdev, REG_TXANTSEG, BIT_ANTSEG, 0x3);
1489 rtw_write32_mask(rtwdev, path_setting[path], MASK20BITS, 0x33312);
1490 rtw_write32_mask(rtwdev, path_setting[path], BIT_PATH_EN, 0x1);
1491 rtw_write32_mask(rtwdev, set_pi[path], BITS_RFC_DIRECT, 0x0);
1492 rtw_write_rf(rtwdev, path, RF_LUTDBG, BIT_TXA_TANK, 0x1);
1493 rtw_write_rf(rtwdev, path, RF_IDAC, BIT_TX_MODE, 0x820);
1494 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, path);
1495 rtw_write32_mask(rtwdev, REG_IQKSTAT, MASKBYTE0, 0x0);
1496
1497 rtw_write32_mask(rtwdev, REG_TX_TONE_IDX, MASKBYTE0, 0x018);
1498 fsleep(1000);
1499 if (channel >= 1 && channel <= 14)
1500 rtw_write32_mask(rtwdev, REG_R_CONFIG, MASKBYTE0, BIT_2G_SWING);
1501 else
1502 rtw_write32_mask(rtwdev, REG_R_CONFIG, MASKBYTE0, BIT_5G_SWING);
1503 fsleep(1000);
1504
1505 rtw_write32_mask(rtwdev, REG_NCTL0, MASKDWORD, cfg1_1b00[path]);
1506 rtw_write32_mask(rtwdev, REG_NCTL0, MASKDWORD, cfg2_1b00[path]);
1507
1508 read_poll_timeout(rtw_read32_mask, val,
1509 val == 0x55, 1000, 100000, false,
1510 rtwdev, REG_RPT_CIP, BIT_RPT_CIP_STATUS);
1511
1512 rtw_write32_mask(rtwdev, set_pi[path], BITS_RFC_DIRECT, 0x2);
1513 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, path);
1514 rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, BIT_RPT_EN, 0x1);
1515 rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, BIT_RPT_SEL, 0x12);
1516 rtw_write32_mask(rtwdev, REG_TX_GAIN_SET, BIT_GAPK_RPT_IDX, 0x3);
1517 val = rtw_read32(rtwdev, REG_STAT_RPT);
1518
1519 txgapk->offset[0][path] = (s8)FIELD_GET(BIT_GAPK_RPT0, val);
1520 txgapk->offset[1][path] = (s8)FIELD_GET(BIT_GAPK_RPT1, val);
1521 txgapk->offset[2][path] = (s8)FIELD_GET(BIT_GAPK_RPT2, val);
1522 txgapk->offset[3][path] = (s8)FIELD_GET(BIT_GAPK_RPT3, val);
1523 txgapk->offset[4][path] = (s8)FIELD_GET(BIT_GAPK_RPT4, val);
1524 txgapk->offset[5][path] = (s8)FIELD_GET(BIT_GAPK_RPT5, val);
1525 txgapk->offset[6][path] = (s8)FIELD_GET(BIT_GAPK_RPT6, val);
1526 txgapk->offset[7][path] = (s8)FIELD_GET(BIT_GAPK_RPT7, val);
1527
1528 rtw_write32_mask(rtwdev, REG_TX_GAIN_SET, BIT_GAPK_RPT_IDX, 0x4);
1529 val = rtw_read32(rtwdev, REG_STAT_RPT);
1530
1531 txgapk->offset[8][path] = (s8)FIELD_GET(BIT_GAPK_RPT0, val);
1532 txgapk->offset[9][path] = (s8)FIELD_GET(BIT_GAPK_RPT1, val);
1533
1534 for (i = 0; i < RF_HW_OFFSET_NUM; i++)
1535 if (txgapk->offset[i][path] & BIT(3))
1536 txgapk->offset[i][path] = txgapk->offset[i][path] |
1537 0xf0;
1538 for (i = 0; i < RF_HW_OFFSET_NUM; i++)
1539 rtw_dbg(rtwdev, RTW_DBG_RFK,
1540 "[TXGAPK] offset %d %d path=%d\n",
1541 txgapk->offset[i][path], i, path);
1542 }
1543
rtw8822c_txgapk_calculate_offset(struct rtw_dev * rtwdev,u8 path)1544 static void rtw8822c_txgapk_calculate_offset(struct rtw_dev *rtwdev, u8 path)
1545 {
1546 static const u32 bb_reg[] = {REG_ANTMAP0, REG_TXLGMAP, REG_TXANTSEG,
1547 REG_ORITXCODE, REG_ORITXCODE2};
1548 struct rtw_gapk_info *txgapk = &rtwdev->dm_info.gapk;
1549 u8 channel = txgapk->channel;
1550 u32 reg_backup[ARRAY_SIZE(bb_reg)] = {0};
1551
1552 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s channel=%d\n",
1553 __func__, channel);
1554
1555 rtw8822c_txgapk_backup_bb_reg(rtwdev, bb_reg,
1556 reg_backup, ARRAY_SIZE(bb_reg));
1557
1558 if (channel >= 1 && channel <= 14) {
1559 rtw_write32_mask(rtwdev,
1560 REG_SINGLE_TONE_SW, BIT_IRQ_TEST_MODE, 0x0);
1561 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, path);
1562 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_IQ_SWITCH, 0x3f);
1563 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x0);
1564 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TX_GAIN, 0x1);
1565 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, RFREG_MASK, 0x5000f);
1566 rtw_write_rf(rtwdev, path, RF_TX_GAIN_OFFSET, BIT_RF_GAIN, 0x0);
1567 rtw_write_rf(rtwdev, path, RF_RXG_GAIN, BIT_RXG_GAIN, 0x1);
1568 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, BIT_RXAGC, 0x0f);
1569 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TRXBW, 0x1);
1570 rtw_write_rf(rtwdev, path, RF_BW_TRXBB, BIT_BW_TXBB, 0x1);
1571 rtw_write_rf(rtwdev, path, RF_BW_TRXBB, BIT_BW_RXBB, 0x0);
1572 rtw_write_rf(rtwdev, path, RF_EXT_TIA_BW, BIT_PW_EXT_TIA, 0x1);
1573
1574 rtw_write32_mask(rtwdev, REG_IQKSTAT, MASKBYTE0, 0x00);
1575 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x0);
1576
1577 rtw8822c_txgapk_read_offset(rtwdev, path);
1578 rtw_dbg(rtwdev, RTW_DBG_RFK, "=============================\n");
1579
1580 } else {
1581 rtw_write32_mask(rtwdev,
1582 REG_SINGLE_TONE_SW, BIT_IRQ_TEST_MODE, 0x0);
1583 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, path);
1584 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_IQ_SWITCH, 0x3f);
1585 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x0);
1586 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TX_GAIN, 0x1);
1587 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, RFREG_MASK, 0x50011);
1588 rtw_write_rf(rtwdev, path, RF_TXA_LB_SW, BIT_TXA_LB_ATT, 0x3);
1589 rtw_write_rf(rtwdev, path, RF_TXA_LB_SW, BIT_LB_ATT, 0x3);
1590 rtw_write_rf(rtwdev, path, RF_TXA_LB_SW, BIT_LB_SW, 0x1);
1591 rtw_write_rf(rtwdev, path,
1592 RF_RXA_MIX_GAIN, BIT_RXA_MIX_GAIN, 0x2);
1593 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, BIT_RXAGC, 0x12);
1594 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TRXBW, 0x1);
1595 rtw_write_rf(rtwdev, path, RF_BW_TRXBB, BIT_BW_RXBB, 0x0);
1596 rtw_write_rf(rtwdev, path, RF_EXT_TIA_BW, BIT_PW_EXT_TIA, 0x1);
1597 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, BIT_RF_MODE, 0x5);
1598
1599 rtw_write32_mask(rtwdev, REG_IQKSTAT, MASKBYTE0, 0x0);
1600
1601 if (channel >= 36 && channel <= 64)
1602 rtw_write32_mask(rtwdev,
1603 REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x2);
1604 else if (channel >= 100 && channel <= 144)
1605 rtw_write32_mask(rtwdev,
1606 REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x3);
1607 else if (channel >= 149 && channel <= 177)
1608 rtw_write32_mask(rtwdev,
1609 REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x4);
1610
1611 rtw8822c_txgapk_read_offset(rtwdev, path);
1612 rtw_dbg(rtwdev, RTW_DBG_RFK, "=============================\n");
1613 }
1614 rtw8822c_txgapk_reload_bb_reg(rtwdev, bb_reg,
1615 reg_backup, ARRAY_SIZE(bb_reg));
1616 }
1617
rtw8822c_txgapk_rf_restore(struct rtw_dev * rtwdev,u8 path)1618 static void rtw8822c_txgapk_rf_restore(struct rtw_dev *rtwdev, u8 path)
1619 {
1620 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1621
1622 if (path >= rtwdev->hal.rf_path_num)
1623 return;
1624
1625 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, BIT_RF_MODE, 0x3);
1626 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TRXBW, 0x0);
1627 rtw_write_rf(rtwdev, path, RF_EXT_TIA_BW, BIT_PW_EXT_TIA, 0x0);
1628 }
1629
rtw8822c_txgapk_cal_gain(struct rtw_dev * rtwdev,u32 gain,s8 offset)1630 static u32 rtw8822c_txgapk_cal_gain(struct rtw_dev *rtwdev, u32 gain, s8 offset)
1631 {
1632 u32 gain_x2, new_gain;
1633
1634 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1635
1636 if (_rtw8822c_txgapk_gain_valid(rtwdev, gain)) {
1637 new_gain = gain;
1638 rtw_dbg(rtwdev, RTW_DBG_RFK,
1639 "[TXGAPK] gain=0x%03X(>=0xCEX) offset=%d new_gain=0x%03X\n",
1640 gain, offset, new_gain);
1641 return new_gain;
1642 }
1643
1644 gain_x2 = (gain << 1) + offset;
1645 new_gain = (gain_x2 >> 1) | (gain_x2 & BIT(0) ? BIT_GAIN_EXT : 0);
1646
1647 rtw_dbg(rtwdev, RTW_DBG_RFK,
1648 "[TXGAPK] gain=0x%X offset=%d new_gain=0x%X\n",
1649 gain, offset, new_gain);
1650
1651 return new_gain;
1652 }
1653
rtw8822c_txgapk_write_tx_gain(struct rtw_dev * rtwdev)1654 static void rtw8822c_txgapk_write_tx_gain(struct rtw_dev *rtwdev)
1655 {
1656 struct rtw_gapk_info *txgapk = &rtwdev->dm_info.gapk;
1657 u32 i, j, tmp = 0x20, tmp_3f, v;
1658 s8 offset_tmp[RF_GAIN_NUM] = {0};
1659 u8 path, band = RF_BAND_2G_OFDM, channel = txgapk->channel;
1660
1661 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1662
1663 if (channel >= 1 && channel <= 14) {
1664 tmp = 0x20;
1665 band = RF_BAND_2G_OFDM;
1666 } else if (channel >= 36 && channel <= 64) {
1667 tmp = 0x200;
1668 band = RF_BAND_5G_L;
1669 } else if (channel >= 100 && channel <= 144) {
1670 tmp = 0x280;
1671 band = RF_BAND_5G_M;
1672 } else if (channel >= 149 && channel <= 177) {
1673 tmp = 0x300;
1674 band = RF_BAND_5G_H;
1675 } else {
1676 rtw_err(rtwdev, "[TXGAPK] unknown channel %d!!\n", channel);
1677 return;
1678 }
1679
1680 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1681 for (i = 0; i < RF_GAIN_NUM; i++) {
1682 offset_tmp[i] = 0;
1683 for (j = i; j < RF_GAIN_NUM; j++) {
1684 v = txgapk->rf3f_bp[band][j][path];
1685 if (_rtw8822c_txgapk_gain_valid(rtwdev, v))
1686 continue;
1687
1688 offset_tmp[i] += txgapk->offset[j][path];
1689 txgapk->fianl_offset[i][path] = offset_tmp[i];
1690 }
1691
1692 v = txgapk->rf3f_bp[band][i][path];
1693 if (_rtw8822c_txgapk_gain_valid(rtwdev, v)) {
1694 rtw_dbg(rtwdev, RTW_DBG_RFK,
1695 "[TXGAPK] tx_gain=0x%03X >= 0xCEX\n",
1696 txgapk->rf3f_bp[band][i][path]);
1697 } else {
1698 txgapk->rf3f_fs[path][i] = offset_tmp[i];
1699 rtw_dbg(rtwdev, RTW_DBG_RFK,
1700 "[TXGAPK] offset %d %d\n",
1701 offset_tmp[i], i);
1702 }
1703 }
1704
1705 rtw_write_rf(rtwdev, path, RF_LUTWE2, RFREG_MASK, 0x10000);
1706 for (i = 0; i < RF_GAIN_NUM; i++) {
1707 rtw_write_rf(rtwdev, path,
1708 RF_LUTWA, RFREG_MASK, tmp + i);
1709
1710 tmp_3f = rtw8822c_txgapk_cal_gain(rtwdev,
1711 txgapk->rf3f_bp[band][i][path],
1712 offset_tmp[i]);
1713 rtw_write_rf(rtwdev, path, RF_LUTWD0,
1714 BIT_GAIN_EXT | BIT_DATA_L, tmp_3f);
1715
1716 rtw_dbg(rtwdev, RTW_DBG_RFK,
1717 "[TXGAPK] 0x33=0x%05X 0x3f=0x%04X\n",
1718 tmp + i, tmp_3f);
1719 }
1720 rtw_write_rf(rtwdev, path, RF_LUTWE2, RFREG_MASK, 0x0);
1721 }
1722 }
1723
rtw8822c_txgapk_save_all_tx_gain_table(struct rtw_dev * rtwdev)1724 static void rtw8822c_txgapk_save_all_tx_gain_table(struct rtw_dev *rtwdev)
1725 {
1726 struct rtw_gapk_info *txgapk = &rtwdev->dm_info.gapk;
1727 static const u32 three_wire[2] = {REG_3WIRE, REG_3WIRE2};
1728 static const u8 ch_num[RF_BAND_MAX] = {1, 1, 36, 100, 149};
1729 static const u8 band_num[RF_BAND_MAX] = {0x0, 0x0, 0x1, 0x3, 0x5};
1730 static const u8 cck[RF_BAND_MAX] = {0x1, 0x0, 0x0, 0x0, 0x0};
1731 u8 path, band, gain, rf0_idx;
1732 u32 rf18, v;
1733
1734 if (rtwdev->dm_info.dm_flags & BIT(RTW_DM_CAP_TXGAPK))
1735 return;
1736
1737 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1738
1739 if (txgapk->read_txgain == 1) {
1740 rtw_dbg(rtwdev, RTW_DBG_RFK,
1741 "[TXGAPK] Already Read txgapk->read_txgain return!!!\n");
1742 rtw8822c_txgapk_write_gain_bb_table(rtwdev);
1743 return;
1744 }
1745
1746 for (band = 0; band < RF_BAND_MAX; band++) {
1747 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1748 rf18 = rtw_read_rf(rtwdev, path, RF_CFGCH, RFREG_MASK);
1749
1750 rtw_write32_mask(rtwdev,
1751 three_wire[path], BIT_3WIRE_EN, 0x0);
1752 rtw_write_rf(rtwdev, path,
1753 RF_CFGCH, MASKBYTE0, ch_num[band]);
1754 rtw_write_rf(rtwdev, path,
1755 RF_CFGCH, BIT_BAND, band_num[band]);
1756 rtw_write_rf(rtwdev, path,
1757 RF_BW_TRXBB, BIT_DBG_CCK_CCA, cck[band]);
1758 rtw_write_rf(rtwdev, path,
1759 RF_BW_TRXBB, BIT_TX_CCK_IND, cck[band]);
1760 gain = 0;
1761 for (rf0_idx = 1; rf0_idx < 32; rf0_idx += 3) {
1762 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC,
1763 MASKBYTE0, rf0_idx);
1764 v = rtw_read_rf(rtwdev, path,
1765 RF_TX_RESULT, RFREG_MASK);
1766 txgapk->rf3f_bp[band][gain][path] = v & BIT_DATA_L;
1767
1768 rtw_dbg(rtwdev, RTW_DBG_RFK,
1769 "[TXGAPK] 0x5f=0x%03X band=%d path=%d\n",
1770 txgapk->rf3f_bp[band][gain][path],
1771 band, path);
1772 gain++;
1773 }
1774 rtw_write_rf(rtwdev, path, RF_CFGCH, RFREG_MASK, rf18);
1775 rtw_write32_mask(rtwdev,
1776 three_wire[path], BIT_3WIRE_EN, 0x3);
1777 }
1778 }
1779 rtw8822c_txgapk_write_gain_bb_table(rtwdev);
1780 txgapk->read_txgain = 1;
1781 }
1782
rtw8822c_txgapk(struct rtw_dev * rtwdev)1783 static void rtw8822c_txgapk(struct rtw_dev *rtwdev)
1784 {
1785 static const u32 bb_reg[2] = {REG_TX_PTCL_CTRL, REG_TX_FIFO};
1786 struct rtw_gapk_info *txgapk = &rtwdev->dm_info.gapk;
1787 u32 bb_reg_backup[2];
1788 u8 path;
1789
1790 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1791
1792 rtw8822c_txgapk_save_all_tx_gain_table(rtwdev);
1793
1794 if (txgapk->read_txgain == 0) {
1795 rtw_dbg(rtwdev, RTW_DBG_RFK,
1796 "[TXGAPK] txgapk->read_txgain == 0 return!!!\n");
1797 return;
1798 }
1799
1800 if (rtwdev->efuse.power_track_type >= 4 &&
1801 rtwdev->efuse.power_track_type <= 7) {
1802 rtw_dbg(rtwdev, RTW_DBG_RFK,
1803 "[TXGAPK] Normal Mode in TSSI mode. return!!!\n");
1804 return;
1805 }
1806
1807 rtw8822c_txgapk_backup_bb_reg(rtwdev, bb_reg,
1808 bb_reg_backup, ARRAY_SIZE(bb_reg));
1809 rtw8822c_txgapk_tx_pause(rtwdev);
1810 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1811 txgapk->channel = rtw_read_rf(rtwdev, path,
1812 RF_CFGCH, RFREG_MASK) & MASKBYTE0;
1813 rtw8822c_txgapk_bb_dpk(rtwdev, path);
1814 rtw8822c_txgapk_afe_dpk(rtwdev, path);
1815 rtw8822c_txgapk_calculate_offset(rtwdev, path);
1816 rtw8822c_txgapk_rf_restore(rtwdev, path);
1817 rtw8822c_txgapk_afe_dpk_restore(rtwdev, path);
1818 rtw8822c_txgapk_bb_dpk_restore(rtwdev, path);
1819 }
1820 rtw8822c_txgapk_write_tx_gain(rtwdev);
1821 rtw8822c_txgapk_reload_bb_reg(rtwdev, bb_reg,
1822 bb_reg_backup, ARRAY_SIZE(bb_reg));
1823 }
1824
rtw8822c_do_gapk(struct rtw_dev * rtwdev)1825 static void rtw8822c_do_gapk(struct rtw_dev *rtwdev)
1826 {
1827 struct rtw_dm_info *dm = &rtwdev->dm_info;
1828
1829 if (dm->dm_flags & BIT(RTW_DM_CAP_TXGAPK)) {
1830 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] feature disable!!!\n");
1831 return;
1832 }
1833 rtw8822c_rfk_handshake(rtwdev, true);
1834 rtw8822c_txgapk(rtwdev);
1835 rtw8822c_rfk_handshake(rtwdev, false);
1836 }
1837
rtw8822c_rf_init(struct rtw_dev * rtwdev)1838 static void rtw8822c_rf_init(struct rtw_dev *rtwdev)
1839 {
1840 rtw8822c_rf_dac_cal(rtwdev);
1841 rtw8822c_rf_x2_check(rtwdev);
1842 rtw8822c_thermal_trim(rtwdev);
1843 rtw8822c_power_trim(rtwdev);
1844 rtw8822c_pa_bias(rtwdev);
1845 }
1846
rtw8822c_pwrtrack_init(struct rtw_dev * rtwdev)1847 static void rtw8822c_pwrtrack_init(struct rtw_dev *rtwdev)
1848 {
1849 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
1850 u8 path;
1851
1852 for (path = RF_PATH_A; path < RTW_RF_PATH_MAX; path++) {
1853 dm_info->delta_power_index[path] = 0;
1854 ewma_thermal_init(&dm_info->avg_thermal[path]);
1855 dm_info->thermal_avg[path] = 0xff;
1856 }
1857
1858 dm_info->pwr_trk_triggered = false;
1859 dm_info->thermal_meter_k = rtwdev->efuse.thermal_meter_k;
1860 dm_info->thermal_meter_lck = rtwdev->efuse.thermal_meter_k;
1861 }
1862
rtw8822c_phy_set_param(struct rtw_dev * rtwdev)1863 static void rtw8822c_phy_set_param(struct rtw_dev *rtwdev)
1864 {
1865 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
1866 struct rtw_hal *hal = &rtwdev->hal;
1867 u8 crystal_cap;
1868 u8 cck_gi_u_bnd_msb = 0;
1869 u8 cck_gi_u_bnd_lsb = 0;
1870 u8 cck_gi_l_bnd_msb = 0;
1871 u8 cck_gi_l_bnd_lsb = 0;
1872 bool is_tx2_path;
1873
1874 /* power on BB/RF domain */
1875 rtw_write8_set(rtwdev, REG_SYS_FUNC_EN,
1876 BIT_FEN_BB_GLB_RST | BIT_FEN_BB_RSTB);
1877 rtw_write8_set(rtwdev, REG_RF_CTRL,
1878 BIT_RF_EN | BIT_RF_RSTB | BIT_RF_SDM_RSTB);
1879 rtw_write32_set(rtwdev, REG_WLRF1, BIT_WLRF1_BBRF_EN);
1880
1881 /* disable low rate DPD */
1882 rtw_write32_mask(rtwdev, REG_DIS_DPD, DIS_DPD_MASK, DIS_DPD_RATEALL);
1883
1884 /* pre init before header files config */
1885 rtw8822c_header_file_init(rtwdev, true);
1886
1887 rtw_phy_load_tables(rtwdev);
1888
1889 crystal_cap = rtwdev->efuse.crystal_cap & 0x7f;
1890 rtw_write32_mask(rtwdev, REG_ANAPAR_XTAL_0, 0xfffc00,
1891 crystal_cap | (crystal_cap << 7));
1892
1893 /* post init after header files config */
1894 rtw8822c_header_file_init(rtwdev, false);
1895
1896 is_tx2_path = false;
1897 rtw8822c_config_trx_mode(rtwdev, hal->antenna_tx, hal->antenna_rx,
1898 is_tx2_path);
1899 rtw_phy_init(rtwdev);
1900
1901 cck_gi_u_bnd_msb = (u8)rtw_read32_mask(rtwdev, 0x1a98, 0xc000);
1902 cck_gi_u_bnd_lsb = (u8)rtw_read32_mask(rtwdev, 0x1aa8, 0xf0000);
1903 cck_gi_l_bnd_msb = (u8)rtw_read32_mask(rtwdev, 0x1a98, 0xc0);
1904 cck_gi_l_bnd_lsb = (u8)rtw_read32_mask(rtwdev, 0x1a70, 0x0f000000);
1905
1906 dm_info->cck_gi_u_bnd = ((cck_gi_u_bnd_msb << 4) | (cck_gi_u_bnd_lsb));
1907 dm_info->cck_gi_l_bnd = ((cck_gi_l_bnd_msb << 4) | (cck_gi_l_bnd_lsb));
1908
1909 rtw8822c_rf_init(rtwdev);
1910 rtw8822c_pwrtrack_init(rtwdev);
1911
1912 rtw_bf_phy_init(rtwdev);
1913 }
1914
1915 #define WLAN_TXQ_RPT_EN 0x1F
1916 #define WLAN_SLOT_TIME 0x09
1917 #define WLAN_PIFS_TIME 0x1C
1918 #define WLAN_SIFS_CCK_CONT_TX 0x0A
1919 #define WLAN_SIFS_OFDM_CONT_TX 0x0E
1920 #define WLAN_SIFS_CCK_TRX 0x0A
1921 #define WLAN_SIFS_OFDM_TRX 0x10
1922 #define WLAN_NAV_MAX 0xC8
1923 #define WLAN_RDG_NAV 0x05
1924 #define WLAN_TXOP_NAV 0x1B
1925 #define WLAN_CCK_RX_TSF 0x30
1926 #define WLAN_OFDM_RX_TSF 0x30
1927 #define WLAN_TBTT_PROHIBIT 0x04 /* unit : 32us */
1928 #define WLAN_TBTT_HOLD_TIME 0x064 /* unit : 32us */
1929 #define WLAN_DRV_EARLY_INT 0x04
1930 #define WLAN_BCN_CTRL_CLT0 0x10
1931 #define WLAN_BCN_DMA_TIME 0x02
1932 #define WLAN_BCN_MAX_ERR 0xFF
1933 #define WLAN_SIFS_CCK_DUR_TUNE 0x0A
1934 #define WLAN_SIFS_OFDM_DUR_TUNE 0x10
1935 #define WLAN_SIFS_CCK_CTX 0x0A
1936 #define WLAN_SIFS_CCK_IRX 0x0A
1937 #define WLAN_SIFS_OFDM_CTX 0x0E
1938 #define WLAN_SIFS_OFDM_IRX 0x0E
1939 #define WLAN_EIFS_DUR_TUNE 0x40
1940 #define WLAN_EDCA_VO_PARAM 0x002FA226
1941 #define WLAN_EDCA_VI_PARAM 0x005EA328
1942 #define WLAN_EDCA_BE_PARAM 0x005EA42B
1943 #define WLAN_EDCA_BK_PARAM 0x0000A44F
1944
1945 #define WLAN_RX_FILTER0 0xFFFFFFFF
1946 #define WLAN_RX_FILTER2 0xFFFF
1947 #define WLAN_RCR_CFG 0xE400220E
1948 #define WLAN_RXPKT_MAX_SZ 12288
1949 #define WLAN_RXPKT_MAX_SZ_512 (WLAN_RXPKT_MAX_SZ >> 9)
1950
1951 #define WLAN_AMPDU_MAX_TIME 0x70
1952 #define WLAN_RTS_LEN_TH 0xFF
1953 #define WLAN_RTS_TX_TIME_TH 0x08
1954 #define WLAN_MAX_AGG_PKT_LIMIT 0x3f
1955 #define WLAN_RTS_MAX_AGG_PKT_LIMIT 0x3f
1956 #define WLAN_PRE_TXCNT_TIME_TH 0x1E0
1957 #define FAST_EDCA_VO_TH 0x06
1958 #define FAST_EDCA_VI_TH 0x06
1959 #define FAST_EDCA_BE_TH 0x06
1960 #define FAST_EDCA_BK_TH 0x06
1961 #define WLAN_BAR_RETRY_LIMIT 0x01
1962 #define WLAN_BAR_ACK_TYPE 0x05
1963 #define WLAN_RA_TRY_RATE_AGG_LIMIT 0x08
1964 #define WLAN_RESP_TXRATE 0x84
1965 #define WLAN_ACK_TO 0x21
1966 #define WLAN_ACK_TO_CCK 0x6A
1967 #define WLAN_DATA_RATE_FB_CNT_1_4 0x01000000
1968 #define WLAN_DATA_RATE_FB_CNT_5_8 0x08070504
1969 #define WLAN_RTS_RATE_FB_CNT_5_8 0x08070504
1970 #define WLAN_DATA_RATE_FB_RATE0 0xFE01F010
1971 #define WLAN_DATA_RATE_FB_RATE0_H 0x40000000
1972 #define WLAN_RTS_RATE_FB_RATE1 0x003FF010
1973 #define WLAN_RTS_RATE_FB_RATE1_H 0x40000000
1974 #define WLAN_RTS_RATE_FB_RATE4 0x0600F010
1975 #define WLAN_RTS_RATE_FB_RATE4_H 0x400003E0
1976 #define WLAN_RTS_RATE_FB_RATE5 0x0600F015
1977 #define WLAN_RTS_RATE_FB_RATE5_H 0x000000E0
1978 #define WLAN_MULTI_ADDR 0xFFFFFFFF
1979
1980 #define WLAN_TX_FUNC_CFG1 0x30
1981 #define WLAN_TX_FUNC_CFG2 0x30
1982 #define WLAN_MAC_OPT_NORM_FUNC1 0x98
1983 #define WLAN_MAC_OPT_LB_FUNC1 0x80
1984 #define WLAN_MAC_OPT_FUNC2 0xb0810041
1985 #define WLAN_MAC_INT_MIG_CFG 0x33330000
1986
1987 #define WLAN_SIFS_CFG (WLAN_SIFS_CCK_CONT_TX | \
1988 (WLAN_SIFS_OFDM_CONT_TX << BIT_SHIFT_SIFS_OFDM_CTX) | \
1989 (WLAN_SIFS_CCK_TRX << BIT_SHIFT_SIFS_CCK_TRX) | \
1990 (WLAN_SIFS_OFDM_TRX << BIT_SHIFT_SIFS_OFDM_TRX))
1991
1992 #define WLAN_SIFS_DUR_TUNE (WLAN_SIFS_CCK_DUR_TUNE | \
1993 (WLAN_SIFS_OFDM_DUR_TUNE << 8))
1994
1995 #define WLAN_TBTT_TIME (WLAN_TBTT_PROHIBIT |\
1996 (WLAN_TBTT_HOLD_TIME << BIT_SHIFT_TBTT_HOLD_TIME_AP))
1997
1998 #define WLAN_NAV_CFG (WLAN_RDG_NAV | (WLAN_TXOP_NAV << 16))
1999 #define WLAN_RX_TSF_CFG (WLAN_CCK_RX_TSF | (WLAN_OFDM_RX_TSF) << 8)
2000
2001 #define MAC_CLK_SPEED 80 /* 80M */
2002 #define EFUSE_PCB_INFO_OFFSET 0xCA
2003
rtw8822c_mac_init(struct rtw_dev * rtwdev)2004 static int rtw8822c_mac_init(struct rtw_dev *rtwdev)
2005 {
2006 u8 value8;
2007 u16 value16;
2008 u32 value32;
2009 u16 pre_txcnt;
2010
2011 /* txq control */
2012 value8 = rtw_read8(rtwdev, REG_FWHW_TXQ_CTRL);
2013 value8 |= (BIT(7) & ~BIT(1) & ~BIT(2));
2014 rtw_write8(rtwdev, REG_FWHW_TXQ_CTRL, value8);
2015 rtw_write8(rtwdev, REG_FWHW_TXQ_CTRL + 1, WLAN_TXQ_RPT_EN);
2016 /* sifs control */
2017 rtw_write16(rtwdev, REG_SPEC_SIFS, WLAN_SIFS_DUR_TUNE);
2018 rtw_write32(rtwdev, REG_SIFS, WLAN_SIFS_CFG);
2019 rtw_write16(rtwdev, REG_RESP_SIFS_CCK,
2020 WLAN_SIFS_CCK_CTX | WLAN_SIFS_CCK_IRX << 8);
2021 rtw_write16(rtwdev, REG_RESP_SIFS_OFDM,
2022 WLAN_SIFS_OFDM_CTX | WLAN_SIFS_OFDM_IRX << 8);
2023 /* rate fallback control */
2024 rtw_write32(rtwdev, REG_DARFRC, WLAN_DATA_RATE_FB_CNT_1_4);
2025 rtw_write32(rtwdev, REG_DARFRCH, WLAN_DATA_RATE_FB_CNT_5_8);
2026 rtw_write32(rtwdev, REG_RARFRCH, WLAN_RTS_RATE_FB_CNT_5_8);
2027 rtw_write32(rtwdev, REG_ARFR0, WLAN_DATA_RATE_FB_RATE0);
2028 rtw_write32(rtwdev, REG_ARFRH0, WLAN_DATA_RATE_FB_RATE0_H);
2029 rtw_write32(rtwdev, REG_ARFR1_V1, WLAN_RTS_RATE_FB_RATE1);
2030 rtw_write32(rtwdev, REG_ARFRH1_V1, WLAN_RTS_RATE_FB_RATE1_H);
2031 rtw_write32(rtwdev, REG_ARFR4, WLAN_RTS_RATE_FB_RATE4);
2032 rtw_write32(rtwdev, REG_ARFRH4, WLAN_RTS_RATE_FB_RATE4_H);
2033 rtw_write32(rtwdev, REG_ARFR5, WLAN_RTS_RATE_FB_RATE5);
2034 rtw_write32(rtwdev, REG_ARFRH5, WLAN_RTS_RATE_FB_RATE5_H);
2035 /* protocol configuration */
2036 rtw_write8(rtwdev, REG_AMPDU_MAX_TIME_V1, WLAN_AMPDU_MAX_TIME);
2037 rtw_write8_set(rtwdev, REG_TX_HANG_CTRL, BIT_EN_EOF_V1);
2038 pre_txcnt = WLAN_PRE_TXCNT_TIME_TH | BIT_EN_PRECNT;
2039 rtw_write8(rtwdev, REG_PRECNT_CTRL, (u8)(pre_txcnt & 0xFF));
2040 rtw_write8(rtwdev, REG_PRECNT_CTRL + 1, (u8)(pre_txcnt >> 8));
2041 value32 = WLAN_RTS_LEN_TH | (WLAN_RTS_TX_TIME_TH << 8) |
2042 (WLAN_MAX_AGG_PKT_LIMIT << 16) |
2043 (WLAN_RTS_MAX_AGG_PKT_LIMIT << 24);
2044 rtw_write32(rtwdev, REG_PROT_MODE_CTRL, value32);
2045 rtw_write16(rtwdev, REG_BAR_MODE_CTRL + 2,
2046 WLAN_BAR_RETRY_LIMIT | WLAN_RA_TRY_RATE_AGG_LIMIT << 8);
2047 rtw_write8(rtwdev, REG_FAST_EDCA_VOVI_SETTING, FAST_EDCA_VO_TH);
2048 rtw_write8(rtwdev, REG_FAST_EDCA_VOVI_SETTING + 2, FAST_EDCA_VI_TH);
2049 rtw_write8(rtwdev, REG_FAST_EDCA_BEBK_SETTING, FAST_EDCA_BE_TH);
2050 rtw_write8(rtwdev, REG_FAST_EDCA_BEBK_SETTING + 2, FAST_EDCA_BK_TH);
2051 /* close BA parser */
2052 rtw_write8_clr(rtwdev, REG_LIFETIME_EN, BIT_BA_PARSER_EN);
2053 rtw_write32_clr(rtwdev, REG_RRSR, BITS_RRSR_RSC);
2054
2055 /* EDCA configuration */
2056 rtw_write32(rtwdev, REG_EDCA_VO_PARAM, WLAN_EDCA_VO_PARAM);
2057 rtw_write32(rtwdev, REG_EDCA_VI_PARAM, WLAN_EDCA_VI_PARAM);
2058 rtw_write32(rtwdev, REG_EDCA_BE_PARAM, WLAN_EDCA_BE_PARAM);
2059 rtw_write32(rtwdev, REG_EDCA_BK_PARAM, WLAN_EDCA_BK_PARAM);
2060 rtw_write8(rtwdev, REG_PIFS, WLAN_PIFS_TIME);
2061 rtw_write8_clr(rtwdev, REG_TX_PTCL_CTRL + 1, BIT_SIFS_BK_EN >> 8);
2062 rtw_write8_set(rtwdev, REG_RD_CTRL + 1,
2063 (BIT_DIS_TXOP_CFE | BIT_DIS_LSIG_CFE |
2064 BIT_DIS_STBC_CFE) >> 8);
2065
2066 /* MAC clock configuration */
2067 rtw_write32_clr(rtwdev, REG_AFE_CTRL1, BIT_MAC_CLK_SEL);
2068 rtw_write8(rtwdev, REG_USTIME_TSF, MAC_CLK_SPEED);
2069 rtw_write8(rtwdev, REG_USTIME_EDCA, MAC_CLK_SPEED);
2070
2071 rtw_write8_set(rtwdev, REG_MISC_CTRL,
2072 BIT_EN_FREE_CNT | BIT_DIS_SECOND_CCA);
2073 rtw_write8_clr(rtwdev, REG_TIMER0_SRC_SEL, BIT_TSFT_SEL_TIMER0);
2074 rtw_write16(rtwdev, REG_TXPAUSE, 0x0000);
2075 rtw_write8(rtwdev, REG_SLOT, WLAN_SLOT_TIME);
2076 rtw_write32(rtwdev, REG_RD_NAV_NXT, WLAN_NAV_CFG);
2077 rtw_write16(rtwdev, REG_RXTSF_OFFSET_CCK, WLAN_RX_TSF_CFG);
2078 /* Set beacon cotnrol - enable TSF and other related functions */
2079 rtw_write8_set(rtwdev, REG_BCN_CTRL, BIT_EN_BCN_FUNCTION);
2080 /* Set send beacon related registers */
2081 rtw_write32(rtwdev, REG_TBTT_PROHIBIT, WLAN_TBTT_TIME);
2082 rtw_write8(rtwdev, REG_DRVERLYINT, WLAN_DRV_EARLY_INT);
2083 rtw_write8(rtwdev, REG_BCN_CTRL_CLINT0, WLAN_BCN_CTRL_CLT0);
2084 rtw_write8(rtwdev, REG_BCNDMATIM, WLAN_BCN_DMA_TIME);
2085 rtw_write8(rtwdev, REG_BCN_MAX_ERR, WLAN_BCN_MAX_ERR);
2086
2087 /* WMAC configuration */
2088 rtw_write32(rtwdev, REG_MAR, WLAN_MULTI_ADDR);
2089 rtw_write32(rtwdev, REG_MAR + 4, WLAN_MULTI_ADDR);
2090 rtw_write8(rtwdev, REG_BBPSF_CTRL + 2, WLAN_RESP_TXRATE);
2091 rtw_write8(rtwdev, REG_ACKTO, WLAN_ACK_TO);
2092 rtw_write8(rtwdev, REG_ACKTO_CCK, WLAN_ACK_TO_CCK);
2093 rtw_write16(rtwdev, REG_EIFS, WLAN_EIFS_DUR_TUNE);
2094 rtw_write8(rtwdev, REG_NAV_CTRL + 2, WLAN_NAV_MAX);
2095 rtw_write8(rtwdev, REG_WMAC_TRXPTCL_CTL_H + 2, WLAN_BAR_ACK_TYPE);
2096 rtw_write32(rtwdev, REG_RXFLTMAP0, WLAN_RX_FILTER0);
2097 rtw_write16(rtwdev, REG_RXFLTMAP2, WLAN_RX_FILTER2);
2098 rtw_write32(rtwdev, REG_RCR, WLAN_RCR_CFG);
2099 rtw_write8(rtwdev, REG_RX_PKT_LIMIT, WLAN_RXPKT_MAX_SZ_512);
2100 rtw_write8(rtwdev, REG_TCR + 2, WLAN_TX_FUNC_CFG2);
2101 rtw_write8(rtwdev, REG_TCR + 1, WLAN_TX_FUNC_CFG1);
2102 rtw_write32_set(rtwdev, REG_GENERAL_OPTION, BIT_DUMMY_FCS_READY_MASK_EN);
2103 rtw_write32(rtwdev, REG_WMAC_OPTION_FUNCTION + 8, WLAN_MAC_OPT_FUNC2);
2104 rtw_write8(rtwdev, REG_WMAC_OPTION_FUNCTION_1, WLAN_MAC_OPT_NORM_FUNC1);
2105
2106 /* init low power */
2107 value16 = rtw_read16(rtwdev, REG_RXPSF_CTRL + 2) & 0xF00F;
2108 value16 |= (BIT_RXGCK_VHT_FIFOTHR(1) | BIT_RXGCK_HT_FIFOTHR(1) |
2109 BIT_RXGCK_OFDM_FIFOTHR(1) | BIT_RXGCK_CCK_FIFOTHR(1)) >> 16;
2110 rtw_write16(rtwdev, REG_RXPSF_CTRL + 2, value16);
2111 value16 = 0;
2112 value16 = BIT_SET_RXPSF_PKTLENTHR(value16, 1);
2113 value16 |= BIT_RXPSF_CTRLEN | BIT_RXPSF_VHTCHKEN | BIT_RXPSF_HTCHKEN
2114 | BIT_RXPSF_OFDMCHKEN | BIT_RXPSF_CCKCHKEN
2115 | BIT_RXPSF_OFDMRST;
2116 rtw_write16(rtwdev, REG_RXPSF_CTRL, value16);
2117 rtw_write32(rtwdev, REG_RXPSF_TYPE_CTRL, 0xFFFFFFFF);
2118 /* rx ignore configuration */
2119 value16 = rtw_read16(rtwdev, REG_RXPSF_CTRL);
2120 value16 &= ~(BIT_RXPSF_MHCHKEN | BIT_RXPSF_CCKRST |
2121 BIT_RXPSF_CONT_ERRCHKEN);
2122 value16 = BIT_SET_RXPSF_ERRTHR(value16, 0x07);
2123 rtw_write16(rtwdev, REG_RXPSF_CTRL, value16);
2124 rtw_write8_set(rtwdev, REG_SND_PTCL_CTRL,
2125 BIT_DIS_CHK_VHTSIGB_CRC);
2126
2127 /* Interrupt migration configuration */
2128 rtw_write32(rtwdev, REG_INT_MIG, WLAN_MAC_INT_MIG_CFG);
2129
2130 return 0;
2131 }
2132
2133 #define FWCD_SIZE_REG_8822C 0x2000
2134 #define FWCD_SIZE_DMEM_8822C 0x10000
2135 #define FWCD_SIZE_IMEM_8822C 0x10000
2136 #define FWCD_SIZE_EMEM_8822C 0x20000
2137 #define FWCD_SIZE_ROM_8822C 0x10000
2138
2139 static const u32 __fwcd_segs_8822c[] = {
2140 FWCD_SIZE_REG_8822C,
2141 FWCD_SIZE_DMEM_8822C,
2142 FWCD_SIZE_IMEM_8822C,
2143 FWCD_SIZE_EMEM_8822C,
2144 FWCD_SIZE_ROM_8822C,
2145 };
2146
2147 static const struct rtw_fwcd_segs rtw8822c_fwcd_segs = {
2148 .segs = __fwcd_segs_8822c,
2149 .num = ARRAY_SIZE(__fwcd_segs_8822c),
2150 };
2151
rtw8822c_dump_fw_crash(struct rtw_dev * rtwdev)2152 static int rtw8822c_dump_fw_crash(struct rtw_dev *rtwdev)
2153 {
2154 #define __dump_fw_8822c(_dev, _mem) \
2155 rtw_dump_fw(_dev, OCPBASE_ ## _mem ## _88XX, \
2156 FWCD_SIZE_ ## _mem ## _8822C, RTW_FWCD_ ## _mem)
2157 int ret;
2158
2159 ret = rtw_dump_reg(rtwdev, 0x0, FWCD_SIZE_REG_8822C);
2160 if (ret)
2161 return ret;
2162 ret = __dump_fw_8822c(rtwdev, DMEM);
2163 if (ret)
2164 return ret;
2165 ret = __dump_fw_8822c(rtwdev, IMEM);
2166 if (ret)
2167 return ret;
2168 ret = __dump_fw_8822c(rtwdev, EMEM);
2169 if (ret)
2170 return ret;
2171 ret = __dump_fw_8822c(rtwdev, ROM);
2172 if (ret)
2173 return ret;
2174
2175 return 0;
2176
2177 #undef __dump_fw_8822c
2178 }
2179
rtw8822c_rstb_3wire(struct rtw_dev * rtwdev,bool enable)2180 static void rtw8822c_rstb_3wire(struct rtw_dev *rtwdev, bool enable)
2181 {
2182 if (enable) {
2183 rtw_write32_mask(rtwdev, REG_RSTB, BIT_RSTB_3WIRE, 0x1);
2184 rtw_write32_mask(rtwdev, REG_ANAPAR_A, BIT_ANAPAR_UPDATE, 0x1);
2185 rtw_write32_mask(rtwdev, REG_ANAPAR_B, BIT_ANAPAR_UPDATE, 0x1);
2186 } else {
2187 rtw_write32_mask(rtwdev, REG_RSTB, BIT_RSTB_3WIRE, 0x0);
2188 }
2189 }
2190
rtw8822c_set_channel_rf(struct rtw_dev * rtwdev,u8 channel,u8 bw)2191 static void rtw8822c_set_channel_rf(struct rtw_dev *rtwdev, u8 channel, u8 bw)
2192 {
2193 #define RF18_BAND_MASK (BIT(16) | BIT(9) | BIT(8))
2194 #define RF18_BAND_2G (0)
2195 #define RF18_BAND_5G (BIT(16) | BIT(8))
2196 #define RF18_CHANNEL_MASK (MASKBYTE0)
2197 #define RF18_RFSI_MASK (BIT(18) | BIT(17))
2198 #define RF18_RFSI_GE_CH80 (BIT(17))
2199 #define RF18_RFSI_GT_CH140 (BIT(18))
2200 #define RF18_BW_MASK (BIT(13) | BIT(12))
2201 #define RF18_BW_20M (BIT(13) | BIT(12))
2202 #define RF18_BW_40M (BIT(13))
2203 #define RF18_BW_80M (BIT(12))
2204
2205 u32 rf_reg18 = 0;
2206 u32 rf_rxbb = 0;
2207
2208 rf_reg18 = rtw_read_rf(rtwdev, RF_PATH_A, 0x18, RFREG_MASK);
2209
2210 rf_reg18 &= ~(RF18_BAND_MASK | RF18_CHANNEL_MASK | RF18_RFSI_MASK |
2211 RF18_BW_MASK);
2212
2213 rf_reg18 |= (IS_CH_2G_BAND(channel) ? RF18_BAND_2G : RF18_BAND_5G);
2214 rf_reg18 |= (channel & RF18_CHANNEL_MASK);
2215 if (IS_CH_5G_BAND_4(channel))
2216 rf_reg18 |= RF18_RFSI_GT_CH140;
2217 else if (IS_CH_5G_BAND_3(channel))
2218 rf_reg18 |= RF18_RFSI_GE_CH80;
2219
2220 switch (bw) {
2221 case RTW_CHANNEL_WIDTH_5:
2222 case RTW_CHANNEL_WIDTH_10:
2223 case RTW_CHANNEL_WIDTH_20:
2224 default:
2225 rf_reg18 |= RF18_BW_20M;
2226 rf_rxbb = 0x18;
2227 break;
2228 case RTW_CHANNEL_WIDTH_40:
2229 /* RF bandwidth */
2230 rf_reg18 |= RF18_BW_40M;
2231 rf_rxbb = 0x10;
2232 break;
2233 case RTW_CHANNEL_WIDTH_80:
2234 rf_reg18 |= RF18_BW_80M;
2235 rf_rxbb = 0x8;
2236 break;
2237 }
2238
2239 rtw8822c_rstb_3wire(rtwdev, false);
2240
2241 rtw_write_rf(rtwdev, RF_PATH_A, RF_LUTWE2, 0x04, 0x01);
2242 rtw_write_rf(rtwdev, RF_PATH_A, RF_LUTWA, 0x1f, 0x12);
2243 rtw_write_rf(rtwdev, RF_PATH_A, RF_LUTWD0, 0xfffff, rf_rxbb);
2244 rtw_write_rf(rtwdev, RF_PATH_A, RF_LUTWE2, 0x04, 0x00);
2245
2246 rtw_write_rf(rtwdev, RF_PATH_B, RF_LUTWE2, 0x04, 0x01);
2247 rtw_write_rf(rtwdev, RF_PATH_B, RF_LUTWA, 0x1f, 0x12);
2248 rtw_write_rf(rtwdev, RF_PATH_B, RF_LUTWD0, 0xfffff, rf_rxbb);
2249 rtw_write_rf(rtwdev, RF_PATH_B, RF_LUTWE2, 0x04, 0x00);
2250
2251 rtw_write_rf(rtwdev, RF_PATH_A, RF_CFGCH, RFREG_MASK, rf_reg18);
2252 rtw_write_rf(rtwdev, RF_PATH_B, RF_CFGCH, RFREG_MASK, rf_reg18);
2253
2254 rtw8822c_rstb_3wire(rtwdev, true);
2255 }
2256
rtw8822c_toggle_igi(struct rtw_dev * rtwdev)2257 static void rtw8822c_toggle_igi(struct rtw_dev *rtwdev)
2258 {
2259 u32 igi;
2260
2261 igi = rtw_read32_mask(rtwdev, REG_RXIGI, 0x7f);
2262 rtw_write32_mask(rtwdev, REG_RXIGI, 0x7f, igi - 2);
2263 rtw_write32_mask(rtwdev, REG_RXIGI, 0x7f00, igi - 2);
2264 rtw_write32_mask(rtwdev, REG_RXIGI, 0x7f, igi);
2265 rtw_write32_mask(rtwdev, REG_RXIGI, 0x7f00, igi);
2266 }
2267
rtw8822c_set_channel_bb(struct rtw_dev * rtwdev,u8 channel,u8 bw,u8 primary_ch_idx)2268 static void rtw8822c_set_channel_bb(struct rtw_dev *rtwdev, u8 channel, u8 bw,
2269 u8 primary_ch_idx)
2270 {
2271 if (IS_CH_2G_BAND(channel)) {
2272 rtw_write32_clr(rtwdev, REG_BGCTRL, BITS_RX_IQ_WEIGHT);
2273 rtw_write32_set(rtwdev, REG_TXF4, BIT(20));
2274 rtw_write32_clr(rtwdev, REG_CCK_CHECK, BIT_CHECK_CCK_EN);
2275 rtw_write32_clr(rtwdev, REG_CCKTXONLY, BIT_BB_CCK_CHECK_EN);
2276 rtw_write32_mask(rtwdev, REG_CCAMSK, 0x3F000000, 0xF);
2277
2278 switch (bw) {
2279 case RTW_CHANNEL_WIDTH_20:
2280 rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_CCK,
2281 0x5);
2282 rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_CCK,
2283 0x5);
2284 rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_OFDM,
2285 0x6);
2286 rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_OFDM,
2287 0x6);
2288 break;
2289 case RTW_CHANNEL_WIDTH_40:
2290 rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_CCK,
2291 0x4);
2292 rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_CCK,
2293 0x4);
2294 rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_OFDM,
2295 0x0);
2296 rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_OFDM,
2297 0x0);
2298 break;
2299 }
2300 if (channel == 13 || channel == 14)
2301 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x969);
2302 else if (channel == 11 || channel == 12)
2303 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x96a);
2304 else
2305 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x9aa);
2306 if (channel == 14) {
2307 rtw_write32_mask(rtwdev, REG_TXF0, MASKHWORD, 0x3da0);
2308 rtw_write32_mask(rtwdev, REG_TXF1, MASKDWORD,
2309 0x4962c931);
2310 rtw_write32_mask(rtwdev, REG_TXF2, MASKLWORD, 0x6aa3);
2311 rtw_write32_mask(rtwdev, REG_TXF3, MASKHWORD, 0xaa7b);
2312 rtw_write32_mask(rtwdev, REG_TXF4, MASKLWORD, 0xf3d7);
2313 rtw_write32_mask(rtwdev, REG_TXF5, MASKDWORD, 0x0);
2314 rtw_write32_mask(rtwdev, REG_TXF6, MASKDWORD,
2315 0xff012455);
2316 rtw_write32_mask(rtwdev, REG_TXF7, MASKDWORD, 0xffff);
2317 } else {
2318 rtw_write32_mask(rtwdev, REG_TXF0, MASKHWORD, 0x5284);
2319 rtw_write32_mask(rtwdev, REG_TXF1, MASKDWORD,
2320 0x3e18fec8);
2321 rtw_write32_mask(rtwdev, REG_TXF2, MASKLWORD, 0x0a88);
2322 rtw_write32_mask(rtwdev, REG_TXF3, MASKHWORD, 0xacc4);
2323 rtw_write32_mask(rtwdev, REG_TXF4, MASKLWORD, 0xc8b2);
2324 rtw_write32_mask(rtwdev, REG_TXF5, MASKDWORD,
2325 0x00faf0de);
2326 rtw_write32_mask(rtwdev, REG_TXF6, MASKDWORD,
2327 0x00122344);
2328 rtw_write32_mask(rtwdev, REG_TXF7, MASKDWORD,
2329 0x0fffffff);
2330 }
2331 if (channel == 13)
2332 rtw_write32_mask(rtwdev, REG_TXDFIR0, 0x70, 0x3);
2333 else
2334 rtw_write32_mask(rtwdev, REG_TXDFIR0, 0x70, 0x1);
2335 } else if (IS_CH_5G_BAND(channel)) {
2336 rtw_write32_set(rtwdev, REG_CCKTXONLY, BIT_BB_CCK_CHECK_EN);
2337 rtw_write32_set(rtwdev, REG_CCK_CHECK, BIT_CHECK_CCK_EN);
2338 rtw_write32_set(rtwdev, REG_BGCTRL, BITS_RX_IQ_WEIGHT);
2339 rtw_write32_clr(rtwdev, REG_TXF4, BIT(20));
2340 rtw_write32_mask(rtwdev, REG_CCAMSK, 0x3F000000, 0x22);
2341 rtw_write32_mask(rtwdev, REG_TXDFIR0, 0x70, 0x3);
2342 if (IS_CH_5G_BAND_1(channel) || IS_CH_5G_BAND_2(channel)) {
2343 rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_OFDM,
2344 0x1);
2345 rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_OFDM,
2346 0x1);
2347 } else if (IS_CH_5G_BAND_3(channel)) {
2348 rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_OFDM,
2349 0x2);
2350 rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_OFDM,
2351 0x2);
2352 } else if (IS_CH_5G_BAND_4(channel)) {
2353 rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_OFDM,
2354 0x3);
2355 rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_OFDM,
2356 0x3);
2357 }
2358
2359 if (channel >= 36 && channel <= 51)
2360 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x494);
2361 else if (channel >= 52 && channel <= 55)
2362 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x493);
2363 else if (channel >= 56 && channel <= 111)
2364 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x453);
2365 else if (channel >= 112 && channel <= 119)
2366 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x452);
2367 else if (channel >= 120 && channel <= 172)
2368 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x412);
2369 else if (channel >= 173 && channel <= 177)
2370 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x411);
2371 }
2372
2373 switch (bw) {
2374 case RTW_CHANNEL_WIDTH_20:
2375 rtw_write32_mask(rtwdev, REG_DFIRBW, 0x3FF0, 0x19B);
2376 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xf, 0x0);
2377 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xffc0, 0x0);
2378 rtw_write32_mask(rtwdev, REG_TXCLK, 0x700, 0x7);
2379 rtw_write32_mask(rtwdev, REG_TXCLK, 0x700000, 0x6);
2380 rtw_write32_mask(rtwdev, REG_CCK_SOURCE, BIT_NBI_EN, 0x0);
2381 rtw_write32_mask(rtwdev, REG_SBD, BITS_SUBTUNE, 0x1);
2382 rtw_write32_mask(rtwdev, REG_PT_CHSMO, BIT_PT_OPT, 0x0);
2383 break;
2384 case RTW_CHANNEL_WIDTH_40:
2385 rtw_write32_mask(rtwdev, REG_CCKSB, BIT(4),
2386 (primary_ch_idx == RTW_SC_20_UPPER ? 1 : 0));
2387 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xf, 0x5);
2388 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xc0, 0x0);
2389 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xff00,
2390 (primary_ch_idx | (primary_ch_idx << 4)));
2391 rtw_write32_mask(rtwdev, REG_CCK_SOURCE, BIT_NBI_EN, 0x1);
2392 rtw_write32_mask(rtwdev, REG_SBD, BITS_SUBTUNE, 0x1);
2393 rtw_write32_mask(rtwdev, REG_PT_CHSMO, BIT_PT_OPT, 0x1);
2394 break;
2395 case RTW_CHANNEL_WIDTH_80:
2396 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xf, 0xa);
2397 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xc0, 0x0);
2398 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xff00,
2399 (primary_ch_idx | (primary_ch_idx << 4)));
2400 rtw_write32_mask(rtwdev, REG_SBD, BITS_SUBTUNE, 0x6);
2401 rtw_write32_mask(rtwdev, REG_PT_CHSMO, BIT_PT_OPT, 0x1);
2402 break;
2403 case RTW_CHANNEL_WIDTH_5:
2404 rtw_write32_mask(rtwdev, REG_DFIRBW, 0x3FF0, 0x2AB);
2405 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xf, 0x0);
2406 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xffc0, 0x1);
2407 rtw_write32_mask(rtwdev, REG_TXCLK, 0x700, 0x4);
2408 rtw_write32_mask(rtwdev, REG_TXCLK, 0x700000, 0x4);
2409 rtw_write32_mask(rtwdev, REG_CCK_SOURCE, BIT_NBI_EN, 0x0);
2410 rtw_write32_mask(rtwdev, REG_SBD, BITS_SUBTUNE, 0x1);
2411 rtw_write32_mask(rtwdev, REG_PT_CHSMO, BIT_PT_OPT, 0x0);
2412 break;
2413 case RTW_CHANNEL_WIDTH_10:
2414 rtw_write32_mask(rtwdev, REG_DFIRBW, 0x3FF0, 0x2AB);
2415 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xf, 0x0);
2416 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xffc0, 0x2);
2417 rtw_write32_mask(rtwdev, REG_TXCLK, 0x700, 0x6);
2418 rtw_write32_mask(rtwdev, REG_TXCLK, 0x700000, 0x5);
2419 rtw_write32_mask(rtwdev, REG_CCK_SOURCE, BIT_NBI_EN, 0x0);
2420 rtw_write32_mask(rtwdev, REG_SBD, BITS_SUBTUNE, 0x1);
2421 rtw_write32_mask(rtwdev, REG_PT_CHSMO, BIT_PT_OPT, 0x0);
2422 break;
2423 }
2424 }
2425
rtw8822c_set_channel(struct rtw_dev * rtwdev,u8 channel,u8 bw,u8 primary_chan_idx)2426 static void rtw8822c_set_channel(struct rtw_dev *rtwdev, u8 channel, u8 bw,
2427 u8 primary_chan_idx)
2428 {
2429 rtw8822c_set_channel_bb(rtwdev, channel, bw, primary_chan_idx);
2430 rtw_set_channel_mac(rtwdev, channel, bw, primary_chan_idx);
2431 rtw8822c_set_channel_rf(rtwdev, channel, bw);
2432 rtw8822c_toggle_igi(rtwdev);
2433 }
2434
rtw8822c_config_cck_rx_path(struct rtw_dev * rtwdev,u8 rx_path)2435 static void rtw8822c_config_cck_rx_path(struct rtw_dev *rtwdev, u8 rx_path)
2436 {
2437 if (rx_path == BB_PATH_A || rx_path == BB_PATH_B) {
2438 rtw_write32_mask(rtwdev, REG_CCANRX, 0x00060000, 0x0);
2439 rtw_write32_mask(rtwdev, REG_CCANRX, 0x00600000, 0x0);
2440 } else if (rx_path == BB_PATH_AB) {
2441 rtw_write32_mask(rtwdev, REG_CCANRX, 0x00600000, 0x1);
2442 rtw_write32_mask(rtwdev, REG_CCANRX, 0x00060000, 0x1);
2443 }
2444
2445 if (rx_path == BB_PATH_A)
2446 rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0x0f000000, 0x0);
2447 else if (rx_path == BB_PATH_B)
2448 rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0x0f000000, 0x5);
2449 else if (rx_path == BB_PATH_AB)
2450 rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0x0f000000, 0x1);
2451 }
2452
rtw8822c_config_ofdm_rx_path(struct rtw_dev * rtwdev,u8 rx_path)2453 static void rtw8822c_config_ofdm_rx_path(struct rtw_dev *rtwdev, u8 rx_path)
2454 {
2455 if (rx_path == BB_PATH_A || rx_path == BB_PATH_B) {
2456 rtw_write32_mask(rtwdev, REG_RXFNCTL, 0x300, 0x0);
2457 rtw_write32_mask(rtwdev, REG_RXFNCTL, 0x600000, 0x0);
2458 rtw_write32_mask(rtwdev, REG_AGCSWSH, BIT(17), 0x0);
2459 rtw_write32_mask(rtwdev, REG_ANTWTPD, BIT(20), 0x0);
2460 rtw_write32_mask(rtwdev, REG_MRCM, BIT(24), 0x0);
2461 } else if (rx_path == BB_PATH_AB) {
2462 rtw_write32_mask(rtwdev, REG_RXFNCTL, 0x300, 0x1);
2463 rtw_write32_mask(rtwdev, REG_RXFNCTL, 0x600000, 0x1);
2464 rtw_write32_mask(rtwdev, REG_AGCSWSH, BIT(17), 0x1);
2465 rtw_write32_mask(rtwdev, REG_ANTWTPD, BIT(20), 0x1);
2466 rtw_write32_mask(rtwdev, REG_MRCM, BIT(24), 0x1);
2467 }
2468
2469 rtw_write32_mask(rtwdev, 0x824, 0x0f000000, rx_path);
2470 rtw_write32_mask(rtwdev, 0x824, 0x000f0000, rx_path);
2471 }
2472
rtw8822c_config_rx_path(struct rtw_dev * rtwdev,u8 rx_path)2473 static void rtw8822c_config_rx_path(struct rtw_dev *rtwdev, u8 rx_path)
2474 {
2475 rtw8822c_config_cck_rx_path(rtwdev, rx_path);
2476 rtw8822c_config_ofdm_rx_path(rtwdev, rx_path);
2477 }
2478
rtw8822c_config_cck_tx_path(struct rtw_dev * rtwdev,u8 tx_path,bool is_tx2_path)2479 static void rtw8822c_config_cck_tx_path(struct rtw_dev *rtwdev, u8 tx_path,
2480 bool is_tx2_path)
2481 {
2482 if (tx_path == BB_PATH_A) {
2483 rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0xf0000000, 0x8);
2484 } else if (tx_path == BB_PATH_B) {
2485 rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0xf0000000, 0x4);
2486 } else {
2487 if (is_tx2_path)
2488 rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0xf0000000, 0xc);
2489 else
2490 rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0xf0000000, 0x8);
2491 }
2492 rtw8822c_bb_reset(rtwdev);
2493 }
2494
rtw8822c_config_ofdm_tx_path(struct rtw_dev * rtwdev,u8 tx_path,enum rtw_bb_path tx_path_sel_1ss)2495 static void rtw8822c_config_ofdm_tx_path(struct rtw_dev *rtwdev, u8 tx_path,
2496 enum rtw_bb_path tx_path_sel_1ss)
2497 {
2498 if (tx_path == BB_PATH_A) {
2499 rtw_write32_mask(rtwdev, REG_ANTMAP0, 0xff, 0x11);
2500 rtw_write32_mask(rtwdev, REG_TXLGMAP, 0xff, 0x0);
2501 } else if (tx_path == BB_PATH_B) {
2502 rtw_write32_mask(rtwdev, REG_ANTMAP0, 0xff, 0x12);
2503 rtw_write32_mask(rtwdev, REG_TXLGMAP, 0xff, 0x0);
2504 } else {
2505 if (tx_path_sel_1ss == BB_PATH_AB) {
2506 rtw_write32_mask(rtwdev, REG_ANTMAP0, 0xff, 0x33);
2507 rtw_write32_mask(rtwdev, REG_TXLGMAP, 0xffff, 0x0404);
2508 } else if (tx_path_sel_1ss == BB_PATH_B) {
2509 rtw_write32_mask(rtwdev, REG_ANTMAP0, 0xff, 0x32);
2510 rtw_write32_mask(rtwdev, REG_TXLGMAP, 0xffff, 0x0400);
2511 } else if (tx_path_sel_1ss == BB_PATH_A) {
2512 rtw_write32_mask(rtwdev, REG_ANTMAP0, 0xff, 0x31);
2513 rtw_write32_mask(rtwdev, REG_TXLGMAP, 0xffff, 0x0400);
2514 }
2515 }
2516 rtw8822c_bb_reset(rtwdev);
2517 }
2518
rtw8822c_config_tx_path(struct rtw_dev * rtwdev,u8 tx_path,enum rtw_bb_path tx_path_sel_1ss,enum rtw_bb_path tx_path_cck,bool is_tx2_path)2519 static void rtw8822c_config_tx_path(struct rtw_dev *rtwdev, u8 tx_path,
2520 enum rtw_bb_path tx_path_sel_1ss,
2521 enum rtw_bb_path tx_path_cck,
2522 bool is_tx2_path)
2523 {
2524 rtw8822c_config_cck_tx_path(rtwdev, tx_path_cck, is_tx2_path);
2525 rtw8822c_config_ofdm_tx_path(rtwdev, tx_path, tx_path_sel_1ss);
2526 rtw8822c_bb_reset(rtwdev);
2527 }
2528
rtw8822c_config_trx_mode(struct rtw_dev * rtwdev,u8 tx_path,u8 rx_path,bool is_tx2_path)2529 static void rtw8822c_config_trx_mode(struct rtw_dev *rtwdev, u8 tx_path,
2530 u8 rx_path, bool is_tx2_path)
2531 {
2532 if ((tx_path | rx_path) & BB_PATH_A)
2533 rtw_write32_mask(rtwdev, REG_ORITXCODE, MASK20BITS, 0x33312);
2534 else
2535 rtw_write32_mask(rtwdev, REG_ORITXCODE, MASK20BITS, 0x11111);
2536 if ((tx_path | rx_path) & BB_PATH_B)
2537 rtw_write32_mask(rtwdev, REG_ORITXCODE2, MASK20BITS, 0x33312);
2538 else
2539 rtw_write32_mask(rtwdev, REG_ORITXCODE2, MASK20BITS, 0x11111);
2540
2541 rtw8822c_config_rx_path(rtwdev, rx_path);
2542 rtw8822c_config_tx_path(rtwdev, tx_path, BB_PATH_A, BB_PATH_A,
2543 is_tx2_path);
2544
2545 rtw8822c_toggle_igi(rtwdev);
2546 }
2547
query_phy_status_page0(struct rtw_dev * rtwdev,u8 * phy_status,struct rtw_rx_pkt_stat * pkt_stat)2548 static void query_phy_status_page0(struct rtw_dev *rtwdev, u8 *phy_status,
2549 struct rtw_rx_pkt_stat *pkt_stat)
2550 {
2551 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
2552 u8 l_bnd, u_bnd;
2553 u8 gain_a, gain_b;
2554 s8 rx_power[RTW_RF_PATH_MAX];
2555 s8 min_rx_power = -120;
2556 u8 rssi;
2557 u8 channel;
2558 int path;
2559
2560 rx_power[RF_PATH_A] = GET_PHY_STAT_P0_PWDB_A(phy_status);
2561 rx_power[RF_PATH_B] = GET_PHY_STAT_P0_PWDB_B(phy_status);
2562 l_bnd = dm_info->cck_gi_l_bnd;
2563 u_bnd = dm_info->cck_gi_u_bnd;
2564 gain_a = GET_PHY_STAT_P0_GAIN_A(phy_status);
2565 gain_b = GET_PHY_STAT_P0_GAIN_B(phy_status);
2566 if (gain_a < l_bnd)
2567 rx_power[RF_PATH_A] += (l_bnd - gain_a) << 1;
2568 else if (gain_a > u_bnd)
2569 rx_power[RF_PATH_A] -= (gain_a - u_bnd) << 1;
2570 if (gain_b < l_bnd)
2571 rx_power[RF_PATH_B] += (l_bnd - gain_b) << 1;
2572 else if (gain_b > u_bnd)
2573 rx_power[RF_PATH_B] -= (gain_b - u_bnd) << 1;
2574
2575 rx_power[RF_PATH_A] -= 110;
2576 rx_power[RF_PATH_B] -= 110;
2577
2578 channel = GET_PHY_STAT_P0_CHANNEL(phy_status);
2579 if (channel != 0)
2580 rtw_set_rx_freq_band(pkt_stat, channel);
2581 else
2582 pkt_stat->channel_invalid = true;
2583
2584 pkt_stat->rx_power[RF_PATH_A] = rx_power[RF_PATH_A];
2585 pkt_stat->rx_power[RF_PATH_B] = rx_power[RF_PATH_B];
2586
2587 for (path = 0; path <= rtwdev->hal.rf_path_num; path++) {
2588 rssi = rtw_phy_rf_power_2_rssi(&pkt_stat->rx_power[path], 1);
2589 dm_info->rssi[path] = rssi;
2590 }
2591
2592 pkt_stat->rssi = rtw_phy_rf_power_2_rssi(pkt_stat->rx_power, 1);
2593 pkt_stat->bw = RTW_CHANNEL_WIDTH_20;
2594 pkt_stat->signal_power = max(pkt_stat->rx_power[RF_PATH_A],
2595 min_rx_power);
2596 }
2597
query_phy_status_page1(struct rtw_dev * rtwdev,u8 * phy_status,struct rtw_rx_pkt_stat * pkt_stat)2598 static void query_phy_status_page1(struct rtw_dev *rtwdev, u8 *phy_status,
2599 struct rtw_rx_pkt_stat *pkt_stat)
2600 {
2601 struct rtw_path_div *p_div = &rtwdev->dm_path_div;
2602 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
2603 u8 rxsc, bw;
2604 s8 min_rx_power = -120;
2605 s8 rx_evm;
2606 u8 evm_dbm = 0;
2607 u8 rssi;
2608 int path;
2609 u8 channel;
2610
2611 if (pkt_stat->rate > DESC_RATE11M && pkt_stat->rate < DESC_RATEMCS0)
2612 rxsc = GET_PHY_STAT_P1_L_RXSC(phy_status);
2613 else
2614 rxsc = GET_PHY_STAT_P1_HT_RXSC(phy_status);
2615
2616 if (rxsc == 0)
2617 bw = rtwdev->hal.current_band_width;
2618 else if (rxsc >= 1 && rxsc <= 8)
2619 bw = RTW_CHANNEL_WIDTH_20;
2620 else if (rxsc >= 9 && rxsc <= 12)
2621 bw = RTW_CHANNEL_WIDTH_40;
2622 else
2623 bw = RTW_CHANNEL_WIDTH_80;
2624
2625 channel = GET_PHY_STAT_P1_CHANNEL(phy_status);
2626 rtw_set_rx_freq_band(pkt_stat, channel);
2627
2628 pkt_stat->rx_power[RF_PATH_A] = GET_PHY_STAT_P1_PWDB_A(phy_status) - 110;
2629 pkt_stat->rx_power[RF_PATH_B] = GET_PHY_STAT_P1_PWDB_B(phy_status) - 110;
2630 pkt_stat->rssi = rtw_phy_rf_power_2_rssi(pkt_stat->rx_power, 2);
2631 pkt_stat->bw = bw;
2632 pkt_stat->signal_power = max3(pkt_stat->rx_power[RF_PATH_A],
2633 pkt_stat->rx_power[RF_PATH_B],
2634 min_rx_power);
2635
2636 dm_info->curr_rx_rate = pkt_stat->rate;
2637
2638 pkt_stat->rx_evm[RF_PATH_A] = GET_PHY_STAT_P1_RXEVM_A(phy_status);
2639 pkt_stat->rx_evm[RF_PATH_B] = GET_PHY_STAT_P1_RXEVM_B(phy_status);
2640
2641 pkt_stat->rx_snr[RF_PATH_A] = GET_PHY_STAT_P1_RXSNR_A(phy_status);
2642 pkt_stat->rx_snr[RF_PATH_B] = GET_PHY_STAT_P1_RXSNR_B(phy_status);
2643
2644 pkt_stat->cfo_tail[RF_PATH_A] = GET_PHY_STAT_P1_CFO_TAIL_A(phy_status);
2645 pkt_stat->cfo_tail[RF_PATH_B] = GET_PHY_STAT_P1_CFO_TAIL_B(phy_status);
2646
2647 for (path = 0; path <= rtwdev->hal.rf_path_num; path++) {
2648 rssi = rtw_phy_rf_power_2_rssi(&pkt_stat->rx_power[path], 1);
2649 dm_info->rssi[path] = rssi;
2650 if (path == RF_PATH_A) {
2651 p_div->path_a_sum += rssi;
2652 p_div->path_a_cnt++;
2653 } else if (path == RF_PATH_B) {
2654 p_div->path_b_sum += rssi;
2655 p_div->path_b_cnt++;
2656 }
2657 dm_info->rx_snr[path] = pkt_stat->rx_snr[path] >> 1;
2658 dm_info->cfo_tail[path] = (pkt_stat->cfo_tail[path] * 5) >> 1;
2659
2660 rx_evm = pkt_stat->rx_evm[path];
2661
2662 if (rx_evm < 0) {
2663 if (rx_evm == S8_MIN)
2664 evm_dbm = 0;
2665 else
2666 evm_dbm = ((u8)-rx_evm >> 1);
2667 }
2668 dm_info->rx_evm_dbm[path] = evm_dbm;
2669 }
2670 rtw_phy_parsing_cfo(rtwdev, pkt_stat);
2671 }
2672
query_phy_status(struct rtw_dev * rtwdev,u8 * phy_status,struct rtw_rx_pkt_stat * pkt_stat)2673 static void query_phy_status(struct rtw_dev *rtwdev, u8 *phy_status,
2674 struct rtw_rx_pkt_stat *pkt_stat)
2675 {
2676 u8 page;
2677
2678 page = *phy_status & 0xf;
2679
2680 switch (page) {
2681 case 0:
2682 query_phy_status_page0(rtwdev, phy_status, pkt_stat);
2683 break;
2684 case 1:
2685 query_phy_status_page1(rtwdev, phy_status, pkt_stat);
2686 break;
2687 default:
2688 rtw_warn(rtwdev, "unused phy status page (%d)\n", page);
2689 return;
2690 }
2691 }
2692
2693 static void
rtw8822c_set_write_tx_power_ref(struct rtw_dev * rtwdev,u8 * tx_pwr_ref_cck,u8 * tx_pwr_ref_ofdm)2694 rtw8822c_set_write_tx_power_ref(struct rtw_dev *rtwdev, u8 *tx_pwr_ref_cck,
2695 u8 *tx_pwr_ref_ofdm)
2696 {
2697 struct rtw_hal *hal = &rtwdev->hal;
2698 u32 txref_cck[2] = {0x18a0, 0x41a0};
2699 u32 txref_ofdm[2] = {0x18e8, 0x41e8};
2700 u8 path;
2701
2702 for (path = 0; path < hal->rf_path_num; path++) {
2703 rtw_write32_mask(rtwdev, 0x1c90, BIT(15), 0);
2704 rtw_write32_mask(rtwdev, txref_cck[path], 0x7f0000,
2705 tx_pwr_ref_cck[path]);
2706 }
2707 for (path = 0; path < hal->rf_path_num; path++) {
2708 rtw_write32_mask(rtwdev, 0x1c90, BIT(15), 0);
2709 rtw_write32_mask(rtwdev, txref_ofdm[path], 0x1fc00,
2710 tx_pwr_ref_ofdm[path]);
2711 }
2712 }
2713
rtw8822c_set_tx_power_diff(struct rtw_dev * rtwdev,u8 rate,s8 * diff_idx)2714 static void rtw8822c_set_tx_power_diff(struct rtw_dev *rtwdev, u8 rate,
2715 s8 *diff_idx)
2716 {
2717 u32 offset_txagc = 0x3a00;
2718 u8 rate_idx = rate & 0xfc;
2719 u8 pwr_idx[4];
2720 u32 phy_pwr_idx;
2721 int i;
2722
2723 for (i = 0; i < 4; i++)
2724 pwr_idx[i] = diff_idx[i] & 0x7f;
2725
2726 phy_pwr_idx = pwr_idx[0] |
2727 (pwr_idx[1] << 8) |
2728 (pwr_idx[2] << 16) |
2729 (pwr_idx[3] << 24);
2730
2731 rtw_write32_mask(rtwdev, 0x1c90, BIT(15), 0x0);
2732 rtw_write32_mask(rtwdev, offset_txagc + rate_idx, MASKDWORD,
2733 phy_pwr_idx);
2734 }
2735
rtw8822c_set_tx_power_index(struct rtw_dev * rtwdev)2736 static void rtw8822c_set_tx_power_index(struct rtw_dev *rtwdev)
2737 {
2738 struct rtw_hal *hal = &rtwdev->hal;
2739 u8 rs, rate, j;
2740 u8 pwr_ref_cck[2] = {hal->tx_pwr_tbl[RF_PATH_A][DESC_RATE11M],
2741 hal->tx_pwr_tbl[RF_PATH_B][DESC_RATE11M]};
2742 u8 pwr_ref_ofdm[2] = {hal->tx_pwr_tbl[RF_PATH_A][DESC_RATEMCS7],
2743 hal->tx_pwr_tbl[RF_PATH_B][DESC_RATEMCS7]};
2744 s8 diff_a, diff_b;
2745 u8 pwr_a, pwr_b;
2746 s8 diff_idx[4];
2747
2748 rtw8822c_set_write_tx_power_ref(rtwdev, pwr_ref_cck, pwr_ref_ofdm);
2749 for (rs = 0; rs <= __RTW_RATE_SECTION_2SS_MAX; rs++) {
2750 for (j = 0; j < rtw_rate_size[rs]; j++) {
2751 rate = rtw_rate_section[rs][j];
2752 pwr_a = hal->tx_pwr_tbl[RF_PATH_A][rate];
2753 pwr_b = hal->tx_pwr_tbl[RF_PATH_B][rate];
2754 if (rs == 0) {
2755 diff_a = (s8)pwr_a - (s8)pwr_ref_cck[0];
2756 diff_b = (s8)pwr_b - (s8)pwr_ref_cck[1];
2757 } else {
2758 diff_a = (s8)pwr_a - (s8)pwr_ref_ofdm[0];
2759 diff_b = (s8)pwr_b - (s8)pwr_ref_ofdm[1];
2760 }
2761 diff_idx[rate % 4] = min(diff_a, diff_b);
2762 if (rate % 4 == 3)
2763 rtw8822c_set_tx_power_diff(rtwdev, rate - 3,
2764 diff_idx);
2765 }
2766 }
2767 }
2768
rtw8822c_set_antenna(struct rtw_dev * rtwdev,int radio_idx,u32 antenna_tx,u32 antenna_rx)2769 static int rtw8822c_set_antenna(struct rtw_dev *rtwdev,
2770 int radio_idx,
2771 u32 antenna_tx,
2772 u32 antenna_rx)
2773 {
2774 struct rtw_hal *hal = &rtwdev->hal;
2775
2776 switch (antenna_tx) {
2777 case BB_PATH_A:
2778 case BB_PATH_B:
2779 case BB_PATH_AB:
2780 break;
2781 default:
2782 rtw_warn(rtwdev, "unsupported tx path 0x%x\n", antenna_tx);
2783 return -EINVAL;
2784 }
2785
2786 /* path B only is not available for RX */
2787 switch (antenna_rx) {
2788 case BB_PATH_A:
2789 case BB_PATH_AB:
2790 break;
2791 default:
2792 rtw_warn(rtwdev, "unsupported rx path 0x%x\n", antenna_rx);
2793 return -EINVAL;
2794 }
2795
2796 hal->antenna_tx = antenna_tx;
2797 hal->antenna_rx = antenna_rx;
2798
2799 rtw8822c_config_trx_mode(rtwdev, antenna_tx, antenna_rx, false);
2800
2801 return 0;
2802 }
2803
rtw8822c_cfg_ldo25(struct rtw_dev * rtwdev,bool enable)2804 static void rtw8822c_cfg_ldo25(struct rtw_dev *rtwdev, bool enable)
2805 {
2806 u8 ldo_pwr;
2807
2808 ldo_pwr = rtw_read8(rtwdev, REG_ANAPARLDO_POW_MAC);
2809 ldo_pwr = enable ? ldo_pwr | BIT_LDOE25_PON : ldo_pwr & ~BIT_LDOE25_PON;
2810 rtw_write8(rtwdev, REG_ANAPARLDO_POW_MAC, ldo_pwr);
2811 }
2812
rtw8822c_false_alarm_statistics(struct rtw_dev * rtwdev)2813 static void rtw8822c_false_alarm_statistics(struct rtw_dev *rtwdev)
2814 {
2815 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
2816 u32 cck_enable;
2817 u32 cck_fa_cnt;
2818 u32 crc32_cnt;
2819 u32 cca32_cnt;
2820 u32 ofdm_fa_cnt;
2821 u32 ofdm_fa_cnt1, ofdm_fa_cnt2, ofdm_fa_cnt3, ofdm_fa_cnt4, ofdm_fa_cnt5;
2822 u16 parity_fail, rate_illegal, crc8_fail, mcs_fail, sb_search_fail,
2823 fast_fsync, crc8_fail_vhta, mcs_fail_vht;
2824
2825 cck_enable = rtw_read32(rtwdev, REG_ENCCK) & BIT_CCK_BLK_EN;
2826 cck_fa_cnt = rtw_read16(rtwdev, REG_CCK_FACNT);
2827
2828 ofdm_fa_cnt1 = rtw_read32(rtwdev, REG_OFDM_FACNT1);
2829 ofdm_fa_cnt2 = rtw_read32(rtwdev, REG_OFDM_FACNT2);
2830 ofdm_fa_cnt3 = rtw_read32(rtwdev, REG_OFDM_FACNT3);
2831 ofdm_fa_cnt4 = rtw_read32(rtwdev, REG_OFDM_FACNT4);
2832 ofdm_fa_cnt5 = rtw_read32(rtwdev, REG_OFDM_FACNT5);
2833
2834 parity_fail = FIELD_GET(GENMASK(31, 16), ofdm_fa_cnt1);
2835 rate_illegal = FIELD_GET(GENMASK(15, 0), ofdm_fa_cnt2);
2836 crc8_fail = FIELD_GET(GENMASK(31, 16), ofdm_fa_cnt2);
2837 crc8_fail_vhta = FIELD_GET(GENMASK(15, 0), ofdm_fa_cnt3);
2838 mcs_fail = FIELD_GET(GENMASK(15, 0), ofdm_fa_cnt4);
2839 mcs_fail_vht = FIELD_GET(GENMASK(31, 16), ofdm_fa_cnt4);
2840 fast_fsync = FIELD_GET(GENMASK(15, 0), ofdm_fa_cnt5);
2841 sb_search_fail = FIELD_GET(GENMASK(31, 16), ofdm_fa_cnt5);
2842
2843 ofdm_fa_cnt = parity_fail + rate_illegal + crc8_fail + crc8_fail_vhta +
2844 mcs_fail + mcs_fail_vht + fast_fsync + sb_search_fail;
2845
2846 dm_info->cck_fa_cnt = cck_fa_cnt;
2847 dm_info->ofdm_fa_cnt = ofdm_fa_cnt;
2848 dm_info->total_fa_cnt = ofdm_fa_cnt;
2849 dm_info->total_fa_cnt += cck_enable ? cck_fa_cnt : 0;
2850
2851 crc32_cnt = rtw_read32(rtwdev, 0x2c04);
2852 dm_info->cck_ok_cnt = crc32_cnt & 0xffff;
2853 dm_info->cck_err_cnt = (crc32_cnt & 0xffff0000) >> 16;
2854 crc32_cnt = rtw_read32(rtwdev, 0x2c14);
2855 dm_info->ofdm_ok_cnt = crc32_cnt & 0xffff;
2856 dm_info->ofdm_err_cnt = (crc32_cnt & 0xffff0000) >> 16;
2857 crc32_cnt = rtw_read32(rtwdev, 0x2c10);
2858 dm_info->ht_ok_cnt = crc32_cnt & 0xffff;
2859 dm_info->ht_err_cnt = (crc32_cnt & 0xffff0000) >> 16;
2860 crc32_cnt = rtw_read32(rtwdev, 0x2c0c);
2861 dm_info->vht_ok_cnt = crc32_cnt & 0xffff;
2862 dm_info->vht_err_cnt = (crc32_cnt & 0xffff0000) >> 16;
2863
2864 cca32_cnt = rtw_read32(rtwdev, 0x2c08);
2865 dm_info->ofdm_cca_cnt = ((cca32_cnt & 0xffff0000) >> 16);
2866 dm_info->cck_cca_cnt = cca32_cnt & 0xffff;
2867 dm_info->total_cca_cnt = dm_info->ofdm_cca_cnt;
2868 if (cck_enable)
2869 dm_info->total_cca_cnt += dm_info->cck_cca_cnt;
2870
2871 rtw_write32_mask(rtwdev, REG_CCANRX, BIT_CCK_FA_RST, 0);
2872 rtw_write32_mask(rtwdev, REG_CCANRX, BIT_CCK_FA_RST, 2);
2873 rtw_write32_mask(rtwdev, REG_CCANRX, BIT_OFDM_FA_RST, 0);
2874 rtw_write32_mask(rtwdev, REG_CCANRX, BIT_OFDM_FA_RST, 2);
2875
2876 /* disable rx clk gating to reset counters */
2877 rtw_write32_clr(rtwdev, REG_RX_BREAK, BIT_COM_RX_GCK_EN);
2878 rtw_write32_set(rtwdev, REG_CNT_CTRL, BIT_ALL_CNT_RST);
2879 rtw_write32_clr(rtwdev, REG_CNT_CTRL, BIT_ALL_CNT_RST);
2880 rtw_write32_set(rtwdev, REG_RX_BREAK, BIT_COM_RX_GCK_EN);
2881 }
2882
rtw8822c_do_lck(struct rtw_dev * rtwdev)2883 static void rtw8822c_do_lck(struct rtw_dev *rtwdev)
2884 {
2885 u32 val;
2886
2887 rtw_write_rf(rtwdev, RF_PATH_A, RF_SYN_CTRL, RFREG_MASK, 0x80010);
2888 rtw_write_rf(rtwdev, RF_PATH_A, RF_SYN_PFD, RFREG_MASK, 0x1F0FA);
2889 fsleep(1);
2890 rtw_write_rf(rtwdev, RF_PATH_A, RF_AAC_CTRL, RFREG_MASK, 0x80000);
2891 rtw_write_rf(rtwdev, RF_PATH_A, RF_SYN_AAC, RFREG_MASK, 0x80001);
2892 read_poll_timeout(rtw_read_rf, val, val != 0x1, 1000, 100000,
2893 true, rtwdev, RF_PATH_A, RF_AAC_CTRL, 0x1000);
2894 rtw_write_rf(rtwdev, RF_PATH_A, RF_SYN_PFD, RFREG_MASK, 0x1F0F8);
2895 rtw_write_rf(rtwdev, RF_PATH_B, RF_SYN_CTRL, RFREG_MASK, 0x80010);
2896
2897 rtw_write_rf(rtwdev, RF_PATH_A, RF_FAST_LCK, RFREG_MASK, 0x0f000);
2898 rtw_write_rf(rtwdev, RF_PATH_A, RF_FAST_LCK, RFREG_MASK, 0x4f000);
2899 fsleep(1);
2900 rtw_write_rf(rtwdev, RF_PATH_A, RF_FAST_LCK, RFREG_MASK, 0x0f000);
2901 }
2902
rtw8822c_do_iqk(struct rtw_dev * rtwdev)2903 static void rtw8822c_do_iqk(struct rtw_dev *rtwdev)
2904 {
2905 struct rtw_iqk_para para = {0};
2906 u8 iqk_chk;
2907 int ret;
2908
2909 para.clear = 1;
2910 rtw_fw_do_iqk(rtwdev, ¶);
2911
2912 ret = read_poll_timeout(rtw_read8, iqk_chk, iqk_chk == IQK_DONE_8822C,
2913 20000, 300000, false, rtwdev, REG_RPT_CIP);
2914 if (ret)
2915 rtw_warn(rtwdev, "failed to poll iqk status bit\n");
2916
2917 rtw_write8(rtwdev, REG_IQKSTAT, 0x0);
2918 }
2919
2920 /* for coex */
rtw8822c_coex_cfg_init(struct rtw_dev * rtwdev)2921 static void rtw8822c_coex_cfg_init(struct rtw_dev *rtwdev)
2922 {
2923 /* enable TBTT nterrupt */
2924 rtw_write8_set(rtwdev, REG_BCN_CTRL, BIT_EN_BCN_FUNCTION);
2925
2926 /* BT report packet sample rate */
2927 /* 0x790[5:0]=0x5 */
2928 rtw_write8_mask(rtwdev, REG_BT_TDMA_TIME, BIT_MASK_SAMPLE_RATE, 0x5);
2929
2930 /* enable BT counter statistics */
2931 rtw_write8(rtwdev, REG_BT_STAT_CTRL, 0x1);
2932
2933 /* enable PTA (3-wire function form BT side) */
2934 rtw_write32_set(rtwdev, REG_GPIO_MUXCFG, BIT_BT_PTA_EN);
2935 rtw_write32_set(rtwdev, REG_GPIO_MUXCFG, BIT_PO_BT_PTA_PINS);
2936
2937 /* enable PTA (tx/rx signal form WiFi side) */
2938 rtw_write8_set(rtwdev, REG_QUEUE_CTRL, BIT_PTA_WL_TX_EN);
2939 /* wl tx signal to PTA not case EDCCA */
2940 rtw_write8_clr(rtwdev, REG_QUEUE_CTRL, BIT_PTA_EDCCA_EN);
2941 /* GNT_BT=1 while select both */
2942 rtw_write16_set(rtwdev, REG_BT_COEX_V2, BIT_GNT_BT_POLARITY);
2943 /* BT_CCA = ~GNT_WL_BB, not or GNT_BT_BB, LTE_Rx */
2944 rtw_write8_clr(rtwdev, REG_DUMMY_PAGE4_V1, BIT_BTCCA_CTRL);
2945
2946 /* to avoid RF parameter error */
2947 rtw_write_rf(rtwdev, RF_PATH_B, RF_MODOPT, 0xfffff, 0x40000);
2948 }
2949
rtw8822c_coex_cfg_gnt_fix(struct rtw_dev * rtwdev)2950 static void rtw8822c_coex_cfg_gnt_fix(struct rtw_dev *rtwdev)
2951 {
2952 struct rtw_coex *coex = &rtwdev->coex;
2953 struct rtw_coex_stat *coex_stat = &coex->stat;
2954 struct rtw_efuse *efuse = &rtwdev->efuse;
2955 u32 rf_0x1;
2956
2957 if (coex_stat->gnt_workaround_state == coex_stat->wl_coex_mode)
2958 return;
2959
2960 coex_stat->gnt_workaround_state = coex_stat->wl_coex_mode;
2961
2962 if ((coex_stat->kt_ver == 0 && coex->under_5g) || coex->freerun)
2963 rf_0x1 = 0x40021;
2964 else
2965 rf_0x1 = 0x40000;
2966
2967 /* BT at S1 for Shared-Ant */
2968 if (efuse->share_ant)
2969 rf_0x1 |= BIT(13);
2970
2971 rtw_write_rf(rtwdev, RF_PATH_B, 0x1, 0xfffff, rf_0x1);
2972
2973 /* WL-S0 2G RF TRX cannot be masked by GNT_BT
2974 * enable "WLS0 BB chage RF mode if GNT_BT = 1" for shared-antenna type
2975 * disable:0x1860[3] = 1, enable:0x1860[3] = 0
2976 *
2977 * enable "DAC off if GNT_WL = 0" for non-shared-antenna
2978 * disable 0x1c30[22] = 0,
2979 * enable: 0x1c30[22] = 1, 0x1c38[12] = 0, 0x1c38[28] = 1
2980 */
2981 if (coex_stat->wl_coex_mode == COEX_WLINK_2GFREE) {
2982 rtw_write8_mask(rtwdev, REG_ANAPAR + 2,
2983 BIT_ANAPAR_BTPS >> 16, 0);
2984 } else {
2985 rtw_write8_mask(rtwdev, REG_ANAPAR + 2,
2986 BIT_ANAPAR_BTPS >> 16, 1);
2987 rtw_write8_mask(rtwdev, REG_RSTB_SEL + 1,
2988 BIT_DAC_OFF_ENABLE, 0);
2989 rtw_write8_mask(rtwdev, REG_RSTB_SEL + 3,
2990 BIT_DAC_OFF_ENABLE, 1);
2991 }
2992
2993 /* disable WL-S1 BB chage RF mode if GNT_BT
2994 * since RF TRx mask can do it
2995 */
2996 rtw_write8_mask(rtwdev, REG_IGN_GNTBT4,
2997 BIT_PI_IGNORE_GNT_BT, 1);
2998
2999 /* disable WL-S0 BB chage RF mode if wifi is at 5G,
3000 * or antenna path is separated
3001 */
3002 if (coex_stat->wl_coex_mode == COEX_WLINK_2GFREE) {
3003 rtw_write8_mask(rtwdev, REG_IGN_GNT_BT1,
3004 BIT_PI_IGNORE_GNT_BT, 1);
3005 rtw_write8_mask(rtwdev, REG_NOMASK_TXBT,
3006 BIT_NOMASK_TXBT_ENABLE, 1);
3007 } else if (coex_stat->wl_coex_mode == COEX_WLINK_5G ||
3008 coex->under_5g || !efuse->share_ant) {
3009 if (coex_stat->kt_ver >= 3) {
3010 rtw_write8_mask(rtwdev, REG_IGN_GNT_BT1,
3011 BIT_PI_IGNORE_GNT_BT, 0);
3012 rtw_write8_mask(rtwdev, REG_NOMASK_TXBT,
3013 BIT_NOMASK_TXBT_ENABLE, 1);
3014 } else {
3015 rtw_write8_mask(rtwdev, REG_IGN_GNT_BT1,
3016 BIT_PI_IGNORE_GNT_BT, 1);
3017 }
3018 } else {
3019 /* shared-antenna */
3020 rtw_write8_mask(rtwdev, REG_IGN_GNT_BT1,
3021 BIT_PI_IGNORE_GNT_BT, 0);
3022 if (coex_stat->kt_ver >= 3) {
3023 rtw_write8_mask(rtwdev, REG_NOMASK_TXBT,
3024 BIT_NOMASK_TXBT_ENABLE, 0);
3025 }
3026 }
3027 }
3028
rtw8822c_coex_cfg_gnt_debug(struct rtw_dev * rtwdev)3029 static void rtw8822c_coex_cfg_gnt_debug(struct rtw_dev *rtwdev)
3030 {
3031 rtw_write8_mask(rtwdev, REG_PAD_CTRL1 + 2, BIT_BTGP_SPI_EN >> 16, 0);
3032 rtw_write8_mask(rtwdev, REG_PAD_CTRL1 + 3, BIT_BTGP_JTAG_EN >> 24, 0);
3033 rtw_write8_mask(rtwdev, REG_GPIO_MUXCFG + 2, BIT_FSPI_EN >> 16, 0);
3034 rtw_write8_mask(rtwdev, REG_PAD_CTRL1 + 1, BIT_LED1DIS >> 8, 0);
3035 rtw_write8_mask(rtwdev, REG_SYS_SDIO_CTRL + 3, BIT_DBG_GNT_WL_BT >> 24, 0);
3036 }
3037
rtw8822c_coex_cfg_rfe_type(struct rtw_dev * rtwdev)3038 static void rtw8822c_coex_cfg_rfe_type(struct rtw_dev *rtwdev)
3039 {
3040 struct rtw_coex *coex = &rtwdev->coex;
3041 struct rtw_coex_rfe *coex_rfe = &coex->rfe;
3042 struct rtw_efuse *efuse = &rtwdev->efuse;
3043
3044 coex_rfe->rfe_module_type = rtwdev->efuse.rfe_option;
3045 coex_rfe->ant_switch_polarity = 0;
3046 coex_rfe->ant_switch_exist = false;
3047 coex_rfe->ant_switch_with_bt = false;
3048 coex_rfe->ant_switch_diversity = false;
3049
3050 if (efuse->share_ant)
3051 coex_rfe->wlg_at_btg = true;
3052 else
3053 coex_rfe->wlg_at_btg = false;
3054
3055 /* disable LTE coex in wifi side */
3056 rtw_coex_write_indirect_reg(rtwdev, LTE_COEX_CTRL, BIT_LTE_COEX_EN, 0x0);
3057 rtw_coex_write_indirect_reg(rtwdev, LTE_WL_TRX_CTRL, MASKLWORD, 0xffff);
3058 rtw_coex_write_indirect_reg(rtwdev, LTE_BT_TRX_CTRL, MASKLWORD, 0xffff);
3059 }
3060
rtw8822c_coex_cfg_wl_tx_power(struct rtw_dev * rtwdev,u8 wl_pwr)3061 static void rtw8822c_coex_cfg_wl_tx_power(struct rtw_dev *rtwdev, u8 wl_pwr)
3062 {
3063 struct rtw_coex *coex = &rtwdev->coex;
3064 struct rtw_coex_dm *coex_dm = &coex->dm;
3065
3066 if (wl_pwr == coex_dm->cur_wl_pwr_lvl)
3067 return;
3068
3069 coex_dm->cur_wl_pwr_lvl = wl_pwr;
3070 }
3071
rtw8822c_coex_cfg_wl_rx_gain(struct rtw_dev * rtwdev,bool low_gain)3072 static void rtw8822c_coex_cfg_wl_rx_gain(struct rtw_dev *rtwdev, bool low_gain)
3073 {
3074 struct rtw_coex *coex = &rtwdev->coex;
3075 struct rtw_coex_dm *coex_dm = &coex->dm;
3076
3077 if (low_gain == coex_dm->cur_wl_rx_low_gain_en)
3078 return;
3079
3080 coex_dm->cur_wl_rx_low_gain_en = low_gain;
3081
3082 if (coex_dm->cur_wl_rx_low_gain_en) {
3083 rtw_dbg(rtwdev, RTW_DBG_COEX, "[BTCoex], Hi-Li Table On!\n");
3084
3085 /* set Rx filter corner RCK offset */
3086 rtw_write_rf(rtwdev, RF_PATH_A, RF_RCKD, RFREG_MASK, 0x22);
3087 rtw_write_rf(rtwdev, RF_PATH_A, RF_RCK, RFREG_MASK, 0x36);
3088 rtw_write_rf(rtwdev, RF_PATH_B, RF_RCKD, RFREG_MASK, 0x22);
3089 rtw_write_rf(rtwdev, RF_PATH_B, RF_RCK, RFREG_MASK, 0x36);
3090
3091 } else {
3092 rtw_dbg(rtwdev, RTW_DBG_COEX, "[BTCoex], Hi-Li Table Off!\n");
3093
3094 /* set Rx filter corner RCK offset */
3095 rtw_write_rf(rtwdev, RF_PATH_A, RF_RCKD, RFREG_MASK, 0x20);
3096 rtw_write_rf(rtwdev, RF_PATH_A, RF_RCK, RFREG_MASK, 0x0);
3097 rtw_write_rf(rtwdev, RF_PATH_B, RF_RCKD, RFREG_MASK, 0x20);
3098 rtw_write_rf(rtwdev, RF_PATH_B, RF_RCK, RFREG_MASK, 0x0);
3099 }
3100 }
3101
rtw8822c_bf_enable_bfee_su(struct rtw_dev * rtwdev,struct rtw_vif * vif,struct rtw_bfee * bfee)3102 static void rtw8822c_bf_enable_bfee_su(struct rtw_dev *rtwdev,
3103 struct rtw_vif *vif,
3104 struct rtw_bfee *bfee)
3105 {
3106 u8 csi_rsc = 0;
3107 u32 tmp6dc;
3108
3109 rtw_bf_enable_bfee_su(rtwdev, vif, bfee);
3110
3111 tmp6dc = rtw_read32(rtwdev, REG_BBPSF_CTRL) |
3112 BIT_WMAC_USE_NDPARATE |
3113 (csi_rsc << 13);
3114 if (vif->net_type == RTW_NET_AP_MODE)
3115 rtw_write32(rtwdev, REG_BBPSF_CTRL, tmp6dc | BIT(12));
3116 else
3117 rtw_write32(rtwdev, REG_BBPSF_CTRL, tmp6dc & ~BIT(12));
3118
3119 rtw_write32(rtwdev, REG_CSI_RRSR, 0x550);
3120 }
3121
rtw8822c_bf_config_bfee_su(struct rtw_dev * rtwdev,struct rtw_vif * vif,struct rtw_bfee * bfee,bool enable)3122 static void rtw8822c_bf_config_bfee_su(struct rtw_dev *rtwdev,
3123 struct rtw_vif *vif,
3124 struct rtw_bfee *bfee, bool enable)
3125 {
3126 if (enable)
3127 rtw8822c_bf_enable_bfee_su(rtwdev, vif, bfee);
3128 else
3129 rtw_bf_remove_bfee_su(rtwdev, bfee);
3130 }
3131
rtw8822c_bf_config_bfee_mu(struct rtw_dev * rtwdev,struct rtw_vif * vif,struct rtw_bfee * bfee,bool enable)3132 static void rtw8822c_bf_config_bfee_mu(struct rtw_dev *rtwdev,
3133 struct rtw_vif *vif,
3134 struct rtw_bfee *bfee, bool enable)
3135 {
3136 if (enable)
3137 rtw_bf_enable_bfee_mu(rtwdev, vif, bfee);
3138 else
3139 rtw_bf_remove_bfee_mu(rtwdev, bfee);
3140 }
3141
rtw8822c_bf_config_bfee(struct rtw_dev * rtwdev,struct rtw_vif * vif,struct rtw_bfee * bfee,bool enable)3142 static void rtw8822c_bf_config_bfee(struct rtw_dev *rtwdev, struct rtw_vif *vif,
3143 struct rtw_bfee *bfee, bool enable)
3144 {
3145 if (bfee->role == RTW_BFEE_SU)
3146 rtw8822c_bf_config_bfee_su(rtwdev, vif, bfee, enable);
3147 else if (bfee->role == RTW_BFEE_MU)
3148 rtw8822c_bf_config_bfee_mu(rtwdev, vif, bfee, enable);
3149 else
3150 rtw_warn(rtwdev, "wrong bfee role\n");
3151 }
3152
3153 struct dpk_cfg_pair {
3154 u32 addr;
3155 u32 bitmask;
3156 u32 data;
3157 };
3158
rtw8822c_parse_tbl_dpk(struct rtw_dev * rtwdev,const struct rtw_table * tbl)3159 void rtw8822c_parse_tbl_dpk(struct rtw_dev *rtwdev,
3160 const struct rtw_table *tbl)
3161 {
3162 const struct dpk_cfg_pair *p = tbl->data;
3163 const struct dpk_cfg_pair *end = p + tbl->size / 3;
3164
3165 BUILD_BUG_ON(sizeof(struct dpk_cfg_pair) != sizeof(u32) * 3);
3166
3167 for (; p < end; p++)
3168 rtw_write32_mask(rtwdev, p->addr, p->bitmask, p->data);
3169 }
3170
rtw8822c_dpk_set_gnt_wl(struct rtw_dev * rtwdev,bool is_before_k)3171 static void rtw8822c_dpk_set_gnt_wl(struct rtw_dev *rtwdev, bool is_before_k)
3172 {
3173 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3174
3175 if (is_before_k) {
3176 dpk_info->gnt_control = rtw_read32(rtwdev, 0x70);
3177 dpk_info->gnt_value = rtw_coex_read_indirect_reg(rtwdev, 0x38);
3178 rtw_write32_mask(rtwdev, 0x70, BIT(26), 0x1);
3179 rtw_coex_write_indirect_reg(rtwdev, 0x38, MASKBYTE1, 0x77);
3180 } else {
3181 rtw_coex_write_indirect_reg(rtwdev, 0x38, MASKDWORD,
3182 dpk_info->gnt_value);
3183 rtw_write32(rtwdev, 0x70, dpk_info->gnt_control);
3184 }
3185 }
3186
3187 static void
rtw8822c_dpk_restore_registers(struct rtw_dev * rtwdev,u32 reg_num,struct rtw_backup_info * bckp)3188 rtw8822c_dpk_restore_registers(struct rtw_dev *rtwdev, u32 reg_num,
3189 struct rtw_backup_info *bckp)
3190 {
3191 rtw_restore_reg(rtwdev, bckp, reg_num);
3192 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
3193 rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, BIT_DPD_CLK, 0x4);
3194 }
3195
3196 static void
rtw8822c_dpk_backup_registers(struct rtw_dev * rtwdev,u32 * reg,u32 reg_num,struct rtw_backup_info * bckp)3197 rtw8822c_dpk_backup_registers(struct rtw_dev *rtwdev, u32 *reg,
3198 u32 reg_num, struct rtw_backup_info *bckp)
3199 {
3200 u32 i;
3201
3202 for (i = 0; i < reg_num; i++) {
3203 bckp[i].len = 4;
3204 bckp[i].reg = reg[i];
3205 bckp[i].val = rtw_read32(rtwdev, reg[i]);
3206 }
3207 }
3208
rtw8822c_dpk_backup_rf_registers(struct rtw_dev * rtwdev,u32 * rf_reg,u32 rf_reg_bak[][2])3209 static void rtw8822c_dpk_backup_rf_registers(struct rtw_dev *rtwdev,
3210 u32 *rf_reg,
3211 u32 rf_reg_bak[][2])
3212 {
3213 u32 i;
3214
3215 for (i = 0; i < DPK_RF_REG_NUM; i++) {
3216 rf_reg_bak[i][RF_PATH_A] = rtw_read_rf(rtwdev, RF_PATH_A,
3217 rf_reg[i], RFREG_MASK);
3218 rf_reg_bak[i][RF_PATH_B] = rtw_read_rf(rtwdev, RF_PATH_B,
3219 rf_reg[i], RFREG_MASK);
3220 }
3221 }
3222
rtw8822c_dpk_reload_rf_registers(struct rtw_dev * rtwdev,u32 * rf_reg,u32 rf_reg_bak[][2])3223 static void rtw8822c_dpk_reload_rf_registers(struct rtw_dev *rtwdev,
3224 u32 *rf_reg,
3225 u32 rf_reg_bak[][2])
3226 {
3227 u32 i;
3228
3229 for (i = 0; i < DPK_RF_REG_NUM; i++) {
3230 rtw_write_rf(rtwdev, RF_PATH_A, rf_reg[i], RFREG_MASK,
3231 rf_reg_bak[i][RF_PATH_A]);
3232 rtw_write_rf(rtwdev, RF_PATH_B, rf_reg[i], RFREG_MASK,
3233 rf_reg_bak[i][RF_PATH_B]);
3234 }
3235 }
3236
rtw8822c_dpk_information(struct rtw_dev * rtwdev)3237 static void rtw8822c_dpk_information(struct rtw_dev *rtwdev)
3238 {
3239 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3240 u32 reg;
3241 u8 band_shift;
3242
3243 reg = rtw_read_rf(rtwdev, RF_PATH_A, 0x18, RFREG_MASK);
3244
3245 band_shift = FIELD_GET(BIT(16), reg);
3246 dpk_info->dpk_band = 1 << band_shift;
3247 dpk_info->dpk_ch = FIELD_GET(0xff, reg);
3248 dpk_info->dpk_bw = FIELD_GET(0x3000, reg);
3249 }
3250
rtw8822c_dpk_rxbb_dc_cal(struct rtw_dev * rtwdev,u8 path)3251 static void rtw8822c_dpk_rxbb_dc_cal(struct rtw_dev *rtwdev, u8 path)
3252 {
3253 rtw_write_rf(rtwdev, path, 0x92, RFREG_MASK, 0x84800);
3254 udelay(5);
3255 rtw_write_rf(rtwdev, path, 0x92, RFREG_MASK, 0x84801);
3256 usleep_range(600, 610);
3257 rtw_write_rf(rtwdev, path, 0x92, RFREG_MASK, 0x84800);
3258 }
3259
rtw8822c_dpk_dc_corr_check(struct rtw_dev * rtwdev,u8 path)3260 static u8 rtw8822c_dpk_dc_corr_check(struct rtw_dev *rtwdev, u8 path)
3261 {
3262 u16 dc_i, dc_q;
3263 u8 corr_idx;
3264
3265 rtw_write32(rtwdev, REG_RXSRAM_CTL, 0x000900f0);
3266 dc_i = (u16)rtw_read32_mask(rtwdev, REG_STAT_RPT, GENMASK(27, 16));
3267 dc_q = (u16)rtw_read32_mask(rtwdev, REG_STAT_RPT, GENMASK(11, 0));
3268
3269 if (dc_i & BIT(11))
3270 dc_i = 0x1000 - dc_i;
3271 if (dc_q & BIT(11))
3272 dc_q = 0x1000 - dc_q;
3273
3274 rtw_write32(rtwdev, REG_RXSRAM_CTL, 0x000000f0);
3275 corr_idx = (u8)rtw_read32_mask(rtwdev, REG_STAT_RPT, GENMASK(7, 0));
3276 rtw_read32_mask(rtwdev, REG_STAT_RPT, GENMASK(15, 8));
3277
3278 if (dc_i > 200 || dc_q > 200 || corr_idx < 40 || corr_idx > 65)
3279 return 1;
3280 else
3281 return 0;
3282
3283 }
3284
rtw8822c_dpk_tx_pause(struct rtw_dev * rtwdev)3285 static void rtw8822c_dpk_tx_pause(struct rtw_dev *rtwdev)
3286 {
3287 u8 reg_a, reg_b;
3288 u16 count = 0;
3289
3290 rtw_write8(rtwdev, 0x522, 0xff);
3291 rtw_write32_mask(rtwdev, 0x1e70, 0xf, 0x2);
3292
3293 do {
3294 reg_a = (u8)rtw_read_rf(rtwdev, RF_PATH_A, 0x00, 0xf0000);
3295 reg_b = (u8)rtw_read_rf(rtwdev, RF_PATH_B, 0x00, 0xf0000);
3296 udelay(2);
3297 count++;
3298 } while ((reg_a == 2 || reg_b == 2) && count < 2500);
3299 }
3300
rtw8822c_dpk_mac_bb_setting(struct rtw_dev * rtwdev)3301 static void rtw8822c_dpk_mac_bb_setting(struct rtw_dev *rtwdev)
3302 {
3303 rtw8822c_dpk_tx_pause(rtwdev);
3304 rtw_load_table(rtwdev, &rtw8822c_dpk_mac_bb_tbl);
3305 }
3306
rtw8822c_dpk_afe_setting(struct rtw_dev * rtwdev,bool is_do_dpk)3307 static void rtw8822c_dpk_afe_setting(struct rtw_dev *rtwdev, bool is_do_dpk)
3308 {
3309 if (is_do_dpk)
3310 rtw_load_table(rtwdev, &rtw8822c_dpk_afe_is_dpk_tbl);
3311 else
3312 rtw_load_table(rtwdev, &rtw8822c_dpk_afe_no_dpk_tbl);
3313 }
3314
rtw8822c_dpk_pre_setting(struct rtw_dev * rtwdev)3315 static void rtw8822c_dpk_pre_setting(struct rtw_dev *rtwdev)
3316 {
3317 u8 path;
3318
3319 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
3320 rtw_write_rf(rtwdev, path, RF_RXAGC_OFFSET, RFREG_MASK, 0x0);
3321 rtw_write32(rtwdev, REG_NCTL0, 0x8 | (path << 1));
3322 if (rtwdev->dm_info.dpk_info.dpk_band == RTW_BAND_2G)
3323 rtw_write32(rtwdev, REG_DPD_CTL1_S1, 0x1f100000);
3324 else
3325 rtw_write32(rtwdev, REG_DPD_CTL1_S1, 0x1f0d0000);
3326 rtw_write32_mask(rtwdev, REG_DPD_LUT0, BIT_GLOSS_DB, 0x4);
3327 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x3);
3328 }
3329 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
3330 rtw_write32(rtwdev, REG_DPD_CTL11, 0x3b23170b);
3331 rtw_write32(rtwdev, REG_DPD_CTL12, 0x775f5347);
3332 }
3333
rtw8822c_dpk_rf_setting(struct rtw_dev * rtwdev,u8 path)3334 static u32 rtw8822c_dpk_rf_setting(struct rtw_dev *rtwdev, u8 path)
3335 {
3336 u32 ori_txbb;
3337
3338 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, RFREG_MASK, 0x50017);
3339 ori_txbb = rtw_read_rf(rtwdev, path, RF_TX_GAIN, RFREG_MASK);
3340
3341 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TX_GAIN, 0x1);
3342 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_PWR_TRIM, 0x1);
3343 rtw_write_rf(rtwdev, path, RF_TX_GAIN_OFFSET, BIT_BB_GAIN, 0x0);
3344 rtw_write_rf(rtwdev, path, RF_TX_GAIN, RFREG_MASK, ori_txbb);
3345
3346 if (rtwdev->dm_info.dpk_info.dpk_band == RTW_BAND_2G) {
3347 rtw_write_rf(rtwdev, path, RF_TX_GAIN_OFFSET, BIT_RF_GAIN, 0x1);
3348 rtw_write_rf(rtwdev, path, RF_RXG_GAIN, BIT_RXG_GAIN, 0x0);
3349 } else {
3350 rtw_write_rf(rtwdev, path, RF_TXA_LB_SW, BIT_TXA_LB_ATT, 0x0);
3351 rtw_write_rf(rtwdev, path, RF_TXA_LB_SW, BIT_LB_ATT, 0x6);
3352 rtw_write_rf(rtwdev, path, RF_TXA_LB_SW, BIT_LB_SW, 0x1);
3353 rtw_write_rf(rtwdev, path, RF_RXA_MIX_GAIN, BIT_RXA_MIX_GAIN, 0);
3354 }
3355
3356 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, BIT_RXAGC, 0xf);
3357 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TRXBW, 0x1);
3358 rtw_write_rf(rtwdev, path, RF_BW_TRXBB, BIT_BW_RXBB, 0x0);
3359
3360 if (rtwdev->dm_info.dpk_info.dpk_bw == DPK_CHANNEL_WIDTH_80)
3361 rtw_write_rf(rtwdev, path, RF_BW_TRXBB, BIT_BW_TXBB, 0x2);
3362 else
3363 rtw_write_rf(rtwdev, path, RF_BW_TRXBB, BIT_BW_TXBB, 0x1);
3364
3365 rtw_write_rf(rtwdev, path, RF_EXT_TIA_BW, BIT(1), 0x1);
3366
3367 usleep_range(100, 110);
3368
3369 return ori_txbb & 0x1f;
3370 }
3371
rtw8822c_dpk_get_cmd(struct rtw_dev * rtwdev,u8 action,u8 path)3372 static u16 rtw8822c_dpk_get_cmd(struct rtw_dev *rtwdev, u8 action, u8 path)
3373 {
3374 u16 cmd;
3375 u8 bw = rtwdev->dm_info.dpk_info.dpk_bw == DPK_CHANNEL_WIDTH_80 ? 2 : 0;
3376
3377 switch (action) {
3378 case RTW_DPK_GAIN_LOSS:
3379 cmd = 0x14 + path;
3380 break;
3381 case RTW_DPK_DO_DPK:
3382 cmd = 0x16 + path + bw;
3383 break;
3384 case RTW_DPK_DPK_ON:
3385 cmd = 0x1a + path;
3386 break;
3387 case RTW_DPK_DAGC:
3388 cmd = 0x1c + path + bw;
3389 break;
3390 default:
3391 return 0;
3392 }
3393
3394 return (cmd << 8) | 0x48;
3395 }
3396
rtw8822c_dpk_one_shot(struct rtw_dev * rtwdev,u8 path,u8 action)3397 static u8 rtw8822c_dpk_one_shot(struct rtw_dev *rtwdev, u8 path, u8 action)
3398 {
3399 u16 dpk_cmd;
3400 u8 result = 0;
3401
3402 rtw8822c_dpk_set_gnt_wl(rtwdev, true);
3403
3404 if (action == RTW_DPK_CAL_PWR) {
3405 rtw_write32_mask(rtwdev, REG_DPD_CTL0, BIT(12), 0x1);
3406 rtw_write32_mask(rtwdev, REG_DPD_CTL0, BIT(12), 0x0);
3407 rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, BIT_RPT_SEL, 0x0);
3408 msleep(10);
3409 if (!check_hw_ready(rtwdev, REG_STAT_RPT, BIT(31), 0x1)) {
3410 result = 1;
3411 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DPK] one-shot over 20ms\n");
3412 }
3413 } else {
3414 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE,
3415 0x8 | (path << 1));
3416 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_IQ_SWITCH, 0x9);
3417
3418 dpk_cmd = rtw8822c_dpk_get_cmd(rtwdev, action, path);
3419 rtw_write32(rtwdev, REG_NCTL0, dpk_cmd);
3420 rtw_write32(rtwdev, REG_NCTL0, dpk_cmd + 1);
3421 msleep(10);
3422 if (!check_hw_ready(rtwdev, 0x2d9c, 0xff, 0x55)) {
3423 result = 1;
3424 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DPK] one-shot over 20ms\n");
3425 }
3426 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE,
3427 0x8 | (path << 1));
3428 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_IQ_SWITCH, 0x0);
3429 }
3430
3431 rtw8822c_dpk_set_gnt_wl(rtwdev, false);
3432
3433 rtw_write8(rtwdev, 0x1b10, 0x0);
3434
3435 return result;
3436 }
3437
rtw8822c_dpk_dgain_read(struct rtw_dev * rtwdev,u8 path)3438 static u16 rtw8822c_dpk_dgain_read(struct rtw_dev *rtwdev, u8 path)
3439 {
3440 u16 dgain;
3441
3442 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
3443 rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, 0x00ff0000, 0x0);
3444
3445 dgain = (u16)rtw_read32_mask(rtwdev, REG_STAT_RPT, GENMASK(27, 16));
3446
3447 return dgain;
3448 }
3449
rtw8822c_dpk_thermal_read(struct rtw_dev * rtwdev,u8 path)3450 static u8 rtw8822c_dpk_thermal_read(struct rtw_dev *rtwdev, u8 path)
3451 {
3452 rtw_write_rf(rtwdev, path, RF_T_METER, BIT(19), 0x1);
3453 rtw_write_rf(rtwdev, path, RF_T_METER, BIT(19), 0x0);
3454 rtw_write_rf(rtwdev, path, RF_T_METER, BIT(19), 0x1);
3455 udelay(15);
3456
3457 return (u8)rtw_read_rf(rtwdev, path, RF_T_METER, 0x0007e);
3458 }
3459
rtw8822c_dpk_pas_read(struct rtw_dev * rtwdev,u8 path)3460 static u32 rtw8822c_dpk_pas_read(struct rtw_dev *rtwdev, u8 path)
3461 {
3462 u32 i_val, q_val;
3463
3464 rtw_write32(rtwdev, REG_NCTL0, 0x8 | (path << 1));
3465 rtw_write32_mask(rtwdev, 0x1b48, BIT(14), 0x0);
3466 rtw_write32(rtwdev, REG_RXSRAM_CTL, 0x00060001);
3467 rtw_write32(rtwdev, 0x1b4c, 0x00000000);
3468 rtw_write32(rtwdev, 0x1b4c, 0x00080000);
3469
3470 q_val = rtw_read32_mask(rtwdev, REG_STAT_RPT, MASKHWORD);
3471 i_val = rtw_read32_mask(rtwdev, REG_STAT_RPT, MASKLWORD);
3472
3473 if (i_val & BIT(15))
3474 i_val = 0x10000 - i_val;
3475 if (q_val & BIT(15))
3476 q_val = 0x10000 - q_val;
3477
3478 rtw_write32(rtwdev, 0x1b4c, 0x00000000);
3479
3480 return i_val * i_val + q_val * q_val;
3481 }
3482
rtw8822c_psd_log2base(u32 val)3483 static u32 rtw8822c_psd_log2base(u32 val)
3484 {
3485 u32 tmp, val_integerd_b, tindex;
3486 u32 result, val_fractiond_b;
3487 u32 table_fraction[21] = {0, 432, 332, 274, 232, 200, 174,
3488 151, 132, 115, 100, 86, 74, 62, 51,
3489 42, 32, 23, 15, 7, 0};
3490
3491 if (val == 0)
3492 return 0;
3493
3494 val_integerd_b = __fls(val) + 1;
3495
3496 tmp = (val * 100) / (1 << val_integerd_b);
3497 tindex = tmp / 5;
3498
3499 if (tindex >= ARRAY_SIZE(table_fraction))
3500 tindex = ARRAY_SIZE(table_fraction) - 1;
3501
3502 val_fractiond_b = table_fraction[tindex];
3503
3504 result = val_integerd_b * 100 - val_fractiond_b;
3505
3506 return result;
3507 }
3508
rtw8822c_dpk_gainloss_result(struct rtw_dev * rtwdev,u8 path)3509 static u8 rtw8822c_dpk_gainloss_result(struct rtw_dev *rtwdev, u8 path)
3510 {
3511 u8 result;
3512
3513 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x8 | (path << 1));
3514 rtw_write32_mask(rtwdev, 0x1b48, BIT(14), 0x1);
3515 rtw_write32(rtwdev, REG_RXSRAM_CTL, 0x00060000);
3516
3517 result = (u8)rtw_read32_mask(rtwdev, REG_STAT_RPT, 0x000000f0);
3518
3519 rtw_write32_mask(rtwdev, 0x1b48, BIT(14), 0x0);
3520
3521 return result;
3522 }
3523
rtw8822c_dpk_agc_gain_chk(struct rtw_dev * rtwdev,u8 path,u8 limited_pga)3524 static u8 rtw8822c_dpk_agc_gain_chk(struct rtw_dev *rtwdev, u8 path,
3525 u8 limited_pga)
3526 {
3527 u8 result = 0;
3528 u16 dgain;
3529
3530 rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_DAGC);
3531 dgain = rtw8822c_dpk_dgain_read(rtwdev, path);
3532
3533 if (dgain > 1535 && !limited_pga)
3534 return RTW_DPK_GAIN_LESS;
3535 else if (dgain < 768 && !limited_pga)
3536 return RTW_DPK_GAIN_LARGE;
3537 else
3538 return result;
3539 }
3540
rtw8822c_dpk_agc_loss_chk(struct rtw_dev * rtwdev,u8 path)3541 static u8 rtw8822c_dpk_agc_loss_chk(struct rtw_dev *rtwdev, u8 path)
3542 {
3543 u32 loss, loss_db;
3544
3545 loss = rtw8822c_dpk_pas_read(rtwdev, path);
3546 if (loss < 0x4000000)
3547 return RTW_DPK_GL_LESS;
3548 loss_db = 3 * rtw8822c_psd_log2base(loss >> 13) - 3870;
3549
3550 if (loss_db > 1000)
3551 return RTW_DPK_GL_LARGE;
3552 else if (loss_db < 250)
3553 return RTW_DPK_GL_LESS;
3554 else
3555 return RTW_DPK_AGC_OUT;
3556 }
3557
3558 struct rtw8822c_dpk_data {
3559 u8 txbb;
3560 u8 pga;
3561 u8 limited_pga;
3562 u8 agc_cnt;
3563 bool loss_only;
3564 bool gain_only;
3565 u8 path;
3566 };
3567
rtw8822c_gain_check_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data)3568 static u8 rtw8822c_gain_check_state(struct rtw_dev *rtwdev,
3569 struct rtw8822c_dpk_data *data)
3570 {
3571 u8 state;
3572
3573 data->txbb = (u8)rtw_read_rf(rtwdev, data->path, RF_TX_GAIN,
3574 BIT_GAIN_TXBB);
3575 data->pga = (u8)rtw_read_rf(rtwdev, data->path, RF_MODE_TRXAGC,
3576 BIT_RXAGC);
3577
3578 if (data->loss_only) {
3579 state = RTW_DPK_LOSS_CHECK;
3580 goto check_end;
3581 }
3582
3583 state = rtw8822c_dpk_agc_gain_chk(rtwdev, data->path,
3584 data->limited_pga);
3585 if (state == RTW_DPK_GAIN_CHECK && data->gain_only)
3586 state = RTW_DPK_AGC_OUT;
3587 else if (state == RTW_DPK_GAIN_CHECK)
3588 state = RTW_DPK_LOSS_CHECK;
3589
3590 check_end:
3591 data->agc_cnt++;
3592 if (data->agc_cnt >= 6)
3593 state = RTW_DPK_AGC_OUT;
3594
3595 return state;
3596 }
3597
rtw8822c_gain_large_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data)3598 static u8 rtw8822c_gain_large_state(struct rtw_dev *rtwdev,
3599 struct rtw8822c_dpk_data *data)
3600 {
3601 u8 pga = data->pga;
3602
3603 if (pga > 0xe)
3604 rtw_write_rf(rtwdev, data->path, RF_MODE_TRXAGC, BIT_RXAGC, 0xc);
3605 else if (pga > 0xb && pga < 0xf)
3606 rtw_write_rf(rtwdev, data->path, RF_MODE_TRXAGC, BIT_RXAGC, 0x0);
3607 else if (pga < 0xc)
3608 data->limited_pga = 1;
3609
3610 return RTW_DPK_GAIN_CHECK;
3611 }
3612
rtw8822c_gain_less_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data)3613 static u8 rtw8822c_gain_less_state(struct rtw_dev *rtwdev,
3614 struct rtw8822c_dpk_data *data)
3615 {
3616 u8 pga = data->pga;
3617
3618 if (pga < 0xc)
3619 rtw_write_rf(rtwdev, data->path, RF_MODE_TRXAGC, BIT_RXAGC, 0xc);
3620 else if (pga > 0xb && pga < 0xf)
3621 rtw_write_rf(rtwdev, data->path, RF_MODE_TRXAGC, BIT_RXAGC, 0xf);
3622 else if (pga > 0xe)
3623 data->limited_pga = 1;
3624
3625 return RTW_DPK_GAIN_CHECK;
3626 }
3627
rtw8822c_gl_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data,u8 is_large)3628 static u8 rtw8822c_gl_state(struct rtw_dev *rtwdev,
3629 struct rtw8822c_dpk_data *data, u8 is_large)
3630 {
3631 u8 txbb_bound[] = {0x1f, 0};
3632
3633 if (data->txbb == txbb_bound[is_large])
3634 return RTW_DPK_AGC_OUT;
3635
3636 if (is_large == 1)
3637 data->txbb -= 2;
3638 else
3639 data->txbb += 3;
3640
3641 rtw_write_rf(rtwdev, data->path, RF_TX_GAIN, BIT_GAIN_TXBB, data->txbb);
3642 data->limited_pga = 0;
3643
3644 return RTW_DPK_GAIN_CHECK;
3645 }
3646
rtw8822c_gl_large_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data)3647 static u8 rtw8822c_gl_large_state(struct rtw_dev *rtwdev,
3648 struct rtw8822c_dpk_data *data)
3649 {
3650 return rtw8822c_gl_state(rtwdev, data, 1);
3651 }
3652
rtw8822c_gl_less_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data)3653 static u8 rtw8822c_gl_less_state(struct rtw_dev *rtwdev,
3654 struct rtw8822c_dpk_data *data)
3655 {
3656 return rtw8822c_gl_state(rtwdev, data, 0);
3657 }
3658
rtw8822c_loss_check_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data)3659 static u8 rtw8822c_loss_check_state(struct rtw_dev *rtwdev,
3660 struct rtw8822c_dpk_data *data)
3661 {
3662 u8 path = data->path;
3663 u8 state;
3664
3665 rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_GAIN_LOSS);
3666 state = rtw8822c_dpk_agc_loss_chk(rtwdev, path);
3667
3668 return state;
3669 }
3670
3671 static u8 (*dpk_state[])(struct rtw_dev *rtwdev,
3672 struct rtw8822c_dpk_data *data) = {
3673 rtw8822c_gain_check_state, rtw8822c_gain_large_state,
3674 rtw8822c_gain_less_state, rtw8822c_gl_large_state,
3675 rtw8822c_gl_less_state, rtw8822c_loss_check_state };
3676
rtw8822c_dpk_pas_agc(struct rtw_dev * rtwdev,u8 path,bool gain_only,bool loss_only)3677 static u8 rtw8822c_dpk_pas_agc(struct rtw_dev *rtwdev, u8 path,
3678 bool gain_only, bool loss_only)
3679 {
3680 struct rtw8822c_dpk_data data = {0};
3681 u8 (*func)(struct rtw_dev *rtwdev, struct rtw8822c_dpk_data *data);
3682 u8 state = RTW_DPK_GAIN_CHECK;
3683
3684 data.loss_only = loss_only;
3685 data.gain_only = gain_only;
3686 data.path = path;
3687
3688 for (;;) {
3689 func = dpk_state[state];
3690 state = func(rtwdev, &data);
3691 if (state == RTW_DPK_AGC_OUT)
3692 break;
3693 }
3694
3695 return data.txbb;
3696 }
3697
rtw8822c_dpk_coef_iq_check(struct rtw_dev * rtwdev,u16 coef_i,u16 coef_q)3698 static bool rtw8822c_dpk_coef_iq_check(struct rtw_dev *rtwdev,
3699 u16 coef_i, u16 coef_q)
3700 {
3701 if (coef_i == 0x1000 || coef_i == 0x0fff ||
3702 coef_q == 0x1000 || coef_q == 0x0fff)
3703 return true;
3704
3705 return false;
3706 }
3707
rtw8822c_dpk_coef_transfer(struct rtw_dev * rtwdev)3708 static u32 rtw8822c_dpk_coef_transfer(struct rtw_dev *rtwdev)
3709 {
3710 u32 reg = 0;
3711 u16 coef_i = 0, coef_q = 0;
3712
3713 reg = rtw_read32(rtwdev, REG_STAT_RPT);
3714
3715 coef_i = (u16)rtw_read32_mask(rtwdev, REG_STAT_RPT, MASKHWORD) & 0x1fff;
3716 coef_q = (u16)rtw_read32_mask(rtwdev, REG_STAT_RPT, MASKLWORD) & 0x1fff;
3717
3718 coef_q = ((0x2000 - coef_q) & 0x1fff) - 1;
3719
3720 reg = (coef_i << 16) | coef_q;
3721
3722 return reg;
3723 }
3724
3725 static const u32 rtw8822c_dpk_get_coef_tbl[] = {
3726 0x000400f0, 0x040400f0, 0x080400f0, 0x010400f0, 0x050400f0,
3727 0x090400f0, 0x020400f0, 0x060400f0, 0x0a0400f0, 0x030400f0,
3728 0x070400f0, 0x0b0400f0, 0x0c0400f0, 0x100400f0, 0x0d0400f0,
3729 0x110400f0, 0x0e0400f0, 0x120400f0, 0x0f0400f0, 0x130400f0,
3730 };
3731
rtw8822c_dpk_coef_tbl_apply(struct rtw_dev * rtwdev,u8 path)3732 static void rtw8822c_dpk_coef_tbl_apply(struct rtw_dev *rtwdev, u8 path)
3733 {
3734 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3735 int i;
3736
3737 for (i = 0; i < 20; i++) {
3738 rtw_write32(rtwdev, REG_RXSRAM_CTL,
3739 rtw8822c_dpk_get_coef_tbl[i]);
3740 dpk_info->coef[path][i] = rtw8822c_dpk_coef_transfer(rtwdev);
3741 }
3742 }
3743
rtw8822c_dpk_get_coef(struct rtw_dev * rtwdev,u8 path)3744 static void rtw8822c_dpk_get_coef(struct rtw_dev *rtwdev, u8 path)
3745 {
3746 rtw_write32(rtwdev, REG_NCTL0, 0x0000000c);
3747
3748 if (path == RF_PATH_A) {
3749 rtw_write32_mask(rtwdev, REG_DPD_CTL0, BIT(24), 0x0);
3750 rtw_write32(rtwdev, REG_DPD_CTL0_S0, 0x30000080);
3751 } else if (path == RF_PATH_B) {
3752 rtw_write32_mask(rtwdev, REG_DPD_CTL0, BIT(24), 0x1);
3753 rtw_write32(rtwdev, REG_DPD_CTL0_S1, 0x30000080);
3754 }
3755
3756 rtw8822c_dpk_coef_tbl_apply(rtwdev, path);
3757 }
3758
rtw8822c_dpk_coef_read(struct rtw_dev * rtwdev,u8 path)3759 static u8 rtw8822c_dpk_coef_read(struct rtw_dev *rtwdev, u8 path)
3760 {
3761 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3762 u8 addr, result = 1;
3763 u16 coef_i, coef_q;
3764
3765 for (addr = 0; addr < 20; addr++) {
3766 coef_i = FIELD_GET(0x1fff0000, dpk_info->coef[path][addr]);
3767 coef_q = FIELD_GET(0x1fff, dpk_info->coef[path][addr]);
3768
3769 if (rtw8822c_dpk_coef_iq_check(rtwdev, coef_i, coef_q)) {
3770 result = 0;
3771 break;
3772 }
3773 }
3774 return result;
3775 }
3776
rtw8822c_dpk_coef_write(struct rtw_dev * rtwdev,u8 path,u8 result)3777 static void rtw8822c_dpk_coef_write(struct rtw_dev *rtwdev, u8 path, u8 result)
3778 {
3779 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3780 u16 reg[DPK_RF_PATH_NUM] = {0x1b0c, 0x1b64};
3781 u32 coef;
3782 u8 addr;
3783
3784 rtw_write32(rtwdev, REG_NCTL0, 0x0000000c);
3785 rtw_write32(rtwdev, REG_RXSRAM_CTL, 0x000000f0);
3786
3787 for (addr = 0; addr < 20; addr++) {
3788 if (result == 0) {
3789 if (addr == 3)
3790 coef = 0x04001fff;
3791 else
3792 coef = 0x00001fff;
3793 } else {
3794 coef = dpk_info->coef[path][addr];
3795 }
3796 rtw_write32(rtwdev, reg[path] + addr * 4, coef);
3797 }
3798 }
3799
rtw8822c_dpk_fill_result(struct rtw_dev * rtwdev,u32 dpk_txagc,u8 path,u8 result)3800 static void rtw8822c_dpk_fill_result(struct rtw_dev *rtwdev, u32 dpk_txagc,
3801 u8 path, u8 result)
3802 {
3803 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3804
3805 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x8 | (path << 1));
3806
3807 if (result)
3808 rtw_write8(rtwdev, REG_DPD_AGC, (u8)(dpk_txagc - 6));
3809 else
3810 rtw_write8(rtwdev, REG_DPD_AGC, 0x00);
3811
3812 dpk_info->result[path] = result;
3813 dpk_info->dpk_txagc[path] = rtw_read8(rtwdev, REG_DPD_AGC);
3814
3815 rtw8822c_dpk_coef_write(rtwdev, path, result);
3816 }
3817
rtw8822c_dpk_gainloss(struct rtw_dev * rtwdev,u8 path)3818 static u32 rtw8822c_dpk_gainloss(struct rtw_dev *rtwdev, u8 path)
3819 {
3820 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3821 u8 tx_agc, tx_bb, ori_txbb, ori_txagc, tx_agc_search, t1, t2;
3822
3823 ori_txbb = rtw8822c_dpk_rf_setting(rtwdev, path);
3824 ori_txagc = (u8)rtw_read_rf(rtwdev, path, RF_MODE_TRXAGC, BIT_TXAGC);
3825
3826 rtw8822c_dpk_rxbb_dc_cal(rtwdev, path);
3827 rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_DAGC);
3828 rtw8822c_dpk_dgain_read(rtwdev, path);
3829
3830 if (rtw8822c_dpk_dc_corr_check(rtwdev, path)) {
3831 rtw8822c_dpk_rxbb_dc_cal(rtwdev, path);
3832 rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_DAGC);
3833 rtw8822c_dpk_dc_corr_check(rtwdev, path);
3834 }
3835
3836 t1 = rtw8822c_dpk_thermal_read(rtwdev, path);
3837 tx_bb = rtw8822c_dpk_pas_agc(rtwdev, path, false, true);
3838 tx_agc_search = rtw8822c_dpk_gainloss_result(rtwdev, path);
3839
3840 if (tx_bb < tx_agc_search)
3841 tx_bb = 0;
3842 else
3843 tx_bb = tx_bb - tx_agc_search;
3844
3845 rtw_write_rf(rtwdev, path, RF_TX_GAIN, BIT_GAIN_TXBB, tx_bb);
3846
3847 tx_agc = ori_txagc - (ori_txbb - tx_bb);
3848
3849 t2 = rtw8822c_dpk_thermal_read(rtwdev, path);
3850
3851 dpk_info->thermal_dpk_delta[path] = abs(t2 - t1);
3852
3853 return tx_agc;
3854 }
3855
rtw8822c_dpk_by_path(struct rtw_dev * rtwdev,u32 tx_agc,u8 path)3856 static u8 rtw8822c_dpk_by_path(struct rtw_dev *rtwdev, u32 tx_agc, u8 path)
3857 {
3858 u8 result;
3859
3860 result = rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_DO_DPK);
3861
3862 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x8 | (path << 1));
3863
3864 result = result | (u8)rtw_read32_mask(rtwdev, REG_DPD_CTL1_S0, BIT(26));
3865
3866 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, RFREG_MASK, 0x33e14);
3867
3868 rtw8822c_dpk_get_coef(rtwdev, path);
3869
3870 return result;
3871 }
3872
rtw8822c_dpk_cal_gs(struct rtw_dev * rtwdev,u8 path)3873 static void rtw8822c_dpk_cal_gs(struct rtw_dev *rtwdev, u8 path)
3874 {
3875 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3876 u32 tmp_gs = 0;
3877
3878 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x8 | (path << 1));
3879 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_BYPASS_DPD, 0x0);
3880 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x0);
3881 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_IQ_SWITCH, 0x9);
3882 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_INNER_LB, 0x1);
3883 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
3884 rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, BIT_DPD_CLK, 0xf);
3885
3886 if (path == RF_PATH_A) {
3887 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0, BIT_GS_PWSF,
3888 0x1066680);
3889 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S0, BIT_DPD_EN, 0x1);
3890 } else {
3891 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S1, BIT_GS_PWSF,
3892 0x1066680);
3893 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S1, BIT_DPD_EN, 0x1);
3894 }
3895
3896 if (dpk_info->dpk_bw == DPK_CHANNEL_WIDTH_80) {
3897 rtw_write32(rtwdev, REG_DPD_CTL16, 0x80001310);
3898 rtw_write32(rtwdev, REG_DPD_CTL16, 0x00001310);
3899 rtw_write32(rtwdev, REG_DPD_CTL16, 0x810000db);
3900 rtw_write32(rtwdev, REG_DPD_CTL16, 0x010000db);
3901 rtw_write32(rtwdev, REG_DPD_CTL16, 0x0000b428);
3902 rtw_write32(rtwdev, REG_DPD_CTL15,
3903 0x05020000 | (BIT(path) << 28));
3904 } else {
3905 rtw_write32(rtwdev, REG_DPD_CTL16, 0x8200190c);
3906 rtw_write32(rtwdev, REG_DPD_CTL16, 0x0200190c);
3907 rtw_write32(rtwdev, REG_DPD_CTL16, 0x8301ee14);
3908 rtw_write32(rtwdev, REG_DPD_CTL16, 0x0301ee14);
3909 rtw_write32(rtwdev, REG_DPD_CTL16, 0x0000b428);
3910 rtw_write32(rtwdev, REG_DPD_CTL15,
3911 0x05020008 | (BIT(path) << 28));
3912 }
3913
3914 rtw_write32_mask(rtwdev, REG_DPD_CTL0, MASKBYTE3, 0x8 | path);
3915
3916 rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_CAL_PWR);
3917
3918 rtw_write32_mask(rtwdev, REG_DPD_CTL15, MASKBYTE3, 0x0);
3919 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x8 | (path << 1));
3920 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_IQ_SWITCH, 0x0);
3921 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_INNER_LB, 0x0);
3922 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
3923
3924 if (path == RF_PATH_A)
3925 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0, BIT_GS_PWSF, 0x5b);
3926 else
3927 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S1, BIT_GS_PWSF, 0x5b);
3928
3929 rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, BIT_RPT_SEL, 0x0);
3930
3931 tmp_gs = (u16)rtw_read32_mask(rtwdev, REG_STAT_RPT, BIT_RPT_DGAIN);
3932 tmp_gs = (tmp_gs * 910) >> 10;
3933 tmp_gs = DIV_ROUND_CLOSEST(tmp_gs, 10);
3934
3935 if (path == RF_PATH_A)
3936 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0, BIT_GS_PWSF, tmp_gs);
3937 else
3938 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S1, BIT_GS_PWSF, tmp_gs);
3939
3940 dpk_info->dpk_gs[path] = tmp_gs;
3941 }
3942
rtw8822c_dpk_cal_coef1(struct rtw_dev * rtwdev)3943 static void rtw8822c_dpk_cal_coef1(struct rtw_dev *rtwdev)
3944 {
3945 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3946 u32 offset[DPK_RF_PATH_NUM] = {0, 0x58};
3947 u32 i_scaling;
3948 u8 path;
3949
3950 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x0000000c);
3951 rtw_write32(rtwdev, REG_RXSRAM_CTL, 0x000000f0);
3952 rtw_write32(rtwdev, REG_NCTL0, 0x00001148);
3953 rtw_write32(rtwdev, REG_NCTL0, 0x00001149);
3954
3955 if (!check_hw_ready(rtwdev, 0x2d9c, MASKBYTE0, 0x55))
3956 rtw_warn(rtwdev, "DPK stuck, performance may be suboptimal");
3957
3958 rtw_write8(rtwdev, 0x1b10, 0x0);
3959 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x0000000c);
3960
3961 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
3962 i_scaling = 0x16c00 / dpk_info->dpk_gs[path];
3963
3964 rtw_write32_mask(rtwdev, 0x1b18 + offset[path], MASKHWORD,
3965 i_scaling);
3966 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0 + offset[path],
3967 GENMASK(31, 28), 0x9);
3968 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0 + offset[path],
3969 GENMASK(31, 28), 0x1);
3970 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0 + offset[path],
3971 GENMASK(31, 28), 0x0);
3972 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S0 + offset[path],
3973 BIT(14), 0x0);
3974 }
3975 }
3976
rtw8822c_dpk_on(struct rtw_dev * rtwdev,u8 path)3977 static void rtw8822c_dpk_on(struct rtw_dev *rtwdev, u8 path)
3978 {
3979 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3980
3981 rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_DPK_ON);
3982
3983 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x8 | (path << 1));
3984 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x0);
3985
3986 if (test_bit(path, dpk_info->dpk_path_ok))
3987 rtw8822c_dpk_cal_gs(rtwdev, path);
3988 }
3989
rtw8822c_dpk_check_pass(struct rtw_dev * rtwdev,bool is_fail,u32 dpk_txagc,u8 path)3990 static bool rtw8822c_dpk_check_pass(struct rtw_dev *rtwdev, bool is_fail,
3991 u32 dpk_txagc, u8 path)
3992 {
3993 bool result;
3994
3995 if (!is_fail) {
3996 if (rtw8822c_dpk_coef_read(rtwdev, path))
3997 result = true;
3998 else
3999 result = false;
4000 } else {
4001 result = false;
4002 }
4003
4004 rtw8822c_dpk_fill_result(rtwdev, dpk_txagc, path, result);
4005
4006 return result;
4007 }
4008
rtw8822c_dpk_result_reset(struct rtw_dev * rtwdev)4009 static void rtw8822c_dpk_result_reset(struct rtw_dev *rtwdev)
4010 {
4011 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4012 u8 path;
4013
4014 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
4015 clear_bit(path, dpk_info->dpk_path_ok);
4016 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE,
4017 0x8 | (path << 1));
4018 rtw_write32_mask(rtwdev, 0x1b58, 0x0000007f, 0x0);
4019
4020 dpk_info->dpk_txagc[path] = 0;
4021 dpk_info->result[path] = 0;
4022 dpk_info->dpk_gs[path] = 0x5b;
4023 dpk_info->pre_pwsf[path] = 0;
4024 dpk_info->thermal_dpk[path] = rtw8822c_dpk_thermal_read(rtwdev,
4025 path);
4026 }
4027 }
4028
rtw8822c_dpk_calibrate(struct rtw_dev * rtwdev,u8 path)4029 static void rtw8822c_dpk_calibrate(struct rtw_dev *rtwdev, u8 path)
4030 {
4031 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4032 u32 dpk_txagc;
4033 u8 dpk_fail;
4034
4035 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DPK] s%d dpk start\n", path);
4036
4037 dpk_txagc = rtw8822c_dpk_gainloss(rtwdev, path);
4038
4039 dpk_fail = rtw8822c_dpk_by_path(rtwdev, dpk_txagc, path);
4040
4041 if (!rtw8822c_dpk_check_pass(rtwdev, dpk_fail, dpk_txagc, path))
4042 rtw_err(rtwdev, "failed to do dpk calibration\n");
4043
4044 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DPK] s%d dpk finish\n", path);
4045
4046 if (dpk_info->result[path])
4047 set_bit(path, dpk_info->dpk_path_ok);
4048 }
4049
rtw8822c_dpk_path_select(struct rtw_dev * rtwdev)4050 static void rtw8822c_dpk_path_select(struct rtw_dev *rtwdev)
4051 {
4052 rtw8822c_dpk_calibrate(rtwdev, RF_PATH_A);
4053 rtw8822c_dpk_calibrate(rtwdev, RF_PATH_B);
4054 rtw8822c_dpk_on(rtwdev, RF_PATH_A);
4055 rtw8822c_dpk_on(rtwdev, RF_PATH_B);
4056 rtw8822c_dpk_cal_coef1(rtwdev);
4057 }
4058
rtw8822c_dpk_enable_disable(struct rtw_dev * rtwdev)4059 static void rtw8822c_dpk_enable_disable(struct rtw_dev *rtwdev)
4060 {
4061 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4062 u32 mask = BIT(15) | BIT(14);
4063
4064 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
4065
4066 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S0, BIT_DPD_EN,
4067 dpk_info->is_dpk_pwr_on);
4068 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S1, BIT_DPD_EN,
4069 dpk_info->is_dpk_pwr_on);
4070
4071 if (test_bit(RF_PATH_A, dpk_info->dpk_path_ok)) {
4072 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S0, mask, 0x0);
4073 rtw_write8(rtwdev, REG_DPD_CTL0_S0, dpk_info->dpk_gs[RF_PATH_A]);
4074 }
4075 if (test_bit(RF_PATH_B, dpk_info->dpk_path_ok)) {
4076 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S1, mask, 0x0);
4077 rtw_write8(rtwdev, REG_DPD_CTL0_S1, dpk_info->dpk_gs[RF_PATH_B]);
4078 }
4079 }
4080
rtw8822c_dpk_reload_data(struct rtw_dev * rtwdev)4081 static void rtw8822c_dpk_reload_data(struct rtw_dev *rtwdev)
4082 {
4083 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4084 u8 path;
4085
4086 if (!test_bit(RF_PATH_A, dpk_info->dpk_path_ok) &&
4087 !test_bit(RF_PATH_B, dpk_info->dpk_path_ok) &&
4088 dpk_info->dpk_ch == 0)
4089 return;
4090
4091 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
4092 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE,
4093 0x8 | (path << 1));
4094 if (dpk_info->dpk_band == RTW_BAND_2G)
4095 rtw_write32(rtwdev, REG_DPD_CTL1_S1, 0x1f100000);
4096 else
4097 rtw_write32(rtwdev, REG_DPD_CTL1_S1, 0x1f0d0000);
4098
4099 rtw_write8(rtwdev, REG_DPD_AGC, dpk_info->dpk_txagc[path]);
4100
4101 rtw8822c_dpk_coef_write(rtwdev, path,
4102 test_bit(path, dpk_info->dpk_path_ok));
4103
4104 rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_DPK_ON);
4105
4106 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
4107
4108 if (path == RF_PATH_A)
4109 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0, BIT_GS_PWSF,
4110 dpk_info->dpk_gs[path]);
4111 else
4112 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S1, BIT_GS_PWSF,
4113 dpk_info->dpk_gs[path]);
4114 }
4115 rtw8822c_dpk_cal_coef1(rtwdev);
4116 }
4117
rtw8822c_dpk_reload(struct rtw_dev * rtwdev)4118 static bool rtw8822c_dpk_reload(struct rtw_dev *rtwdev)
4119 {
4120 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4121 u8 channel;
4122
4123 dpk_info->is_reload = false;
4124
4125 channel = (u8)(rtw_read_rf(rtwdev, RF_PATH_A, 0x18, RFREG_MASK) & 0xff);
4126
4127 if (channel == dpk_info->dpk_ch) {
4128 rtw_dbg(rtwdev, RTW_DBG_RFK,
4129 "[DPK] DPK reload for CH%d!!\n", dpk_info->dpk_ch);
4130 rtw8822c_dpk_reload_data(rtwdev);
4131 dpk_info->is_reload = true;
4132 }
4133
4134 return dpk_info->is_reload;
4135 }
4136
rtw8822c_do_dpk(struct rtw_dev * rtwdev)4137 static void rtw8822c_do_dpk(struct rtw_dev *rtwdev)
4138 {
4139 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4140 struct rtw_backup_info bckp[DPK_BB_REG_NUM];
4141 u32 rf_reg_backup[DPK_RF_REG_NUM][DPK_RF_PATH_NUM];
4142 u32 bb_reg[DPK_BB_REG_NUM] = {
4143 0x520, 0x820, 0x824, 0x1c3c, 0x1d58, 0x1864,
4144 0x4164, 0x180c, 0x410c, 0x186c, 0x416c,
4145 0x1a14, 0x1e70, 0x80c, 0x1d70, 0x1e7c, 0x18a4, 0x41a4};
4146 u32 rf_reg[DPK_RF_REG_NUM] = {
4147 0x0, 0x1a, 0x55, 0x63, 0x87, 0x8f, 0xde};
4148 u8 path;
4149
4150 if (!dpk_info->is_dpk_pwr_on) {
4151 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DPK] Skip DPK due to DPD PWR off\n");
4152 return;
4153 } else if (rtw8822c_dpk_reload(rtwdev)) {
4154 return;
4155 }
4156
4157 for (path = RF_PATH_A; path < DPK_RF_PATH_NUM; path++)
4158 ewma_thermal_init(&dpk_info->avg_thermal[path]);
4159
4160 rtw8822c_dpk_information(rtwdev);
4161
4162 rtw8822c_dpk_backup_registers(rtwdev, bb_reg, DPK_BB_REG_NUM, bckp);
4163 rtw8822c_dpk_backup_rf_registers(rtwdev, rf_reg, rf_reg_backup);
4164
4165 rtw8822c_dpk_mac_bb_setting(rtwdev);
4166 rtw8822c_dpk_afe_setting(rtwdev, true);
4167 rtw8822c_dpk_pre_setting(rtwdev);
4168 rtw8822c_dpk_result_reset(rtwdev);
4169 rtw8822c_dpk_path_select(rtwdev);
4170 rtw8822c_dpk_afe_setting(rtwdev, false);
4171 rtw8822c_dpk_enable_disable(rtwdev);
4172
4173 rtw8822c_dpk_reload_rf_registers(rtwdev, rf_reg, rf_reg_backup);
4174 for (path = 0; path < rtwdev->hal.rf_path_num; path++)
4175 rtw8822c_dpk_rxbb_dc_cal(rtwdev, path);
4176 rtw8822c_dpk_restore_registers(rtwdev, DPK_BB_REG_NUM, bckp);
4177 }
4178
rtw8822c_phy_calibration(struct rtw_dev * rtwdev)4179 static void rtw8822c_phy_calibration(struct rtw_dev *rtwdev)
4180 {
4181 rtw8822c_rfk_power_save(rtwdev, false);
4182 rtw8822c_do_gapk(rtwdev);
4183 rtw8822c_do_iqk(rtwdev);
4184 rtw8822c_do_dpk(rtwdev);
4185 rtw8822c_rfk_power_save(rtwdev, true);
4186 }
4187
rtw8822c_dpk_track(struct rtw_dev * rtwdev)4188 static void rtw8822c_dpk_track(struct rtw_dev *rtwdev)
4189 {
4190 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4191 u8 path;
4192 u8 thermal_value[DPK_RF_PATH_NUM] = {0};
4193 s8 offset[DPK_RF_PATH_NUM], delta_dpk[DPK_RF_PATH_NUM];
4194
4195 if (dpk_info->thermal_dpk[0] == 0 && dpk_info->thermal_dpk[1] == 0)
4196 return;
4197
4198 for (path = 0; path < DPK_RF_PATH_NUM; path++) {
4199 thermal_value[path] = rtw8822c_dpk_thermal_read(rtwdev, path);
4200 ewma_thermal_add(&dpk_info->avg_thermal[path],
4201 thermal_value[path]);
4202 thermal_value[path] =
4203 ewma_thermal_read(&dpk_info->avg_thermal[path]);
4204 delta_dpk[path] = dpk_info->thermal_dpk[path] -
4205 thermal_value[path];
4206 offset[path] = delta_dpk[path] -
4207 dpk_info->thermal_dpk_delta[path];
4208 offset[path] &= 0x7f;
4209
4210 if (offset[path] != dpk_info->pre_pwsf[path]) {
4211 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE,
4212 0x8 | (path << 1));
4213 rtw_write32_mask(rtwdev, 0x1b58, GENMASK(6, 0),
4214 offset[path]);
4215 dpk_info->pre_pwsf[path] = offset[path];
4216 }
4217 }
4218 }
4219
4220 #define XCAP_EXTEND(val) ({typeof(val) _v = (val); _v | _v << 7; })
rtw8822c_set_crystal_cap_reg(struct rtw_dev * rtwdev,u8 crystal_cap)4221 static void rtw8822c_set_crystal_cap_reg(struct rtw_dev *rtwdev, u8 crystal_cap)
4222 {
4223 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4224 struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4225 u32 val = 0;
4226
4227 val = XCAP_EXTEND(crystal_cap);
4228 cfo->crystal_cap = crystal_cap;
4229 rtw_write32_mask(rtwdev, REG_ANAPAR_XTAL_0, BIT_XCAP_0, val);
4230 }
4231
rtw8822c_set_crystal_cap(struct rtw_dev * rtwdev,u8 crystal_cap)4232 static void rtw8822c_set_crystal_cap(struct rtw_dev *rtwdev, u8 crystal_cap)
4233 {
4234 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4235 struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4236
4237 if (cfo->crystal_cap == crystal_cap)
4238 return;
4239
4240 rtw8822c_set_crystal_cap_reg(rtwdev, crystal_cap);
4241 }
4242
rtw8822c_cfo_tracking_reset(struct rtw_dev * rtwdev)4243 static void rtw8822c_cfo_tracking_reset(struct rtw_dev *rtwdev)
4244 {
4245 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4246 struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4247
4248 cfo->is_adjust = true;
4249
4250 if (cfo->crystal_cap > rtwdev->efuse.crystal_cap)
4251 rtw8822c_set_crystal_cap(rtwdev, cfo->crystal_cap - 1);
4252 else if (cfo->crystal_cap < rtwdev->efuse.crystal_cap)
4253 rtw8822c_set_crystal_cap(rtwdev, cfo->crystal_cap + 1);
4254 }
4255
rtw8822c_cfo_init(struct rtw_dev * rtwdev)4256 static void rtw8822c_cfo_init(struct rtw_dev *rtwdev)
4257 {
4258 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4259 struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4260
4261 cfo->crystal_cap = rtwdev->efuse.crystal_cap;
4262 cfo->is_adjust = true;
4263 }
4264
4265 #define REPORT_TO_KHZ(val) ({typeof(val) _v = (val); (_v << 1) + (_v >> 1); })
rtw8822c_cfo_calc_avg(struct rtw_dev * rtwdev,u8 path_num)4266 static s32 rtw8822c_cfo_calc_avg(struct rtw_dev *rtwdev, u8 path_num)
4267 {
4268 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4269 struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4270 s32 cfo_avg, cfo_path_sum = 0, cfo_rpt_sum;
4271 u8 i;
4272
4273 for (i = 0; i < path_num; i++) {
4274 cfo_rpt_sum = REPORT_TO_KHZ(cfo->cfo_tail[i]);
4275
4276 if (cfo->cfo_cnt[i])
4277 cfo_avg = cfo_rpt_sum / cfo->cfo_cnt[i];
4278 else
4279 cfo_avg = 0;
4280
4281 cfo_path_sum += cfo_avg;
4282 }
4283
4284 for (i = 0; i < path_num; i++) {
4285 cfo->cfo_tail[i] = 0;
4286 cfo->cfo_cnt[i] = 0;
4287 }
4288
4289 return cfo_path_sum / path_num;
4290 }
4291
rtw8822c_cfo_need_adjust(struct rtw_dev * rtwdev,s32 cfo_avg)4292 static void rtw8822c_cfo_need_adjust(struct rtw_dev *rtwdev, s32 cfo_avg)
4293 {
4294 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4295 struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4296
4297 if (!cfo->is_adjust) {
4298 if (abs(cfo_avg) > CFO_TRK_ENABLE_TH)
4299 cfo->is_adjust = true;
4300 } else {
4301 if (abs(cfo_avg) <= CFO_TRK_STOP_TH)
4302 cfo->is_adjust = false;
4303 }
4304
4305 if (!rtw_coex_disabled(rtwdev)) {
4306 cfo->is_adjust = false;
4307 rtw8822c_set_crystal_cap(rtwdev, rtwdev->efuse.crystal_cap);
4308 }
4309 }
4310
rtw8822c_cfo_track(struct rtw_dev * rtwdev)4311 static void rtw8822c_cfo_track(struct rtw_dev *rtwdev)
4312 {
4313 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4314 struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4315 u8 path_num = rtwdev->hal.rf_path_num;
4316 s8 crystal_cap = cfo->crystal_cap;
4317 s32 cfo_avg = 0;
4318
4319 if (rtwdev->sta_cnt != 1) {
4320 rtw8822c_cfo_tracking_reset(rtwdev);
4321 return;
4322 }
4323
4324 if (cfo->packet_count == cfo->packet_count_pre)
4325 return;
4326
4327 cfo->packet_count_pre = cfo->packet_count;
4328 cfo_avg = rtw8822c_cfo_calc_avg(rtwdev, path_num);
4329 rtw8822c_cfo_need_adjust(rtwdev, cfo_avg);
4330
4331 if (cfo->is_adjust) {
4332 if (cfo_avg > CFO_TRK_ADJ_TH)
4333 crystal_cap++;
4334 else if (cfo_avg < -CFO_TRK_ADJ_TH)
4335 crystal_cap--;
4336
4337 crystal_cap = clamp_t(s8, crystal_cap, 0, XCAP_MASK);
4338 rtw8822c_set_crystal_cap(rtwdev, (u8)crystal_cap);
4339 }
4340 }
4341
4342 static const struct rtw_phy_cck_pd_reg
4343 rtw8822c_cck_pd_reg[RTW_CHANNEL_WIDTH_40 + 1][RTW_RF_PATH_MAX] = {
4344 {
4345 {0x1ac8, 0x00ff, 0x1ad0, 0x01f},
4346 {0x1ac8, 0xff00, 0x1ad0, 0x3e0}
4347 },
4348 {
4349 {0x1acc, 0x00ff, 0x1ad0, 0x01F00000},
4350 {0x1acc, 0xff00, 0x1ad0, 0x3E000000}
4351 },
4352 };
4353
4354 #define RTW_CCK_PD_MAX 255
4355 #define RTW_CCK_CS_MAX 31
4356 #define RTW_CCK_CS_ERR1 27
4357 #define RTW_CCK_CS_ERR2 29
4358 static void
rtw8822c_phy_cck_pd_set_reg(struct rtw_dev * rtwdev,s8 pd_diff,s8 cs_diff,u8 bw,u8 nrx)4359 rtw8822c_phy_cck_pd_set_reg(struct rtw_dev *rtwdev,
4360 s8 pd_diff, s8 cs_diff, u8 bw, u8 nrx)
4361 {
4362 u32 pd, cs;
4363
4364 if (WARN_ON(bw > RTW_CHANNEL_WIDTH_40 || nrx >= RTW_RF_PATH_MAX))
4365 return;
4366
4367 pd = rtw_read32_mask(rtwdev,
4368 rtw8822c_cck_pd_reg[bw][nrx].reg_pd,
4369 rtw8822c_cck_pd_reg[bw][nrx].mask_pd);
4370 cs = rtw_read32_mask(rtwdev,
4371 rtw8822c_cck_pd_reg[bw][nrx].reg_cs,
4372 rtw8822c_cck_pd_reg[bw][nrx].mask_cs);
4373 pd += pd_diff;
4374 cs += cs_diff;
4375 if (pd > RTW_CCK_PD_MAX)
4376 pd = RTW_CCK_PD_MAX;
4377 if (cs == RTW_CCK_CS_ERR1 || cs == RTW_CCK_CS_ERR2)
4378 cs++;
4379 else if (cs > RTW_CCK_CS_MAX)
4380 cs = RTW_CCK_CS_MAX;
4381 rtw_write32_mask(rtwdev,
4382 rtw8822c_cck_pd_reg[bw][nrx].reg_pd,
4383 rtw8822c_cck_pd_reg[bw][nrx].mask_pd,
4384 pd);
4385 rtw_write32_mask(rtwdev,
4386 rtw8822c_cck_pd_reg[bw][nrx].reg_cs,
4387 rtw8822c_cck_pd_reg[bw][nrx].mask_cs,
4388 cs);
4389
4390 rtw_dbg(rtwdev, RTW_DBG_PHY,
4391 "is_linked=%d, bw=%d, nrx=%d, cs_ratio=0x%x, pd_th=0x%x\n",
4392 rtw_is_assoc(rtwdev), bw, nrx, cs, pd);
4393 }
4394
rtw8822c_phy_cck_pd_set(struct rtw_dev * rtwdev,u8 new_lvl)4395 static void rtw8822c_phy_cck_pd_set(struct rtw_dev *rtwdev, u8 new_lvl)
4396 {
4397 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4398 s8 pd_lvl[CCK_PD_LV_MAX] = {0, 2, 4, 6, 8};
4399 s8 cs_lvl[CCK_PD_LV_MAX] = {0, 2, 2, 2, 4};
4400 u8 cur_lvl;
4401 u8 nrx, bw;
4402
4403 nrx = (u8)rtw_read32_mask(rtwdev, 0x1a2c, 0x60000);
4404 bw = (u8)rtw_read32_mask(rtwdev, 0x9b0, 0xc);
4405
4406 rtw_dbg(rtwdev, RTW_DBG_PHY, "lv: (%d) -> (%d) bw=%d nr=%d cck_fa_avg=%d\n",
4407 dm_info->cck_pd_lv[bw][nrx], new_lvl, bw, nrx,
4408 dm_info->cck_fa_avg);
4409
4410 if (dm_info->cck_pd_lv[bw][nrx] == new_lvl)
4411 return;
4412
4413 cur_lvl = dm_info->cck_pd_lv[bw][nrx];
4414
4415 /* update cck pd info */
4416 dm_info->cck_fa_avg = CCK_FA_AVG_RESET;
4417
4418 rtw8822c_phy_cck_pd_set_reg(rtwdev,
4419 pd_lvl[new_lvl] - pd_lvl[cur_lvl],
4420 cs_lvl[new_lvl] - cs_lvl[cur_lvl],
4421 bw, nrx);
4422 dm_info->cck_pd_lv[bw][nrx] = new_lvl;
4423 }
4424
4425 #define PWR_TRACK_MASK 0x7f
rtw8822c_pwrtrack_set(struct rtw_dev * rtwdev,u8 rf_path)4426 static void rtw8822c_pwrtrack_set(struct rtw_dev *rtwdev, u8 rf_path)
4427 {
4428 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4429
4430 switch (rf_path) {
4431 case RF_PATH_A:
4432 rtw_write32_mask(rtwdev, 0x18a0, PWR_TRACK_MASK,
4433 dm_info->delta_power_index[rf_path]);
4434 break;
4435 case RF_PATH_B:
4436 rtw_write32_mask(rtwdev, 0x41a0, PWR_TRACK_MASK,
4437 dm_info->delta_power_index[rf_path]);
4438 break;
4439 default:
4440 break;
4441 }
4442 }
4443
rtw8822c_pwr_track_stats(struct rtw_dev * rtwdev,u8 path)4444 static void rtw8822c_pwr_track_stats(struct rtw_dev *rtwdev, u8 path)
4445 {
4446 u8 thermal_value;
4447
4448 if (rtwdev->efuse.thermal_meter[path] == 0xff)
4449 return;
4450
4451 thermal_value = rtw_read_rf(rtwdev, path, RF_T_METER, 0x7e);
4452 rtw_phy_pwrtrack_avg(rtwdev, thermal_value, path);
4453 }
4454
rtw8822c_pwr_track_path(struct rtw_dev * rtwdev,struct rtw_swing_table * swing_table,u8 path)4455 static void rtw8822c_pwr_track_path(struct rtw_dev *rtwdev,
4456 struct rtw_swing_table *swing_table,
4457 u8 path)
4458 {
4459 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4460 u8 delta;
4461
4462 delta = rtw_phy_pwrtrack_get_delta(rtwdev, path);
4463 dm_info->delta_power_index[path] =
4464 rtw_phy_pwrtrack_get_pwridx(rtwdev, swing_table, path, path,
4465 delta);
4466 rtw8822c_pwrtrack_set(rtwdev, path);
4467 }
4468
__rtw8822c_pwr_track(struct rtw_dev * rtwdev)4469 static void __rtw8822c_pwr_track(struct rtw_dev *rtwdev)
4470 {
4471 struct rtw_swing_table swing_table;
4472 u8 i;
4473
4474 rtw_phy_config_swing_table(rtwdev, &swing_table);
4475
4476 for (i = 0; i < rtwdev->hal.rf_path_num; i++)
4477 rtw8822c_pwr_track_stats(rtwdev, i);
4478 if (rtw_phy_pwrtrack_need_lck(rtwdev))
4479 rtw8822c_do_lck(rtwdev);
4480 for (i = 0; i < rtwdev->hal.rf_path_num; i++)
4481 rtw8822c_pwr_track_path(rtwdev, &swing_table, i);
4482 }
4483
rtw8822c_pwr_track(struct rtw_dev * rtwdev)4484 static void rtw8822c_pwr_track(struct rtw_dev *rtwdev)
4485 {
4486 struct rtw_efuse *efuse = &rtwdev->efuse;
4487 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4488
4489 if (efuse->power_track_type != 0)
4490 return;
4491
4492 if (!dm_info->pwr_trk_triggered) {
4493 rtw_write_rf(rtwdev, RF_PATH_A, RF_T_METER, BIT(19), 0x01);
4494 rtw_write_rf(rtwdev, RF_PATH_A, RF_T_METER, BIT(19), 0x00);
4495 rtw_write_rf(rtwdev, RF_PATH_A, RF_T_METER, BIT(19), 0x01);
4496
4497 rtw_write_rf(rtwdev, RF_PATH_B, RF_T_METER, BIT(19), 0x01);
4498 rtw_write_rf(rtwdev, RF_PATH_B, RF_T_METER, BIT(19), 0x00);
4499 rtw_write_rf(rtwdev, RF_PATH_B, RF_T_METER, BIT(19), 0x01);
4500
4501 dm_info->pwr_trk_triggered = true;
4502 return;
4503 }
4504
4505 __rtw8822c_pwr_track(rtwdev);
4506 dm_info->pwr_trk_triggered = false;
4507 }
4508
rtw8822c_adaptivity_init(struct rtw_dev * rtwdev)4509 static void rtw8822c_adaptivity_init(struct rtw_dev *rtwdev)
4510 {
4511 rtw_phy_set_edcca_th(rtwdev, RTW8822C_EDCCA_MAX, RTW8822C_EDCCA_MAX);
4512
4513 /* mac edcca state setting */
4514 rtw_write32_clr(rtwdev, REG_TX_PTCL_CTRL, BIT_DIS_EDCCA);
4515 rtw_write32_set(rtwdev, REG_RD_CTRL, BIT_EDCCA_MSK_CNTDOWN_EN);
4516
4517 /* edcca decistion opt */
4518 rtw_write32_clr(rtwdev, REG_EDCCA_DECISION, BIT_EDCCA_OPTION);
4519 }
4520
rtw8822c_adaptivity(struct rtw_dev * rtwdev)4521 static void rtw8822c_adaptivity(struct rtw_dev *rtwdev)
4522 {
4523 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4524 s8 l2h, h2l;
4525 u8 igi;
4526
4527 igi = dm_info->igi_history[0];
4528 if (dm_info->edcca_mode == RTW_EDCCA_NORMAL) {
4529 l2h = max_t(s8, igi + EDCCA_IGI_L2H_DIFF, EDCCA_TH_L2H_LB);
4530 h2l = l2h - EDCCA_L2H_H2L_DIFF_NORMAL;
4531 } else {
4532 if (igi < dm_info->l2h_th_ini - EDCCA_ADC_BACKOFF)
4533 l2h = igi + EDCCA_ADC_BACKOFF;
4534 else
4535 l2h = dm_info->l2h_th_ini;
4536 h2l = l2h - EDCCA_L2H_H2L_DIFF;
4537 }
4538
4539 rtw_phy_set_edcca_th(rtwdev, l2h, h2l);
4540 }
4541
rtw8822c_led_set(struct led_classdev * led,enum led_brightness brightness)4542 static void rtw8822c_led_set(struct led_classdev *led,
4543 enum led_brightness brightness)
4544 {
4545 struct rtw_dev *rtwdev = container_of(led, struct rtw_dev, led_cdev);
4546 u32 ledcfg;
4547
4548 ledcfg = rtw_read32(rtwdev, REG_LED_CFG);
4549 u32p_replace_bits(&ledcfg, BIT_LED_MODE_SW_CTRL, BIT_LED2_CM);
4550 ledcfg &= ~BIT_GPIO13_14_WL_CTRL_EN;
4551
4552 if (brightness == LED_OFF)
4553 ledcfg |= BIT_LED2_SV;
4554 else
4555 ledcfg &= ~BIT_LED2_SV;
4556
4557 rtw_write32(rtwdev, REG_LED_CFG, ledcfg);
4558 }
4559
rtw8822c_fill_txdesc_checksum(struct rtw_dev * rtwdev,struct rtw_tx_pkt_info * pkt_info,u8 * txdesc)4560 static void rtw8822c_fill_txdesc_checksum(struct rtw_dev *rtwdev,
4561 struct rtw_tx_pkt_info *pkt_info,
4562 u8 *txdesc)
4563 {
4564 const struct rtw_chip_info *chip = rtwdev->chip;
4565 size_t words;
4566
4567 words = (pkt_info->pkt_offset * 8 + chip->tx_pkt_desc_sz) / 2;
4568
4569 fill_txdesc_checksum_common(txdesc, words);
4570 }
4571
4572 static const struct rtw_pwr_seq_cmd trans_carddis_to_cardemu_8822c[] = {
4573 {0x0086,
4574 RTW_PWR_CUT_ALL_MSK,
4575 RTW_PWR_INTF_SDIO_MSK,
4576 RTW_PWR_ADDR_SDIO,
4577 RTW_PWR_CMD_WRITE, BIT(0), 0},
4578 {0x0086,
4579 RTW_PWR_CUT_ALL_MSK,
4580 RTW_PWR_INTF_SDIO_MSK,
4581 RTW_PWR_ADDR_SDIO,
4582 RTW_PWR_CMD_POLLING, BIT(1), BIT(1)},
4583 {0x002E,
4584 RTW_PWR_CUT_ALL_MSK,
4585 RTW_PWR_INTF_ALL_MSK,
4586 RTW_PWR_ADDR_MAC,
4587 RTW_PWR_CMD_WRITE, BIT(2), BIT(2)},
4588 {0x002D,
4589 RTW_PWR_CUT_ALL_MSK,
4590 RTW_PWR_INTF_ALL_MSK,
4591 RTW_PWR_ADDR_MAC,
4592 RTW_PWR_CMD_WRITE, BIT(0), 0},
4593 {0x007F,
4594 RTW_PWR_CUT_ALL_MSK,
4595 RTW_PWR_INTF_ALL_MSK,
4596 RTW_PWR_ADDR_MAC,
4597 RTW_PWR_CMD_WRITE, BIT(7), 0},
4598 {0x004A,
4599 RTW_PWR_CUT_ALL_MSK,
4600 RTW_PWR_INTF_USB_MSK,
4601 RTW_PWR_ADDR_MAC,
4602 RTW_PWR_CMD_WRITE, BIT(0), 0},
4603 {0x0005,
4604 RTW_PWR_CUT_ALL_MSK,
4605 RTW_PWR_INTF_ALL_MSK,
4606 RTW_PWR_ADDR_MAC,
4607 RTW_PWR_CMD_WRITE, BIT(3) | BIT(4) | BIT(7), 0},
4608 {0xFFFF,
4609 RTW_PWR_CUT_ALL_MSK,
4610 RTW_PWR_INTF_ALL_MSK,
4611 0,
4612 RTW_PWR_CMD_END, 0, 0},
4613 };
4614
4615 static const struct rtw_pwr_seq_cmd trans_cardemu_to_act_8822c[] = {
4616 {0x0000,
4617 RTW_PWR_CUT_ALL_MSK,
4618 RTW_PWR_INTF_USB_MSK | RTW_PWR_INTF_SDIO_MSK,
4619 RTW_PWR_ADDR_MAC,
4620 RTW_PWR_CMD_WRITE, BIT(5), 0},
4621 {0x0005,
4622 RTW_PWR_CUT_ALL_MSK,
4623 RTW_PWR_INTF_ALL_MSK,
4624 RTW_PWR_ADDR_MAC,
4625 RTW_PWR_CMD_WRITE, (BIT(4) | BIT(3) | BIT(2)), 0},
4626 {0x0075,
4627 RTW_PWR_CUT_ALL_MSK,
4628 RTW_PWR_INTF_PCI_MSK,
4629 RTW_PWR_ADDR_MAC,
4630 RTW_PWR_CMD_WRITE, BIT(0), BIT(0)},
4631 {0x0006,
4632 RTW_PWR_CUT_ALL_MSK,
4633 RTW_PWR_INTF_ALL_MSK,
4634 RTW_PWR_ADDR_MAC,
4635 RTW_PWR_CMD_POLLING, BIT(1), BIT(1)},
4636 {0x0075,
4637 RTW_PWR_CUT_ALL_MSK,
4638 RTW_PWR_INTF_PCI_MSK,
4639 RTW_PWR_ADDR_MAC,
4640 RTW_PWR_CMD_WRITE, BIT(0), 0},
4641 {0xFF1A,
4642 RTW_PWR_CUT_ALL_MSK,
4643 RTW_PWR_INTF_USB_MSK,
4644 RTW_PWR_ADDR_MAC,
4645 RTW_PWR_CMD_WRITE, 0xFF, 0},
4646 {0x002E,
4647 RTW_PWR_CUT_ALL_MSK,
4648 RTW_PWR_INTF_ALL_MSK,
4649 RTW_PWR_ADDR_MAC,
4650 RTW_PWR_CMD_WRITE, BIT(3), 0},
4651 {0x0006,
4652 RTW_PWR_CUT_ALL_MSK,
4653 RTW_PWR_INTF_ALL_MSK,
4654 RTW_PWR_ADDR_MAC,
4655 RTW_PWR_CMD_WRITE, BIT(0), BIT(0)},
4656 {0x0005,
4657 RTW_PWR_CUT_ALL_MSK,
4658 RTW_PWR_INTF_ALL_MSK,
4659 RTW_PWR_ADDR_MAC,
4660 RTW_PWR_CMD_WRITE, (BIT(4) | BIT(3)), 0},
4661 {0x1018,
4662 RTW_PWR_CUT_ALL_MSK,
4663 RTW_PWR_INTF_ALL_MSK,
4664 RTW_PWR_ADDR_MAC,
4665 RTW_PWR_CMD_WRITE, BIT(2), BIT(2)},
4666 {0x0005,
4667 RTW_PWR_CUT_ALL_MSK,
4668 RTW_PWR_INTF_ALL_MSK,
4669 RTW_PWR_ADDR_MAC,
4670 RTW_PWR_CMD_WRITE, BIT(0), BIT(0)},
4671 {0x0005,
4672 RTW_PWR_CUT_ALL_MSK,
4673 RTW_PWR_INTF_ALL_MSK,
4674 RTW_PWR_ADDR_MAC,
4675 RTW_PWR_CMD_POLLING, BIT(0), 0},
4676 {0x0074,
4677 RTW_PWR_CUT_ALL_MSK,
4678 RTW_PWR_INTF_PCI_MSK,
4679 RTW_PWR_ADDR_MAC,
4680 RTW_PWR_CMD_WRITE, BIT(5), BIT(5)},
4681 {0x0071,
4682 RTW_PWR_CUT_ALL_MSK,
4683 RTW_PWR_INTF_PCI_MSK,
4684 RTW_PWR_ADDR_MAC,
4685 RTW_PWR_CMD_WRITE, BIT(4), 0},
4686 {0x0062,
4687 RTW_PWR_CUT_ALL_MSK,
4688 RTW_PWR_INTF_PCI_MSK,
4689 RTW_PWR_ADDR_MAC,
4690 RTW_PWR_CMD_WRITE, (BIT(7) | BIT(6) | BIT(5)),
4691 (BIT(7) | BIT(6) | BIT(5))},
4692 {0x0061,
4693 RTW_PWR_CUT_ALL_MSK,
4694 RTW_PWR_INTF_PCI_MSK,
4695 RTW_PWR_ADDR_MAC,
4696 RTW_PWR_CMD_WRITE, (BIT(7) | BIT(6) | BIT(5)), 0},
4697 {0x001F,
4698 RTW_PWR_CUT_ALL_MSK,
4699 RTW_PWR_INTF_ALL_MSK,
4700 RTW_PWR_ADDR_MAC,
4701 RTW_PWR_CMD_WRITE, (BIT(7) | BIT(6)), BIT(7)},
4702 {0x00EF,
4703 RTW_PWR_CUT_ALL_MSK,
4704 RTW_PWR_INTF_ALL_MSK,
4705 RTW_PWR_ADDR_MAC,
4706 RTW_PWR_CMD_WRITE, (BIT(7) | BIT(6)), BIT(7)},
4707 {0x1045,
4708 RTW_PWR_CUT_ALL_MSK,
4709 RTW_PWR_INTF_ALL_MSK,
4710 RTW_PWR_ADDR_MAC,
4711 RTW_PWR_CMD_WRITE, BIT(4), BIT(4)},
4712 {0x0010,
4713 RTW_PWR_CUT_ALL_MSK,
4714 RTW_PWR_INTF_ALL_MSK,
4715 RTW_PWR_ADDR_MAC,
4716 RTW_PWR_CMD_WRITE, BIT(2), BIT(2)},
4717 {0x1064,
4718 RTW_PWR_CUT_ALL_MSK,
4719 RTW_PWR_INTF_ALL_MSK,
4720 RTW_PWR_ADDR_MAC,
4721 RTW_PWR_CMD_WRITE, BIT(1), BIT(1)},
4722 {0xFFFF,
4723 RTW_PWR_CUT_ALL_MSK,
4724 RTW_PWR_INTF_ALL_MSK,
4725 0,
4726 RTW_PWR_CMD_END, 0, 0},
4727 };
4728
4729 static const struct rtw_pwr_seq_cmd trans_act_to_cardemu_8822c[] = {
4730 {0x0093,
4731 RTW_PWR_CUT_ALL_MSK,
4732 RTW_PWR_INTF_ALL_MSK,
4733 RTW_PWR_ADDR_MAC,
4734 RTW_PWR_CMD_WRITE, BIT(3), 0},
4735 {0x001F,
4736 RTW_PWR_CUT_ALL_MSK,
4737 RTW_PWR_INTF_ALL_MSK,
4738 RTW_PWR_ADDR_MAC,
4739 RTW_PWR_CMD_WRITE, 0xFF, 0},
4740 {0x00EF,
4741 RTW_PWR_CUT_ALL_MSK,
4742 RTW_PWR_INTF_ALL_MSK,
4743 RTW_PWR_ADDR_MAC,
4744 RTW_PWR_CMD_WRITE, 0xFF, 0},
4745 {0x1045,
4746 RTW_PWR_CUT_ALL_MSK,
4747 RTW_PWR_INTF_ALL_MSK,
4748 RTW_PWR_ADDR_MAC,
4749 RTW_PWR_CMD_WRITE, BIT(4), 0},
4750 {0xFF1A,
4751 RTW_PWR_CUT_ALL_MSK,
4752 RTW_PWR_INTF_USB_MSK,
4753 RTW_PWR_ADDR_MAC,
4754 RTW_PWR_CMD_WRITE, 0xFF, 0x30},
4755 {0x0049,
4756 RTW_PWR_CUT_ALL_MSK,
4757 RTW_PWR_INTF_ALL_MSK,
4758 RTW_PWR_ADDR_MAC,
4759 RTW_PWR_CMD_WRITE, BIT(1), 0},
4760 {0x0006,
4761 RTW_PWR_CUT_ALL_MSK,
4762 RTW_PWR_INTF_ALL_MSK,
4763 RTW_PWR_ADDR_MAC,
4764 RTW_PWR_CMD_WRITE, BIT(0), BIT(0)},
4765 {0x0002,
4766 RTW_PWR_CUT_ALL_MSK,
4767 RTW_PWR_INTF_ALL_MSK,
4768 RTW_PWR_ADDR_MAC,
4769 RTW_PWR_CMD_WRITE, BIT(1), 0},
4770 {0x0005,
4771 RTW_PWR_CUT_ALL_MSK,
4772 RTW_PWR_INTF_ALL_MSK,
4773 RTW_PWR_ADDR_MAC,
4774 RTW_PWR_CMD_WRITE, BIT(1), BIT(1)},
4775 {0x0005,
4776 RTW_PWR_CUT_ALL_MSK,
4777 RTW_PWR_INTF_ALL_MSK,
4778 RTW_PWR_ADDR_MAC,
4779 RTW_PWR_CMD_POLLING, BIT(1), 0},
4780 {0x0000,
4781 RTW_PWR_CUT_ALL_MSK,
4782 RTW_PWR_INTF_USB_MSK | RTW_PWR_INTF_SDIO_MSK,
4783 RTW_PWR_ADDR_MAC,
4784 RTW_PWR_CMD_WRITE, BIT(5), BIT(5)},
4785 {0xFFFF,
4786 RTW_PWR_CUT_ALL_MSK,
4787 RTW_PWR_INTF_ALL_MSK,
4788 0,
4789 RTW_PWR_CMD_END, 0, 0},
4790 };
4791
4792 static const struct rtw_pwr_seq_cmd trans_cardemu_to_carddis_8822c[] = {
4793 {0x0005,
4794 RTW_PWR_CUT_ALL_MSK,
4795 RTW_PWR_INTF_SDIO_MSK,
4796 RTW_PWR_ADDR_MAC,
4797 RTW_PWR_CMD_WRITE, BIT(7), BIT(7)},
4798 {0x0007,
4799 RTW_PWR_CUT_ALL_MSK,
4800 RTW_PWR_INTF_USB_MSK | RTW_PWR_INTF_SDIO_MSK,
4801 RTW_PWR_ADDR_MAC,
4802 RTW_PWR_CMD_WRITE, 0xFF, 0x00},
4803 {0x0067,
4804 RTW_PWR_CUT_ALL_MSK,
4805 RTW_PWR_INTF_ALL_MSK,
4806 RTW_PWR_ADDR_MAC,
4807 RTW_PWR_CMD_WRITE, BIT(5), 0},
4808 {0x004A,
4809 RTW_PWR_CUT_ALL_MSK,
4810 RTW_PWR_INTF_USB_MSK,
4811 RTW_PWR_ADDR_MAC,
4812 RTW_PWR_CMD_WRITE, BIT(0), 0},
4813 {0x0081,
4814 RTW_PWR_CUT_ALL_MSK,
4815 RTW_PWR_INTF_ALL_MSK,
4816 RTW_PWR_ADDR_MAC,
4817 RTW_PWR_CMD_WRITE, BIT(7) | BIT(6), 0},
4818 {0x0090,
4819 RTW_PWR_CUT_ALL_MSK,
4820 RTW_PWR_INTF_ALL_MSK,
4821 RTW_PWR_ADDR_MAC,
4822 RTW_PWR_CMD_WRITE, BIT(1), 0},
4823 {0x0092,
4824 RTW_PWR_CUT_ALL_MSK,
4825 RTW_PWR_INTF_PCI_MSK,
4826 RTW_PWR_ADDR_MAC,
4827 RTW_PWR_CMD_WRITE, 0xFF, 0x20},
4828 {0x0093,
4829 RTW_PWR_CUT_ALL_MSK,
4830 RTW_PWR_INTF_PCI_MSK,
4831 RTW_PWR_ADDR_MAC,
4832 RTW_PWR_CMD_WRITE, 0xFF, 0x04},
4833 {0x0005,
4834 RTW_PWR_CUT_ALL_MSK,
4835 RTW_PWR_INTF_USB_MSK | RTW_PWR_INTF_SDIO_MSK,
4836 RTW_PWR_ADDR_MAC,
4837 RTW_PWR_CMD_WRITE, BIT(3) | BIT(4), BIT(3)},
4838 {0x0005,
4839 RTW_PWR_CUT_ALL_MSK,
4840 RTW_PWR_INTF_PCI_MSK,
4841 RTW_PWR_ADDR_MAC,
4842 RTW_PWR_CMD_WRITE, BIT(2), BIT(2)},
4843 {0x0086,
4844 RTW_PWR_CUT_ALL_MSK,
4845 RTW_PWR_INTF_SDIO_MSK,
4846 RTW_PWR_ADDR_SDIO,
4847 RTW_PWR_CMD_WRITE, BIT(0), BIT(0)},
4848 {0xFFFF,
4849 RTW_PWR_CUT_ALL_MSK,
4850 RTW_PWR_INTF_ALL_MSK,
4851 0,
4852 RTW_PWR_CMD_END, 0, 0},
4853 };
4854
4855 static const struct rtw_pwr_seq_cmd * const card_enable_flow_8822c[] = {
4856 trans_carddis_to_cardemu_8822c,
4857 trans_cardemu_to_act_8822c,
4858 NULL
4859 };
4860
4861 static const struct rtw_pwr_seq_cmd * const card_disable_flow_8822c[] = {
4862 trans_act_to_cardemu_8822c,
4863 trans_cardemu_to_carddis_8822c,
4864 NULL
4865 };
4866
4867 static const struct rtw_intf_phy_para usb2_param_8822c[] = {
4868 {0xFFFF, 0x00,
4869 RTW_IP_SEL_PHY,
4870 RTW_INTF_PHY_CUT_ALL,
4871 RTW_INTF_PHY_PLATFORM_ALL},
4872 };
4873
4874 static const struct rtw_intf_phy_para usb3_param_8822c[] = {
4875 {0xFFFF, 0x0000,
4876 RTW_IP_SEL_PHY,
4877 RTW_INTF_PHY_CUT_ALL,
4878 RTW_INTF_PHY_PLATFORM_ALL},
4879 };
4880
4881 static const struct rtw_intf_phy_para pcie_gen1_param_8822c[] = {
4882 {0xFFFF, 0x0000,
4883 RTW_IP_SEL_PHY,
4884 RTW_INTF_PHY_CUT_ALL,
4885 RTW_INTF_PHY_PLATFORM_ALL},
4886 };
4887
4888 static const struct rtw_intf_phy_para pcie_gen2_param_8822c[] = {
4889 {0xFFFF, 0x0000,
4890 RTW_IP_SEL_PHY,
4891 RTW_INTF_PHY_CUT_ALL,
4892 RTW_INTF_PHY_PLATFORM_ALL},
4893 };
4894
4895 static const struct rtw_intf_phy_para_table phy_para_table_8822c = {
4896 .usb2_para = usb2_param_8822c,
4897 .usb3_para = usb3_param_8822c,
4898 .gen1_para = pcie_gen1_param_8822c,
4899 .gen2_para = pcie_gen2_param_8822c,
4900 .n_usb2_para = ARRAY_SIZE(usb2_param_8822c),
4901 .n_usb3_para = ARRAY_SIZE(usb2_param_8822c),
4902 .n_gen1_para = ARRAY_SIZE(pcie_gen1_param_8822c),
4903 .n_gen2_para = ARRAY_SIZE(pcie_gen2_param_8822c),
4904 };
4905
4906 static const struct rtw_hw_reg rtw8822c_dig[] = {
4907 [0] = { .addr = 0x1d70, .mask = 0x7f },
4908 [1] = { .addr = 0x1d70, .mask = 0x7f00 },
4909 };
4910
4911 static const struct rtw_ltecoex_addr rtw8822c_ltecoex_addr = {
4912 .ctrl = LTECOEX_ACCESS_CTRL,
4913 .wdata = LTECOEX_WRITE_DATA,
4914 .rdata = LTECOEX_READ_DATA,
4915 };
4916
4917 static const struct rtw_page_table page_table_8822c[] = {
4918 {64, 64, 64, 64, 1},
4919 {64, 64, 64, 64, 1},
4920 {64, 64, 0, 0, 1},
4921 {64, 64, 64, 0, 1},
4922 {64, 64, 64, 64, 1},
4923 };
4924
4925 static const struct rtw_rqpn rqpn_table_8822c[] = {
4926 {RTW_DMA_MAPPING_NORMAL, RTW_DMA_MAPPING_NORMAL,
4927 RTW_DMA_MAPPING_LOW, RTW_DMA_MAPPING_LOW,
4928 RTW_DMA_MAPPING_EXTRA, RTW_DMA_MAPPING_HIGH},
4929 {RTW_DMA_MAPPING_NORMAL, RTW_DMA_MAPPING_NORMAL,
4930 RTW_DMA_MAPPING_LOW, RTW_DMA_MAPPING_LOW,
4931 RTW_DMA_MAPPING_EXTRA, RTW_DMA_MAPPING_HIGH},
4932 {RTW_DMA_MAPPING_NORMAL, RTW_DMA_MAPPING_NORMAL,
4933 RTW_DMA_MAPPING_NORMAL, RTW_DMA_MAPPING_HIGH,
4934 RTW_DMA_MAPPING_HIGH, RTW_DMA_MAPPING_HIGH},
4935 {RTW_DMA_MAPPING_NORMAL, RTW_DMA_MAPPING_NORMAL,
4936 RTW_DMA_MAPPING_LOW, RTW_DMA_MAPPING_LOW,
4937 RTW_DMA_MAPPING_HIGH, RTW_DMA_MAPPING_HIGH},
4938 {RTW_DMA_MAPPING_NORMAL, RTW_DMA_MAPPING_NORMAL,
4939 RTW_DMA_MAPPING_LOW, RTW_DMA_MAPPING_LOW,
4940 RTW_DMA_MAPPING_EXTRA, RTW_DMA_MAPPING_HIGH},
4941 };
4942
4943 static const struct rtw_prioq_addrs prioq_addrs_8822c = {
4944 .prio[RTW_DMA_MAPPING_EXTRA] = {
4945 .rsvd = REG_FIFOPAGE_INFO_4, .avail = REG_FIFOPAGE_INFO_4 + 2,
4946 },
4947 .prio[RTW_DMA_MAPPING_LOW] = {
4948 .rsvd = REG_FIFOPAGE_INFO_2, .avail = REG_FIFOPAGE_INFO_2 + 2,
4949 },
4950 .prio[RTW_DMA_MAPPING_NORMAL] = {
4951 .rsvd = REG_FIFOPAGE_INFO_3, .avail = REG_FIFOPAGE_INFO_3 + 2,
4952 },
4953 .prio[RTW_DMA_MAPPING_HIGH] = {
4954 .rsvd = REG_FIFOPAGE_INFO_1, .avail = REG_FIFOPAGE_INFO_1 + 2,
4955 },
4956 .wsize = true,
4957 };
4958
4959 static const struct rtw_chip_ops rtw8822c_ops = {
4960 .power_on = rtw_power_on,
4961 .power_off = rtw_power_off,
4962 .phy_set_param = rtw8822c_phy_set_param,
4963 .read_efuse = rtw8822c_read_efuse,
4964 .query_phy_status = query_phy_status,
4965 .set_channel = rtw8822c_set_channel,
4966 .mac_init = rtw8822c_mac_init,
4967 .mac_postinit = NULL,
4968 .dump_fw_crash = rtw8822c_dump_fw_crash,
4969 .read_rf = rtw_phy_read_rf,
4970 .write_rf = rtw_phy_write_rf_reg_mix,
4971 .set_tx_power_index = rtw8822c_set_tx_power_index,
4972 .set_antenna = rtw8822c_set_antenna,
4973 .cfg_ldo25 = rtw8822c_cfg_ldo25,
4974 .set_ampdu_factor = NULL,
4975 .false_alarm_statistics = rtw8822c_false_alarm_statistics,
4976 .dpk_track = rtw8822c_dpk_track,
4977 .phy_calibration = rtw8822c_phy_calibration,
4978 .cck_pd_set = rtw8822c_phy_cck_pd_set,
4979 .pwr_track = rtw8822c_pwr_track,
4980 .config_bfee = rtw8822c_bf_config_bfee,
4981 .set_gid_table = rtw_bf_set_gid_table,
4982 .cfg_csi_rate = rtw_bf_cfg_csi_rate,
4983 .adaptivity_init = rtw8822c_adaptivity_init,
4984 .adaptivity = rtw8822c_adaptivity,
4985 .cfo_init = rtw8822c_cfo_init,
4986 .cfo_track = rtw8822c_cfo_track,
4987 .config_tx_path = rtw8822c_config_tx_path,
4988 .config_txrx_mode = rtw8822c_config_trx_mode,
4989 .led_set = rtw8822c_led_set,
4990 .fill_txdesc_checksum = rtw8822c_fill_txdesc_checksum,
4991
4992 .coex_set_init = rtw8822c_coex_cfg_init,
4993 .coex_set_ant_switch = NULL,
4994 .coex_set_gnt_fix = rtw8822c_coex_cfg_gnt_fix,
4995 .coex_set_gnt_debug = rtw8822c_coex_cfg_gnt_debug,
4996 .coex_set_rfe_type = rtw8822c_coex_cfg_rfe_type,
4997 .coex_set_wl_tx_power = rtw8822c_coex_cfg_wl_tx_power,
4998 .coex_set_wl_rx_gain = rtw8822c_coex_cfg_wl_rx_gain,
4999 };
5000
5001 /* Shared-Antenna Coex Table */
5002 static const struct coex_table_para table_sant_8822c[] = {
5003 {0xffffffff, 0xffffffff}, /* case-0 */
5004 {0x55555555, 0x55555555},
5005 {0x66555555, 0x66555555},
5006 {0xaaaaaaaa, 0xaaaaaaaa},
5007 {0x5a5a5a5a, 0x5a5a5a5a},
5008 {0xfafafafa, 0xfafafafa}, /* case-5 */
5009 {0x6a5a5555, 0xaaaaaaaa},
5010 {0x6a5a56aa, 0x6a5a56aa},
5011 {0x6a5a5a5a, 0x6a5a5a5a},
5012 {0x66555555, 0x5a5a5a5a},
5013 {0x66555555, 0x6a5a5a5a}, /* case-10 */
5014 {0x66555555, 0x6a5a5aaa},
5015 {0x66555555, 0x5a5a5aaa},
5016 {0x66555555, 0x6aaa5aaa},
5017 {0x66555555, 0xaaaa5aaa},
5018 {0x66555555, 0xaaaaaaaa}, /* case-15 */
5019 {0xffff55ff, 0xfafafafa},
5020 {0xffff55ff, 0x6afa5afa},
5021 {0xaaffffaa, 0xfafafafa},
5022 {0xaa5555aa, 0x5a5a5a5a},
5023 {0xaa5555aa, 0x6a5a5a5a}, /* case-20 */
5024 {0xaa5555aa, 0xaaaaaaaa},
5025 {0xffffffff, 0x5a5a5a5a},
5026 {0xffffffff, 0x5a5a5a5a},
5027 {0xffffffff, 0x55555555},
5028 {0xffffffff, 0x5a5a5aaa}, /* case-25 */
5029 {0x55555555, 0x5a5a5a5a},
5030 {0x55555555, 0xaaaaaaaa},
5031 {0x55555555, 0x6a5a6a5a},
5032 {0x66556655, 0x66556655},
5033 {0x66556aaa, 0x6a5a6aaa}, /*case-30*/
5034 {0xffffffff, 0x5aaa5aaa},
5035 {0x56555555, 0x5a5a5aaa},
5036 {0xdaffdaff, 0xdaffdaff},
5037 {0xddffddff, 0xddffddff},
5038 };
5039
5040 /* Non-Shared-Antenna Coex Table */
5041 static const struct coex_table_para table_nsant_8822c[] = {
5042 {0xffffffff, 0xffffffff}, /* case-100 */
5043 {0x55555555, 0x55555555},
5044 {0x66555555, 0x66555555},
5045 {0xaaaaaaaa, 0xaaaaaaaa},
5046 {0x5a5a5a5a, 0x5a5a5a5a},
5047 {0xfafafafa, 0xfafafafa}, /* case-105 */
5048 {0x5afa5afa, 0x5afa5afa},
5049 {0x55555555, 0xfafafafa},
5050 {0x66555555, 0xfafafafa},
5051 {0x66555555, 0x5a5a5a5a},
5052 {0x66555555, 0x6a5a5a5a}, /* case-110 */
5053 {0x66555555, 0xaaaaaaaa},
5054 {0xffff55ff, 0xfafafafa},
5055 {0xffff55ff, 0x5afa5afa},
5056 {0xffff55ff, 0xaaaaaaaa},
5057 {0xffff55ff, 0xffff55ff}, /* case-115 */
5058 {0xaaffffaa, 0x5afa5afa},
5059 {0xaaffffaa, 0xaaaaaaaa},
5060 {0xffffffff, 0xfafafafa},
5061 {0xffffffff, 0x5afa5afa},
5062 {0xffffffff, 0xaaaaaaaa}, /* case-120 */
5063 {0x55ff55ff, 0x5afa5afa},
5064 {0x55ff55ff, 0xaaaaaaaa},
5065 {0x55ff55ff, 0x55ff55ff}
5066 };
5067
5068 /* Shared-Antenna TDMA */
5069 static const struct coex_tdma_para tdma_sant_8822c[] = {
5070 { {0x00, 0x00, 0x00, 0x00, 0x00} }, /* case-0 */
5071 { {0x61, 0x45, 0x03, 0x11, 0x11} }, /* case-1 */
5072 { {0x61, 0x3a, 0x03, 0x11, 0x11} },
5073 { {0x61, 0x30, 0x03, 0x11, 0x11} },
5074 { {0x61, 0x20, 0x03, 0x11, 0x11} },
5075 { {0x61, 0x10, 0x03, 0x11, 0x11} }, /* case-5 */
5076 { {0x61, 0x45, 0x03, 0x11, 0x10} },
5077 { {0x61, 0x3a, 0x03, 0x11, 0x10} },
5078 { {0x61, 0x30, 0x03, 0x11, 0x10} },
5079 { {0x61, 0x20, 0x03, 0x11, 0x10} },
5080 { {0x61, 0x10, 0x03, 0x11, 0x10} }, /* case-10 */
5081 { {0x61, 0x08, 0x03, 0x11, 0x14} },
5082 { {0x61, 0x08, 0x03, 0x10, 0x14} },
5083 { {0x51, 0x08, 0x03, 0x10, 0x54} },
5084 { {0x51, 0x08, 0x03, 0x10, 0x55} },
5085 { {0x51, 0x08, 0x07, 0x10, 0x54} }, /* case-15 */
5086 { {0x51, 0x45, 0x03, 0x10, 0x50} },
5087 { {0x51, 0x3a, 0x03, 0x10, 0x50} },
5088 { {0x51, 0x30, 0x03, 0x10, 0x50} },
5089 { {0x51, 0x20, 0x03, 0x10, 0x50} },
5090 { {0x51, 0x10, 0x03, 0x10, 0x50} }, /* case-20 */
5091 { {0x51, 0x4a, 0x03, 0x10, 0x50} },
5092 { {0x51, 0x0c, 0x03, 0x10, 0x54} },
5093 { {0x55, 0x08, 0x03, 0x10, 0x54} },
5094 { {0x65, 0x10, 0x03, 0x11, 0x10} },
5095 { {0x51, 0x10, 0x03, 0x10, 0x51} }, /* case-25 */
5096 { {0x51, 0x08, 0x03, 0x10, 0x50} },
5097 { {0x61, 0x08, 0x03, 0x11, 0x11} }
5098 };
5099
5100 /* Non-Shared-Antenna TDMA */
5101 static const struct coex_tdma_para tdma_nsant_8822c[] = {
5102 { {0x00, 0x00, 0x00, 0x00, 0x00} }, /* case-100 */
5103 { {0x61, 0x45, 0x03, 0x11, 0x11} },
5104 { {0x61, 0x3a, 0x03, 0x11, 0x11} },
5105 { {0x61, 0x30, 0x03, 0x11, 0x11} },
5106 { {0x61, 0x20, 0x03, 0x11, 0x11} },
5107 { {0x61, 0x10, 0x03, 0x11, 0x11} }, /* case-105 */
5108 { {0x61, 0x45, 0x03, 0x11, 0x10} },
5109 { {0x61, 0x3a, 0x03, 0x11, 0x10} },
5110 { {0x61, 0x30, 0x03, 0x11, 0x10} },
5111 { {0x61, 0x20, 0x03, 0x11, 0x10} },
5112 { {0x61, 0x10, 0x03, 0x11, 0x10} }, /* case-110 */
5113 { {0x61, 0x08, 0x03, 0x11, 0x14} },
5114 { {0x61, 0x08, 0x03, 0x10, 0x14} },
5115 { {0x51, 0x08, 0x03, 0x10, 0x54} },
5116 { {0x51, 0x08, 0x03, 0x10, 0x55} },
5117 { {0x51, 0x08, 0x07, 0x10, 0x54} }, /* case-115 */
5118 { {0x51, 0x45, 0x03, 0x10, 0x50} },
5119 { {0x51, 0x3a, 0x03, 0x10, 0x50} },
5120 { {0x51, 0x30, 0x03, 0x10, 0x50} },
5121 { {0x51, 0x20, 0x03, 0x10, 0x50} },
5122 { {0x51, 0x10, 0x03, 0x10, 0x50} }, /* case-120 */
5123 { {0x51, 0x08, 0x03, 0x10, 0x50} }
5124 };
5125
5126 /* rssi in percentage % (dbm = % - 100) */
5127 static const u8 wl_rssi_step_8822c[] = {60, 50, 44, 30};
5128 static const u8 bt_rssi_step_8822c[] = {8, 15, 20, 25};
5129 static const struct coex_5g_afh_map afh_5g_8822c[] = { {0, 0, 0} };
5130
5131 /* wl_tx_dec_power, bt_tx_dec_power, wl_rx_gain, bt_rx_lna_constrain */
5132 static const struct coex_rf_para rf_para_tx_8822c[] = {
5133 {0, 0, false, 7}, /* for normal */
5134 {0, 16, false, 7}, /* for WL-CPT */
5135 {8, 17, true, 4},
5136 {7, 18, true, 4},
5137 {6, 19, true, 4},
5138 {5, 20, true, 4},
5139 {0, 21, true, 4} /* for gamg hid */
5140 };
5141
5142 static const struct coex_rf_para rf_para_rx_8822c[] = {
5143 {0, 0, false, 7}, /* for normal */
5144 {0, 16, false, 7}, /* for WL-CPT */
5145 {3, 24, true, 5},
5146 {2, 26, true, 5},
5147 {1, 27, true, 5},
5148 {0, 28, true, 5},
5149 {0, 28, true, 5} /* for gamg hid */
5150 };
5151
5152 static_assert(ARRAY_SIZE(rf_para_tx_8822c) == ARRAY_SIZE(rf_para_rx_8822c));
5153
5154 static const u8
5155 rtw8822c_pwrtrk_5gb_n[RTW_PWR_TRK_5G_NUM][RTW_PWR_TRK_TBL_SZ] = {
5156 { 0, 1, 2, 3, 5, 6, 7, 8, 9, 10,
5157 11, 12, 13, 14, 15, 16, 18, 19, 20, 21,
5158 22, 23, 24, 25, 26, 27, 28, 29, 30, 32 },
5159 { 0, 1, 2, 3, 5, 6, 7, 8, 9, 10,
5160 11, 12, 13, 14, 15, 16, 18, 19, 20, 21,
5161 22, 23, 24, 25, 26, 27, 28, 29, 30, 32 },
5162 { 0, 1, 2, 3, 5, 6, 7, 8, 9, 10,
5163 11, 12, 13, 14, 15, 16, 18, 19, 20, 21,
5164 22, 23, 24, 25, 26, 27, 28, 29, 30, 32 },
5165 };
5166
5167 static const u8
5168 rtw8822c_pwrtrk_5gb_p[RTW_PWR_TRK_5G_NUM][RTW_PWR_TRK_TBL_SZ] = {
5169 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5170 10, 10, 11, 12, 13, 14, 15, 16, 17, 18,
5171 19, 20, 21, 22, 22, 23, 24, 25, 26, 27 },
5172 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5173 10, 10, 11, 12, 13, 14, 15, 16, 17, 18,
5174 19, 20, 21, 22, 22, 23, 24, 25, 26, 27 },
5175 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5176 10, 10, 11, 12, 13, 14, 15, 16, 17, 18,
5177 19, 20, 21, 22, 22, 23, 24, 25, 26, 27 },
5178 };
5179
5180 static const u8
5181 rtw8822c_pwrtrk_5ga_n[RTW_PWR_TRK_5G_NUM][RTW_PWR_TRK_TBL_SZ] = {
5182 { 0, 1, 2, 4, 5, 6, 7, 8, 9, 10,
5183 11, 13, 14, 15, 16, 17, 18, 19, 20, 21,
5184 23, 24, 25, 26, 27, 28, 29, 30, 31, 33 },
5185 { 0, 1, 2, 4, 5, 6, 7, 8, 9, 10,
5186 11, 13, 14, 15, 16, 17, 18, 19, 20, 21,
5187 23, 24, 25, 26, 27, 28, 29, 30, 31, 33 },
5188 { 0, 1, 2, 4, 5, 6, 7, 8, 9, 10,
5189 11, 13, 14, 15, 16, 17, 18, 19, 20, 21,
5190 23, 24, 25, 26, 27, 28, 29, 30, 31, 33 },
5191 };
5192
5193 static const u8
5194 rtw8822c_pwrtrk_5ga_p[RTW_PWR_TRK_5G_NUM][RTW_PWR_TRK_TBL_SZ] = {
5195 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5196 10, 11, 12, 13, 14, 15, 16, 17, 18, 20,
5197 21, 22, 23, 24, 25, 26, 27, 28, 29, 30 },
5198 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5199 10, 11, 12, 13, 14, 15, 16, 17, 18, 20,
5200 21, 22, 23, 24, 25, 26, 27, 28, 29, 30 },
5201 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5202 10, 11, 12, 13, 14, 15, 16, 17, 18, 20,
5203 21, 22, 23, 24, 25, 26, 27, 28, 29, 30 },
5204 };
5205
5206 static const u8 rtw8822c_pwrtrk_2gb_n[RTW_PWR_TRK_TBL_SZ] = {
5207 0, 1, 2, 3, 4, 4, 5, 6, 7, 8,
5208 9, 9, 10, 11, 12, 13, 14, 15, 15, 16,
5209 17, 18, 19, 20, 20, 21, 22, 23, 24, 25
5210 };
5211
5212 static const u8 rtw8822c_pwrtrk_2gb_p[RTW_PWR_TRK_TBL_SZ] = {
5213 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5214 10, 11, 12, 13, 14, 14, 15, 16, 17, 18,
5215 19, 20, 21, 22, 23, 24, 25, 26, 27, 28
5216 };
5217
5218 static const u8 rtw8822c_pwrtrk_2ga_n[RTW_PWR_TRK_TBL_SZ] = {
5219 0, 1, 2, 2, 3, 4, 4, 5, 6, 6,
5220 7, 8, 8, 9, 9, 10, 11, 11, 12, 13,
5221 13, 14, 15, 15, 16, 17, 17, 18, 19, 19
5222 };
5223
5224 static const u8 rtw8822c_pwrtrk_2ga_p[RTW_PWR_TRK_TBL_SZ] = {
5225 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5226 10, 11, 11, 12, 13, 14, 15, 16, 17, 18,
5227 19, 20, 21, 22, 23, 24, 25, 25, 26, 27
5228 };
5229
5230 static const u8 rtw8822c_pwrtrk_2g_cck_b_n[RTW_PWR_TRK_TBL_SZ] = {
5231 0, 1, 2, 3, 4, 5, 5, 6, 7, 8,
5232 9, 10, 11, 11, 12, 13, 14, 15, 16, 17,
5233 17, 18, 19, 20, 21, 22, 23, 23, 24, 25
5234 };
5235
5236 static const u8 rtw8822c_pwrtrk_2g_cck_b_p[RTW_PWR_TRK_TBL_SZ] = {
5237 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5238 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
5239 20, 21, 22, 23, 24, 25, 26, 27, 28, 29
5240 };
5241
5242 static const u8 rtw8822c_pwrtrk_2g_cck_a_n[RTW_PWR_TRK_TBL_SZ] = {
5243 0, 1, 2, 3, 3, 4, 5, 6, 6, 7,
5244 8, 9, 9, 10, 11, 12, 12, 13, 14, 15,
5245 15, 16, 17, 18, 18, 19, 20, 21, 21, 22
5246 };
5247
5248 static const u8 rtw8822c_pwrtrk_2g_cck_a_p[RTW_PWR_TRK_TBL_SZ] = {
5249 0, 1, 2, 3, 4, 5, 5, 6, 7, 8,
5250 9, 10, 11, 11, 12, 13, 14, 15, 16, 17,
5251 18, 18, 19, 20, 21, 22, 23, 24, 24, 25
5252 };
5253
5254 static const struct rtw_pwr_track_tbl rtw8822c_pwr_track_type0_tbl = {
5255 .pwrtrk_5gb_n[RTW_PWR_TRK_5G_1] = rtw8822c_pwrtrk_5gb_n[RTW_PWR_TRK_5G_1],
5256 .pwrtrk_5gb_n[RTW_PWR_TRK_5G_2] = rtw8822c_pwrtrk_5gb_n[RTW_PWR_TRK_5G_2],
5257 .pwrtrk_5gb_n[RTW_PWR_TRK_5G_3] = rtw8822c_pwrtrk_5gb_n[RTW_PWR_TRK_5G_3],
5258 .pwrtrk_5gb_p[RTW_PWR_TRK_5G_1] = rtw8822c_pwrtrk_5gb_p[RTW_PWR_TRK_5G_1],
5259 .pwrtrk_5gb_p[RTW_PWR_TRK_5G_2] = rtw8822c_pwrtrk_5gb_p[RTW_PWR_TRK_5G_2],
5260 .pwrtrk_5gb_p[RTW_PWR_TRK_5G_3] = rtw8822c_pwrtrk_5gb_p[RTW_PWR_TRK_5G_3],
5261 .pwrtrk_5ga_n[RTW_PWR_TRK_5G_1] = rtw8822c_pwrtrk_5ga_n[RTW_PWR_TRK_5G_1],
5262 .pwrtrk_5ga_n[RTW_PWR_TRK_5G_2] = rtw8822c_pwrtrk_5ga_n[RTW_PWR_TRK_5G_2],
5263 .pwrtrk_5ga_n[RTW_PWR_TRK_5G_3] = rtw8822c_pwrtrk_5ga_n[RTW_PWR_TRK_5G_3],
5264 .pwrtrk_5ga_p[RTW_PWR_TRK_5G_1] = rtw8822c_pwrtrk_5ga_p[RTW_PWR_TRK_5G_1],
5265 .pwrtrk_5ga_p[RTW_PWR_TRK_5G_2] = rtw8822c_pwrtrk_5ga_p[RTW_PWR_TRK_5G_2],
5266 .pwrtrk_5ga_p[RTW_PWR_TRK_5G_3] = rtw8822c_pwrtrk_5ga_p[RTW_PWR_TRK_5G_3],
5267 .pwrtrk_2gb_n = rtw8822c_pwrtrk_2gb_n,
5268 .pwrtrk_2gb_p = rtw8822c_pwrtrk_2gb_p,
5269 .pwrtrk_2ga_n = rtw8822c_pwrtrk_2ga_n,
5270 .pwrtrk_2ga_p = rtw8822c_pwrtrk_2ga_p,
5271 .pwrtrk_2g_cckb_n = rtw8822c_pwrtrk_2g_cck_b_n,
5272 .pwrtrk_2g_cckb_p = rtw8822c_pwrtrk_2g_cck_b_p,
5273 .pwrtrk_2g_ccka_n = rtw8822c_pwrtrk_2g_cck_a_n,
5274 .pwrtrk_2g_ccka_p = rtw8822c_pwrtrk_2g_cck_a_p,
5275 };
5276
5277 static const struct rtw_rfe_def rtw8822c_rfe_defs[] = {
5278 [0] = RTW_DEF_RFE(8822c, 0, 0, 0),
5279 [1] = RTW_DEF_RFE(8822c, 0, 0, 0),
5280 [2] = RTW_DEF_RFE(8822c, 0, 0, 0),
5281 [3] = RTW_DEF_RFE(8822c, 0, 0, 0),
5282 [4] = RTW_DEF_RFE(8822c, 0, 0, 0),
5283 [5] = RTW_DEF_RFE(8822c, 0, 5, 0),
5284 [6] = RTW_DEF_RFE(8822c, 0, 0, 0),
5285 };
5286
5287 static const struct rtw_hw_reg_offset rtw8822c_edcca_th[] = {
5288 [EDCCA_TH_L2H_IDX] = {
5289 {.addr = 0x84c, .mask = MASKBYTE2}, .offset = 0x80
5290 },
5291 [EDCCA_TH_H2L_IDX] = {
5292 {.addr = 0x84c, .mask = MASKBYTE3}, .offset = 0x80
5293 },
5294 };
5295
5296 #ifdef CONFIG_PM
5297 static const struct wiphy_wowlan_support rtw_wowlan_stub_8822c = {
5298 .flags = WIPHY_WOWLAN_MAGIC_PKT | WIPHY_WOWLAN_GTK_REKEY_FAILURE |
5299 WIPHY_WOWLAN_DISCONNECT | WIPHY_WOWLAN_SUPPORTS_GTK_REKEY |
5300 WIPHY_WOWLAN_NET_DETECT,
5301 .n_patterns = RTW_MAX_PATTERN_NUM,
5302 .pattern_max_len = RTW_MAX_PATTERN_SIZE,
5303 .pattern_min_len = 1,
5304 .max_nd_match_sets = 4,
5305 };
5306 #endif
5307
5308 static const struct rtw_reg_domain coex_info_hw_regs_8822c[] = {
5309 {0x1860, BIT(3), RTW_REG_DOMAIN_MAC8},
5310 {0x4160, BIT(3), RTW_REG_DOMAIN_MAC8},
5311 {0x1c32, BIT(6), RTW_REG_DOMAIN_MAC8},
5312 {0x1c38, BIT(28), RTW_REG_DOMAIN_MAC32},
5313 {0, 0, RTW_REG_DOMAIN_NL},
5314 {0x430, MASKDWORD, RTW_REG_DOMAIN_MAC32},
5315 {0x434, MASKDWORD, RTW_REG_DOMAIN_MAC32},
5316 {0x42a, MASKLWORD, RTW_REG_DOMAIN_MAC16},
5317 {0x426, MASKBYTE0, RTW_REG_DOMAIN_MAC8},
5318 {0x45e, BIT(3), RTW_REG_DOMAIN_MAC8},
5319 {0x454, MASKLWORD, RTW_REG_DOMAIN_MAC16},
5320 {0, 0, RTW_REG_DOMAIN_NL},
5321 {0x4c, BIT(24) | BIT(23), RTW_REG_DOMAIN_MAC32},
5322 {0x64, BIT(0), RTW_REG_DOMAIN_MAC8},
5323 {0x4c6, BIT(4), RTW_REG_DOMAIN_MAC8},
5324 {0x40, BIT(5), RTW_REG_DOMAIN_MAC8},
5325 {0x1, RFREG_MASK, RTW_REG_DOMAIN_RF_B},
5326 {0, 0, RTW_REG_DOMAIN_NL},
5327 {0x550, MASKDWORD, RTW_REG_DOMAIN_MAC32},
5328 {0x522, MASKBYTE0, RTW_REG_DOMAIN_MAC8},
5329 {0x953, BIT(1), RTW_REG_DOMAIN_MAC8},
5330 {0xc50, MASKBYTE0, RTW_REG_DOMAIN_MAC8},
5331 };
5332
5333 const struct rtw_chip_info rtw8822c_hw_spec = {
5334 .ops = &rtw8822c_ops,
5335 .id = RTW_CHIP_TYPE_8822C,
5336 .fw_name = "rtw88/rtw8822c_fw.bin",
5337 .wlan_cpu = RTW_WCPU_3081,
5338 .tx_pkt_desc_sz = 48,
5339 .tx_buf_desc_sz = 16,
5340 .rx_pkt_desc_sz = 24,
5341 .rx_buf_desc_sz = 8,
5342 .phy_efuse_size = 512,
5343 .log_efuse_size = 768,
5344 .ptct_efuse_size = 124,
5345 .txff_size = 262144,
5346 .rxff_size = 24576,
5347 .fw_rxff_size = 12288,
5348 .rsvd_drv_pg_num = 16,
5349 .txgi_factor = 2,
5350 .is_pwr_by_rate_dec = false,
5351 .max_power_index = 0x7f,
5352 .csi_buf_pg_num = 50,
5353 .band = RTW_BAND_2G | RTW_BAND_5G,
5354 .page_size = TX_PAGE_SIZE,
5355 .dig_min = 0x20,
5356 .amsdu_in_ampdu = true,
5357 .usb_tx_agg_desc_num = 3,
5358 .hw_feature_report = true,
5359 .c2h_ra_report_size = 7,
5360 .old_datarate_fb_limit = false,
5361 .default_1ss_tx_path = BB_PATH_A,
5362 .path_div_supported = true,
5363 .ht_supported = true,
5364 .vht_supported = true,
5365 .lps_deep_mode_supported = BIT(LPS_DEEP_MODE_LCLK) | BIT(LPS_DEEP_MODE_PG),
5366 .sys_func_en = 0xD8,
5367 .pwr_on_seq = card_enable_flow_8822c,
5368 .pwr_off_seq = card_disable_flow_8822c,
5369 .page_table = page_table_8822c,
5370 .rqpn_table = rqpn_table_8822c,
5371 .prioq_addrs = &prioq_addrs_8822c,
5372 .intf_table = &phy_para_table_8822c,
5373 .dig = rtw8822c_dig,
5374 .dig_cck = NULL,
5375 .rf_base_addr = {0x3c00, 0x4c00},
5376 .rf_sipi_addr = {0x1808, 0x4108},
5377 .ltecoex_addr = &rtw8822c_ltecoex_addr,
5378 .mac_tbl = &rtw8822c_mac_tbl,
5379 .agc_tbl = &rtw8822c_agc_tbl,
5380 .bb_tbl = &rtw8822c_bb_tbl,
5381 .rfk_init_tbl = &rtw8822c_array_mp_cal_init_tbl,
5382 .rf_tbl = {&rtw8822c_rf_b_tbl, &rtw8822c_rf_a_tbl},
5383 .rfe_defs = rtw8822c_rfe_defs,
5384 .rfe_defs_size = ARRAY_SIZE(rtw8822c_rfe_defs),
5385 .en_dis_dpd = true,
5386 .dpd_ratemask = DIS_DPD_RATEALL,
5387 .iqk_threshold = 8,
5388 .lck_threshold = 8,
5389 .bfer_su_max_num = 2,
5390 .bfer_mu_max_num = 1,
5391 .rx_ldpc = true,
5392 .tx_stbc = true,
5393 .edcca_th = rtw8822c_edcca_th,
5394 .l2h_th_ini_cs = 60,
5395 .l2h_th_ini_ad = 45,
5396 .ampdu_density = IEEE80211_HT_MPDU_DENSITY_2,
5397
5398 #ifdef CONFIG_PM
5399 .wow_fw_name = "rtw88/rtw8822c_wow_fw.bin",
5400 .wowlan_stub = &rtw_wowlan_stub_8822c,
5401 .max_sched_scan_ssids = 4,
5402 #endif
5403 .max_scan_ie_len = (RTW_PROBE_PG_CNT - 1) * TX_PAGE_SIZE,
5404 .coex_para_ver = 0x22020720,
5405 .bt_desired_ver = 0x20,
5406 .scbd_support = true,
5407 .new_scbd10_def = true,
5408 .ble_hid_profile_support = true,
5409 .wl_mimo_ps_support = true,
5410 .pstdma_type = COEX_PSTDMA_FORCE_LPSOFF,
5411 .bt_rssi_type = COEX_BTRSSI_DBM,
5412 .ant_isolation = 15,
5413 .rssi_tolerance = 2,
5414 .wl_rssi_step = wl_rssi_step_8822c,
5415 .bt_rssi_step = bt_rssi_step_8822c,
5416 .table_sant_num = ARRAY_SIZE(table_sant_8822c),
5417 .table_sant = table_sant_8822c,
5418 .table_nsant_num = ARRAY_SIZE(table_nsant_8822c),
5419 .table_nsant = table_nsant_8822c,
5420 .tdma_sant_num = ARRAY_SIZE(tdma_sant_8822c),
5421 .tdma_sant = tdma_sant_8822c,
5422 .tdma_nsant_num = ARRAY_SIZE(tdma_nsant_8822c),
5423 .tdma_nsant = tdma_nsant_8822c,
5424 .wl_rf_para_num = ARRAY_SIZE(rf_para_tx_8822c),
5425 .wl_rf_para_tx = rf_para_tx_8822c,
5426 .wl_rf_para_rx = rf_para_rx_8822c,
5427 .bt_afh_span_bw20 = 0x24,
5428 .bt_afh_span_bw40 = 0x36,
5429 .afh_5g_num = ARRAY_SIZE(afh_5g_8822c),
5430 .afh_5g = afh_5g_8822c,
5431
5432 .coex_info_hw_regs_num = ARRAY_SIZE(coex_info_hw_regs_8822c),
5433 .coex_info_hw_regs = coex_info_hw_regs_8822c,
5434
5435 .fw_fifo_addr = {0x780, 0x700, 0x780, 0x660, 0x650, 0x680},
5436 .fwcd_segs = &rtw8822c_fwcd_segs,
5437 };
5438 EXPORT_SYMBOL(rtw8822c_hw_spec);
5439
5440 MODULE_FIRMWARE("rtw88/rtw8822c_fw.bin");
5441 MODULE_FIRMWARE("rtw88/rtw8822c_wow_fw.bin");
5442
5443 MODULE_AUTHOR("Realtek Corporation");
5444 MODULE_DESCRIPTION("Realtek 802.11ac wireless 8822c driver");
5445 MODULE_LICENSE("Dual BSD/GPL");
5446