1 // SPDX-License-Identifier: GPL-2.0 OR BSD-3-Clause
2 /* Copyright(c) 2018-2019 Realtek Corporation
3 */
4
5 #include <linux/module.h>
6 #include "main.h"
7 #include "coex.h"
8 #include "fw.h"
9 #include "tx.h"
10 #include "rx.h"
11 #include "phy.h"
12 #include "rtw8822c.h"
13 #include "rtw8822c_table.h"
14 #include "mac.h"
15 #include "reg.h"
16 #include "debug.h"
17 #include "util.h"
18 #include "bf.h"
19 #include "efuse.h"
20
21 #define IQK_DONE_8822C 0xaa
22
23 static void rtw8822c_config_trx_mode(struct rtw_dev *rtwdev, u8 tx_path,
24 u8 rx_path, bool is_tx2_path);
25
rtw8822ce_efuse_parsing(struct rtw_efuse * efuse,struct rtw8822c_efuse * map)26 static void rtw8822ce_efuse_parsing(struct rtw_efuse *efuse,
27 struct rtw8822c_efuse *map)
28 {
29 ether_addr_copy(efuse->addr, map->e.mac_addr);
30 }
31
rtw8822cu_efuse_parsing(struct rtw_efuse * efuse,struct rtw8822c_efuse * map)32 static void rtw8822cu_efuse_parsing(struct rtw_efuse *efuse,
33 struct rtw8822c_efuse *map)
34 {
35 ether_addr_copy(efuse->addr, map->u.mac_addr);
36 }
37
rtw8822cs_efuse_parsing(struct rtw_efuse * efuse,struct rtw8822c_efuse * map)38 static void rtw8822cs_efuse_parsing(struct rtw_efuse *efuse,
39 struct rtw8822c_efuse *map)
40 {
41 ether_addr_copy(efuse->addr, map->s.mac_addr);
42 }
43
rtw8822c_read_efuse(struct rtw_dev * rtwdev,u8 * log_map)44 static int rtw8822c_read_efuse(struct rtw_dev *rtwdev, u8 *log_map)
45 {
46 struct rtw_efuse *efuse = &rtwdev->efuse;
47 struct rtw8822c_efuse *map;
48 int i;
49
50 map = (struct rtw8822c_efuse *)log_map;
51
52 efuse->usb_mode_switch = u8_get_bits(map->usb_mode, BIT(7));
53 efuse->rfe_option = map->rfe_option;
54 efuse->rf_board_option = map->rf_board_option;
55 efuse->crystal_cap = map->xtal_k & XCAP_MASK;
56 efuse->channel_plan = map->channel_plan;
57 efuse->country_code[0] = map->country_code[0];
58 efuse->country_code[1] = map->country_code[1];
59 efuse->bt_setting = map->rf_bt_setting;
60 efuse->regd = map->rf_board_option & 0x7;
61 efuse->thermal_meter[RF_PATH_A] = map->path_a_thermal;
62 efuse->thermal_meter[RF_PATH_B] = map->path_b_thermal;
63 efuse->thermal_meter_k =
64 (map->path_a_thermal + map->path_b_thermal) >> 1;
65 efuse->power_track_type = (map->tx_pwr_calibrate_rate >> 4) & 0xf;
66
67 for (i = 0; i < 4; i++)
68 efuse->txpwr_idx_table[i] = map->txpwr_idx_table[i];
69
70 switch (rtw_hci_type(rtwdev)) {
71 case RTW_HCI_TYPE_PCIE:
72 rtw8822ce_efuse_parsing(efuse, map);
73 break;
74 case RTW_HCI_TYPE_USB:
75 rtw8822cu_efuse_parsing(efuse, map);
76 break;
77 case RTW_HCI_TYPE_SDIO:
78 rtw8822cs_efuse_parsing(efuse, map);
79 break;
80 default:
81 /* unsupported now */
82 return -ENOTSUPP;
83 }
84
85 return 0;
86 }
87
rtw8822c_header_file_init(struct rtw_dev * rtwdev,bool pre)88 static void rtw8822c_header_file_init(struct rtw_dev *rtwdev, bool pre)
89 {
90 rtw_write32_set(rtwdev, REG_3WIRE, BIT_3WIRE_TX_EN | BIT_3WIRE_RX_EN);
91 rtw_write32_set(rtwdev, REG_3WIRE, BIT_3WIRE_PI_ON);
92 rtw_write32_set(rtwdev, REG_3WIRE2, BIT_3WIRE_TX_EN | BIT_3WIRE_RX_EN);
93 rtw_write32_set(rtwdev, REG_3WIRE2, BIT_3WIRE_PI_ON);
94
95 if (pre)
96 rtw_write32_clr(rtwdev, REG_ENCCK, BIT_CCK_OFDM_BLK_EN);
97 else
98 rtw_write32_set(rtwdev, REG_ENCCK, BIT_CCK_OFDM_BLK_EN);
99 }
100
rtw8822c_bb_reset(struct rtw_dev * rtwdev)101 static void rtw8822c_bb_reset(struct rtw_dev *rtwdev)
102 {
103 rtw_write16_set(rtwdev, REG_SYS_FUNC_EN, BIT_FEN_BB_RSTB);
104 rtw_write16_clr(rtwdev, REG_SYS_FUNC_EN, BIT_FEN_BB_RSTB);
105 rtw_write16_set(rtwdev, REG_SYS_FUNC_EN, BIT_FEN_BB_RSTB);
106 }
107
rtw8822c_dac_backup_reg(struct rtw_dev * rtwdev,struct rtw_backup_info * backup,struct rtw_backup_info * backup_rf)108 static void rtw8822c_dac_backup_reg(struct rtw_dev *rtwdev,
109 struct rtw_backup_info *backup,
110 struct rtw_backup_info *backup_rf)
111 {
112 u32 path, i;
113 u32 val;
114 u32 reg;
115 u32 rf_addr[DACK_RF_8822C] = {0x8f};
116 u32 addrs[DACK_REG_8822C] = {0x180c, 0x1810, 0x410c, 0x4110,
117 0x1c3c, 0x1c24, 0x1d70, 0x9b4,
118 0x1a00, 0x1a14, 0x1d58, 0x1c38,
119 0x1e24, 0x1e28, 0x1860, 0x4160};
120
121 for (i = 0; i < DACK_REG_8822C; i++) {
122 backup[i].len = 4;
123 backup[i].reg = addrs[i];
124 backup[i].val = rtw_read32(rtwdev, addrs[i]);
125 }
126
127 for (path = 0; path < DACK_PATH_8822C; path++) {
128 for (i = 0; i < DACK_RF_8822C; i++) {
129 reg = rf_addr[i];
130 val = rtw_read_rf(rtwdev, path, reg, RFREG_MASK);
131 backup_rf[path * i + i].reg = reg;
132 backup_rf[path * i + i].val = val;
133 }
134 }
135 }
136
rtw8822c_dac_restore_reg(struct rtw_dev * rtwdev,struct rtw_backup_info * backup,struct rtw_backup_info * backup_rf)137 static void rtw8822c_dac_restore_reg(struct rtw_dev *rtwdev,
138 struct rtw_backup_info *backup,
139 struct rtw_backup_info *backup_rf)
140 {
141 u32 path, i;
142 u32 val;
143 u32 reg;
144
145 rtw_restore_reg(rtwdev, backup, DACK_REG_8822C);
146
147 for (path = 0; path < DACK_PATH_8822C; path++) {
148 for (i = 0; i < DACK_RF_8822C; i++) {
149 val = backup_rf[path * i + i].val;
150 reg = backup_rf[path * i + i].reg;
151 rtw_write_rf(rtwdev, path, reg, RFREG_MASK, val);
152 }
153 }
154 }
155
rtw8822c_rf_minmax_cmp(struct rtw_dev * rtwdev,u32 value,u32 * min,u32 * max)156 static void rtw8822c_rf_minmax_cmp(struct rtw_dev *rtwdev, u32 value,
157 u32 *min, u32 *max)
158 {
159 if (value >= 0x200) {
160 if (*min >= 0x200) {
161 if (*min > value)
162 *min = value;
163 } else {
164 *min = value;
165 }
166 if (*max >= 0x200) {
167 if (*max < value)
168 *max = value;
169 }
170 } else {
171 if (*min < 0x200) {
172 if (*min > value)
173 *min = value;
174 }
175
176 if (*max >= 0x200) {
177 *max = value;
178 } else {
179 if (*max < value)
180 *max = value;
181 }
182 }
183 }
184
__rtw8822c_dac_iq_sort(struct rtw_dev * rtwdev,u32 * v1,u32 * v2)185 static void __rtw8822c_dac_iq_sort(struct rtw_dev *rtwdev, u32 *v1, u32 *v2)
186 {
187 if (*v1 >= 0x200 && *v2 >= 0x200) {
188 if (*v1 > *v2)
189 swap(*v1, *v2);
190 } else if (*v1 < 0x200 && *v2 < 0x200) {
191 if (*v1 > *v2)
192 swap(*v1, *v2);
193 } else if (*v1 < 0x200 && *v2 >= 0x200) {
194 swap(*v1, *v2);
195 }
196 }
197
rtw8822c_dac_iq_sort(struct rtw_dev * rtwdev,u32 * iv,u32 * qv)198 static void rtw8822c_dac_iq_sort(struct rtw_dev *rtwdev, u32 *iv, u32 *qv)
199 {
200 u32 i, j;
201
202 for (i = 0; i < DACK_SN_8822C - 1; i++) {
203 for (j = 0; j < (DACK_SN_8822C - 1 - i) ; j++) {
204 __rtw8822c_dac_iq_sort(rtwdev, &iv[j], &iv[j + 1]);
205 __rtw8822c_dac_iq_sort(rtwdev, &qv[j], &qv[j + 1]);
206 }
207 }
208 }
209
rtw8822c_dac_iq_offset(struct rtw_dev * rtwdev,u32 * vec,u32 * val)210 static void rtw8822c_dac_iq_offset(struct rtw_dev *rtwdev, u32 *vec, u32 *val)
211 {
212 u32 p, m, t, i;
213
214 m = 0;
215 p = 0;
216 for (i = 10; i < DACK_SN_8822C - 10; i++) {
217 if (vec[i] > 0x200)
218 m = (0x400 - vec[i]) + m;
219 else
220 p = vec[i] + p;
221 }
222
223 if (p > m) {
224 t = p - m;
225 t = t / (DACK_SN_8822C - 20);
226 } else {
227 t = m - p;
228 t = t / (DACK_SN_8822C - 20);
229 if (t != 0x0)
230 t = 0x400 - t;
231 }
232
233 *val = t;
234 }
235
rtw8822c_get_path_write_addr(u8 path)236 static u32 rtw8822c_get_path_write_addr(u8 path)
237 {
238 u32 base_addr;
239
240 switch (path) {
241 case RF_PATH_A:
242 base_addr = 0x1800;
243 break;
244 case RF_PATH_B:
245 base_addr = 0x4100;
246 break;
247 default:
248 WARN_ON(1);
249 return -1;
250 }
251
252 return base_addr;
253 }
254
rtw8822c_get_path_read_addr(u8 path)255 static u32 rtw8822c_get_path_read_addr(u8 path)
256 {
257 u32 base_addr;
258
259 switch (path) {
260 case RF_PATH_A:
261 base_addr = 0x2800;
262 break;
263 case RF_PATH_B:
264 base_addr = 0x4500;
265 break;
266 default:
267 WARN_ON(1);
268 return -1;
269 }
270
271 return base_addr;
272 }
273
rtw8822c_dac_iq_check(struct rtw_dev * rtwdev,u32 value)274 static bool rtw8822c_dac_iq_check(struct rtw_dev *rtwdev, u32 value)
275 {
276 bool ret = true;
277
278 if ((value >= 0x200 && (0x400 - value) > 0x64) ||
279 (value < 0x200 && value > 0x64)) {
280 ret = false;
281 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] Error overflow\n");
282 }
283
284 return ret;
285 }
286
rtw8822c_dac_cal_iq_sample(struct rtw_dev * rtwdev,u32 * iv,u32 * qv)287 static void rtw8822c_dac_cal_iq_sample(struct rtw_dev *rtwdev, u32 *iv, u32 *qv)
288 {
289 u32 temp;
290 int i = 0, cnt = 0;
291
292 while (i < DACK_SN_8822C && cnt < 10000) {
293 cnt++;
294 temp = rtw_read32_mask(rtwdev, 0x2dbc, 0x3fffff);
295 iv[i] = (temp & 0x3ff000) >> 12;
296 qv[i] = temp & 0x3ff;
297
298 if (rtw8822c_dac_iq_check(rtwdev, iv[i]) &&
299 rtw8822c_dac_iq_check(rtwdev, qv[i]))
300 i++;
301 }
302 }
303
rtw8822c_dac_cal_iq_search(struct rtw_dev * rtwdev,u32 * iv,u32 * qv,u32 * i_value,u32 * q_value)304 static void rtw8822c_dac_cal_iq_search(struct rtw_dev *rtwdev,
305 u32 *iv, u32 *qv,
306 u32 *i_value, u32 *q_value)
307 {
308 u32 i_max = 0, q_max = 0, i_min = 0, q_min = 0;
309 u32 i_delta, q_delta;
310 u32 temp;
311 int i, cnt = 0;
312
313 do {
314 i_min = iv[0];
315 i_max = iv[0];
316 q_min = qv[0];
317 q_max = qv[0];
318 for (i = 0; i < DACK_SN_8822C; i++) {
319 rtw8822c_rf_minmax_cmp(rtwdev, iv[i], &i_min, &i_max);
320 rtw8822c_rf_minmax_cmp(rtwdev, qv[i], &q_min, &q_max);
321 }
322
323 if (i_max < 0x200 && i_min < 0x200)
324 i_delta = i_max - i_min;
325 else if (i_max >= 0x200 && i_min >= 0x200)
326 i_delta = i_max - i_min;
327 else
328 i_delta = i_max + (0x400 - i_min);
329
330 if (q_max < 0x200 && q_min < 0x200)
331 q_delta = q_max - q_min;
332 else if (q_max >= 0x200 && q_min >= 0x200)
333 q_delta = q_max - q_min;
334 else
335 q_delta = q_max + (0x400 - q_min);
336
337 rtw_dbg(rtwdev, RTW_DBG_RFK,
338 "[DACK] i: min=0x%08x, max=0x%08x, delta=0x%08x\n",
339 i_min, i_max, i_delta);
340 rtw_dbg(rtwdev, RTW_DBG_RFK,
341 "[DACK] q: min=0x%08x, max=0x%08x, delta=0x%08x\n",
342 q_min, q_max, q_delta);
343
344 rtw8822c_dac_iq_sort(rtwdev, iv, qv);
345
346 if (i_delta > 5 || q_delta > 5) {
347 temp = rtw_read32_mask(rtwdev, 0x2dbc, 0x3fffff);
348 iv[0] = (temp & 0x3ff000) >> 12;
349 qv[0] = temp & 0x3ff;
350 temp = rtw_read32_mask(rtwdev, 0x2dbc, 0x3fffff);
351 iv[DACK_SN_8822C - 1] = (temp & 0x3ff000) >> 12;
352 qv[DACK_SN_8822C - 1] = temp & 0x3ff;
353 } else {
354 break;
355 }
356 } while (cnt++ < 100);
357
358 rtw8822c_dac_iq_offset(rtwdev, iv, i_value);
359 rtw8822c_dac_iq_offset(rtwdev, qv, q_value);
360 }
361
rtw8822c_dac_cal_rf_mode(struct rtw_dev * rtwdev,u32 * i_value,u32 * q_value)362 static void rtw8822c_dac_cal_rf_mode(struct rtw_dev *rtwdev,
363 u32 *i_value, u32 *q_value)
364 {
365 u32 iv[DACK_SN_8822C], qv[DACK_SN_8822C];
366 u32 rf_a, rf_b;
367
368 rf_a = rtw_read_rf(rtwdev, RF_PATH_A, 0x0, RFREG_MASK);
369 rf_b = rtw_read_rf(rtwdev, RF_PATH_B, 0x0, RFREG_MASK);
370
371 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] RF path-A=0x%05x\n", rf_a);
372 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] RF path-B=0x%05x\n", rf_b);
373
374 rtw8822c_dac_cal_iq_sample(rtwdev, iv, qv);
375 rtw8822c_dac_cal_iq_search(rtwdev, iv, qv, i_value, q_value);
376 }
377
rtw8822c_dac_bb_setting(struct rtw_dev * rtwdev)378 static void rtw8822c_dac_bb_setting(struct rtw_dev *rtwdev)
379 {
380 rtw_write32_mask(rtwdev, 0x1d58, 0xff8, 0x1ff);
381 rtw_write32_mask(rtwdev, 0x1a00, 0x3, 0x2);
382 rtw_write32_mask(rtwdev, 0x1a14, 0x300, 0x3);
383 rtw_write32(rtwdev, 0x1d70, 0x7e7e7e7e);
384 rtw_write32_mask(rtwdev, 0x180c, 0x3, 0x0);
385 rtw_write32_mask(rtwdev, 0x410c, 0x3, 0x0);
386 rtw_write32(rtwdev, 0x1b00, 0x00000008);
387 rtw_write8(rtwdev, 0x1bcc, 0x3f);
388 rtw_write32(rtwdev, 0x1b00, 0x0000000a);
389 rtw_write8(rtwdev, 0x1bcc, 0x3f);
390 rtw_write32_mask(rtwdev, 0x1e24, BIT(31), 0x0);
391 rtw_write32_mask(rtwdev, 0x1e28, 0xf, 0x3);
392 }
393
rtw8822c_dac_cal_adc(struct rtw_dev * rtwdev,u8 path,u32 * adc_ic,u32 * adc_qc)394 static void rtw8822c_dac_cal_adc(struct rtw_dev *rtwdev,
395 u8 path, u32 *adc_ic, u32 *adc_qc)
396 {
397 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
398 u32 ic = 0, qc = 0, temp = 0;
399 u32 base_addr;
400 u32 path_sel;
401 int i;
402
403 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] ADCK path(%d)\n", path);
404
405 base_addr = rtw8822c_get_path_write_addr(path);
406 switch (path) {
407 case RF_PATH_A:
408 path_sel = 0xa0000;
409 break;
410 case RF_PATH_B:
411 path_sel = 0x80000;
412 break;
413 default:
414 WARN_ON(1);
415 return;
416 }
417
418 /* ADCK step1 */
419 rtw_write32_mask(rtwdev, base_addr + 0x30, BIT(30), 0x0);
420 if (path == RF_PATH_B)
421 rtw_write32(rtwdev, base_addr + 0x30, 0x30db8041);
422 rtw_write32(rtwdev, base_addr + 0x60, 0xf0040ff0);
423 rtw_write32(rtwdev, base_addr + 0x0c, 0xdff00220);
424 rtw_write32(rtwdev, base_addr + 0x10, 0x02dd08c4);
425 rtw_write32(rtwdev, base_addr + 0x0c, 0x10000260);
426 rtw_write_rf(rtwdev, RF_PATH_A, 0x0, RFREG_MASK, 0x10000);
427 rtw_write_rf(rtwdev, RF_PATH_B, 0x0, RFREG_MASK, 0x10000);
428 for (i = 0; i < 10; i++) {
429 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] ADCK count=%d\n", i);
430 rtw_write32(rtwdev, 0x1c3c, path_sel + 0x8003);
431 rtw_write32(rtwdev, 0x1c24, 0x00010002);
432 rtw8822c_dac_cal_rf_mode(rtwdev, &ic, &qc);
433 rtw_dbg(rtwdev, RTW_DBG_RFK,
434 "[DACK] before: i=0x%x, q=0x%x\n", ic, qc);
435
436 /* compensation value */
437 if (ic != 0x0) {
438 ic = 0x400 - ic;
439 *adc_ic = ic;
440 }
441 if (qc != 0x0) {
442 qc = 0x400 - qc;
443 *adc_qc = qc;
444 }
445 temp = (ic & 0x3ff) | ((qc & 0x3ff) << 10);
446 rtw_write32(rtwdev, base_addr + 0x68, temp);
447 dm_info->dack_adck[path] = temp;
448 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] ADCK 0x%08x=0x08%x\n",
449 base_addr + 0x68, temp);
450 /* check ADC DC offset */
451 rtw_write32(rtwdev, 0x1c3c, path_sel + 0x8103);
452 rtw8822c_dac_cal_rf_mode(rtwdev, &ic, &qc);
453 rtw_dbg(rtwdev, RTW_DBG_RFK,
454 "[DACK] after: i=0x%08x, q=0x%08x\n", ic, qc);
455 if (ic >= 0x200)
456 ic = 0x400 - ic;
457 if (qc >= 0x200)
458 qc = 0x400 - qc;
459 if (ic < 5 && qc < 5)
460 break;
461 }
462
463 /* ADCK step2 */
464 rtw_write32(rtwdev, 0x1c3c, 0x00000003);
465 rtw_write32(rtwdev, base_addr + 0x0c, 0x10000260);
466 rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c4);
467
468 /* release pull low switch on IQ path */
469 rtw_write_rf(rtwdev, path, 0x8f, BIT(13), 0x1);
470 }
471
rtw8822c_dac_cal_step1(struct rtw_dev * rtwdev,u8 path)472 static void rtw8822c_dac_cal_step1(struct rtw_dev *rtwdev, u8 path)
473 {
474 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
475 u32 base_addr;
476 u32 read_addr;
477
478 base_addr = rtw8822c_get_path_write_addr(path);
479 read_addr = rtw8822c_get_path_read_addr(path);
480
481 rtw_write32(rtwdev, base_addr + 0x68, dm_info->dack_adck[path]);
482 rtw_write32(rtwdev, base_addr + 0x0c, 0xdff00220);
483 if (path == RF_PATH_A) {
484 rtw_write32(rtwdev, base_addr + 0x60, 0xf0040ff0);
485 rtw_write32(rtwdev, 0x1c38, 0xffffffff);
486 }
487 rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c5);
488 rtw_write32(rtwdev, 0x9b4, 0xdb66db00);
489 rtw_write32(rtwdev, base_addr + 0xb0, 0x0a11fb88);
490 rtw_write32(rtwdev, base_addr + 0xbc, 0x0008ff81);
491 rtw_write32(rtwdev, base_addr + 0xc0, 0x0003d208);
492 rtw_write32(rtwdev, base_addr + 0xcc, 0x0a11fb88);
493 rtw_write32(rtwdev, base_addr + 0xd8, 0x0008ff81);
494 rtw_write32(rtwdev, base_addr + 0xdc, 0x0003d208);
495 rtw_write32(rtwdev, base_addr + 0xb8, 0x60000000);
496 mdelay(2);
497 rtw_write32(rtwdev, base_addr + 0xbc, 0x000aff8d);
498 mdelay(2);
499 rtw_write32(rtwdev, base_addr + 0xb0, 0x0a11fb89);
500 rtw_write32(rtwdev, base_addr + 0xcc, 0x0a11fb89);
501 mdelay(1);
502 rtw_write32(rtwdev, base_addr + 0xb8, 0x62000000);
503 rtw_write32(rtwdev, base_addr + 0xd4, 0x62000000);
504 mdelay(20);
505 if (!check_hw_ready(rtwdev, read_addr + 0x08, 0x7fff80, 0xffff) ||
506 !check_hw_ready(rtwdev, read_addr + 0x34, 0x7fff80, 0xffff))
507 rtw_err(rtwdev, "failed to wait for dack ready\n");
508 rtw_write32(rtwdev, base_addr + 0xb8, 0x02000000);
509 mdelay(1);
510 rtw_write32(rtwdev, base_addr + 0xbc, 0x0008ff87);
511 rtw_write32(rtwdev, 0x9b4, 0xdb6db600);
512 rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c5);
513 rtw_write32(rtwdev, base_addr + 0xbc, 0x0008ff87);
514 rtw_write32(rtwdev, base_addr + 0x60, 0xf0000000);
515 }
516
rtw8822c_dac_cal_step2(struct rtw_dev * rtwdev,u8 path,u32 * ic_out,u32 * qc_out)517 static void rtw8822c_dac_cal_step2(struct rtw_dev *rtwdev,
518 u8 path, u32 *ic_out, u32 *qc_out)
519 {
520 u32 base_addr;
521 u32 ic, qc, ic_in, qc_in;
522
523 base_addr = rtw8822c_get_path_write_addr(path);
524 rtw_write32_mask(rtwdev, base_addr + 0xbc, 0xf0000000, 0x0);
525 rtw_write32_mask(rtwdev, base_addr + 0xc0, 0xf, 0x8);
526 rtw_write32_mask(rtwdev, base_addr + 0xd8, 0xf0000000, 0x0);
527 rtw_write32_mask(rtwdev, base_addr + 0xdc, 0xf, 0x8);
528
529 rtw_write32(rtwdev, 0x1b00, 0x00000008);
530 rtw_write8(rtwdev, 0x1bcc, 0x03f);
531 rtw_write32(rtwdev, base_addr + 0x0c, 0xdff00220);
532 rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c5);
533 rtw_write32(rtwdev, 0x1c3c, 0x00088103);
534
535 rtw8822c_dac_cal_rf_mode(rtwdev, &ic_in, &qc_in);
536 ic = ic_in;
537 qc = qc_in;
538
539 /* compensation value */
540 if (ic != 0x0)
541 ic = 0x400 - ic;
542 if (qc != 0x0)
543 qc = 0x400 - qc;
544 if (ic < 0x300) {
545 ic = ic * 2 * 6 / 5;
546 ic = ic + 0x80;
547 } else {
548 ic = (0x400 - ic) * 2 * 6 / 5;
549 ic = 0x7f - ic;
550 }
551 if (qc < 0x300) {
552 qc = qc * 2 * 6 / 5;
553 qc = qc + 0x80;
554 } else {
555 qc = (0x400 - qc) * 2 * 6 / 5;
556 qc = 0x7f - qc;
557 }
558
559 *ic_out = ic;
560 *qc_out = qc;
561
562 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] before i=0x%x, q=0x%x\n", ic_in, qc_in);
563 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] after i=0x%x, q=0x%x\n", ic, qc);
564 }
565
rtw8822c_dac_cal_step3(struct rtw_dev * rtwdev,u8 path,u32 adc_ic,u32 adc_qc,u32 * ic_in,u32 * qc_in,u32 * i_out,u32 * q_out)566 static void rtw8822c_dac_cal_step3(struct rtw_dev *rtwdev, u8 path,
567 u32 adc_ic, u32 adc_qc,
568 u32 *ic_in, u32 *qc_in,
569 u32 *i_out, u32 *q_out)
570 {
571 u32 base_addr;
572 u32 read_addr;
573 u32 ic, qc;
574 u32 temp;
575
576 base_addr = rtw8822c_get_path_write_addr(path);
577 read_addr = rtw8822c_get_path_read_addr(path);
578 ic = *ic_in;
579 qc = *qc_in;
580
581 rtw_write32(rtwdev, base_addr + 0x0c, 0xdff00220);
582 rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c5);
583 rtw_write32(rtwdev, 0x9b4, 0xdb66db00);
584 rtw_write32(rtwdev, base_addr + 0xb0, 0x0a11fb88);
585 rtw_write32(rtwdev, base_addr + 0xbc, 0xc008ff81);
586 rtw_write32(rtwdev, base_addr + 0xc0, 0x0003d208);
587 rtw_write32_mask(rtwdev, base_addr + 0xbc, 0xf0000000, ic & 0xf);
588 rtw_write32_mask(rtwdev, base_addr + 0xc0, 0xf, (ic & 0xf0) >> 4);
589 rtw_write32(rtwdev, base_addr + 0xcc, 0x0a11fb88);
590 rtw_write32(rtwdev, base_addr + 0xd8, 0xe008ff81);
591 rtw_write32(rtwdev, base_addr + 0xdc, 0x0003d208);
592 rtw_write32_mask(rtwdev, base_addr + 0xd8, 0xf0000000, qc & 0xf);
593 rtw_write32_mask(rtwdev, base_addr + 0xdc, 0xf, (qc & 0xf0) >> 4);
594 rtw_write32(rtwdev, base_addr + 0xb8, 0x60000000);
595 mdelay(2);
596 rtw_write32_mask(rtwdev, base_addr + 0xbc, 0xe, 0x6);
597 mdelay(2);
598 rtw_write32(rtwdev, base_addr + 0xb0, 0x0a11fb89);
599 rtw_write32(rtwdev, base_addr + 0xcc, 0x0a11fb89);
600 mdelay(1);
601 rtw_write32(rtwdev, base_addr + 0xb8, 0x62000000);
602 rtw_write32(rtwdev, base_addr + 0xd4, 0x62000000);
603 mdelay(20);
604 if (!check_hw_ready(rtwdev, read_addr + 0x24, 0x07f80000, ic) ||
605 !check_hw_ready(rtwdev, read_addr + 0x50, 0x07f80000, qc))
606 rtw_err(rtwdev, "failed to write IQ vector to hardware\n");
607 rtw_write32(rtwdev, base_addr + 0xb8, 0x02000000);
608 mdelay(1);
609 rtw_write32_mask(rtwdev, base_addr + 0xbc, 0xe, 0x3);
610 rtw_write32(rtwdev, 0x9b4, 0xdb6db600);
611
612 /* check DAC DC offset */
613 temp = ((adc_ic + 0x10) & 0x3ff) | (((adc_qc + 0x10) & 0x3ff) << 10);
614 rtw_write32(rtwdev, base_addr + 0x68, temp);
615 rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c5);
616 rtw_write32(rtwdev, base_addr + 0x60, 0xf0000000);
617 rtw8822c_dac_cal_rf_mode(rtwdev, &ic, &qc);
618 if (ic >= 0x10)
619 ic = ic - 0x10;
620 else
621 ic = 0x400 - (0x10 - ic);
622
623 if (qc >= 0x10)
624 qc = qc - 0x10;
625 else
626 qc = 0x400 - (0x10 - qc);
627
628 *i_out = ic;
629 *q_out = qc;
630
631 if (ic >= 0x200)
632 ic = 0x400 - ic;
633 if (qc >= 0x200)
634 qc = 0x400 - qc;
635
636 *ic_in = ic;
637 *qc_in = qc;
638
639 rtw_dbg(rtwdev, RTW_DBG_RFK,
640 "[DACK] after DACK i=0x%x, q=0x%x\n", *i_out, *q_out);
641 }
642
rtw8822c_dac_cal_step4(struct rtw_dev * rtwdev,u8 path)643 static void rtw8822c_dac_cal_step4(struct rtw_dev *rtwdev, u8 path)
644 {
645 u32 base_addr = rtw8822c_get_path_write_addr(path);
646
647 rtw_write32(rtwdev, base_addr + 0x68, 0x0);
648 rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c4);
649 rtw_write32_mask(rtwdev, base_addr + 0xbc, 0x1, 0x0);
650 rtw_write32_mask(rtwdev, base_addr + 0x30, BIT(30), 0x1);
651 }
652
rtw8822c_dac_cal_backup_vec(struct rtw_dev * rtwdev,u8 path,u8 vec,u32 w_addr,u32 r_addr)653 static void rtw8822c_dac_cal_backup_vec(struct rtw_dev *rtwdev,
654 u8 path, u8 vec, u32 w_addr, u32 r_addr)
655 {
656 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
657 u16 val;
658 u32 i;
659
660 if (WARN_ON(vec >= 2))
661 return;
662
663 for (i = 0; i < DACK_MSBK_BACKUP_NUM; i++) {
664 rtw_write32_mask(rtwdev, w_addr, 0xf0000000, i);
665 val = (u16)rtw_read32_mask(rtwdev, r_addr, 0x7fc0000);
666 dm_info->dack_msbk[path][vec][i] = val;
667 }
668 }
669
rtw8822c_dac_cal_backup_path(struct rtw_dev * rtwdev,u8 path)670 static void rtw8822c_dac_cal_backup_path(struct rtw_dev *rtwdev, u8 path)
671 {
672 u32 w_off = 0x1c;
673 u32 r_off = 0x2c;
674 u32 w_addr, r_addr;
675
676 if (WARN_ON(path >= 2))
677 return;
678
679 /* backup I vector */
680 w_addr = rtw8822c_get_path_write_addr(path) + 0xb0;
681 r_addr = rtw8822c_get_path_read_addr(path) + 0x10;
682 rtw8822c_dac_cal_backup_vec(rtwdev, path, 0, w_addr, r_addr);
683
684 /* backup Q vector */
685 w_addr = rtw8822c_get_path_write_addr(path) + 0xb0 + w_off;
686 r_addr = rtw8822c_get_path_read_addr(path) + 0x10 + r_off;
687 rtw8822c_dac_cal_backup_vec(rtwdev, path, 1, w_addr, r_addr);
688 }
689
rtw8822c_dac_cal_backup_dck(struct rtw_dev * rtwdev)690 static void rtw8822c_dac_cal_backup_dck(struct rtw_dev *rtwdev)
691 {
692 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
693 u8 val;
694
695 val = (u8)rtw_read32_mask(rtwdev, REG_DCKA_I_0, 0xf0000000);
696 dm_info->dack_dck[RF_PATH_A][0][0] = val;
697 val = (u8)rtw_read32_mask(rtwdev, REG_DCKA_I_1, 0xf);
698 dm_info->dack_dck[RF_PATH_A][0][1] = val;
699 val = (u8)rtw_read32_mask(rtwdev, REG_DCKA_Q_0, 0xf0000000);
700 dm_info->dack_dck[RF_PATH_A][1][0] = val;
701 val = (u8)rtw_read32_mask(rtwdev, REG_DCKA_Q_1, 0xf);
702 dm_info->dack_dck[RF_PATH_A][1][1] = val;
703
704 val = (u8)rtw_read32_mask(rtwdev, REG_DCKB_I_0, 0xf0000000);
705 dm_info->dack_dck[RF_PATH_B][0][0] = val;
706 val = (u8)rtw_read32_mask(rtwdev, REG_DCKB_I_1, 0xf);
707 dm_info->dack_dck[RF_PATH_B][1][0] = val;
708 val = (u8)rtw_read32_mask(rtwdev, REG_DCKB_Q_0, 0xf0000000);
709 dm_info->dack_dck[RF_PATH_B][0][1] = val;
710 val = (u8)rtw_read32_mask(rtwdev, REG_DCKB_Q_1, 0xf);
711 dm_info->dack_dck[RF_PATH_B][1][1] = val;
712 }
713
rtw8822c_dac_cal_backup(struct rtw_dev * rtwdev)714 static void rtw8822c_dac_cal_backup(struct rtw_dev *rtwdev)
715 {
716 u32 temp[3];
717
718 temp[0] = rtw_read32(rtwdev, 0x1860);
719 temp[1] = rtw_read32(rtwdev, 0x4160);
720 temp[2] = rtw_read32(rtwdev, 0x9b4);
721
722 /* set clock */
723 rtw_write32(rtwdev, 0x9b4, 0xdb66db00);
724
725 /* backup path-A I/Q */
726 rtw_write32_clr(rtwdev, 0x1830, BIT(30));
727 rtw_write32_mask(rtwdev, 0x1860, 0xfc000000, 0x3c);
728 rtw8822c_dac_cal_backup_path(rtwdev, RF_PATH_A);
729
730 /* backup path-B I/Q */
731 rtw_write32_clr(rtwdev, 0x4130, BIT(30));
732 rtw_write32_mask(rtwdev, 0x4160, 0xfc000000, 0x3c);
733 rtw8822c_dac_cal_backup_path(rtwdev, RF_PATH_B);
734
735 rtw8822c_dac_cal_backup_dck(rtwdev);
736 rtw_write32_set(rtwdev, 0x1830, BIT(30));
737 rtw_write32_set(rtwdev, 0x4130, BIT(30));
738
739 rtw_write32(rtwdev, 0x1860, temp[0]);
740 rtw_write32(rtwdev, 0x4160, temp[1]);
741 rtw_write32(rtwdev, 0x9b4, temp[2]);
742 }
743
rtw8822c_dac_cal_restore_dck(struct rtw_dev * rtwdev)744 static void rtw8822c_dac_cal_restore_dck(struct rtw_dev *rtwdev)
745 {
746 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
747 u8 val;
748
749 rtw_write32_set(rtwdev, REG_DCKA_I_0, BIT(19));
750 val = dm_info->dack_dck[RF_PATH_A][0][0];
751 rtw_write32_mask(rtwdev, REG_DCKA_I_0, 0xf0000000, val);
752 val = dm_info->dack_dck[RF_PATH_A][0][1];
753 rtw_write32_mask(rtwdev, REG_DCKA_I_1, 0xf, val);
754
755 rtw_write32_set(rtwdev, REG_DCKA_Q_0, BIT(19));
756 val = dm_info->dack_dck[RF_PATH_A][1][0];
757 rtw_write32_mask(rtwdev, REG_DCKA_Q_0, 0xf0000000, val);
758 val = dm_info->dack_dck[RF_PATH_A][1][1];
759 rtw_write32_mask(rtwdev, REG_DCKA_Q_1, 0xf, val);
760
761 rtw_write32_set(rtwdev, REG_DCKB_I_0, BIT(19));
762 val = dm_info->dack_dck[RF_PATH_B][0][0];
763 rtw_write32_mask(rtwdev, REG_DCKB_I_0, 0xf0000000, val);
764 val = dm_info->dack_dck[RF_PATH_B][0][1];
765 rtw_write32_mask(rtwdev, REG_DCKB_I_1, 0xf, val);
766
767 rtw_write32_set(rtwdev, REG_DCKB_Q_0, BIT(19));
768 val = dm_info->dack_dck[RF_PATH_B][1][0];
769 rtw_write32_mask(rtwdev, REG_DCKB_Q_0, 0xf0000000, val);
770 val = dm_info->dack_dck[RF_PATH_B][1][1];
771 rtw_write32_mask(rtwdev, REG_DCKB_Q_1, 0xf, val);
772 }
773
rtw8822c_dac_cal_restore_prepare(struct rtw_dev * rtwdev)774 static void rtw8822c_dac_cal_restore_prepare(struct rtw_dev *rtwdev)
775 {
776 rtw_write32(rtwdev, 0x9b4, 0xdb66db00);
777
778 rtw_write32_mask(rtwdev, 0x18b0, BIT(27), 0x0);
779 rtw_write32_mask(rtwdev, 0x18cc, BIT(27), 0x0);
780 rtw_write32_mask(rtwdev, 0x41b0, BIT(27), 0x0);
781 rtw_write32_mask(rtwdev, 0x41cc, BIT(27), 0x0);
782
783 rtw_write32_mask(rtwdev, 0x1830, BIT(30), 0x0);
784 rtw_write32_mask(rtwdev, 0x1860, 0xfc000000, 0x3c);
785 rtw_write32_mask(rtwdev, 0x18b4, BIT(0), 0x1);
786 rtw_write32_mask(rtwdev, 0x18d0, BIT(0), 0x1);
787
788 rtw_write32_mask(rtwdev, 0x4130, BIT(30), 0x0);
789 rtw_write32_mask(rtwdev, 0x4160, 0xfc000000, 0x3c);
790 rtw_write32_mask(rtwdev, 0x41b4, BIT(0), 0x1);
791 rtw_write32_mask(rtwdev, 0x41d0, BIT(0), 0x1);
792
793 rtw_write32_mask(rtwdev, 0x18b0, 0xf00, 0x0);
794 rtw_write32_mask(rtwdev, 0x18c0, BIT(14), 0x0);
795 rtw_write32_mask(rtwdev, 0x18cc, 0xf00, 0x0);
796 rtw_write32_mask(rtwdev, 0x18dc, BIT(14), 0x0);
797
798 rtw_write32_mask(rtwdev, 0x18b0, BIT(0), 0x0);
799 rtw_write32_mask(rtwdev, 0x18cc, BIT(0), 0x0);
800 rtw_write32_mask(rtwdev, 0x18b0, BIT(0), 0x1);
801 rtw_write32_mask(rtwdev, 0x18cc, BIT(0), 0x1);
802
803 rtw8822c_dac_cal_restore_dck(rtwdev);
804
805 rtw_write32_mask(rtwdev, 0x18c0, 0x38000, 0x7);
806 rtw_write32_mask(rtwdev, 0x18dc, 0x38000, 0x7);
807 rtw_write32_mask(rtwdev, 0x41c0, 0x38000, 0x7);
808 rtw_write32_mask(rtwdev, 0x41dc, 0x38000, 0x7);
809
810 rtw_write32_mask(rtwdev, 0x18b8, BIT(26) | BIT(25), 0x1);
811 rtw_write32_mask(rtwdev, 0x18d4, BIT(26) | BIT(25), 0x1);
812
813 rtw_write32_mask(rtwdev, 0x41b0, 0xf00, 0x0);
814 rtw_write32_mask(rtwdev, 0x41c0, BIT(14), 0x0);
815 rtw_write32_mask(rtwdev, 0x41cc, 0xf00, 0x0);
816 rtw_write32_mask(rtwdev, 0x41dc, BIT(14), 0x0);
817
818 rtw_write32_mask(rtwdev, 0x41b0, BIT(0), 0x0);
819 rtw_write32_mask(rtwdev, 0x41cc, BIT(0), 0x0);
820 rtw_write32_mask(rtwdev, 0x41b0, BIT(0), 0x1);
821 rtw_write32_mask(rtwdev, 0x41cc, BIT(0), 0x1);
822
823 rtw_write32_mask(rtwdev, 0x41b8, BIT(26) | BIT(25), 0x1);
824 rtw_write32_mask(rtwdev, 0x41d4, BIT(26) | BIT(25), 0x1);
825 }
826
rtw8822c_dac_cal_restore_wait(struct rtw_dev * rtwdev,u32 target_addr,u32 toggle_addr)827 static bool rtw8822c_dac_cal_restore_wait(struct rtw_dev *rtwdev,
828 u32 target_addr, u32 toggle_addr)
829 {
830 u32 cnt = 0;
831
832 do {
833 rtw_write32_mask(rtwdev, toggle_addr, BIT(26) | BIT(25), 0x0);
834 rtw_write32_mask(rtwdev, toggle_addr, BIT(26) | BIT(25), 0x2);
835
836 if (rtw_read32_mask(rtwdev, target_addr, 0xf) == 0x6)
837 return true;
838
839 } while (cnt++ < 100);
840
841 return false;
842 }
843
rtw8822c_dac_cal_restore_path(struct rtw_dev * rtwdev,u8 path)844 static bool rtw8822c_dac_cal_restore_path(struct rtw_dev *rtwdev, u8 path)
845 {
846 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
847 u32 w_off = 0x1c;
848 u32 r_off = 0x2c;
849 u32 w_i, r_i, w_q, r_q;
850 u32 value;
851 u32 i;
852
853 w_i = rtw8822c_get_path_write_addr(path) + 0xb0;
854 r_i = rtw8822c_get_path_read_addr(path) + 0x08;
855 w_q = rtw8822c_get_path_write_addr(path) + 0xb0 + w_off;
856 r_q = rtw8822c_get_path_read_addr(path) + 0x08 + r_off;
857
858 if (!rtw8822c_dac_cal_restore_wait(rtwdev, r_i, w_i + 0x8))
859 return false;
860
861 for (i = 0; i < DACK_MSBK_BACKUP_NUM; i++) {
862 rtw_write32_mask(rtwdev, w_i + 0x4, BIT(2), 0x0);
863 value = dm_info->dack_msbk[path][0][i];
864 rtw_write32_mask(rtwdev, w_i + 0x4, 0xff8, value);
865 rtw_write32_mask(rtwdev, w_i, 0xf0000000, i);
866 rtw_write32_mask(rtwdev, w_i + 0x4, BIT(2), 0x1);
867 }
868
869 rtw_write32_mask(rtwdev, w_i + 0x4, BIT(2), 0x0);
870
871 if (!rtw8822c_dac_cal_restore_wait(rtwdev, r_q, w_q + 0x8))
872 return false;
873
874 for (i = 0; i < DACK_MSBK_BACKUP_NUM; i++) {
875 rtw_write32_mask(rtwdev, w_q + 0x4, BIT(2), 0x0);
876 value = dm_info->dack_msbk[path][1][i];
877 rtw_write32_mask(rtwdev, w_q + 0x4, 0xff8, value);
878 rtw_write32_mask(rtwdev, w_q, 0xf0000000, i);
879 rtw_write32_mask(rtwdev, w_q + 0x4, BIT(2), 0x1);
880 }
881 rtw_write32_mask(rtwdev, w_q + 0x4, BIT(2), 0x0);
882
883 rtw_write32_mask(rtwdev, w_i + 0x8, BIT(26) | BIT(25), 0x0);
884 rtw_write32_mask(rtwdev, w_q + 0x8, BIT(26) | BIT(25), 0x0);
885 rtw_write32_mask(rtwdev, w_i + 0x4, BIT(0), 0x0);
886 rtw_write32_mask(rtwdev, w_q + 0x4, BIT(0), 0x0);
887
888 return true;
889 }
890
__rtw8822c_dac_cal_restore(struct rtw_dev * rtwdev)891 static bool __rtw8822c_dac_cal_restore(struct rtw_dev *rtwdev)
892 {
893 if (!rtw8822c_dac_cal_restore_path(rtwdev, RF_PATH_A))
894 return false;
895
896 if (!rtw8822c_dac_cal_restore_path(rtwdev, RF_PATH_B))
897 return false;
898
899 return true;
900 }
901
rtw8822c_dac_cal_restore(struct rtw_dev * rtwdev)902 static bool rtw8822c_dac_cal_restore(struct rtw_dev *rtwdev)
903 {
904 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
905 u32 temp[3];
906
907 /* sample the first element for both path's IQ vector */
908 if (dm_info->dack_msbk[RF_PATH_A][0][0] == 0 &&
909 dm_info->dack_msbk[RF_PATH_A][1][0] == 0 &&
910 dm_info->dack_msbk[RF_PATH_B][0][0] == 0 &&
911 dm_info->dack_msbk[RF_PATH_B][1][0] == 0)
912 return false;
913
914 temp[0] = rtw_read32(rtwdev, 0x1860);
915 temp[1] = rtw_read32(rtwdev, 0x4160);
916 temp[2] = rtw_read32(rtwdev, 0x9b4);
917
918 rtw8822c_dac_cal_restore_prepare(rtwdev);
919 if (!check_hw_ready(rtwdev, 0x2808, 0x7fff80, 0xffff) ||
920 !check_hw_ready(rtwdev, 0x2834, 0x7fff80, 0xffff) ||
921 !check_hw_ready(rtwdev, 0x4508, 0x7fff80, 0xffff) ||
922 !check_hw_ready(rtwdev, 0x4534, 0x7fff80, 0xffff))
923 return false;
924
925 if (!__rtw8822c_dac_cal_restore(rtwdev)) {
926 rtw_err(rtwdev, "failed to restore dack vectors\n");
927 return false;
928 }
929
930 rtw_write32_mask(rtwdev, 0x1830, BIT(30), 0x1);
931 rtw_write32_mask(rtwdev, 0x4130, BIT(30), 0x1);
932 rtw_write32(rtwdev, 0x1860, temp[0]);
933 rtw_write32(rtwdev, 0x4160, temp[1]);
934 rtw_write32_mask(rtwdev, 0x18b0, BIT(27), 0x1);
935 rtw_write32_mask(rtwdev, 0x18cc, BIT(27), 0x1);
936 rtw_write32_mask(rtwdev, 0x41b0, BIT(27), 0x1);
937 rtw_write32_mask(rtwdev, 0x41cc, BIT(27), 0x1);
938 rtw_write32(rtwdev, 0x9b4, temp[2]);
939
940 return true;
941 }
942
rtw8822c_rf_dac_cal(struct rtw_dev * rtwdev)943 static void rtw8822c_rf_dac_cal(struct rtw_dev *rtwdev)
944 {
945 struct rtw_backup_info backup_rf[DACK_RF_8822C * DACK_PATH_8822C];
946 struct rtw_backup_info backup[DACK_REG_8822C];
947 u32 ic = 0, qc = 0, i;
948 u32 i_a = 0x0, q_a = 0x0, i_b = 0x0, q_b = 0x0;
949 u32 ic_a = 0x0, qc_a = 0x0, ic_b = 0x0, qc_b = 0x0;
950 u32 adc_ic_a = 0x0, adc_qc_a = 0x0, adc_ic_b = 0x0, adc_qc_b = 0x0;
951
952 if (rtw8822c_dac_cal_restore(rtwdev))
953 return;
954
955 /* not able to restore, do it */
956
957 rtw8822c_dac_backup_reg(rtwdev, backup, backup_rf);
958
959 rtw8822c_dac_bb_setting(rtwdev);
960
961 /* path-A */
962 rtw8822c_dac_cal_adc(rtwdev, RF_PATH_A, &adc_ic_a, &adc_qc_a);
963 for (i = 0; i < 10; i++) {
964 rtw8822c_dac_cal_step1(rtwdev, RF_PATH_A);
965 rtw8822c_dac_cal_step2(rtwdev, RF_PATH_A, &ic, &qc);
966 ic_a = ic;
967 qc_a = qc;
968
969 rtw8822c_dac_cal_step3(rtwdev, RF_PATH_A, adc_ic_a, adc_qc_a,
970 &ic, &qc, &i_a, &q_a);
971
972 if (ic < 5 && qc < 5)
973 break;
974 }
975 rtw8822c_dac_cal_step4(rtwdev, RF_PATH_A);
976
977 /* path-B */
978 rtw8822c_dac_cal_adc(rtwdev, RF_PATH_B, &adc_ic_b, &adc_qc_b);
979 for (i = 0; i < 10; i++) {
980 rtw8822c_dac_cal_step1(rtwdev, RF_PATH_B);
981 rtw8822c_dac_cal_step2(rtwdev, RF_PATH_B, &ic, &qc);
982 ic_b = ic;
983 qc_b = qc;
984
985 rtw8822c_dac_cal_step3(rtwdev, RF_PATH_B, adc_ic_b, adc_qc_b,
986 &ic, &qc, &i_b, &q_b);
987
988 if (ic < 5 && qc < 5)
989 break;
990 }
991 rtw8822c_dac_cal_step4(rtwdev, RF_PATH_B);
992
993 rtw_write32(rtwdev, 0x1b00, 0x00000008);
994 rtw_write32_mask(rtwdev, 0x4130, BIT(30), 0x1);
995 rtw_write8(rtwdev, 0x1bcc, 0x0);
996 rtw_write32(rtwdev, 0x1b00, 0x0000000a);
997 rtw_write8(rtwdev, 0x1bcc, 0x0);
998
999 rtw8822c_dac_restore_reg(rtwdev, backup, backup_rf);
1000
1001 /* backup results to restore, saving a lot of time */
1002 rtw8822c_dac_cal_backup(rtwdev);
1003
1004 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] path A: ic=0x%x, qc=0x%x\n", ic_a, qc_a);
1005 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] path B: ic=0x%x, qc=0x%x\n", ic_b, qc_b);
1006 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] path A: i=0x%x, q=0x%x\n", i_a, q_a);
1007 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] path B: i=0x%x, q=0x%x\n", i_b, q_b);
1008 }
1009
rtw8822c_rf_x2_check(struct rtw_dev * rtwdev)1010 static void rtw8822c_rf_x2_check(struct rtw_dev *rtwdev)
1011 {
1012 u8 x2k_busy;
1013
1014 mdelay(1);
1015 x2k_busy = rtw_read_rf(rtwdev, RF_PATH_A, 0xb8, BIT(15));
1016 if (x2k_busy == 1) {
1017 rtw_write_rf(rtwdev, RF_PATH_A, 0xb8, RFREG_MASK, 0xC4440);
1018 rtw_write_rf(rtwdev, RF_PATH_A, 0xba, RFREG_MASK, 0x6840D);
1019 rtw_write_rf(rtwdev, RF_PATH_A, 0xb8, RFREG_MASK, 0x80440);
1020 mdelay(1);
1021 }
1022 }
1023
rtw8822c_set_power_trim(struct rtw_dev * rtwdev,s8 bb_gain[2][8])1024 static void rtw8822c_set_power_trim(struct rtw_dev *rtwdev, s8 bb_gain[2][8])
1025 {
1026 #define RF_SET_POWER_TRIM(_path, _seq, _idx) \
1027 do { \
1028 rtw_write_rf(rtwdev, _path, 0x33, RFREG_MASK, _seq); \
1029 rtw_write_rf(rtwdev, _path, 0x3f, RFREG_MASK, \
1030 bb_gain[_path][_idx]); \
1031 } while (0)
1032 u8 path;
1033
1034 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1035 rtw_write_rf(rtwdev, path, 0xee, BIT(19), 1);
1036 RF_SET_POWER_TRIM(path, 0x0, 0);
1037 RF_SET_POWER_TRIM(path, 0x1, 1);
1038 RF_SET_POWER_TRIM(path, 0x2, 2);
1039 RF_SET_POWER_TRIM(path, 0x3, 2);
1040 RF_SET_POWER_TRIM(path, 0x4, 3);
1041 RF_SET_POWER_TRIM(path, 0x5, 4);
1042 RF_SET_POWER_TRIM(path, 0x6, 5);
1043 RF_SET_POWER_TRIM(path, 0x7, 6);
1044 RF_SET_POWER_TRIM(path, 0x8, 7);
1045 RF_SET_POWER_TRIM(path, 0x9, 3);
1046 RF_SET_POWER_TRIM(path, 0xa, 4);
1047 RF_SET_POWER_TRIM(path, 0xb, 5);
1048 RF_SET_POWER_TRIM(path, 0xc, 6);
1049 RF_SET_POWER_TRIM(path, 0xd, 7);
1050 RF_SET_POWER_TRIM(path, 0xe, 7);
1051 rtw_write_rf(rtwdev, path, 0xee, BIT(19), 0);
1052 }
1053 #undef RF_SET_POWER_TRIM
1054 }
1055
rtw8822c_power_trim(struct rtw_dev * rtwdev)1056 static void rtw8822c_power_trim(struct rtw_dev *rtwdev)
1057 {
1058 u8 pg_pwr = 0xff, i, path, idx;
1059 s8 bb_gain[2][8] = {};
1060 u16 rf_efuse_2g[3] = {PPG_2GL_TXAB, PPG_2GM_TXAB, PPG_2GH_TXAB};
1061 u16 rf_efuse_5g[2][5] = {{PPG_5GL1_TXA, PPG_5GL2_TXA, PPG_5GM1_TXA,
1062 PPG_5GM2_TXA, PPG_5GH1_TXA},
1063 {PPG_5GL1_TXB, PPG_5GL2_TXB, PPG_5GM1_TXB,
1064 PPG_5GM2_TXB, PPG_5GH1_TXB} };
1065 bool set = false;
1066
1067 for (i = 0; i < ARRAY_SIZE(rf_efuse_2g); i++) {
1068 rtw_read8_physical_efuse(rtwdev, rf_efuse_2g[i], &pg_pwr);
1069 if (pg_pwr == EFUSE_READ_FAIL)
1070 continue;
1071 set = true;
1072 bb_gain[RF_PATH_A][i] = FIELD_GET(PPG_2G_A_MASK, pg_pwr);
1073 bb_gain[RF_PATH_B][i] = FIELD_GET(PPG_2G_B_MASK, pg_pwr);
1074 }
1075
1076 for (i = 0; i < ARRAY_SIZE(rf_efuse_5g[0]); i++) {
1077 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1078 rtw_read8_physical_efuse(rtwdev, rf_efuse_5g[path][i],
1079 &pg_pwr);
1080 if (pg_pwr == EFUSE_READ_FAIL)
1081 continue;
1082 set = true;
1083 idx = i + ARRAY_SIZE(rf_efuse_2g);
1084 bb_gain[path][idx] = FIELD_GET(PPG_5G_MASK, pg_pwr);
1085 }
1086 }
1087 if (set)
1088 rtw8822c_set_power_trim(rtwdev, bb_gain);
1089
1090 rtw_write32_mask(rtwdev, REG_DIS_DPD, DIS_DPD_MASK, DIS_DPD_RATEALL);
1091 }
1092
rtw8822c_thermal_trim(struct rtw_dev * rtwdev)1093 static void rtw8822c_thermal_trim(struct rtw_dev *rtwdev)
1094 {
1095 u16 rf_efuse[2] = {PPG_THERMAL_A, PPG_THERMAL_B};
1096 u8 pg_therm = 0xff, thermal[2] = {0}, path;
1097
1098 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1099 rtw_read8_physical_efuse(rtwdev, rf_efuse[path], &pg_therm);
1100 if (pg_therm == EFUSE_READ_FAIL)
1101 return;
1102 /* Efuse value of BIT(0) shall be move to BIT(3), and the value
1103 * of BIT(1) to BIT(3) should be right shifted 1 bit.
1104 */
1105 thermal[path] = FIELD_GET(GENMASK(3, 1), pg_therm);
1106 thermal[path] |= FIELD_PREP(BIT(3), pg_therm & BIT(0));
1107 rtw_write_rf(rtwdev, path, 0x43, RF_THEMAL_MASK, thermal[path]);
1108 }
1109 }
1110
rtw8822c_pa_bias(struct rtw_dev * rtwdev)1111 static void rtw8822c_pa_bias(struct rtw_dev *rtwdev)
1112 {
1113 u16 rf_efuse_2g[2] = {PPG_PABIAS_2GA, PPG_PABIAS_2GB};
1114 u16 rf_efuse_5g[2] = {PPG_PABIAS_5GA, PPG_PABIAS_5GB};
1115 u8 pg_pa_bias = 0xff, path;
1116
1117 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1118 rtw_read8_physical_efuse(rtwdev, rf_efuse_2g[path],
1119 &pg_pa_bias);
1120 if (pg_pa_bias == EFUSE_READ_FAIL)
1121 return;
1122 pg_pa_bias = FIELD_GET(PPG_PABIAS_MASK, pg_pa_bias);
1123 rtw_write_rf(rtwdev, path, RF_PA, RF_PABIAS_2G_MASK, pg_pa_bias);
1124 }
1125 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1126 rtw_read8_physical_efuse(rtwdev, rf_efuse_5g[path],
1127 &pg_pa_bias);
1128 pg_pa_bias = FIELD_GET(PPG_PABIAS_MASK, pg_pa_bias);
1129 rtw_write_rf(rtwdev, path, RF_PA, RF_PABIAS_5G_MASK, pg_pa_bias);
1130 }
1131 }
1132
rtw8822c_rfk_handshake(struct rtw_dev * rtwdev,bool is_before_k)1133 static void rtw8822c_rfk_handshake(struct rtw_dev *rtwdev, bool is_before_k)
1134 {
1135 struct rtw_dm_info *dm = &rtwdev->dm_info;
1136 u8 u1b_tmp;
1137 u8 u4b_tmp;
1138 int ret;
1139
1140 if (is_before_k) {
1141 rtw_dbg(rtwdev, RTW_DBG_RFK,
1142 "[RFK] WiFi / BT RFK handshake start!!\n");
1143
1144 if (!dm->is_bt_iqk_timeout) {
1145 ret = read_poll_timeout(rtw_read32_mask, u4b_tmp,
1146 u4b_tmp == 0, 20, 600000, false,
1147 rtwdev, REG_PMC_DBG_CTRL1,
1148 BITS_PMC_BT_IQK_STS);
1149 if (ret) {
1150 rtw_dbg(rtwdev, RTW_DBG_RFK,
1151 "[RFK] Wait BT IQK finish timeout!!\n");
1152 dm->is_bt_iqk_timeout = true;
1153 }
1154 }
1155
1156 rtw_fw_inform_rfk_status(rtwdev, true);
1157
1158 ret = read_poll_timeout(rtw_read8_mask, u1b_tmp,
1159 u1b_tmp == 1, 20, 100000, false,
1160 rtwdev, REG_ARFR4, BIT_WL_RFK);
1161 if (ret)
1162 rtw_dbg(rtwdev, RTW_DBG_RFK,
1163 "[RFK] Send WiFi RFK start H2C cmd FAIL!!\n");
1164 } else {
1165 rtw_fw_inform_rfk_status(rtwdev, false);
1166 ret = read_poll_timeout(rtw_read8_mask, u1b_tmp,
1167 u1b_tmp == 1, 20, 100000, false,
1168 rtwdev, REG_ARFR4,
1169 BIT_WL_RFK);
1170 if (ret)
1171 rtw_dbg(rtwdev, RTW_DBG_RFK,
1172 "[RFK] Send WiFi RFK finish H2C cmd FAIL!!\n");
1173
1174 rtw_dbg(rtwdev, RTW_DBG_RFK,
1175 "[RFK] WiFi / BT RFK handshake finish!!\n");
1176 }
1177 }
1178
rtw8822c_rfk_power_save(struct rtw_dev * rtwdev,bool is_power_save)1179 static void rtw8822c_rfk_power_save(struct rtw_dev *rtwdev,
1180 bool is_power_save)
1181 {
1182 u8 path;
1183
1184 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1185 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, path);
1186 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S0, BIT_PS_EN,
1187 is_power_save ? 0 : 1);
1188 }
1189 }
1190
rtw8822c_txgapk_backup_bb_reg(struct rtw_dev * rtwdev,const u32 reg[],u32 reg_backup[],u32 reg_num)1191 static void rtw8822c_txgapk_backup_bb_reg(struct rtw_dev *rtwdev, const u32 reg[],
1192 u32 reg_backup[], u32 reg_num)
1193 {
1194 u32 i;
1195
1196 for (i = 0; i < reg_num; i++) {
1197 reg_backup[i] = rtw_read32(rtwdev, reg[i]);
1198
1199 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] Backup BB 0x%x = 0x%x\n",
1200 reg[i], reg_backup[i]);
1201 }
1202 }
1203
rtw8822c_txgapk_reload_bb_reg(struct rtw_dev * rtwdev,const u32 reg[],u32 reg_backup[],u32 reg_num)1204 static void rtw8822c_txgapk_reload_bb_reg(struct rtw_dev *rtwdev,
1205 const u32 reg[], u32 reg_backup[],
1206 u32 reg_num)
1207 {
1208 u32 i;
1209
1210 for (i = 0; i < reg_num; i++) {
1211 rtw_write32(rtwdev, reg[i], reg_backup[i]);
1212 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] Reload BB 0x%x = 0x%x\n",
1213 reg[i], reg_backup[i]);
1214 }
1215 }
1216
check_rf_status(struct rtw_dev * rtwdev,u8 status)1217 static bool check_rf_status(struct rtw_dev *rtwdev, u8 status)
1218 {
1219 u8 reg_rf0_a, reg_rf0_b;
1220
1221 reg_rf0_a = (u8)rtw_read_rf(rtwdev, RF_PATH_A,
1222 RF_MODE_TRXAGC, BIT_RF_MODE);
1223 reg_rf0_b = (u8)rtw_read_rf(rtwdev, RF_PATH_B,
1224 RF_MODE_TRXAGC, BIT_RF_MODE);
1225
1226 if (reg_rf0_a == status || reg_rf0_b == status)
1227 return false;
1228
1229 return true;
1230 }
1231
rtw8822c_txgapk_tx_pause(struct rtw_dev * rtwdev)1232 static void rtw8822c_txgapk_tx_pause(struct rtw_dev *rtwdev)
1233 {
1234 bool status;
1235 int ret;
1236
1237 rtw_write8(rtwdev, REG_TXPAUSE, BIT_AC_QUEUE);
1238 rtw_write32_mask(rtwdev, REG_TX_FIFO, BIT_STOP_TX, 0x2);
1239
1240 ret = read_poll_timeout_atomic(check_rf_status, status, status,
1241 2, 5000, false, rtwdev, 2);
1242 if (ret)
1243 rtw_warn(rtwdev, "failed to pause TX\n");
1244
1245 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] Tx pause!!\n");
1246 }
1247
rtw8822c_txgapk_bb_dpk(struct rtw_dev * rtwdev,u8 path)1248 static void rtw8822c_txgapk_bb_dpk(struct rtw_dev *rtwdev, u8 path)
1249 {
1250 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1251
1252 rtw_write32_mask(rtwdev, REG_ENFN, BIT_IQK_DPK_EN, 0x1);
1253 rtw_write32_mask(rtwdev, REG_CH_DELAY_EXTR2,
1254 BIT_IQK_DPK_CLOCK_SRC, 0x1);
1255 rtw_write32_mask(rtwdev, REG_CH_DELAY_EXTR2,
1256 BIT_IQK_DPK_RESET_SRC, 0x1);
1257 rtw_write32_mask(rtwdev, REG_CH_DELAY_EXTR2, BIT_EN_IOQ_IQK_DPK, 0x1);
1258 rtw_write32_mask(rtwdev, REG_CH_DELAY_EXTR2, BIT_TST_IQK2SET_SRC, 0x0);
1259 rtw_write32_mask(rtwdev, REG_CCA_OFF, BIT_CCA_ON_BY_PW, 0x1ff);
1260
1261 if (path == RF_PATH_A) {
1262 rtw_write32_mask(rtwdev, REG_RFTXEN_GCK_A,
1263 BIT_RFTXEN_GCK_FORCE_ON, 0x1);
1264 rtw_write32_mask(rtwdev, REG_3WIRE, BIT_DIS_SHARERX_TXGAT, 0x1);
1265 rtw_write32_mask(rtwdev, REG_DIS_SHARE_RX_A,
1266 BIT_TX_SCALE_0DB, 0x1);
1267 rtw_write32_mask(rtwdev, REG_3WIRE, BIT_3WIRE_EN, 0x0);
1268 } else if (path == RF_PATH_B) {
1269 rtw_write32_mask(rtwdev, REG_RFTXEN_GCK_B,
1270 BIT_RFTXEN_GCK_FORCE_ON, 0x1);
1271 rtw_write32_mask(rtwdev, REG_3WIRE2,
1272 BIT_DIS_SHARERX_TXGAT, 0x1);
1273 rtw_write32_mask(rtwdev, REG_DIS_SHARE_RX_B,
1274 BIT_TX_SCALE_0DB, 0x1);
1275 rtw_write32_mask(rtwdev, REG_3WIRE2, BIT_3WIRE_EN, 0x0);
1276 }
1277 rtw_write32_mask(rtwdev, REG_CCKSB, BIT_BBMODE, 0x2);
1278 }
1279
rtw8822c_txgapk_afe_dpk(struct rtw_dev * rtwdev,u8 path)1280 static void rtw8822c_txgapk_afe_dpk(struct rtw_dev *rtwdev, u8 path)
1281 {
1282 u32 reg;
1283
1284 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1285
1286 if (path == RF_PATH_A) {
1287 reg = REG_ANAPAR_A;
1288 } else if (path == RF_PATH_B) {
1289 reg = REG_ANAPAR_B;
1290 } else {
1291 rtw_err(rtwdev, "[TXGAPK] unknown path %d!!\n", path);
1292 return;
1293 }
1294
1295 rtw_write32_mask(rtwdev, REG_IQK_CTRL, MASKDWORD, MASKDWORD);
1296 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x700f0001);
1297 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x700f0001);
1298 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x701f0001);
1299 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x702f0001);
1300 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x703f0001);
1301 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x704f0001);
1302 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x705f0001);
1303 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x706f0001);
1304 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x707f0001);
1305 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x708f0001);
1306 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x709f0001);
1307 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70af0001);
1308 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70bf0001);
1309 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70cf0001);
1310 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70df0001);
1311 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70ef0001);
1312 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70ff0001);
1313 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70ff0001);
1314 }
1315
rtw8822c_txgapk_afe_dpk_restore(struct rtw_dev * rtwdev,u8 path)1316 static void rtw8822c_txgapk_afe_dpk_restore(struct rtw_dev *rtwdev, u8 path)
1317 {
1318 u32 reg;
1319
1320 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1321
1322 if (path == RF_PATH_A) {
1323 reg = REG_ANAPAR_A;
1324 } else if (path == RF_PATH_B) {
1325 reg = REG_ANAPAR_B;
1326 } else {
1327 rtw_err(rtwdev, "[TXGAPK] unknown path %d!!\n", path);
1328 return;
1329 }
1330 rtw_write32_mask(rtwdev, REG_IQK_CTRL, MASKDWORD, 0xffa1005e);
1331 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x700b8041);
1332 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70144041);
1333 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70244041);
1334 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70344041);
1335 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70444041);
1336 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x705b8041);
1337 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70644041);
1338 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x707b8041);
1339 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x708b8041);
1340 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x709b8041);
1341 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70ab8041);
1342 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70bb8041);
1343 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70cb8041);
1344 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70db8041);
1345 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70eb8041);
1346 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70fb8041);
1347 }
1348
rtw8822c_txgapk_bb_dpk_restore(struct rtw_dev * rtwdev,u8 path)1349 static void rtw8822c_txgapk_bb_dpk_restore(struct rtw_dev *rtwdev, u8 path)
1350 {
1351 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1352
1353 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TX_GAIN, 0x0);
1354 rtw_write_rf(rtwdev, path, RF_DIS_BYPASS_TXBB, BIT_TIA_BYPASS, 0x0);
1355 rtw_write_rf(rtwdev, path, RF_DIS_BYPASS_TXBB, BIT_TXBB, 0x0);
1356
1357 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, 0x0);
1358 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x0);
1359 rtw_write32_mask(rtwdev, REG_SINGLE_TONE_SW, BIT_IRQ_TEST_MODE, 0x0);
1360 rtw_write32_mask(rtwdev, REG_R_CONFIG, MASKBYTE0, 0x00);
1361 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, 0x1);
1362 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x0);
1363 rtw_write32_mask(rtwdev, REG_SINGLE_TONE_SW, BIT_IRQ_TEST_MODE, 0x0);
1364 rtw_write32_mask(rtwdev, REG_R_CONFIG, MASKBYTE0, 0x00);
1365 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, 0x0);
1366 rtw_write32_mask(rtwdev, REG_CCA_OFF, BIT_CCA_ON_BY_PW, 0x0);
1367
1368 if (path == RF_PATH_A) {
1369 rtw_write32_mask(rtwdev, REG_RFTXEN_GCK_A,
1370 BIT_RFTXEN_GCK_FORCE_ON, 0x0);
1371 rtw_write32_mask(rtwdev, REG_3WIRE, BIT_DIS_SHARERX_TXGAT, 0x0);
1372 rtw_write32_mask(rtwdev, REG_DIS_SHARE_RX_A,
1373 BIT_TX_SCALE_0DB, 0x0);
1374 rtw_write32_mask(rtwdev, REG_3WIRE, BIT_3WIRE_EN, 0x3);
1375 } else if (path == RF_PATH_B) {
1376 rtw_write32_mask(rtwdev, REG_RFTXEN_GCK_B,
1377 BIT_RFTXEN_GCK_FORCE_ON, 0x0);
1378 rtw_write32_mask(rtwdev, REG_3WIRE2,
1379 BIT_DIS_SHARERX_TXGAT, 0x0);
1380 rtw_write32_mask(rtwdev, REG_DIS_SHARE_RX_B,
1381 BIT_TX_SCALE_0DB, 0x0);
1382 rtw_write32_mask(rtwdev, REG_3WIRE2, BIT_3WIRE_EN, 0x3);
1383 }
1384
1385 rtw_write32_mask(rtwdev, REG_CCKSB, BIT_BBMODE, 0x0);
1386 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_CFIR_EN, 0x5);
1387 }
1388
_rtw8822c_txgapk_gain_valid(struct rtw_dev * rtwdev,u32 gain)1389 static bool _rtw8822c_txgapk_gain_valid(struct rtw_dev *rtwdev, u32 gain)
1390 {
1391 if ((FIELD_GET(BIT_GAIN_TX_PAD_H, gain) >= 0xc) &&
1392 (FIELD_GET(BIT_GAIN_TX_PAD_L, gain) >= 0xe))
1393 return true;
1394
1395 return false;
1396 }
1397
_rtw8822c_txgapk_write_gain_bb_table(struct rtw_dev * rtwdev,u8 band,u8 path)1398 static void _rtw8822c_txgapk_write_gain_bb_table(struct rtw_dev *rtwdev,
1399 u8 band, u8 path)
1400 {
1401 struct rtw_gapk_info *txgapk = &rtwdev->dm_info.gapk;
1402 u32 v, tmp_3f = 0;
1403 u8 gain, check_txgain;
1404
1405 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, path);
1406
1407 switch (band) {
1408 case RF_BAND_2G_OFDM:
1409 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x0);
1410 break;
1411 case RF_BAND_5G_L:
1412 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x2);
1413 break;
1414 case RF_BAND_5G_M:
1415 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x3);
1416 break;
1417 case RF_BAND_5G_H:
1418 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x4);
1419 break;
1420 default:
1421 break;
1422 }
1423
1424 rtw_write32_mask(rtwdev, REG_TX_GAIN_SET, MASKBYTE0, 0x88);
1425
1426 check_txgain = 0;
1427 for (gain = 0; gain < RF_GAIN_NUM; gain++) {
1428 v = txgapk->rf3f_bp[band][gain][path];
1429 if (_rtw8822c_txgapk_gain_valid(rtwdev, v)) {
1430 if (!check_txgain) {
1431 tmp_3f = txgapk->rf3f_bp[band][gain][path];
1432 check_txgain = 1;
1433 }
1434 rtw_dbg(rtwdev, RTW_DBG_RFK,
1435 "[TXGAPK] tx_gain=0x%03X >= 0xCEX\n",
1436 txgapk->rf3f_bp[band][gain][path]);
1437 } else {
1438 tmp_3f = txgapk->rf3f_bp[band][gain][path];
1439 }
1440
1441 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_Q_GAIN, tmp_3f);
1442 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_I_GAIN, gain);
1443 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_GAIN_RST, 0x1);
1444 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_GAIN_RST, 0x0);
1445
1446 rtw_dbg(rtwdev, RTW_DBG_RFK,
1447 "[TXGAPK] Band=%d 0x1b98[11:0]=0x%03X path=%d\n",
1448 band, tmp_3f, path);
1449 }
1450 }
1451
rtw8822c_txgapk_write_gain_bb_table(struct rtw_dev * rtwdev)1452 static void rtw8822c_txgapk_write_gain_bb_table(struct rtw_dev *rtwdev)
1453 {
1454 u8 path, band;
1455
1456 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s channel=%d\n",
1457 __func__, rtwdev->dm_info.gapk.channel);
1458
1459 for (band = 0; band < RF_BAND_MAX; band++) {
1460 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1461 _rtw8822c_txgapk_write_gain_bb_table(rtwdev,
1462 band, path);
1463 }
1464 }
1465 }
1466
rtw8822c_txgapk_read_offset(struct rtw_dev * rtwdev,u8 path)1467 static void rtw8822c_txgapk_read_offset(struct rtw_dev *rtwdev, u8 path)
1468 {
1469 static const u32 cfg1_1b00[2] = {0x00000d18, 0x00000d2a};
1470 static const u32 cfg2_1b00[2] = {0x00000d19, 0x00000d2b};
1471 static const u32 set_pi[2] = {REG_RSV_CTRL, REG_WLRF1};
1472 static const u32 path_setting[2] = {REG_ORITXCODE, REG_ORITXCODE2};
1473 struct rtw_gapk_info *txgapk = &rtwdev->dm_info.gapk;
1474 u8 channel = txgapk->channel;
1475 u32 val;
1476 int i;
1477
1478 if (path >= ARRAY_SIZE(cfg1_1b00) ||
1479 path >= ARRAY_SIZE(cfg2_1b00) ||
1480 path >= ARRAY_SIZE(set_pi) ||
1481 path >= ARRAY_SIZE(path_setting)) {
1482 rtw_warn(rtwdev, "[TXGAPK] wrong path %d\n", path);
1483 return;
1484 }
1485
1486 rtw_write32_mask(rtwdev, REG_ANTMAP0, BIT_ANT_PATH, path + 1);
1487 rtw_write32_mask(rtwdev, REG_TXLGMAP, MASKDWORD, 0xe4e40000);
1488 rtw_write32_mask(rtwdev, REG_TXANTSEG, BIT_ANTSEG, 0x3);
1489 rtw_write32_mask(rtwdev, path_setting[path], MASK20BITS, 0x33312);
1490 rtw_write32_mask(rtwdev, path_setting[path], BIT_PATH_EN, 0x1);
1491 rtw_write32_mask(rtwdev, set_pi[path], BITS_RFC_DIRECT, 0x0);
1492 rtw_write_rf(rtwdev, path, RF_LUTDBG, BIT_TXA_TANK, 0x1);
1493 rtw_write_rf(rtwdev, path, RF_IDAC, BIT_TX_MODE, 0x820);
1494 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, path);
1495 rtw_write32_mask(rtwdev, REG_IQKSTAT, MASKBYTE0, 0x0);
1496
1497 rtw_write32_mask(rtwdev, REG_TX_TONE_IDX, MASKBYTE0, 0x018);
1498 fsleep(1000);
1499 if (channel >= 1 && channel <= 14)
1500 rtw_write32_mask(rtwdev, REG_R_CONFIG, MASKBYTE0, BIT_2G_SWING);
1501 else
1502 rtw_write32_mask(rtwdev, REG_R_CONFIG, MASKBYTE0, BIT_5G_SWING);
1503 fsleep(1000);
1504
1505 rtw_write32_mask(rtwdev, REG_NCTL0, MASKDWORD, cfg1_1b00[path]);
1506 rtw_write32_mask(rtwdev, REG_NCTL0, MASKDWORD, cfg2_1b00[path]);
1507
1508 read_poll_timeout(rtw_read32_mask, val,
1509 val == 0x55, 1000, 100000, false,
1510 rtwdev, REG_RPT_CIP, BIT_RPT_CIP_STATUS);
1511
1512 rtw_write32_mask(rtwdev, set_pi[path], BITS_RFC_DIRECT, 0x2);
1513 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, path);
1514 rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, BIT_RPT_EN, 0x1);
1515 rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, BIT_RPT_SEL, 0x12);
1516 rtw_write32_mask(rtwdev, REG_TX_GAIN_SET, BIT_GAPK_RPT_IDX, 0x3);
1517 val = rtw_read32(rtwdev, REG_STAT_RPT);
1518
1519 txgapk->offset[0][path] = (s8)FIELD_GET(BIT_GAPK_RPT0, val);
1520 txgapk->offset[1][path] = (s8)FIELD_GET(BIT_GAPK_RPT1, val);
1521 txgapk->offset[2][path] = (s8)FIELD_GET(BIT_GAPK_RPT2, val);
1522 txgapk->offset[3][path] = (s8)FIELD_GET(BIT_GAPK_RPT3, val);
1523 txgapk->offset[4][path] = (s8)FIELD_GET(BIT_GAPK_RPT4, val);
1524 txgapk->offset[5][path] = (s8)FIELD_GET(BIT_GAPK_RPT5, val);
1525 txgapk->offset[6][path] = (s8)FIELD_GET(BIT_GAPK_RPT6, val);
1526 txgapk->offset[7][path] = (s8)FIELD_GET(BIT_GAPK_RPT7, val);
1527
1528 rtw_write32_mask(rtwdev, REG_TX_GAIN_SET, BIT_GAPK_RPT_IDX, 0x4);
1529 val = rtw_read32(rtwdev, REG_STAT_RPT);
1530
1531 txgapk->offset[8][path] = (s8)FIELD_GET(BIT_GAPK_RPT0, val);
1532 txgapk->offset[9][path] = (s8)FIELD_GET(BIT_GAPK_RPT1, val);
1533
1534 for (i = 0; i < RF_HW_OFFSET_NUM; i++)
1535 if (txgapk->offset[i][path] & BIT(3))
1536 txgapk->offset[i][path] = txgapk->offset[i][path] |
1537 0xf0;
1538 for (i = 0; i < RF_HW_OFFSET_NUM; i++)
1539 rtw_dbg(rtwdev, RTW_DBG_RFK,
1540 "[TXGAPK] offset %d %d path=%d\n",
1541 txgapk->offset[i][path], i, path);
1542 }
1543
rtw8822c_txgapk_calculate_offset(struct rtw_dev * rtwdev,u8 path)1544 static void rtw8822c_txgapk_calculate_offset(struct rtw_dev *rtwdev, u8 path)
1545 {
1546 static const u32 bb_reg[] = {REG_ANTMAP0, REG_TXLGMAP, REG_TXANTSEG,
1547 REG_ORITXCODE, REG_ORITXCODE2};
1548 struct rtw_gapk_info *txgapk = &rtwdev->dm_info.gapk;
1549 u8 channel = txgapk->channel;
1550 u32 reg_backup[ARRAY_SIZE(bb_reg)] = {0};
1551
1552 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s channel=%d\n",
1553 __func__, channel);
1554
1555 rtw8822c_txgapk_backup_bb_reg(rtwdev, bb_reg,
1556 reg_backup, ARRAY_SIZE(bb_reg));
1557
1558 if (channel >= 1 && channel <= 14) {
1559 rtw_write32_mask(rtwdev,
1560 REG_SINGLE_TONE_SW, BIT_IRQ_TEST_MODE, 0x0);
1561 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, path);
1562 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_IQ_SWITCH, 0x3f);
1563 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x0);
1564 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TX_GAIN, 0x1);
1565 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, RFREG_MASK, 0x5000f);
1566 rtw_write_rf(rtwdev, path, RF_TX_GAIN_OFFSET, BIT_RF_GAIN, 0x0);
1567 rtw_write_rf(rtwdev, path, RF_RXG_GAIN, BIT_RXG_GAIN, 0x1);
1568 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, BIT_RXAGC, 0x0f);
1569 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TRXBW, 0x1);
1570 rtw_write_rf(rtwdev, path, RF_BW_TRXBB, BIT_BW_TXBB, 0x1);
1571 rtw_write_rf(rtwdev, path, RF_BW_TRXBB, BIT_BW_RXBB, 0x0);
1572 rtw_write_rf(rtwdev, path, RF_EXT_TIA_BW, BIT_PW_EXT_TIA, 0x1);
1573
1574 rtw_write32_mask(rtwdev, REG_IQKSTAT, MASKBYTE0, 0x00);
1575 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x0);
1576
1577 rtw8822c_txgapk_read_offset(rtwdev, path);
1578 rtw_dbg(rtwdev, RTW_DBG_RFK, "=============================\n");
1579
1580 } else {
1581 rtw_write32_mask(rtwdev,
1582 REG_SINGLE_TONE_SW, BIT_IRQ_TEST_MODE, 0x0);
1583 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, path);
1584 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_IQ_SWITCH, 0x3f);
1585 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x0);
1586 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TX_GAIN, 0x1);
1587 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, RFREG_MASK, 0x50011);
1588 rtw_write_rf(rtwdev, path, RF_TXA_LB_SW, BIT_TXA_LB_ATT, 0x3);
1589 rtw_write_rf(rtwdev, path, RF_TXA_LB_SW, BIT_LB_ATT, 0x3);
1590 rtw_write_rf(rtwdev, path, RF_TXA_LB_SW, BIT_LB_SW, 0x1);
1591 rtw_write_rf(rtwdev, path,
1592 RF_RXA_MIX_GAIN, BIT_RXA_MIX_GAIN, 0x2);
1593 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, BIT_RXAGC, 0x12);
1594 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TRXBW, 0x1);
1595 rtw_write_rf(rtwdev, path, RF_BW_TRXBB, BIT_BW_RXBB, 0x0);
1596 rtw_write_rf(rtwdev, path, RF_EXT_TIA_BW, BIT_PW_EXT_TIA, 0x1);
1597 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, BIT_RF_MODE, 0x5);
1598
1599 rtw_write32_mask(rtwdev, REG_IQKSTAT, MASKBYTE0, 0x0);
1600
1601 if (channel >= 36 && channel <= 64)
1602 rtw_write32_mask(rtwdev,
1603 REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x2);
1604 else if (channel >= 100 && channel <= 144)
1605 rtw_write32_mask(rtwdev,
1606 REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x3);
1607 else if (channel >= 149 && channel <= 177)
1608 rtw_write32_mask(rtwdev,
1609 REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x4);
1610
1611 rtw8822c_txgapk_read_offset(rtwdev, path);
1612 rtw_dbg(rtwdev, RTW_DBG_RFK, "=============================\n");
1613 }
1614 rtw8822c_txgapk_reload_bb_reg(rtwdev, bb_reg,
1615 reg_backup, ARRAY_SIZE(bb_reg));
1616 }
1617
rtw8822c_txgapk_rf_restore(struct rtw_dev * rtwdev,u8 path)1618 static void rtw8822c_txgapk_rf_restore(struct rtw_dev *rtwdev, u8 path)
1619 {
1620 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1621
1622 if (path >= rtwdev->hal.rf_path_num)
1623 return;
1624
1625 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, BIT_RF_MODE, 0x3);
1626 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TRXBW, 0x0);
1627 rtw_write_rf(rtwdev, path, RF_EXT_TIA_BW, BIT_PW_EXT_TIA, 0x0);
1628 }
1629
rtw8822c_txgapk_cal_gain(struct rtw_dev * rtwdev,u32 gain,s8 offset)1630 static u32 rtw8822c_txgapk_cal_gain(struct rtw_dev *rtwdev, u32 gain, s8 offset)
1631 {
1632 u32 gain_x2, new_gain;
1633
1634 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1635
1636 if (_rtw8822c_txgapk_gain_valid(rtwdev, gain)) {
1637 new_gain = gain;
1638 rtw_dbg(rtwdev, RTW_DBG_RFK,
1639 "[TXGAPK] gain=0x%03X(>=0xCEX) offset=%d new_gain=0x%03X\n",
1640 gain, offset, new_gain);
1641 return new_gain;
1642 }
1643
1644 gain_x2 = (gain << 1) + offset;
1645 new_gain = (gain_x2 >> 1) | (gain_x2 & BIT(0) ? BIT_GAIN_EXT : 0);
1646
1647 rtw_dbg(rtwdev, RTW_DBG_RFK,
1648 "[TXGAPK] gain=0x%X offset=%d new_gain=0x%X\n",
1649 gain, offset, new_gain);
1650
1651 return new_gain;
1652 }
1653
rtw8822c_txgapk_write_tx_gain(struct rtw_dev * rtwdev)1654 static void rtw8822c_txgapk_write_tx_gain(struct rtw_dev *rtwdev)
1655 {
1656 struct rtw_gapk_info *txgapk = &rtwdev->dm_info.gapk;
1657 u32 i, j, tmp = 0x20, tmp_3f, v;
1658 s8 offset_tmp[RF_GAIN_NUM] = {0};
1659 u8 path, band = RF_BAND_2G_OFDM, channel = txgapk->channel;
1660
1661 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1662
1663 if (channel >= 1 && channel <= 14) {
1664 tmp = 0x20;
1665 band = RF_BAND_2G_OFDM;
1666 } else if (channel >= 36 && channel <= 64) {
1667 tmp = 0x200;
1668 band = RF_BAND_5G_L;
1669 } else if (channel >= 100 && channel <= 144) {
1670 tmp = 0x280;
1671 band = RF_BAND_5G_M;
1672 } else if (channel >= 149 && channel <= 177) {
1673 tmp = 0x300;
1674 band = RF_BAND_5G_H;
1675 } else {
1676 rtw_err(rtwdev, "[TXGAPK] unknown channel %d!!\n", channel);
1677 return;
1678 }
1679
1680 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1681 for (i = 0; i < RF_GAIN_NUM; i++) {
1682 offset_tmp[i] = 0;
1683 for (j = i; j < RF_GAIN_NUM; j++) {
1684 v = txgapk->rf3f_bp[band][j][path];
1685 if (_rtw8822c_txgapk_gain_valid(rtwdev, v))
1686 continue;
1687
1688 offset_tmp[i] += txgapk->offset[j][path];
1689 txgapk->fianl_offset[i][path] = offset_tmp[i];
1690 }
1691
1692 v = txgapk->rf3f_bp[band][i][path];
1693 if (_rtw8822c_txgapk_gain_valid(rtwdev, v)) {
1694 rtw_dbg(rtwdev, RTW_DBG_RFK,
1695 "[TXGAPK] tx_gain=0x%03X >= 0xCEX\n",
1696 txgapk->rf3f_bp[band][i][path]);
1697 } else {
1698 txgapk->rf3f_fs[path][i] = offset_tmp[i];
1699 rtw_dbg(rtwdev, RTW_DBG_RFK,
1700 "[TXGAPK] offset %d %d\n",
1701 offset_tmp[i], i);
1702 }
1703 }
1704
1705 rtw_write_rf(rtwdev, path, RF_LUTWE2, RFREG_MASK, 0x10000);
1706 for (i = 0; i < RF_GAIN_NUM; i++) {
1707 rtw_write_rf(rtwdev, path,
1708 RF_LUTWA, RFREG_MASK, tmp + i);
1709
1710 tmp_3f = rtw8822c_txgapk_cal_gain(rtwdev,
1711 txgapk->rf3f_bp[band][i][path],
1712 offset_tmp[i]);
1713 rtw_write_rf(rtwdev, path, RF_LUTWD0,
1714 BIT_GAIN_EXT | BIT_DATA_L, tmp_3f);
1715
1716 rtw_dbg(rtwdev, RTW_DBG_RFK,
1717 "[TXGAPK] 0x33=0x%05X 0x3f=0x%04X\n",
1718 tmp + i, tmp_3f);
1719 }
1720 rtw_write_rf(rtwdev, path, RF_LUTWE2, RFREG_MASK, 0x0);
1721 }
1722 }
1723
rtw8822c_txgapk_save_all_tx_gain_table(struct rtw_dev * rtwdev)1724 static void rtw8822c_txgapk_save_all_tx_gain_table(struct rtw_dev *rtwdev)
1725 {
1726 struct rtw_gapk_info *txgapk = &rtwdev->dm_info.gapk;
1727 static const u32 three_wire[2] = {REG_3WIRE, REG_3WIRE2};
1728 static const u8 ch_num[RF_BAND_MAX] = {1, 1, 36, 100, 149};
1729 static const u8 band_num[RF_BAND_MAX] = {0x0, 0x0, 0x1, 0x3, 0x5};
1730 static const u8 cck[RF_BAND_MAX] = {0x1, 0x0, 0x0, 0x0, 0x0};
1731 u8 path, band, gain, rf0_idx;
1732 u32 rf18, v;
1733
1734 if (rtwdev->dm_info.dm_flags & BIT(RTW_DM_CAP_TXGAPK))
1735 return;
1736
1737 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1738
1739 if (txgapk->read_txgain == 1) {
1740 rtw_dbg(rtwdev, RTW_DBG_RFK,
1741 "[TXGAPK] Already Read txgapk->read_txgain return!!!\n");
1742 rtw8822c_txgapk_write_gain_bb_table(rtwdev);
1743 return;
1744 }
1745
1746 for (band = 0; band < RF_BAND_MAX; band++) {
1747 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1748 rf18 = rtw_read_rf(rtwdev, path, RF_CFGCH, RFREG_MASK);
1749
1750 rtw_write32_mask(rtwdev,
1751 three_wire[path], BIT_3WIRE_EN, 0x0);
1752 rtw_write_rf(rtwdev, path,
1753 RF_CFGCH, MASKBYTE0, ch_num[band]);
1754 rtw_write_rf(rtwdev, path,
1755 RF_CFGCH, BIT_BAND, band_num[band]);
1756 rtw_write_rf(rtwdev, path,
1757 RF_BW_TRXBB, BIT_DBG_CCK_CCA, cck[band]);
1758 rtw_write_rf(rtwdev, path,
1759 RF_BW_TRXBB, BIT_TX_CCK_IND, cck[band]);
1760 gain = 0;
1761 for (rf0_idx = 1; rf0_idx < 32; rf0_idx += 3) {
1762 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC,
1763 MASKBYTE0, rf0_idx);
1764 v = rtw_read_rf(rtwdev, path,
1765 RF_TX_RESULT, RFREG_MASK);
1766 txgapk->rf3f_bp[band][gain][path] = v & BIT_DATA_L;
1767
1768 rtw_dbg(rtwdev, RTW_DBG_RFK,
1769 "[TXGAPK] 0x5f=0x%03X band=%d path=%d\n",
1770 txgapk->rf3f_bp[band][gain][path],
1771 band, path);
1772 gain++;
1773 }
1774 rtw_write_rf(rtwdev, path, RF_CFGCH, RFREG_MASK, rf18);
1775 rtw_write32_mask(rtwdev,
1776 three_wire[path], BIT_3WIRE_EN, 0x3);
1777 }
1778 }
1779 rtw8822c_txgapk_write_gain_bb_table(rtwdev);
1780 txgapk->read_txgain = 1;
1781 }
1782
rtw8822c_txgapk(struct rtw_dev * rtwdev)1783 static void rtw8822c_txgapk(struct rtw_dev *rtwdev)
1784 {
1785 static const u32 bb_reg[2] = {REG_TX_PTCL_CTRL, REG_TX_FIFO};
1786 struct rtw_gapk_info *txgapk = &rtwdev->dm_info.gapk;
1787 u32 bb_reg_backup[2];
1788 u8 path;
1789
1790 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1791
1792 rtw8822c_txgapk_save_all_tx_gain_table(rtwdev);
1793
1794 if (txgapk->read_txgain == 0) {
1795 rtw_dbg(rtwdev, RTW_DBG_RFK,
1796 "[TXGAPK] txgapk->read_txgain == 0 return!!!\n");
1797 return;
1798 }
1799
1800 if (rtwdev->efuse.power_track_type >= 4 &&
1801 rtwdev->efuse.power_track_type <= 7) {
1802 rtw_dbg(rtwdev, RTW_DBG_RFK,
1803 "[TXGAPK] Normal Mode in TSSI mode. return!!!\n");
1804 return;
1805 }
1806
1807 rtw8822c_txgapk_backup_bb_reg(rtwdev, bb_reg,
1808 bb_reg_backup, ARRAY_SIZE(bb_reg));
1809 rtw8822c_txgapk_tx_pause(rtwdev);
1810 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1811 txgapk->channel = rtw_read_rf(rtwdev, path,
1812 RF_CFGCH, RFREG_MASK) & MASKBYTE0;
1813 rtw8822c_txgapk_bb_dpk(rtwdev, path);
1814 rtw8822c_txgapk_afe_dpk(rtwdev, path);
1815 rtw8822c_txgapk_calculate_offset(rtwdev, path);
1816 rtw8822c_txgapk_rf_restore(rtwdev, path);
1817 rtw8822c_txgapk_afe_dpk_restore(rtwdev, path);
1818 rtw8822c_txgapk_bb_dpk_restore(rtwdev, path);
1819 }
1820 rtw8822c_txgapk_write_tx_gain(rtwdev);
1821 rtw8822c_txgapk_reload_bb_reg(rtwdev, bb_reg,
1822 bb_reg_backup, ARRAY_SIZE(bb_reg));
1823 }
1824
rtw8822c_do_gapk(struct rtw_dev * rtwdev)1825 static void rtw8822c_do_gapk(struct rtw_dev *rtwdev)
1826 {
1827 struct rtw_dm_info *dm = &rtwdev->dm_info;
1828
1829 if (dm->dm_flags & BIT(RTW_DM_CAP_TXGAPK)) {
1830 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] feature disable!!!\n");
1831 return;
1832 }
1833 rtw8822c_rfk_handshake(rtwdev, true);
1834 rtw8822c_txgapk(rtwdev);
1835 rtw8822c_rfk_handshake(rtwdev, false);
1836 }
1837
rtw8822c_rf_init(struct rtw_dev * rtwdev)1838 static void rtw8822c_rf_init(struct rtw_dev *rtwdev)
1839 {
1840 rtw8822c_rf_dac_cal(rtwdev);
1841 rtw8822c_rf_x2_check(rtwdev);
1842 rtw8822c_thermal_trim(rtwdev);
1843 rtw8822c_power_trim(rtwdev);
1844 rtw8822c_pa_bias(rtwdev);
1845 }
1846
rtw8822c_pwrtrack_init(struct rtw_dev * rtwdev)1847 static void rtw8822c_pwrtrack_init(struct rtw_dev *rtwdev)
1848 {
1849 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
1850 u8 path;
1851
1852 for (path = RF_PATH_A; path < RTW_RF_PATH_MAX; path++) {
1853 dm_info->delta_power_index[path] = 0;
1854 ewma_thermal_init(&dm_info->avg_thermal[path]);
1855 dm_info->thermal_avg[path] = 0xff;
1856 }
1857
1858 dm_info->pwr_trk_triggered = false;
1859 dm_info->thermal_meter_k = rtwdev->efuse.thermal_meter_k;
1860 dm_info->thermal_meter_lck = rtwdev->efuse.thermal_meter_k;
1861 }
1862
rtw8822c_phy_set_param(struct rtw_dev * rtwdev)1863 static void rtw8822c_phy_set_param(struct rtw_dev *rtwdev)
1864 {
1865 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
1866 struct rtw_hal *hal = &rtwdev->hal;
1867 u8 crystal_cap;
1868 u8 cck_gi_u_bnd_msb = 0;
1869 u8 cck_gi_u_bnd_lsb = 0;
1870 u8 cck_gi_l_bnd_msb = 0;
1871 u8 cck_gi_l_bnd_lsb = 0;
1872 bool is_tx2_path;
1873
1874 /* power on BB/RF domain */
1875 rtw_write8_set(rtwdev, REG_SYS_FUNC_EN,
1876 BIT_FEN_BB_GLB_RST | BIT_FEN_BB_RSTB);
1877 rtw_write8_set(rtwdev, REG_RF_CTRL,
1878 BIT_RF_EN | BIT_RF_RSTB | BIT_RF_SDM_RSTB);
1879 rtw_write32_set(rtwdev, REG_WLRF1, BIT_WLRF1_BBRF_EN);
1880
1881 /* disable low rate DPD */
1882 rtw_write32_mask(rtwdev, REG_DIS_DPD, DIS_DPD_MASK, DIS_DPD_RATEALL);
1883
1884 /* pre init before header files config */
1885 rtw8822c_header_file_init(rtwdev, true);
1886
1887 rtw_phy_load_tables(rtwdev);
1888
1889 crystal_cap = rtwdev->efuse.crystal_cap & 0x7f;
1890 rtw_write32_mask(rtwdev, REG_ANAPAR_XTAL_0, 0xfffc00,
1891 crystal_cap | (crystal_cap << 7));
1892
1893 /* post init after header files config */
1894 rtw8822c_header_file_init(rtwdev, false);
1895
1896 is_tx2_path = false;
1897 rtw8822c_config_trx_mode(rtwdev, hal->antenna_tx, hal->antenna_rx,
1898 is_tx2_path);
1899 rtw_phy_init(rtwdev);
1900
1901 cck_gi_u_bnd_msb = (u8)rtw_read32_mask(rtwdev, 0x1a98, 0xc000);
1902 cck_gi_u_bnd_lsb = (u8)rtw_read32_mask(rtwdev, 0x1aa8, 0xf0000);
1903 cck_gi_l_bnd_msb = (u8)rtw_read32_mask(rtwdev, 0x1a98, 0xc0);
1904 cck_gi_l_bnd_lsb = (u8)rtw_read32_mask(rtwdev, 0x1a70, 0x0f000000);
1905
1906 dm_info->cck_gi_u_bnd = ((cck_gi_u_bnd_msb << 4) | (cck_gi_u_bnd_lsb));
1907 dm_info->cck_gi_l_bnd = ((cck_gi_l_bnd_msb << 4) | (cck_gi_l_bnd_lsb));
1908
1909 rtw8822c_rf_init(rtwdev);
1910 rtw8822c_pwrtrack_init(rtwdev);
1911
1912 rtw_bf_phy_init(rtwdev);
1913 }
1914
1915 #define WLAN_TXQ_RPT_EN 0x1F
1916 #define WLAN_SLOT_TIME 0x09
1917 #define WLAN_PIFS_TIME 0x1C
1918 #define WLAN_SIFS_CCK_CONT_TX 0x0A
1919 #define WLAN_SIFS_OFDM_CONT_TX 0x0E
1920 #define WLAN_SIFS_CCK_TRX 0x0A
1921 #define WLAN_SIFS_OFDM_TRX 0x10
1922 #define WLAN_NAV_MAX 0xC8
1923 #define WLAN_RDG_NAV 0x05
1924 #define WLAN_TXOP_NAV 0x1B
1925 #define WLAN_CCK_RX_TSF 0x30
1926 #define WLAN_OFDM_RX_TSF 0x30
1927 #define WLAN_TBTT_PROHIBIT 0x04 /* unit : 32us */
1928 #define WLAN_TBTT_HOLD_TIME 0x064 /* unit : 32us */
1929 #define WLAN_DRV_EARLY_INT 0x04
1930 #define WLAN_BCN_CTRL_CLT0 0x10
1931 #define WLAN_BCN_DMA_TIME 0x02
1932 #define WLAN_BCN_MAX_ERR 0xFF
1933 #define WLAN_SIFS_CCK_DUR_TUNE 0x0A
1934 #define WLAN_SIFS_OFDM_DUR_TUNE 0x10
1935 #define WLAN_SIFS_CCK_CTX 0x0A
1936 #define WLAN_SIFS_CCK_IRX 0x0A
1937 #define WLAN_SIFS_OFDM_CTX 0x0E
1938 #define WLAN_SIFS_OFDM_IRX 0x0E
1939 #define WLAN_EIFS_DUR_TUNE 0x40
1940 #define WLAN_EDCA_VO_PARAM 0x002FA226
1941 #define WLAN_EDCA_VI_PARAM 0x005EA328
1942 #define WLAN_EDCA_BE_PARAM 0x005EA42B
1943 #define WLAN_EDCA_BK_PARAM 0x0000A44F
1944
1945 #define WLAN_RX_FILTER0 0xFFFFFFFF
1946 #define WLAN_RX_FILTER2 0xFFFF
1947 #define WLAN_RCR_CFG 0xE400220E
1948 #define WLAN_RXPKT_MAX_SZ 12288
1949 #define WLAN_RXPKT_MAX_SZ_512 (WLAN_RXPKT_MAX_SZ >> 9)
1950
1951 #define WLAN_AMPDU_MAX_TIME 0x70
1952 #define WLAN_RTS_LEN_TH 0xFF
1953 #define WLAN_RTS_TX_TIME_TH 0x08
1954 #define WLAN_MAX_AGG_PKT_LIMIT 0x3f
1955 #define WLAN_RTS_MAX_AGG_PKT_LIMIT 0x3f
1956 #define WLAN_PRE_TXCNT_TIME_TH 0x1E0
1957 #define FAST_EDCA_VO_TH 0x06
1958 #define FAST_EDCA_VI_TH 0x06
1959 #define FAST_EDCA_BE_TH 0x06
1960 #define FAST_EDCA_BK_TH 0x06
1961 #define WLAN_BAR_RETRY_LIMIT 0x01
1962 #define WLAN_BAR_ACK_TYPE 0x05
1963 #define WLAN_RA_TRY_RATE_AGG_LIMIT 0x08
1964 #define WLAN_RESP_TXRATE 0x84
1965 #define WLAN_ACK_TO 0x21
1966 #define WLAN_ACK_TO_CCK 0x6A
1967 #define WLAN_DATA_RATE_FB_CNT_1_4 0x01000000
1968 #define WLAN_DATA_RATE_FB_CNT_5_8 0x08070504
1969 #define WLAN_RTS_RATE_FB_CNT_5_8 0x08070504
1970 #define WLAN_DATA_RATE_FB_RATE0 0xFE01F010
1971 #define WLAN_DATA_RATE_FB_RATE0_H 0x40000000
1972 #define WLAN_RTS_RATE_FB_RATE1 0x003FF010
1973 #define WLAN_RTS_RATE_FB_RATE1_H 0x40000000
1974 #define WLAN_RTS_RATE_FB_RATE4 0x0600F010
1975 #define WLAN_RTS_RATE_FB_RATE4_H 0x400003E0
1976 #define WLAN_RTS_RATE_FB_RATE5 0x0600F015
1977 #define WLAN_RTS_RATE_FB_RATE5_H 0x000000E0
1978 #define WLAN_MULTI_ADDR 0xFFFFFFFF
1979
1980 #define WLAN_TX_FUNC_CFG1 0x30
1981 #define WLAN_TX_FUNC_CFG2 0x30
1982 #define WLAN_MAC_OPT_NORM_FUNC1 0x98
1983 #define WLAN_MAC_OPT_LB_FUNC1 0x80
1984 #define WLAN_MAC_OPT_FUNC2 0xb0810041
1985 #define WLAN_MAC_INT_MIG_CFG 0x33330000
1986
1987 #define WLAN_SIFS_CFG (WLAN_SIFS_CCK_CONT_TX | \
1988 (WLAN_SIFS_OFDM_CONT_TX << BIT_SHIFT_SIFS_OFDM_CTX) | \
1989 (WLAN_SIFS_CCK_TRX << BIT_SHIFT_SIFS_CCK_TRX) | \
1990 (WLAN_SIFS_OFDM_TRX << BIT_SHIFT_SIFS_OFDM_TRX))
1991
1992 #define WLAN_SIFS_DUR_TUNE (WLAN_SIFS_CCK_DUR_TUNE | \
1993 (WLAN_SIFS_OFDM_DUR_TUNE << 8))
1994
1995 #define WLAN_TBTT_TIME (WLAN_TBTT_PROHIBIT |\
1996 (WLAN_TBTT_HOLD_TIME << BIT_SHIFT_TBTT_HOLD_TIME_AP))
1997
1998 #define WLAN_NAV_CFG (WLAN_RDG_NAV | (WLAN_TXOP_NAV << 16))
1999 #define WLAN_RX_TSF_CFG (WLAN_CCK_RX_TSF | (WLAN_OFDM_RX_TSF) << 8)
2000
2001 #define MAC_CLK_SPEED 80 /* 80M */
2002 #define EFUSE_PCB_INFO_OFFSET 0xCA
2003
rtw8822c_mac_init(struct rtw_dev * rtwdev)2004 static int rtw8822c_mac_init(struct rtw_dev *rtwdev)
2005 {
2006 u8 value8;
2007 u16 value16;
2008 u32 value32;
2009 u16 pre_txcnt;
2010
2011 /* txq control */
2012 value8 = rtw_read8(rtwdev, REG_FWHW_TXQ_CTRL);
2013 value8 |= (BIT(7) & ~BIT(1) & ~BIT(2));
2014 rtw_write8(rtwdev, REG_FWHW_TXQ_CTRL, value8);
2015 rtw_write8(rtwdev, REG_FWHW_TXQ_CTRL + 1, WLAN_TXQ_RPT_EN);
2016 /* sifs control */
2017 rtw_write16(rtwdev, REG_SPEC_SIFS, WLAN_SIFS_DUR_TUNE);
2018 rtw_write32(rtwdev, REG_SIFS, WLAN_SIFS_CFG);
2019 rtw_write16(rtwdev, REG_RESP_SIFS_CCK,
2020 WLAN_SIFS_CCK_CTX | WLAN_SIFS_CCK_IRX << 8);
2021 rtw_write16(rtwdev, REG_RESP_SIFS_OFDM,
2022 WLAN_SIFS_OFDM_CTX | WLAN_SIFS_OFDM_IRX << 8);
2023 /* rate fallback control */
2024 rtw_write32(rtwdev, REG_DARFRC, WLAN_DATA_RATE_FB_CNT_1_4);
2025 rtw_write32(rtwdev, REG_DARFRCH, WLAN_DATA_RATE_FB_CNT_5_8);
2026 rtw_write32(rtwdev, REG_RARFRCH, WLAN_RTS_RATE_FB_CNT_5_8);
2027 rtw_write32(rtwdev, REG_ARFR0, WLAN_DATA_RATE_FB_RATE0);
2028 rtw_write32(rtwdev, REG_ARFRH0, WLAN_DATA_RATE_FB_RATE0_H);
2029 rtw_write32(rtwdev, REG_ARFR1_V1, WLAN_RTS_RATE_FB_RATE1);
2030 rtw_write32(rtwdev, REG_ARFRH1_V1, WLAN_RTS_RATE_FB_RATE1_H);
2031 rtw_write32(rtwdev, REG_ARFR4, WLAN_RTS_RATE_FB_RATE4);
2032 rtw_write32(rtwdev, REG_ARFRH4, WLAN_RTS_RATE_FB_RATE4_H);
2033 rtw_write32(rtwdev, REG_ARFR5, WLAN_RTS_RATE_FB_RATE5);
2034 rtw_write32(rtwdev, REG_ARFRH5, WLAN_RTS_RATE_FB_RATE5_H);
2035 /* protocol configuration */
2036 rtw_write8(rtwdev, REG_AMPDU_MAX_TIME_V1, WLAN_AMPDU_MAX_TIME);
2037 rtw_write8_set(rtwdev, REG_TX_HANG_CTRL, BIT_EN_EOF_V1);
2038 pre_txcnt = WLAN_PRE_TXCNT_TIME_TH | BIT_EN_PRECNT;
2039 rtw_write8(rtwdev, REG_PRECNT_CTRL, (u8)(pre_txcnt & 0xFF));
2040 rtw_write8(rtwdev, REG_PRECNT_CTRL + 1, (u8)(pre_txcnt >> 8));
2041 value32 = WLAN_RTS_LEN_TH | (WLAN_RTS_TX_TIME_TH << 8) |
2042 (WLAN_MAX_AGG_PKT_LIMIT << 16) |
2043 (WLAN_RTS_MAX_AGG_PKT_LIMIT << 24);
2044 rtw_write32(rtwdev, REG_PROT_MODE_CTRL, value32);
2045 rtw_write16(rtwdev, REG_BAR_MODE_CTRL + 2,
2046 WLAN_BAR_RETRY_LIMIT | WLAN_RA_TRY_RATE_AGG_LIMIT << 8);
2047 rtw_write8(rtwdev, REG_FAST_EDCA_VOVI_SETTING, FAST_EDCA_VO_TH);
2048 rtw_write8(rtwdev, REG_FAST_EDCA_VOVI_SETTING + 2, FAST_EDCA_VI_TH);
2049 rtw_write8(rtwdev, REG_FAST_EDCA_BEBK_SETTING, FAST_EDCA_BE_TH);
2050 rtw_write8(rtwdev, REG_FAST_EDCA_BEBK_SETTING + 2, FAST_EDCA_BK_TH);
2051 /* close BA parser */
2052 rtw_write8_clr(rtwdev, REG_LIFETIME_EN, BIT_BA_PARSER_EN);
2053 rtw_write32_clr(rtwdev, REG_RRSR, BITS_RRSR_RSC);
2054
2055 /* EDCA configuration */
2056 rtw_write32(rtwdev, REG_EDCA_VO_PARAM, WLAN_EDCA_VO_PARAM);
2057 rtw_write32(rtwdev, REG_EDCA_VI_PARAM, WLAN_EDCA_VI_PARAM);
2058 rtw_write32(rtwdev, REG_EDCA_BE_PARAM, WLAN_EDCA_BE_PARAM);
2059 rtw_write32(rtwdev, REG_EDCA_BK_PARAM, WLAN_EDCA_BK_PARAM);
2060 rtw_write8(rtwdev, REG_PIFS, WLAN_PIFS_TIME);
2061 rtw_write8_clr(rtwdev, REG_TX_PTCL_CTRL + 1, BIT_SIFS_BK_EN >> 8);
2062 rtw_write8_set(rtwdev, REG_RD_CTRL + 1,
2063 (BIT_DIS_TXOP_CFE | BIT_DIS_LSIG_CFE |
2064 BIT_DIS_STBC_CFE) >> 8);
2065
2066 /* MAC clock configuration */
2067 rtw_write32_clr(rtwdev, REG_AFE_CTRL1, BIT_MAC_CLK_SEL);
2068 rtw_write8(rtwdev, REG_USTIME_TSF, MAC_CLK_SPEED);
2069 rtw_write8(rtwdev, REG_USTIME_EDCA, MAC_CLK_SPEED);
2070
2071 rtw_write8_set(rtwdev, REG_MISC_CTRL,
2072 BIT_EN_FREE_CNT | BIT_DIS_SECOND_CCA);
2073 rtw_write8_clr(rtwdev, REG_TIMER0_SRC_SEL, BIT_TSFT_SEL_TIMER0);
2074 rtw_write16(rtwdev, REG_TXPAUSE, 0x0000);
2075 rtw_write8(rtwdev, REG_SLOT, WLAN_SLOT_TIME);
2076 rtw_write32(rtwdev, REG_RD_NAV_NXT, WLAN_NAV_CFG);
2077 rtw_write16(rtwdev, REG_RXTSF_OFFSET_CCK, WLAN_RX_TSF_CFG);
2078 /* Set beacon cotnrol - enable TSF and other related functions */
2079 rtw_write8_set(rtwdev, REG_BCN_CTRL, BIT_EN_BCN_FUNCTION);
2080 /* Set send beacon related registers */
2081 rtw_write32(rtwdev, REG_TBTT_PROHIBIT, WLAN_TBTT_TIME);
2082 rtw_write8(rtwdev, REG_DRVERLYINT, WLAN_DRV_EARLY_INT);
2083 rtw_write8(rtwdev, REG_BCN_CTRL_CLINT0, WLAN_BCN_CTRL_CLT0);
2084 rtw_write8(rtwdev, REG_BCNDMATIM, WLAN_BCN_DMA_TIME);
2085 rtw_write8(rtwdev, REG_BCN_MAX_ERR, WLAN_BCN_MAX_ERR);
2086
2087 /* WMAC configuration */
2088 rtw_write32(rtwdev, REG_MAR, WLAN_MULTI_ADDR);
2089 rtw_write32(rtwdev, REG_MAR + 4, WLAN_MULTI_ADDR);
2090 rtw_write8(rtwdev, REG_BBPSF_CTRL + 2, WLAN_RESP_TXRATE);
2091 rtw_write8(rtwdev, REG_ACKTO, WLAN_ACK_TO);
2092 rtw_write8(rtwdev, REG_ACKTO_CCK, WLAN_ACK_TO_CCK);
2093 rtw_write16(rtwdev, REG_EIFS, WLAN_EIFS_DUR_TUNE);
2094 rtw_write8(rtwdev, REG_NAV_CTRL + 2, WLAN_NAV_MAX);
2095 rtw_write8(rtwdev, REG_WMAC_TRXPTCL_CTL_H + 2, WLAN_BAR_ACK_TYPE);
2096 rtw_write32(rtwdev, REG_RXFLTMAP0, WLAN_RX_FILTER0);
2097 rtw_write16(rtwdev, REG_RXFLTMAP2, WLAN_RX_FILTER2);
2098 rtw_write32(rtwdev, REG_RCR, WLAN_RCR_CFG);
2099 rtw_write8(rtwdev, REG_RX_PKT_LIMIT, WLAN_RXPKT_MAX_SZ_512);
2100 rtw_write8(rtwdev, REG_TCR + 2, WLAN_TX_FUNC_CFG2);
2101 rtw_write8(rtwdev, REG_TCR + 1, WLAN_TX_FUNC_CFG1);
2102 rtw_write32_set(rtwdev, REG_GENERAL_OPTION, BIT_DUMMY_FCS_READY_MASK_EN);
2103 rtw_write32(rtwdev, REG_WMAC_OPTION_FUNCTION + 8, WLAN_MAC_OPT_FUNC2);
2104 rtw_write8(rtwdev, REG_WMAC_OPTION_FUNCTION_1, WLAN_MAC_OPT_NORM_FUNC1);
2105
2106 /* init low power */
2107 value16 = rtw_read16(rtwdev, REG_RXPSF_CTRL + 2) & 0xF00F;
2108 value16 |= (BIT_RXGCK_VHT_FIFOTHR(1) | BIT_RXGCK_HT_FIFOTHR(1) |
2109 BIT_RXGCK_OFDM_FIFOTHR(1) | BIT_RXGCK_CCK_FIFOTHR(1)) >> 16;
2110 rtw_write16(rtwdev, REG_RXPSF_CTRL + 2, value16);
2111 value16 = 0;
2112 value16 = BIT_SET_RXPSF_PKTLENTHR(value16, 1);
2113 value16 |= BIT_RXPSF_CTRLEN | BIT_RXPSF_VHTCHKEN | BIT_RXPSF_HTCHKEN
2114 | BIT_RXPSF_OFDMCHKEN | BIT_RXPSF_CCKCHKEN
2115 | BIT_RXPSF_OFDMRST;
2116 rtw_write16(rtwdev, REG_RXPSF_CTRL, value16);
2117 rtw_write32(rtwdev, REG_RXPSF_TYPE_CTRL, 0xFFFFFFFF);
2118 /* rx ignore configuration */
2119 value16 = rtw_read16(rtwdev, REG_RXPSF_CTRL);
2120 value16 &= ~(BIT_RXPSF_MHCHKEN | BIT_RXPSF_CCKRST |
2121 BIT_RXPSF_CONT_ERRCHKEN);
2122 value16 = BIT_SET_RXPSF_ERRTHR(value16, 0x07);
2123 rtw_write16(rtwdev, REG_RXPSF_CTRL, value16);
2124 rtw_write8_set(rtwdev, REG_SND_PTCL_CTRL,
2125 BIT_DIS_CHK_VHTSIGB_CRC);
2126
2127 /* Interrupt migration configuration */
2128 rtw_write32(rtwdev, REG_INT_MIG, WLAN_MAC_INT_MIG_CFG);
2129
2130 return 0;
2131 }
2132
2133 #define FWCD_SIZE_REG_8822C 0x2000
2134 #define FWCD_SIZE_DMEM_8822C 0x10000
2135 #define FWCD_SIZE_IMEM_8822C 0x10000
2136 #define FWCD_SIZE_EMEM_8822C 0x20000
2137 #define FWCD_SIZE_ROM_8822C 0x10000
2138
2139 static const u32 __fwcd_segs_8822c[] = {
2140 FWCD_SIZE_REG_8822C,
2141 FWCD_SIZE_DMEM_8822C,
2142 FWCD_SIZE_IMEM_8822C,
2143 FWCD_SIZE_EMEM_8822C,
2144 FWCD_SIZE_ROM_8822C,
2145 };
2146
2147 static const struct rtw_fwcd_segs rtw8822c_fwcd_segs = {
2148 .segs = __fwcd_segs_8822c,
2149 .num = ARRAY_SIZE(__fwcd_segs_8822c),
2150 };
2151
rtw8822c_dump_fw_crash(struct rtw_dev * rtwdev)2152 static int rtw8822c_dump_fw_crash(struct rtw_dev *rtwdev)
2153 {
2154 #define __dump_fw_8822c(_dev, _mem) \
2155 rtw_dump_fw(_dev, OCPBASE_ ## _mem ## _88XX, \
2156 FWCD_SIZE_ ## _mem ## _8822C, RTW_FWCD_ ## _mem)
2157 int ret;
2158
2159 ret = rtw_dump_reg(rtwdev, 0x0, FWCD_SIZE_REG_8822C);
2160 if (ret)
2161 return ret;
2162 ret = __dump_fw_8822c(rtwdev, DMEM);
2163 if (ret)
2164 return ret;
2165 ret = __dump_fw_8822c(rtwdev, IMEM);
2166 if (ret)
2167 return ret;
2168 ret = __dump_fw_8822c(rtwdev, EMEM);
2169 if (ret)
2170 return ret;
2171 ret = __dump_fw_8822c(rtwdev, ROM);
2172 if (ret)
2173 return ret;
2174
2175 return 0;
2176
2177 #undef __dump_fw_8822c
2178 }
2179
rtw8822c_rstb_3wire(struct rtw_dev * rtwdev,bool enable)2180 static void rtw8822c_rstb_3wire(struct rtw_dev *rtwdev, bool enable)
2181 {
2182 if (enable) {
2183 rtw_write32_mask(rtwdev, REG_RSTB, BIT_RSTB_3WIRE, 0x1);
2184 rtw_write32_mask(rtwdev, REG_ANAPAR_A, BIT_ANAPAR_UPDATE, 0x1);
2185 rtw_write32_mask(rtwdev, REG_ANAPAR_B, BIT_ANAPAR_UPDATE, 0x1);
2186 } else {
2187 rtw_write32_mask(rtwdev, REG_RSTB, BIT_RSTB_3WIRE, 0x0);
2188 }
2189 }
2190
rtw8822c_set_channel_rf(struct rtw_dev * rtwdev,u8 channel,u8 bw)2191 static void rtw8822c_set_channel_rf(struct rtw_dev *rtwdev, u8 channel, u8 bw)
2192 {
2193 #define RF18_BAND_MASK (BIT(16) | BIT(9) | BIT(8))
2194 #define RF18_BAND_2G (0)
2195 #define RF18_BAND_5G (BIT(16) | BIT(8))
2196 #define RF18_CHANNEL_MASK (MASKBYTE0)
2197 #define RF18_RFSI_MASK (BIT(18) | BIT(17))
2198 #define RF18_RFSI_GE_CH80 (BIT(17))
2199 #define RF18_RFSI_GT_CH140 (BIT(18))
2200 #define RF18_BW_MASK (BIT(13) | BIT(12))
2201 #define RF18_BW_20M (BIT(13) | BIT(12))
2202 #define RF18_BW_40M (BIT(13))
2203 #define RF18_BW_80M (BIT(12))
2204
2205 u32 rf_reg18 = 0;
2206 u32 rf_rxbb = 0;
2207
2208 rf_reg18 = rtw_read_rf(rtwdev, RF_PATH_A, 0x18, RFREG_MASK);
2209
2210 rf_reg18 &= ~(RF18_BAND_MASK | RF18_CHANNEL_MASK | RF18_RFSI_MASK |
2211 RF18_BW_MASK);
2212
2213 rf_reg18 |= (IS_CH_2G_BAND(channel) ? RF18_BAND_2G : RF18_BAND_5G);
2214 rf_reg18 |= (channel & RF18_CHANNEL_MASK);
2215 if (IS_CH_5G_BAND_4(channel))
2216 rf_reg18 |= RF18_RFSI_GT_CH140;
2217 else if (IS_CH_5G_BAND_3(channel))
2218 rf_reg18 |= RF18_RFSI_GE_CH80;
2219
2220 switch (bw) {
2221 case RTW_CHANNEL_WIDTH_5:
2222 case RTW_CHANNEL_WIDTH_10:
2223 case RTW_CHANNEL_WIDTH_20:
2224 default:
2225 rf_reg18 |= RF18_BW_20M;
2226 rf_rxbb = 0x18;
2227 break;
2228 case RTW_CHANNEL_WIDTH_40:
2229 /* RF bandwidth */
2230 rf_reg18 |= RF18_BW_40M;
2231 rf_rxbb = 0x10;
2232 break;
2233 case RTW_CHANNEL_WIDTH_80:
2234 rf_reg18 |= RF18_BW_80M;
2235 rf_rxbb = 0x8;
2236 break;
2237 }
2238
2239 rtw8822c_rstb_3wire(rtwdev, false);
2240
2241 rtw_write_rf(rtwdev, RF_PATH_A, RF_LUTWE2, 0x04, 0x01);
2242 rtw_write_rf(rtwdev, RF_PATH_A, RF_LUTWA, 0x1f, 0x12);
2243 rtw_write_rf(rtwdev, RF_PATH_A, RF_LUTWD0, 0xfffff, rf_rxbb);
2244 rtw_write_rf(rtwdev, RF_PATH_A, RF_LUTWE2, 0x04, 0x00);
2245
2246 rtw_write_rf(rtwdev, RF_PATH_B, RF_LUTWE2, 0x04, 0x01);
2247 rtw_write_rf(rtwdev, RF_PATH_B, RF_LUTWA, 0x1f, 0x12);
2248 rtw_write_rf(rtwdev, RF_PATH_B, RF_LUTWD0, 0xfffff, rf_rxbb);
2249 rtw_write_rf(rtwdev, RF_PATH_B, RF_LUTWE2, 0x04, 0x00);
2250
2251 rtw_write_rf(rtwdev, RF_PATH_A, RF_CFGCH, RFREG_MASK, rf_reg18);
2252 rtw_write_rf(rtwdev, RF_PATH_B, RF_CFGCH, RFREG_MASK, rf_reg18);
2253
2254 rtw8822c_rstb_3wire(rtwdev, true);
2255 }
2256
rtw8822c_toggle_igi(struct rtw_dev * rtwdev)2257 static void rtw8822c_toggle_igi(struct rtw_dev *rtwdev)
2258 {
2259 u32 igi;
2260
2261 igi = rtw_read32_mask(rtwdev, REG_RXIGI, 0x7f);
2262 rtw_write32_mask(rtwdev, REG_RXIGI, 0x7f, igi - 2);
2263 rtw_write32_mask(rtwdev, REG_RXIGI, 0x7f00, igi - 2);
2264 rtw_write32_mask(rtwdev, REG_RXIGI, 0x7f, igi);
2265 rtw_write32_mask(rtwdev, REG_RXIGI, 0x7f00, igi);
2266 }
2267
rtw8822c_set_channel_bb(struct rtw_dev * rtwdev,u8 channel,u8 bw,u8 primary_ch_idx)2268 static void rtw8822c_set_channel_bb(struct rtw_dev *rtwdev, u8 channel, u8 bw,
2269 u8 primary_ch_idx)
2270 {
2271 if (IS_CH_2G_BAND(channel)) {
2272 rtw_write32_clr(rtwdev, REG_BGCTRL, BITS_RX_IQ_WEIGHT);
2273 rtw_write32_set(rtwdev, REG_TXF4, BIT(20));
2274 rtw_write32_clr(rtwdev, REG_CCK_CHECK, BIT_CHECK_CCK_EN);
2275 rtw_write32_clr(rtwdev, REG_CCKTXONLY, BIT_BB_CCK_CHECK_EN);
2276 rtw_write32_mask(rtwdev, REG_CCAMSK, 0x3F000000, 0xF);
2277
2278 switch (bw) {
2279 case RTW_CHANNEL_WIDTH_20:
2280 rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_CCK,
2281 0x5);
2282 rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_CCK,
2283 0x5);
2284 rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_OFDM,
2285 0x6);
2286 rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_OFDM,
2287 0x6);
2288 break;
2289 case RTW_CHANNEL_WIDTH_40:
2290 rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_CCK,
2291 0x4);
2292 rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_CCK,
2293 0x4);
2294 rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_OFDM,
2295 0x0);
2296 rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_OFDM,
2297 0x0);
2298 break;
2299 }
2300 if (channel == 13 || channel == 14)
2301 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x969);
2302 else if (channel == 11 || channel == 12)
2303 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x96a);
2304 else
2305 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x9aa);
2306 if (channel == 14) {
2307 rtw_write32_mask(rtwdev, REG_TXF0, MASKHWORD, 0x3da0);
2308 rtw_write32_mask(rtwdev, REG_TXF1, MASKDWORD,
2309 0x4962c931);
2310 rtw_write32_mask(rtwdev, REG_TXF2, MASKLWORD, 0x6aa3);
2311 rtw_write32_mask(rtwdev, REG_TXF3, MASKHWORD, 0xaa7b);
2312 rtw_write32_mask(rtwdev, REG_TXF4, MASKLWORD, 0xf3d7);
2313 rtw_write32_mask(rtwdev, REG_TXF5, MASKDWORD, 0x0);
2314 rtw_write32_mask(rtwdev, REG_TXF6, MASKDWORD,
2315 0xff012455);
2316 rtw_write32_mask(rtwdev, REG_TXF7, MASKDWORD, 0xffff);
2317 } else {
2318 rtw_write32_mask(rtwdev, REG_TXF0, MASKHWORD, 0x5284);
2319 rtw_write32_mask(rtwdev, REG_TXF1, MASKDWORD,
2320 0x3e18fec8);
2321 rtw_write32_mask(rtwdev, REG_TXF2, MASKLWORD, 0x0a88);
2322 rtw_write32_mask(rtwdev, REG_TXF3, MASKHWORD, 0xacc4);
2323 rtw_write32_mask(rtwdev, REG_TXF4, MASKLWORD, 0xc8b2);
2324 rtw_write32_mask(rtwdev, REG_TXF5, MASKDWORD,
2325 0x00faf0de);
2326 rtw_write32_mask(rtwdev, REG_TXF6, MASKDWORD,
2327 0x00122344);
2328 rtw_write32_mask(rtwdev, REG_TXF7, MASKDWORD,
2329 0x0fffffff);
2330 }
2331 if (channel == 13)
2332 rtw_write32_mask(rtwdev, REG_TXDFIR0, 0x70, 0x3);
2333 else
2334 rtw_write32_mask(rtwdev, REG_TXDFIR0, 0x70, 0x1);
2335 } else if (IS_CH_5G_BAND(channel)) {
2336 rtw_write32_set(rtwdev, REG_CCKTXONLY, BIT_BB_CCK_CHECK_EN);
2337 rtw_write32_set(rtwdev, REG_CCK_CHECK, BIT_CHECK_CCK_EN);
2338 rtw_write32_set(rtwdev, REG_BGCTRL, BITS_RX_IQ_WEIGHT);
2339 rtw_write32_clr(rtwdev, REG_TXF4, BIT(20));
2340 rtw_write32_mask(rtwdev, REG_CCAMSK, 0x3F000000, 0x22);
2341 rtw_write32_mask(rtwdev, REG_TXDFIR0, 0x70, 0x3);
2342 if (IS_CH_5G_BAND_1(channel) || IS_CH_5G_BAND_2(channel)) {
2343 rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_OFDM,
2344 0x1);
2345 rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_OFDM,
2346 0x1);
2347 } else if (IS_CH_5G_BAND_3(channel)) {
2348 rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_OFDM,
2349 0x2);
2350 rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_OFDM,
2351 0x2);
2352 } else if (IS_CH_5G_BAND_4(channel)) {
2353 rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_OFDM,
2354 0x3);
2355 rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_OFDM,
2356 0x3);
2357 }
2358
2359 if (channel >= 36 && channel <= 51)
2360 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x494);
2361 else if (channel >= 52 && channel <= 55)
2362 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x493);
2363 else if (channel >= 56 && channel <= 111)
2364 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x453);
2365 else if (channel >= 112 && channel <= 119)
2366 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x452);
2367 else if (channel >= 120 && channel <= 172)
2368 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x412);
2369 else if (channel >= 173 && channel <= 177)
2370 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x411);
2371 }
2372
2373 switch (bw) {
2374 case RTW_CHANNEL_WIDTH_20:
2375 rtw_write32_mask(rtwdev, REG_DFIRBW, 0x3FF0, 0x19B);
2376 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xf, 0x0);
2377 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xffc0, 0x0);
2378 rtw_write32_mask(rtwdev, REG_TXCLK, 0x700, 0x7);
2379 rtw_write32_mask(rtwdev, REG_TXCLK, 0x700000, 0x6);
2380 rtw_write32_mask(rtwdev, REG_CCK_SOURCE, BIT_NBI_EN, 0x0);
2381 rtw_write32_mask(rtwdev, REG_SBD, BITS_SUBTUNE, 0x1);
2382 rtw_write32_mask(rtwdev, REG_PT_CHSMO, BIT_PT_OPT, 0x0);
2383 break;
2384 case RTW_CHANNEL_WIDTH_40:
2385 rtw_write32_mask(rtwdev, REG_CCKSB, BIT(4),
2386 (primary_ch_idx == RTW_SC_20_UPPER ? 1 : 0));
2387 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xf, 0x5);
2388 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xc0, 0x0);
2389 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xff00,
2390 (primary_ch_idx | (primary_ch_idx << 4)));
2391 rtw_write32_mask(rtwdev, REG_CCK_SOURCE, BIT_NBI_EN, 0x1);
2392 rtw_write32_mask(rtwdev, REG_SBD, BITS_SUBTUNE, 0x1);
2393 rtw_write32_mask(rtwdev, REG_PT_CHSMO, BIT_PT_OPT, 0x1);
2394 break;
2395 case RTW_CHANNEL_WIDTH_80:
2396 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xf, 0xa);
2397 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xc0, 0x0);
2398 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xff00,
2399 (primary_ch_idx | (primary_ch_idx << 4)));
2400 rtw_write32_mask(rtwdev, REG_SBD, BITS_SUBTUNE, 0x6);
2401 rtw_write32_mask(rtwdev, REG_PT_CHSMO, BIT_PT_OPT, 0x1);
2402 break;
2403 case RTW_CHANNEL_WIDTH_5:
2404 rtw_write32_mask(rtwdev, REG_DFIRBW, 0x3FF0, 0x2AB);
2405 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xf, 0x0);
2406 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xffc0, 0x1);
2407 rtw_write32_mask(rtwdev, REG_TXCLK, 0x700, 0x4);
2408 rtw_write32_mask(rtwdev, REG_TXCLK, 0x700000, 0x4);
2409 rtw_write32_mask(rtwdev, REG_CCK_SOURCE, BIT_NBI_EN, 0x0);
2410 rtw_write32_mask(rtwdev, REG_SBD, BITS_SUBTUNE, 0x1);
2411 rtw_write32_mask(rtwdev, REG_PT_CHSMO, BIT_PT_OPT, 0x0);
2412 break;
2413 case RTW_CHANNEL_WIDTH_10:
2414 rtw_write32_mask(rtwdev, REG_DFIRBW, 0x3FF0, 0x2AB);
2415 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xf, 0x0);
2416 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xffc0, 0x2);
2417 rtw_write32_mask(rtwdev, REG_TXCLK, 0x700, 0x6);
2418 rtw_write32_mask(rtwdev, REG_TXCLK, 0x700000, 0x5);
2419 rtw_write32_mask(rtwdev, REG_CCK_SOURCE, BIT_NBI_EN, 0x0);
2420 rtw_write32_mask(rtwdev, REG_SBD, BITS_SUBTUNE, 0x1);
2421 rtw_write32_mask(rtwdev, REG_PT_CHSMO, BIT_PT_OPT, 0x0);
2422 break;
2423 }
2424 }
2425
rtw8822c_set_channel(struct rtw_dev * rtwdev,u8 channel,u8 bw,u8 primary_chan_idx)2426 static void rtw8822c_set_channel(struct rtw_dev *rtwdev, u8 channel, u8 bw,
2427 u8 primary_chan_idx)
2428 {
2429 rtw8822c_set_channel_bb(rtwdev, channel, bw, primary_chan_idx);
2430 rtw_set_channel_mac(rtwdev, channel, bw, primary_chan_idx);
2431 rtw8822c_set_channel_rf(rtwdev, channel, bw);
2432 rtw8822c_toggle_igi(rtwdev);
2433 }
2434
rtw8822c_config_cck_rx_path(struct rtw_dev * rtwdev,u8 rx_path)2435 static void rtw8822c_config_cck_rx_path(struct rtw_dev *rtwdev, u8 rx_path)
2436 {
2437 if (rx_path == BB_PATH_A || rx_path == BB_PATH_B) {
2438 rtw_write32_mask(rtwdev, REG_CCANRX, 0x00060000, 0x0);
2439 rtw_write32_mask(rtwdev, REG_CCANRX, 0x00600000, 0x0);
2440 } else if (rx_path == BB_PATH_AB) {
2441 rtw_write32_mask(rtwdev, REG_CCANRX, 0x00600000, 0x1);
2442 rtw_write32_mask(rtwdev, REG_CCANRX, 0x00060000, 0x1);
2443 }
2444
2445 if (rx_path == BB_PATH_A)
2446 rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0x0f000000, 0x0);
2447 else if (rx_path == BB_PATH_B)
2448 rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0x0f000000, 0x5);
2449 else if (rx_path == BB_PATH_AB)
2450 rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0x0f000000, 0x1);
2451 }
2452
rtw8822c_config_ofdm_rx_path(struct rtw_dev * rtwdev,u8 rx_path)2453 static void rtw8822c_config_ofdm_rx_path(struct rtw_dev *rtwdev, u8 rx_path)
2454 {
2455 if (rx_path == BB_PATH_A || rx_path == BB_PATH_B) {
2456 rtw_write32_mask(rtwdev, REG_RXFNCTL, 0x300, 0x0);
2457 rtw_write32_mask(rtwdev, REG_RXFNCTL, 0x600000, 0x0);
2458 rtw_write32_mask(rtwdev, REG_AGCSWSH, BIT(17), 0x0);
2459 rtw_write32_mask(rtwdev, REG_ANTWTPD, BIT(20), 0x0);
2460 rtw_write32_mask(rtwdev, REG_MRCM, BIT(24), 0x0);
2461 } else if (rx_path == BB_PATH_AB) {
2462 rtw_write32_mask(rtwdev, REG_RXFNCTL, 0x300, 0x1);
2463 rtw_write32_mask(rtwdev, REG_RXFNCTL, 0x600000, 0x1);
2464 rtw_write32_mask(rtwdev, REG_AGCSWSH, BIT(17), 0x1);
2465 rtw_write32_mask(rtwdev, REG_ANTWTPD, BIT(20), 0x1);
2466 rtw_write32_mask(rtwdev, REG_MRCM, BIT(24), 0x1);
2467 }
2468
2469 rtw_write32_mask(rtwdev, 0x824, 0x0f000000, rx_path);
2470 rtw_write32_mask(rtwdev, 0x824, 0x000f0000, rx_path);
2471 }
2472
rtw8822c_config_rx_path(struct rtw_dev * rtwdev,u8 rx_path)2473 static void rtw8822c_config_rx_path(struct rtw_dev *rtwdev, u8 rx_path)
2474 {
2475 rtw8822c_config_cck_rx_path(rtwdev, rx_path);
2476 rtw8822c_config_ofdm_rx_path(rtwdev, rx_path);
2477 }
2478
rtw8822c_config_cck_tx_path(struct rtw_dev * rtwdev,u8 tx_path,bool is_tx2_path)2479 static void rtw8822c_config_cck_tx_path(struct rtw_dev *rtwdev, u8 tx_path,
2480 bool is_tx2_path)
2481 {
2482 if (tx_path == BB_PATH_A) {
2483 rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0xf0000000, 0x8);
2484 } else if (tx_path == BB_PATH_B) {
2485 rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0xf0000000, 0x4);
2486 } else {
2487 if (is_tx2_path)
2488 rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0xf0000000, 0xc);
2489 else
2490 rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0xf0000000, 0x8);
2491 }
2492 rtw8822c_bb_reset(rtwdev);
2493 }
2494
rtw8822c_config_ofdm_tx_path(struct rtw_dev * rtwdev,u8 tx_path,enum rtw_bb_path tx_path_sel_1ss)2495 static void rtw8822c_config_ofdm_tx_path(struct rtw_dev *rtwdev, u8 tx_path,
2496 enum rtw_bb_path tx_path_sel_1ss)
2497 {
2498 if (tx_path == BB_PATH_A) {
2499 rtw_write32_mask(rtwdev, REG_ANTMAP0, 0xff, 0x11);
2500 rtw_write32_mask(rtwdev, REG_TXLGMAP, 0xff, 0x0);
2501 } else if (tx_path == BB_PATH_B) {
2502 rtw_write32_mask(rtwdev, REG_ANTMAP0, 0xff, 0x12);
2503 rtw_write32_mask(rtwdev, REG_TXLGMAP, 0xff, 0x0);
2504 } else {
2505 if (tx_path_sel_1ss == BB_PATH_AB) {
2506 rtw_write32_mask(rtwdev, REG_ANTMAP0, 0xff, 0x33);
2507 rtw_write32_mask(rtwdev, REG_TXLGMAP, 0xffff, 0x0404);
2508 } else if (tx_path_sel_1ss == BB_PATH_B) {
2509 rtw_write32_mask(rtwdev, REG_ANTMAP0, 0xff, 0x32);
2510 rtw_write32_mask(rtwdev, REG_TXLGMAP, 0xffff, 0x0400);
2511 } else if (tx_path_sel_1ss == BB_PATH_A) {
2512 rtw_write32_mask(rtwdev, REG_ANTMAP0, 0xff, 0x31);
2513 rtw_write32_mask(rtwdev, REG_TXLGMAP, 0xffff, 0x0400);
2514 }
2515 }
2516 rtw8822c_bb_reset(rtwdev);
2517 }
2518
rtw8822c_config_tx_path(struct rtw_dev * rtwdev,u8 tx_path,enum rtw_bb_path tx_path_sel_1ss,enum rtw_bb_path tx_path_cck,bool is_tx2_path)2519 static void rtw8822c_config_tx_path(struct rtw_dev *rtwdev, u8 tx_path,
2520 enum rtw_bb_path tx_path_sel_1ss,
2521 enum rtw_bb_path tx_path_cck,
2522 bool is_tx2_path)
2523 {
2524 rtw8822c_config_cck_tx_path(rtwdev, tx_path_cck, is_tx2_path);
2525 rtw8822c_config_ofdm_tx_path(rtwdev, tx_path, tx_path_sel_1ss);
2526 rtw8822c_bb_reset(rtwdev);
2527 }
2528
rtw8822c_config_trx_mode(struct rtw_dev * rtwdev,u8 tx_path,u8 rx_path,bool is_tx2_path)2529 static void rtw8822c_config_trx_mode(struct rtw_dev *rtwdev, u8 tx_path,
2530 u8 rx_path, bool is_tx2_path)
2531 {
2532 if ((tx_path | rx_path) & BB_PATH_A)
2533 rtw_write32_mask(rtwdev, REG_ORITXCODE, MASK20BITS, 0x33312);
2534 else
2535 rtw_write32_mask(rtwdev, REG_ORITXCODE, MASK20BITS, 0x11111);
2536 if ((tx_path | rx_path) & BB_PATH_B)
2537 rtw_write32_mask(rtwdev, REG_ORITXCODE2, MASK20BITS, 0x33312);
2538 else
2539 rtw_write32_mask(rtwdev, REG_ORITXCODE2, MASK20BITS, 0x11111);
2540
2541 rtw8822c_config_rx_path(rtwdev, rx_path);
2542 rtw8822c_config_tx_path(rtwdev, tx_path, BB_PATH_A, BB_PATH_A,
2543 is_tx2_path);
2544
2545 rtw8822c_toggle_igi(rtwdev);
2546 }
2547
query_phy_status_page0(struct rtw_dev * rtwdev,u8 * phy_status,struct rtw_rx_pkt_stat * pkt_stat)2548 static void query_phy_status_page0(struct rtw_dev *rtwdev, u8 *phy_status,
2549 struct rtw_rx_pkt_stat *pkt_stat)
2550 {
2551 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
2552 u8 l_bnd, u_bnd;
2553 u8 gain_a, gain_b;
2554 s8 rx_power[RTW_RF_PATH_MAX];
2555 s8 min_rx_power = -120;
2556 u8 rssi;
2557 u8 channel;
2558 int path;
2559
2560 rx_power[RF_PATH_A] = GET_PHY_STAT_P0_PWDB_A(phy_status);
2561 rx_power[RF_PATH_B] = GET_PHY_STAT_P0_PWDB_B(phy_status);
2562 l_bnd = dm_info->cck_gi_l_bnd;
2563 u_bnd = dm_info->cck_gi_u_bnd;
2564 gain_a = GET_PHY_STAT_P0_GAIN_A(phy_status);
2565 gain_b = GET_PHY_STAT_P0_GAIN_B(phy_status);
2566 if (gain_a < l_bnd)
2567 rx_power[RF_PATH_A] += (l_bnd - gain_a) << 1;
2568 else if (gain_a > u_bnd)
2569 rx_power[RF_PATH_A] -= (gain_a - u_bnd) << 1;
2570 if (gain_b < l_bnd)
2571 rx_power[RF_PATH_B] += (l_bnd - gain_b) << 1;
2572 else if (gain_b > u_bnd)
2573 rx_power[RF_PATH_B] -= (gain_b - u_bnd) << 1;
2574
2575 rx_power[RF_PATH_A] -= 110;
2576 rx_power[RF_PATH_B] -= 110;
2577
2578 channel = GET_PHY_STAT_P0_CHANNEL(phy_status);
2579 if (channel != 0)
2580 rtw_set_rx_freq_band(pkt_stat, channel);
2581 else
2582 pkt_stat->channel_invalid = true;
2583
2584 pkt_stat->rx_power[RF_PATH_A] = rx_power[RF_PATH_A];
2585 pkt_stat->rx_power[RF_PATH_B] = rx_power[RF_PATH_B];
2586
2587 for (path = 0; path <= rtwdev->hal.rf_path_num; path++) {
2588 rssi = rtw_phy_rf_power_2_rssi(&pkt_stat->rx_power[path], 1);
2589 dm_info->rssi[path] = rssi;
2590 }
2591
2592 pkt_stat->rssi = rtw_phy_rf_power_2_rssi(pkt_stat->rx_power, 1);
2593 pkt_stat->bw = RTW_CHANNEL_WIDTH_20;
2594 pkt_stat->signal_power = max(pkt_stat->rx_power[RF_PATH_A],
2595 min_rx_power);
2596 }
2597
query_phy_status_page1(struct rtw_dev * rtwdev,u8 * phy_status,struct rtw_rx_pkt_stat * pkt_stat)2598 static void query_phy_status_page1(struct rtw_dev *rtwdev, u8 *phy_status,
2599 struct rtw_rx_pkt_stat *pkt_stat)
2600 {
2601 struct rtw_path_div *p_div = &rtwdev->dm_path_div;
2602 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
2603 u8 rxsc, bw;
2604 s8 min_rx_power = -120;
2605 s8 rx_evm;
2606 u8 evm_dbm = 0;
2607 u8 rssi;
2608 int path;
2609 u8 channel;
2610
2611 if (pkt_stat->rate > DESC_RATE11M && pkt_stat->rate < DESC_RATEMCS0)
2612 rxsc = GET_PHY_STAT_P1_L_RXSC(phy_status);
2613 else
2614 rxsc = GET_PHY_STAT_P1_HT_RXSC(phy_status);
2615
2616 if (rxsc == 0)
2617 bw = rtwdev->hal.current_band_width;
2618 else if (rxsc >= 1 && rxsc <= 8)
2619 bw = RTW_CHANNEL_WIDTH_20;
2620 else if (rxsc >= 9 && rxsc <= 12)
2621 bw = RTW_CHANNEL_WIDTH_40;
2622 else
2623 bw = RTW_CHANNEL_WIDTH_80;
2624
2625 channel = GET_PHY_STAT_P1_CHANNEL(phy_status);
2626 rtw_set_rx_freq_band(pkt_stat, channel);
2627
2628 pkt_stat->rx_power[RF_PATH_A] = GET_PHY_STAT_P1_PWDB_A(phy_status) - 110;
2629 pkt_stat->rx_power[RF_PATH_B] = GET_PHY_STAT_P1_PWDB_B(phy_status) - 110;
2630 pkt_stat->rssi = rtw_phy_rf_power_2_rssi(pkt_stat->rx_power, 2);
2631 pkt_stat->bw = bw;
2632 pkt_stat->signal_power = max3(pkt_stat->rx_power[RF_PATH_A],
2633 pkt_stat->rx_power[RF_PATH_B],
2634 min_rx_power);
2635
2636 dm_info->curr_rx_rate = pkt_stat->rate;
2637
2638 pkt_stat->rx_evm[RF_PATH_A] = GET_PHY_STAT_P1_RXEVM_A(phy_status);
2639 pkt_stat->rx_evm[RF_PATH_B] = GET_PHY_STAT_P1_RXEVM_B(phy_status);
2640
2641 pkt_stat->rx_snr[RF_PATH_A] = GET_PHY_STAT_P1_RXSNR_A(phy_status);
2642 pkt_stat->rx_snr[RF_PATH_B] = GET_PHY_STAT_P1_RXSNR_B(phy_status);
2643
2644 pkt_stat->cfo_tail[RF_PATH_A] = GET_PHY_STAT_P1_CFO_TAIL_A(phy_status);
2645 pkt_stat->cfo_tail[RF_PATH_B] = GET_PHY_STAT_P1_CFO_TAIL_B(phy_status);
2646
2647 for (path = 0; path <= rtwdev->hal.rf_path_num; path++) {
2648 rssi = rtw_phy_rf_power_2_rssi(&pkt_stat->rx_power[path], 1);
2649 dm_info->rssi[path] = rssi;
2650 if (path == RF_PATH_A) {
2651 p_div->path_a_sum += rssi;
2652 p_div->path_a_cnt++;
2653 } else if (path == RF_PATH_B) {
2654 p_div->path_b_sum += rssi;
2655 p_div->path_b_cnt++;
2656 }
2657 dm_info->rx_snr[path] = pkt_stat->rx_snr[path] >> 1;
2658 dm_info->cfo_tail[path] = (pkt_stat->cfo_tail[path] * 5) >> 1;
2659
2660 rx_evm = pkt_stat->rx_evm[path];
2661
2662 if (rx_evm < 0) {
2663 if (rx_evm == S8_MIN)
2664 evm_dbm = 0;
2665 else
2666 evm_dbm = ((u8)-rx_evm >> 1);
2667 }
2668 dm_info->rx_evm_dbm[path] = evm_dbm;
2669 }
2670 rtw_phy_parsing_cfo(rtwdev, pkt_stat);
2671 }
2672
query_phy_status(struct rtw_dev * rtwdev,u8 * phy_status,struct rtw_rx_pkt_stat * pkt_stat)2673 static void query_phy_status(struct rtw_dev *rtwdev, u8 *phy_status,
2674 struct rtw_rx_pkt_stat *pkt_stat)
2675 {
2676 u8 page;
2677
2678 page = *phy_status & 0xf;
2679
2680 switch (page) {
2681 case 0:
2682 query_phy_status_page0(rtwdev, phy_status, pkt_stat);
2683 break;
2684 case 1:
2685 query_phy_status_page1(rtwdev, phy_status, pkt_stat);
2686 break;
2687 default:
2688 rtw_warn(rtwdev, "unused phy status page (%d)\n", page);
2689 return;
2690 }
2691 }
2692
2693 static void
rtw8822c_set_write_tx_power_ref(struct rtw_dev * rtwdev,u8 * tx_pwr_ref_cck,u8 * tx_pwr_ref_ofdm)2694 rtw8822c_set_write_tx_power_ref(struct rtw_dev *rtwdev, u8 *tx_pwr_ref_cck,
2695 u8 *tx_pwr_ref_ofdm)
2696 {
2697 struct rtw_hal *hal = &rtwdev->hal;
2698 u32 txref_cck[2] = {0x18a0, 0x41a0};
2699 u32 txref_ofdm[2] = {0x18e8, 0x41e8};
2700 u8 path;
2701
2702 for (path = 0; path < hal->rf_path_num; path++) {
2703 rtw_write32_mask(rtwdev, 0x1c90, BIT(15), 0);
2704 rtw_write32_mask(rtwdev, txref_cck[path], 0x7f0000,
2705 tx_pwr_ref_cck[path]);
2706 }
2707 for (path = 0; path < hal->rf_path_num; path++) {
2708 rtw_write32_mask(rtwdev, 0x1c90, BIT(15), 0);
2709 rtw_write32_mask(rtwdev, txref_ofdm[path], 0x1fc00,
2710 tx_pwr_ref_ofdm[path]);
2711 }
2712 }
2713
rtw8822c_set_tx_power_diff(struct rtw_dev * rtwdev,u8 rate,s8 * diff_idx)2714 static void rtw8822c_set_tx_power_diff(struct rtw_dev *rtwdev, u8 rate,
2715 s8 *diff_idx)
2716 {
2717 u32 offset_txagc = 0x3a00;
2718 u8 rate_idx = rate & 0xfc;
2719 u8 pwr_idx[4];
2720 u32 phy_pwr_idx;
2721 int i;
2722
2723 for (i = 0; i < 4; i++)
2724 pwr_idx[i] = diff_idx[i] & 0x7f;
2725
2726 phy_pwr_idx = pwr_idx[0] |
2727 (pwr_idx[1] << 8) |
2728 (pwr_idx[2] << 16) |
2729 (pwr_idx[3] << 24);
2730
2731 rtw_write32_mask(rtwdev, 0x1c90, BIT(15), 0x0);
2732 rtw_write32_mask(rtwdev, offset_txagc + rate_idx, MASKDWORD,
2733 phy_pwr_idx);
2734 }
2735
rtw8822c_set_tx_power_index(struct rtw_dev * rtwdev)2736 static void rtw8822c_set_tx_power_index(struct rtw_dev *rtwdev)
2737 {
2738 struct rtw_hal *hal = &rtwdev->hal;
2739 u8 rs, rate, j;
2740 u8 pwr_ref_cck[2] = {hal->tx_pwr_tbl[RF_PATH_A][DESC_RATE11M],
2741 hal->tx_pwr_tbl[RF_PATH_B][DESC_RATE11M]};
2742 u8 pwr_ref_ofdm[2] = {hal->tx_pwr_tbl[RF_PATH_A][DESC_RATEMCS7],
2743 hal->tx_pwr_tbl[RF_PATH_B][DESC_RATEMCS7]};
2744 s8 diff_a, diff_b;
2745 u8 pwr_a, pwr_b;
2746 s8 diff_idx[4];
2747
2748 rtw8822c_set_write_tx_power_ref(rtwdev, pwr_ref_cck, pwr_ref_ofdm);
2749 for (rs = 0; rs < RTW_RATE_SECTION_MAX; rs++) {
2750 for (j = 0; j < rtw_rate_size[rs]; j++) {
2751 rate = rtw_rate_section[rs][j];
2752 pwr_a = hal->tx_pwr_tbl[RF_PATH_A][rate];
2753 pwr_b = hal->tx_pwr_tbl[RF_PATH_B][rate];
2754 if (rs == 0) {
2755 diff_a = (s8)pwr_a - (s8)pwr_ref_cck[0];
2756 diff_b = (s8)pwr_b - (s8)pwr_ref_cck[1];
2757 } else {
2758 diff_a = (s8)pwr_a - (s8)pwr_ref_ofdm[0];
2759 diff_b = (s8)pwr_b - (s8)pwr_ref_ofdm[1];
2760 }
2761 diff_idx[rate % 4] = min(diff_a, diff_b);
2762 if (rate % 4 == 3)
2763 rtw8822c_set_tx_power_diff(rtwdev, rate - 3,
2764 diff_idx);
2765 }
2766 }
2767 }
2768
rtw8822c_set_antenna(struct rtw_dev * rtwdev,u32 antenna_tx,u32 antenna_rx)2769 static int rtw8822c_set_antenna(struct rtw_dev *rtwdev,
2770 u32 antenna_tx,
2771 u32 antenna_rx)
2772 {
2773 struct rtw_hal *hal = &rtwdev->hal;
2774
2775 switch (antenna_tx) {
2776 case BB_PATH_A:
2777 case BB_PATH_B:
2778 case BB_PATH_AB:
2779 break;
2780 default:
2781 rtw_warn(rtwdev, "unsupported tx path 0x%x\n", antenna_tx);
2782 return -EINVAL;
2783 }
2784
2785 /* path B only is not available for RX */
2786 switch (antenna_rx) {
2787 case BB_PATH_A:
2788 case BB_PATH_AB:
2789 break;
2790 default:
2791 rtw_warn(rtwdev, "unsupported rx path 0x%x\n", antenna_rx);
2792 return -EINVAL;
2793 }
2794
2795 hal->antenna_tx = antenna_tx;
2796 hal->antenna_rx = antenna_rx;
2797
2798 rtw8822c_config_trx_mode(rtwdev, antenna_tx, antenna_rx, false);
2799
2800 return 0;
2801 }
2802
rtw8822c_cfg_ldo25(struct rtw_dev * rtwdev,bool enable)2803 static void rtw8822c_cfg_ldo25(struct rtw_dev *rtwdev, bool enable)
2804 {
2805 u8 ldo_pwr;
2806
2807 ldo_pwr = rtw_read8(rtwdev, REG_ANAPARLDO_POW_MAC);
2808 ldo_pwr = enable ? ldo_pwr | BIT_LDOE25_PON : ldo_pwr & ~BIT_LDOE25_PON;
2809 rtw_write8(rtwdev, REG_ANAPARLDO_POW_MAC, ldo_pwr);
2810 }
2811
rtw8822c_false_alarm_statistics(struct rtw_dev * rtwdev)2812 static void rtw8822c_false_alarm_statistics(struct rtw_dev *rtwdev)
2813 {
2814 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
2815 u32 cck_enable;
2816 u32 cck_fa_cnt;
2817 u32 crc32_cnt;
2818 u32 cca32_cnt;
2819 u32 ofdm_fa_cnt;
2820 u32 ofdm_fa_cnt1, ofdm_fa_cnt2, ofdm_fa_cnt3, ofdm_fa_cnt4, ofdm_fa_cnt5;
2821 u16 parity_fail, rate_illegal, crc8_fail, mcs_fail, sb_search_fail,
2822 fast_fsync, crc8_fail_vhta, mcs_fail_vht;
2823
2824 cck_enable = rtw_read32(rtwdev, REG_ENCCK) & BIT_CCK_BLK_EN;
2825 cck_fa_cnt = rtw_read16(rtwdev, REG_CCK_FACNT);
2826
2827 ofdm_fa_cnt1 = rtw_read32(rtwdev, REG_OFDM_FACNT1);
2828 ofdm_fa_cnt2 = rtw_read32(rtwdev, REG_OFDM_FACNT2);
2829 ofdm_fa_cnt3 = rtw_read32(rtwdev, REG_OFDM_FACNT3);
2830 ofdm_fa_cnt4 = rtw_read32(rtwdev, REG_OFDM_FACNT4);
2831 ofdm_fa_cnt5 = rtw_read32(rtwdev, REG_OFDM_FACNT5);
2832
2833 parity_fail = FIELD_GET(GENMASK(31, 16), ofdm_fa_cnt1);
2834 rate_illegal = FIELD_GET(GENMASK(15, 0), ofdm_fa_cnt2);
2835 crc8_fail = FIELD_GET(GENMASK(31, 16), ofdm_fa_cnt2);
2836 crc8_fail_vhta = FIELD_GET(GENMASK(15, 0), ofdm_fa_cnt3);
2837 mcs_fail = FIELD_GET(GENMASK(15, 0), ofdm_fa_cnt4);
2838 mcs_fail_vht = FIELD_GET(GENMASK(31, 16), ofdm_fa_cnt4);
2839 fast_fsync = FIELD_GET(GENMASK(15, 0), ofdm_fa_cnt5);
2840 sb_search_fail = FIELD_GET(GENMASK(31, 16), ofdm_fa_cnt5);
2841
2842 ofdm_fa_cnt = parity_fail + rate_illegal + crc8_fail + crc8_fail_vhta +
2843 mcs_fail + mcs_fail_vht + fast_fsync + sb_search_fail;
2844
2845 dm_info->cck_fa_cnt = cck_fa_cnt;
2846 dm_info->ofdm_fa_cnt = ofdm_fa_cnt;
2847 dm_info->total_fa_cnt = ofdm_fa_cnt;
2848 dm_info->total_fa_cnt += cck_enable ? cck_fa_cnt : 0;
2849
2850 crc32_cnt = rtw_read32(rtwdev, 0x2c04);
2851 dm_info->cck_ok_cnt = crc32_cnt & 0xffff;
2852 dm_info->cck_err_cnt = (crc32_cnt & 0xffff0000) >> 16;
2853 crc32_cnt = rtw_read32(rtwdev, 0x2c14);
2854 dm_info->ofdm_ok_cnt = crc32_cnt & 0xffff;
2855 dm_info->ofdm_err_cnt = (crc32_cnt & 0xffff0000) >> 16;
2856 crc32_cnt = rtw_read32(rtwdev, 0x2c10);
2857 dm_info->ht_ok_cnt = crc32_cnt & 0xffff;
2858 dm_info->ht_err_cnt = (crc32_cnt & 0xffff0000) >> 16;
2859 crc32_cnt = rtw_read32(rtwdev, 0x2c0c);
2860 dm_info->vht_ok_cnt = crc32_cnt & 0xffff;
2861 dm_info->vht_err_cnt = (crc32_cnt & 0xffff0000) >> 16;
2862
2863 cca32_cnt = rtw_read32(rtwdev, 0x2c08);
2864 dm_info->ofdm_cca_cnt = ((cca32_cnt & 0xffff0000) >> 16);
2865 dm_info->cck_cca_cnt = cca32_cnt & 0xffff;
2866 dm_info->total_cca_cnt = dm_info->ofdm_cca_cnt;
2867 if (cck_enable)
2868 dm_info->total_cca_cnt += dm_info->cck_cca_cnt;
2869
2870 rtw_write32_mask(rtwdev, REG_CCANRX, BIT_CCK_FA_RST, 0);
2871 rtw_write32_mask(rtwdev, REG_CCANRX, BIT_CCK_FA_RST, 2);
2872 rtw_write32_mask(rtwdev, REG_CCANRX, BIT_OFDM_FA_RST, 0);
2873 rtw_write32_mask(rtwdev, REG_CCANRX, BIT_OFDM_FA_RST, 2);
2874
2875 /* disable rx clk gating to reset counters */
2876 rtw_write32_clr(rtwdev, REG_RX_BREAK, BIT_COM_RX_GCK_EN);
2877 rtw_write32_set(rtwdev, REG_CNT_CTRL, BIT_ALL_CNT_RST);
2878 rtw_write32_clr(rtwdev, REG_CNT_CTRL, BIT_ALL_CNT_RST);
2879 rtw_write32_set(rtwdev, REG_RX_BREAK, BIT_COM_RX_GCK_EN);
2880 }
2881
rtw8822c_do_lck(struct rtw_dev * rtwdev)2882 static void rtw8822c_do_lck(struct rtw_dev *rtwdev)
2883 {
2884 u32 val;
2885
2886 rtw_write_rf(rtwdev, RF_PATH_A, RF_SYN_CTRL, RFREG_MASK, 0x80010);
2887 rtw_write_rf(rtwdev, RF_PATH_A, RF_SYN_PFD, RFREG_MASK, 0x1F0FA);
2888 fsleep(1);
2889 rtw_write_rf(rtwdev, RF_PATH_A, RF_AAC_CTRL, RFREG_MASK, 0x80000);
2890 rtw_write_rf(rtwdev, RF_PATH_A, RF_SYN_AAC, RFREG_MASK, 0x80001);
2891 read_poll_timeout(rtw_read_rf, val, val != 0x1, 1000, 100000,
2892 true, rtwdev, RF_PATH_A, RF_AAC_CTRL, 0x1000);
2893 rtw_write_rf(rtwdev, RF_PATH_A, RF_SYN_PFD, RFREG_MASK, 0x1F0F8);
2894 rtw_write_rf(rtwdev, RF_PATH_B, RF_SYN_CTRL, RFREG_MASK, 0x80010);
2895
2896 rtw_write_rf(rtwdev, RF_PATH_A, RF_FAST_LCK, RFREG_MASK, 0x0f000);
2897 rtw_write_rf(rtwdev, RF_PATH_A, RF_FAST_LCK, RFREG_MASK, 0x4f000);
2898 fsleep(1);
2899 rtw_write_rf(rtwdev, RF_PATH_A, RF_FAST_LCK, RFREG_MASK, 0x0f000);
2900 }
2901
rtw8822c_do_iqk(struct rtw_dev * rtwdev)2902 static void rtw8822c_do_iqk(struct rtw_dev *rtwdev)
2903 {
2904 struct rtw_iqk_para para = {0};
2905 u8 iqk_chk;
2906 int ret;
2907
2908 para.clear = 1;
2909 rtw_fw_do_iqk(rtwdev, ¶);
2910
2911 ret = read_poll_timeout(rtw_read8, iqk_chk, iqk_chk == IQK_DONE_8822C,
2912 20000, 300000, false, rtwdev, REG_RPT_CIP);
2913 if (ret)
2914 rtw_warn(rtwdev, "failed to poll iqk status bit\n");
2915
2916 rtw_write8(rtwdev, REG_IQKSTAT, 0x0);
2917 }
2918
2919 /* for coex */
rtw8822c_coex_cfg_init(struct rtw_dev * rtwdev)2920 static void rtw8822c_coex_cfg_init(struct rtw_dev *rtwdev)
2921 {
2922 /* enable TBTT nterrupt */
2923 rtw_write8_set(rtwdev, REG_BCN_CTRL, BIT_EN_BCN_FUNCTION);
2924
2925 /* BT report packet sample rate */
2926 /* 0x790[5:0]=0x5 */
2927 rtw_write8_mask(rtwdev, REG_BT_TDMA_TIME, BIT_MASK_SAMPLE_RATE, 0x5);
2928
2929 /* enable BT counter statistics */
2930 rtw_write8(rtwdev, REG_BT_STAT_CTRL, 0x1);
2931
2932 /* enable PTA (3-wire function form BT side) */
2933 rtw_write32_set(rtwdev, REG_GPIO_MUXCFG, BIT_BT_PTA_EN);
2934 rtw_write32_set(rtwdev, REG_GPIO_MUXCFG, BIT_PO_BT_PTA_PINS);
2935
2936 /* enable PTA (tx/rx signal form WiFi side) */
2937 rtw_write8_set(rtwdev, REG_QUEUE_CTRL, BIT_PTA_WL_TX_EN);
2938 /* wl tx signal to PTA not case EDCCA */
2939 rtw_write8_clr(rtwdev, REG_QUEUE_CTRL, BIT_PTA_EDCCA_EN);
2940 /* GNT_BT=1 while select both */
2941 rtw_write16_set(rtwdev, REG_BT_COEX_V2, BIT_GNT_BT_POLARITY);
2942 /* BT_CCA = ~GNT_WL_BB, not or GNT_BT_BB, LTE_Rx */
2943 rtw_write8_clr(rtwdev, REG_DUMMY_PAGE4_V1, BIT_BTCCA_CTRL);
2944
2945 /* to avoid RF parameter error */
2946 rtw_write_rf(rtwdev, RF_PATH_B, RF_MODOPT, 0xfffff, 0x40000);
2947 }
2948
rtw8822c_coex_cfg_gnt_fix(struct rtw_dev * rtwdev)2949 static void rtw8822c_coex_cfg_gnt_fix(struct rtw_dev *rtwdev)
2950 {
2951 struct rtw_coex *coex = &rtwdev->coex;
2952 struct rtw_coex_stat *coex_stat = &coex->stat;
2953 struct rtw_efuse *efuse = &rtwdev->efuse;
2954 u32 rf_0x1;
2955
2956 if (coex_stat->gnt_workaround_state == coex_stat->wl_coex_mode)
2957 return;
2958
2959 coex_stat->gnt_workaround_state = coex_stat->wl_coex_mode;
2960
2961 if ((coex_stat->kt_ver == 0 && coex->under_5g) || coex->freerun)
2962 rf_0x1 = 0x40021;
2963 else
2964 rf_0x1 = 0x40000;
2965
2966 /* BT at S1 for Shared-Ant */
2967 if (efuse->share_ant)
2968 rf_0x1 |= BIT(13);
2969
2970 rtw_write_rf(rtwdev, RF_PATH_B, 0x1, 0xfffff, rf_0x1);
2971
2972 /* WL-S0 2G RF TRX cannot be masked by GNT_BT
2973 * enable "WLS0 BB chage RF mode if GNT_BT = 1" for shared-antenna type
2974 * disable:0x1860[3] = 1, enable:0x1860[3] = 0
2975 *
2976 * enable "DAC off if GNT_WL = 0" for non-shared-antenna
2977 * disable 0x1c30[22] = 0,
2978 * enable: 0x1c30[22] = 1, 0x1c38[12] = 0, 0x1c38[28] = 1
2979 */
2980 if (coex_stat->wl_coex_mode == COEX_WLINK_2GFREE) {
2981 rtw_write8_mask(rtwdev, REG_ANAPAR + 2,
2982 BIT_ANAPAR_BTPS >> 16, 0);
2983 } else {
2984 rtw_write8_mask(rtwdev, REG_ANAPAR + 2,
2985 BIT_ANAPAR_BTPS >> 16, 1);
2986 rtw_write8_mask(rtwdev, REG_RSTB_SEL + 1,
2987 BIT_DAC_OFF_ENABLE, 0);
2988 rtw_write8_mask(rtwdev, REG_RSTB_SEL + 3,
2989 BIT_DAC_OFF_ENABLE, 1);
2990 }
2991
2992 /* disable WL-S1 BB chage RF mode if GNT_BT
2993 * since RF TRx mask can do it
2994 */
2995 rtw_write8_mask(rtwdev, REG_IGN_GNTBT4,
2996 BIT_PI_IGNORE_GNT_BT, 1);
2997
2998 /* disable WL-S0 BB chage RF mode if wifi is at 5G,
2999 * or antenna path is separated
3000 */
3001 if (coex_stat->wl_coex_mode == COEX_WLINK_2GFREE) {
3002 rtw_write8_mask(rtwdev, REG_IGN_GNT_BT1,
3003 BIT_PI_IGNORE_GNT_BT, 1);
3004 rtw_write8_mask(rtwdev, REG_NOMASK_TXBT,
3005 BIT_NOMASK_TXBT_ENABLE, 1);
3006 } else if (coex_stat->wl_coex_mode == COEX_WLINK_5G ||
3007 coex->under_5g || !efuse->share_ant) {
3008 if (coex_stat->kt_ver >= 3) {
3009 rtw_write8_mask(rtwdev, REG_IGN_GNT_BT1,
3010 BIT_PI_IGNORE_GNT_BT, 0);
3011 rtw_write8_mask(rtwdev, REG_NOMASK_TXBT,
3012 BIT_NOMASK_TXBT_ENABLE, 1);
3013 } else {
3014 rtw_write8_mask(rtwdev, REG_IGN_GNT_BT1,
3015 BIT_PI_IGNORE_GNT_BT, 1);
3016 }
3017 } else {
3018 /* shared-antenna */
3019 rtw_write8_mask(rtwdev, REG_IGN_GNT_BT1,
3020 BIT_PI_IGNORE_GNT_BT, 0);
3021 if (coex_stat->kt_ver >= 3) {
3022 rtw_write8_mask(rtwdev, REG_NOMASK_TXBT,
3023 BIT_NOMASK_TXBT_ENABLE, 0);
3024 }
3025 }
3026 }
3027
rtw8822c_coex_cfg_gnt_debug(struct rtw_dev * rtwdev)3028 static void rtw8822c_coex_cfg_gnt_debug(struct rtw_dev *rtwdev)
3029 {
3030 rtw_write8_mask(rtwdev, REG_PAD_CTRL1 + 2, BIT_BTGP_SPI_EN >> 16, 0);
3031 rtw_write8_mask(rtwdev, REG_PAD_CTRL1 + 3, BIT_BTGP_JTAG_EN >> 24, 0);
3032 rtw_write8_mask(rtwdev, REG_GPIO_MUXCFG + 2, BIT_FSPI_EN >> 16, 0);
3033 rtw_write8_mask(rtwdev, REG_PAD_CTRL1 + 1, BIT_LED1DIS >> 8, 0);
3034 rtw_write8_mask(rtwdev, REG_SYS_SDIO_CTRL + 3, BIT_DBG_GNT_WL_BT >> 24, 0);
3035 }
3036
rtw8822c_coex_cfg_rfe_type(struct rtw_dev * rtwdev)3037 static void rtw8822c_coex_cfg_rfe_type(struct rtw_dev *rtwdev)
3038 {
3039 struct rtw_coex *coex = &rtwdev->coex;
3040 struct rtw_coex_rfe *coex_rfe = &coex->rfe;
3041 struct rtw_efuse *efuse = &rtwdev->efuse;
3042
3043 coex_rfe->rfe_module_type = rtwdev->efuse.rfe_option;
3044 coex_rfe->ant_switch_polarity = 0;
3045 coex_rfe->ant_switch_exist = false;
3046 coex_rfe->ant_switch_with_bt = false;
3047 coex_rfe->ant_switch_diversity = false;
3048
3049 if (efuse->share_ant)
3050 coex_rfe->wlg_at_btg = true;
3051 else
3052 coex_rfe->wlg_at_btg = false;
3053
3054 /* disable LTE coex in wifi side */
3055 rtw_coex_write_indirect_reg(rtwdev, LTE_COEX_CTRL, BIT_LTE_COEX_EN, 0x0);
3056 rtw_coex_write_indirect_reg(rtwdev, LTE_WL_TRX_CTRL, MASKLWORD, 0xffff);
3057 rtw_coex_write_indirect_reg(rtwdev, LTE_BT_TRX_CTRL, MASKLWORD, 0xffff);
3058 }
3059
rtw8822c_coex_cfg_wl_tx_power(struct rtw_dev * rtwdev,u8 wl_pwr)3060 static void rtw8822c_coex_cfg_wl_tx_power(struct rtw_dev *rtwdev, u8 wl_pwr)
3061 {
3062 struct rtw_coex *coex = &rtwdev->coex;
3063 struct rtw_coex_dm *coex_dm = &coex->dm;
3064
3065 if (wl_pwr == coex_dm->cur_wl_pwr_lvl)
3066 return;
3067
3068 coex_dm->cur_wl_pwr_lvl = wl_pwr;
3069 }
3070
rtw8822c_coex_cfg_wl_rx_gain(struct rtw_dev * rtwdev,bool low_gain)3071 static void rtw8822c_coex_cfg_wl_rx_gain(struct rtw_dev *rtwdev, bool low_gain)
3072 {
3073 struct rtw_coex *coex = &rtwdev->coex;
3074 struct rtw_coex_dm *coex_dm = &coex->dm;
3075
3076 if (low_gain == coex_dm->cur_wl_rx_low_gain_en)
3077 return;
3078
3079 coex_dm->cur_wl_rx_low_gain_en = low_gain;
3080
3081 if (coex_dm->cur_wl_rx_low_gain_en) {
3082 rtw_dbg(rtwdev, RTW_DBG_COEX, "[BTCoex], Hi-Li Table On!\n");
3083
3084 /* set Rx filter corner RCK offset */
3085 rtw_write_rf(rtwdev, RF_PATH_A, RF_RCKD, RFREG_MASK, 0x22);
3086 rtw_write_rf(rtwdev, RF_PATH_A, RF_RCK, RFREG_MASK, 0x36);
3087 rtw_write_rf(rtwdev, RF_PATH_B, RF_RCKD, RFREG_MASK, 0x22);
3088 rtw_write_rf(rtwdev, RF_PATH_B, RF_RCK, RFREG_MASK, 0x36);
3089
3090 } else {
3091 rtw_dbg(rtwdev, RTW_DBG_COEX, "[BTCoex], Hi-Li Table Off!\n");
3092
3093 /* set Rx filter corner RCK offset */
3094 rtw_write_rf(rtwdev, RF_PATH_A, RF_RCKD, RFREG_MASK, 0x20);
3095 rtw_write_rf(rtwdev, RF_PATH_A, RF_RCK, RFREG_MASK, 0x0);
3096 rtw_write_rf(rtwdev, RF_PATH_B, RF_RCKD, RFREG_MASK, 0x20);
3097 rtw_write_rf(rtwdev, RF_PATH_B, RF_RCK, RFREG_MASK, 0x0);
3098 }
3099 }
3100
rtw8822c_bf_enable_bfee_su(struct rtw_dev * rtwdev,struct rtw_vif * vif,struct rtw_bfee * bfee)3101 static void rtw8822c_bf_enable_bfee_su(struct rtw_dev *rtwdev,
3102 struct rtw_vif *vif,
3103 struct rtw_bfee *bfee)
3104 {
3105 u8 csi_rsc = 0;
3106 u32 tmp6dc;
3107
3108 rtw_bf_enable_bfee_su(rtwdev, vif, bfee);
3109
3110 tmp6dc = rtw_read32(rtwdev, REG_BBPSF_CTRL) |
3111 BIT_WMAC_USE_NDPARATE |
3112 (csi_rsc << 13);
3113 if (vif->net_type == RTW_NET_AP_MODE)
3114 rtw_write32(rtwdev, REG_BBPSF_CTRL, tmp6dc | BIT(12));
3115 else
3116 rtw_write32(rtwdev, REG_BBPSF_CTRL, tmp6dc & ~BIT(12));
3117
3118 rtw_write32(rtwdev, REG_CSI_RRSR, 0x550);
3119 }
3120
rtw8822c_bf_config_bfee_su(struct rtw_dev * rtwdev,struct rtw_vif * vif,struct rtw_bfee * bfee,bool enable)3121 static void rtw8822c_bf_config_bfee_su(struct rtw_dev *rtwdev,
3122 struct rtw_vif *vif,
3123 struct rtw_bfee *bfee, bool enable)
3124 {
3125 if (enable)
3126 rtw8822c_bf_enable_bfee_su(rtwdev, vif, bfee);
3127 else
3128 rtw_bf_remove_bfee_su(rtwdev, bfee);
3129 }
3130
rtw8822c_bf_config_bfee_mu(struct rtw_dev * rtwdev,struct rtw_vif * vif,struct rtw_bfee * bfee,bool enable)3131 static void rtw8822c_bf_config_bfee_mu(struct rtw_dev *rtwdev,
3132 struct rtw_vif *vif,
3133 struct rtw_bfee *bfee, bool enable)
3134 {
3135 if (enable)
3136 rtw_bf_enable_bfee_mu(rtwdev, vif, bfee);
3137 else
3138 rtw_bf_remove_bfee_mu(rtwdev, bfee);
3139 }
3140
rtw8822c_bf_config_bfee(struct rtw_dev * rtwdev,struct rtw_vif * vif,struct rtw_bfee * bfee,bool enable)3141 static void rtw8822c_bf_config_bfee(struct rtw_dev *rtwdev, struct rtw_vif *vif,
3142 struct rtw_bfee *bfee, bool enable)
3143 {
3144 if (bfee->role == RTW_BFEE_SU)
3145 rtw8822c_bf_config_bfee_su(rtwdev, vif, bfee, enable);
3146 else if (bfee->role == RTW_BFEE_MU)
3147 rtw8822c_bf_config_bfee_mu(rtwdev, vif, bfee, enable);
3148 else
3149 rtw_warn(rtwdev, "wrong bfee role\n");
3150 }
3151
3152 struct dpk_cfg_pair {
3153 u32 addr;
3154 u32 bitmask;
3155 u32 data;
3156 };
3157
rtw8822c_parse_tbl_dpk(struct rtw_dev * rtwdev,const struct rtw_table * tbl)3158 void rtw8822c_parse_tbl_dpk(struct rtw_dev *rtwdev,
3159 const struct rtw_table *tbl)
3160 {
3161 const struct dpk_cfg_pair *p = tbl->data;
3162 const struct dpk_cfg_pair *end = p + tbl->size / 3;
3163
3164 BUILD_BUG_ON(sizeof(struct dpk_cfg_pair) != sizeof(u32) * 3);
3165
3166 for (; p < end; p++)
3167 rtw_write32_mask(rtwdev, p->addr, p->bitmask, p->data);
3168 }
3169
rtw8822c_dpk_set_gnt_wl(struct rtw_dev * rtwdev,bool is_before_k)3170 static void rtw8822c_dpk_set_gnt_wl(struct rtw_dev *rtwdev, bool is_before_k)
3171 {
3172 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3173
3174 if (is_before_k) {
3175 dpk_info->gnt_control = rtw_read32(rtwdev, 0x70);
3176 dpk_info->gnt_value = rtw_coex_read_indirect_reg(rtwdev, 0x38);
3177 rtw_write32_mask(rtwdev, 0x70, BIT(26), 0x1);
3178 rtw_coex_write_indirect_reg(rtwdev, 0x38, MASKBYTE1, 0x77);
3179 } else {
3180 rtw_coex_write_indirect_reg(rtwdev, 0x38, MASKDWORD,
3181 dpk_info->gnt_value);
3182 rtw_write32(rtwdev, 0x70, dpk_info->gnt_control);
3183 }
3184 }
3185
3186 static void
rtw8822c_dpk_restore_registers(struct rtw_dev * rtwdev,u32 reg_num,struct rtw_backup_info * bckp)3187 rtw8822c_dpk_restore_registers(struct rtw_dev *rtwdev, u32 reg_num,
3188 struct rtw_backup_info *bckp)
3189 {
3190 rtw_restore_reg(rtwdev, bckp, reg_num);
3191 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
3192 rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, BIT_DPD_CLK, 0x4);
3193 }
3194
3195 static void
rtw8822c_dpk_backup_registers(struct rtw_dev * rtwdev,u32 * reg,u32 reg_num,struct rtw_backup_info * bckp)3196 rtw8822c_dpk_backup_registers(struct rtw_dev *rtwdev, u32 *reg,
3197 u32 reg_num, struct rtw_backup_info *bckp)
3198 {
3199 u32 i;
3200
3201 for (i = 0; i < reg_num; i++) {
3202 bckp[i].len = 4;
3203 bckp[i].reg = reg[i];
3204 bckp[i].val = rtw_read32(rtwdev, reg[i]);
3205 }
3206 }
3207
rtw8822c_dpk_backup_rf_registers(struct rtw_dev * rtwdev,u32 * rf_reg,u32 rf_reg_bak[][2])3208 static void rtw8822c_dpk_backup_rf_registers(struct rtw_dev *rtwdev,
3209 u32 *rf_reg,
3210 u32 rf_reg_bak[][2])
3211 {
3212 u32 i;
3213
3214 for (i = 0; i < DPK_RF_REG_NUM; i++) {
3215 rf_reg_bak[i][RF_PATH_A] = rtw_read_rf(rtwdev, RF_PATH_A,
3216 rf_reg[i], RFREG_MASK);
3217 rf_reg_bak[i][RF_PATH_B] = rtw_read_rf(rtwdev, RF_PATH_B,
3218 rf_reg[i], RFREG_MASK);
3219 }
3220 }
3221
rtw8822c_dpk_reload_rf_registers(struct rtw_dev * rtwdev,u32 * rf_reg,u32 rf_reg_bak[][2])3222 static void rtw8822c_dpk_reload_rf_registers(struct rtw_dev *rtwdev,
3223 u32 *rf_reg,
3224 u32 rf_reg_bak[][2])
3225 {
3226 u32 i;
3227
3228 for (i = 0; i < DPK_RF_REG_NUM; i++) {
3229 rtw_write_rf(rtwdev, RF_PATH_A, rf_reg[i], RFREG_MASK,
3230 rf_reg_bak[i][RF_PATH_A]);
3231 rtw_write_rf(rtwdev, RF_PATH_B, rf_reg[i], RFREG_MASK,
3232 rf_reg_bak[i][RF_PATH_B]);
3233 }
3234 }
3235
rtw8822c_dpk_information(struct rtw_dev * rtwdev)3236 static void rtw8822c_dpk_information(struct rtw_dev *rtwdev)
3237 {
3238 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3239 u32 reg;
3240 u8 band_shift;
3241
3242 reg = rtw_read_rf(rtwdev, RF_PATH_A, 0x18, RFREG_MASK);
3243
3244 band_shift = FIELD_GET(BIT(16), reg);
3245 dpk_info->dpk_band = 1 << band_shift;
3246 dpk_info->dpk_ch = FIELD_GET(0xff, reg);
3247 dpk_info->dpk_bw = FIELD_GET(0x3000, reg);
3248 }
3249
rtw8822c_dpk_rxbb_dc_cal(struct rtw_dev * rtwdev,u8 path)3250 static void rtw8822c_dpk_rxbb_dc_cal(struct rtw_dev *rtwdev, u8 path)
3251 {
3252 rtw_write_rf(rtwdev, path, 0x92, RFREG_MASK, 0x84800);
3253 udelay(5);
3254 rtw_write_rf(rtwdev, path, 0x92, RFREG_MASK, 0x84801);
3255 usleep_range(600, 610);
3256 rtw_write_rf(rtwdev, path, 0x92, RFREG_MASK, 0x84800);
3257 }
3258
rtw8822c_dpk_dc_corr_check(struct rtw_dev * rtwdev,u8 path)3259 static u8 rtw8822c_dpk_dc_corr_check(struct rtw_dev *rtwdev, u8 path)
3260 {
3261 u16 dc_i, dc_q;
3262 u8 corr_idx;
3263
3264 rtw_write32(rtwdev, REG_RXSRAM_CTL, 0x000900f0);
3265 dc_i = (u16)rtw_read32_mask(rtwdev, REG_STAT_RPT, GENMASK(27, 16));
3266 dc_q = (u16)rtw_read32_mask(rtwdev, REG_STAT_RPT, GENMASK(11, 0));
3267
3268 if (dc_i & BIT(11))
3269 dc_i = 0x1000 - dc_i;
3270 if (dc_q & BIT(11))
3271 dc_q = 0x1000 - dc_q;
3272
3273 rtw_write32(rtwdev, REG_RXSRAM_CTL, 0x000000f0);
3274 corr_idx = (u8)rtw_read32_mask(rtwdev, REG_STAT_RPT, GENMASK(7, 0));
3275 rtw_read32_mask(rtwdev, REG_STAT_RPT, GENMASK(15, 8));
3276
3277 if (dc_i > 200 || dc_q > 200 || corr_idx < 40 || corr_idx > 65)
3278 return 1;
3279 else
3280 return 0;
3281
3282 }
3283
rtw8822c_dpk_tx_pause(struct rtw_dev * rtwdev)3284 static void rtw8822c_dpk_tx_pause(struct rtw_dev *rtwdev)
3285 {
3286 u8 reg_a, reg_b;
3287 u16 count = 0;
3288
3289 rtw_write8(rtwdev, 0x522, 0xff);
3290 rtw_write32_mask(rtwdev, 0x1e70, 0xf, 0x2);
3291
3292 do {
3293 reg_a = (u8)rtw_read_rf(rtwdev, RF_PATH_A, 0x00, 0xf0000);
3294 reg_b = (u8)rtw_read_rf(rtwdev, RF_PATH_B, 0x00, 0xf0000);
3295 udelay(2);
3296 count++;
3297 } while ((reg_a == 2 || reg_b == 2) && count < 2500);
3298 }
3299
rtw8822c_dpk_mac_bb_setting(struct rtw_dev * rtwdev)3300 static void rtw8822c_dpk_mac_bb_setting(struct rtw_dev *rtwdev)
3301 {
3302 rtw8822c_dpk_tx_pause(rtwdev);
3303 rtw_load_table(rtwdev, &rtw8822c_dpk_mac_bb_tbl);
3304 }
3305
rtw8822c_dpk_afe_setting(struct rtw_dev * rtwdev,bool is_do_dpk)3306 static void rtw8822c_dpk_afe_setting(struct rtw_dev *rtwdev, bool is_do_dpk)
3307 {
3308 if (is_do_dpk)
3309 rtw_load_table(rtwdev, &rtw8822c_dpk_afe_is_dpk_tbl);
3310 else
3311 rtw_load_table(rtwdev, &rtw8822c_dpk_afe_no_dpk_tbl);
3312 }
3313
rtw8822c_dpk_pre_setting(struct rtw_dev * rtwdev)3314 static void rtw8822c_dpk_pre_setting(struct rtw_dev *rtwdev)
3315 {
3316 u8 path;
3317
3318 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
3319 rtw_write_rf(rtwdev, path, RF_RXAGC_OFFSET, RFREG_MASK, 0x0);
3320 rtw_write32(rtwdev, REG_NCTL0, 0x8 | (path << 1));
3321 if (rtwdev->dm_info.dpk_info.dpk_band == RTW_BAND_2G)
3322 rtw_write32(rtwdev, REG_DPD_CTL1_S1, 0x1f100000);
3323 else
3324 rtw_write32(rtwdev, REG_DPD_CTL1_S1, 0x1f0d0000);
3325 rtw_write32_mask(rtwdev, REG_DPD_LUT0, BIT_GLOSS_DB, 0x4);
3326 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x3);
3327 }
3328 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
3329 rtw_write32(rtwdev, REG_DPD_CTL11, 0x3b23170b);
3330 rtw_write32(rtwdev, REG_DPD_CTL12, 0x775f5347);
3331 }
3332
rtw8822c_dpk_rf_setting(struct rtw_dev * rtwdev,u8 path)3333 static u32 rtw8822c_dpk_rf_setting(struct rtw_dev *rtwdev, u8 path)
3334 {
3335 u32 ori_txbb;
3336
3337 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, RFREG_MASK, 0x50017);
3338 ori_txbb = rtw_read_rf(rtwdev, path, RF_TX_GAIN, RFREG_MASK);
3339
3340 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TX_GAIN, 0x1);
3341 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_PWR_TRIM, 0x1);
3342 rtw_write_rf(rtwdev, path, RF_TX_GAIN_OFFSET, BIT_BB_GAIN, 0x0);
3343 rtw_write_rf(rtwdev, path, RF_TX_GAIN, RFREG_MASK, ori_txbb);
3344
3345 if (rtwdev->dm_info.dpk_info.dpk_band == RTW_BAND_2G) {
3346 rtw_write_rf(rtwdev, path, RF_TX_GAIN_OFFSET, BIT_RF_GAIN, 0x1);
3347 rtw_write_rf(rtwdev, path, RF_RXG_GAIN, BIT_RXG_GAIN, 0x0);
3348 } else {
3349 rtw_write_rf(rtwdev, path, RF_TXA_LB_SW, BIT_TXA_LB_ATT, 0x0);
3350 rtw_write_rf(rtwdev, path, RF_TXA_LB_SW, BIT_LB_ATT, 0x6);
3351 rtw_write_rf(rtwdev, path, RF_TXA_LB_SW, BIT_LB_SW, 0x1);
3352 rtw_write_rf(rtwdev, path, RF_RXA_MIX_GAIN, BIT_RXA_MIX_GAIN, 0);
3353 }
3354
3355 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, BIT_RXAGC, 0xf);
3356 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TRXBW, 0x1);
3357 rtw_write_rf(rtwdev, path, RF_BW_TRXBB, BIT_BW_RXBB, 0x0);
3358
3359 if (rtwdev->dm_info.dpk_info.dpk_bw == DPK_CHANNEL_WIDTH_80)
3360 rtw_write_rf(rtwdev, path, RF_BW_TRXBB, BIT_BW_TXBB, 0x2);
3361 else
3362 rtw_write_rf(rtwdev, path, RF_BW_TRXBB, BIT_BW_TXBB, 0x1);
3363
3364 rtw_write_rf(rtwdev, path, RF_EXT_TIA_BW, BIT(1), 0x1);
3365
3366 usleep_range(100, 110);
3367
3368 return ori_txbb & 0x1f;
3369 }
3370
rtw8822c_dpk_get_cmd(struct rtw_dev * rtwdev,u8 action,u8 path)3371 static u16 rtw8822c_dpk_get_cmd(struct rtw_dev *rtwdev, u8 action, u8 path)
3372 {
3373 u16 cmd;
3374 u8 bw = rtwdev->dm_info.dpk_info.dpk_bw == DPK_CHANNEL_WIDTH_80 ? 2 : 0;
3375
3376 switch (action) {
3377 case RTW_DPK_GAIN_LOSS:
3378 cmd = 0x14 + path;
3379 break;
3380 case RTW_DPK_DO_DPK:
3381 cmd = 0x16 + path + bw;
3382 break;
3383 case RTW_DPK_DPK_ON:
3384 cmd = 0x1a + path;
3385 break;
3386 case RTW_DPK_DAGC:
3387 cmd = 0x1c + path + bw;
3388 break;
3389 default:
3390 return 0;
3391 }
3392
3393 return (cmd << 8) | 0x48;
3394 }
3395
rtw8822c_dpk_one_shot(struct rtw_dev * rtwdev,u8 path,u8 action)3396 static u8 rtw8822c_dpk_one_shot(struct rtw_dev *rtwdev, u8 path, u8 action)
3397 {
3398 u16 dpk_cmd;
3399 u8 result = 0;
3400
3401 rtw8822c_dpk_set_gnt_wl(rtwdev, true);
3402
3403 if (action == RTW_DPK_CAL_PWR) {
3404 rtw_write32_mask(rtwdev, REG_DPD_CTL0, BIT(12), 0x1);
3405 rtw_write32_mask(rtwdev, REG_DPD_CTL0, BIT(12), 0x0);
3406 rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, BIT_RPT_SEL, 0x0);
3407 msleep(10);
3408 if (!check_hw_ready(rtwdev, REG_STAT_RPT, BIT(31), 0x1)) {
3409 result = 1;
3410 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DPK] one-shot over 20ms\n");
3411 }
3412 } else {
3413 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE,
3414 0x8 | (path << 1));
3415 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_IQ_SWITCH, 0x9);
3416
3417 dpk_cmd = rtw8822c_dpk_get_cmd(rtwdev, action, path);
3418 rtw_write32(rtwdev, REG_NCTL0, dpk_cmd);
3419 rtw_write32(rtwdev, REG_NCTL0, dpk_cmd + 1);
3420 msleep(10);
3421 if (!check_hw_ready(rtwdev, 0x2d9c, 0xff, 0x55)) {
3422 result = 1;
3423 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DPK] one-shot over 20ms\n");
3424 }
3425 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE,
3426 0x8 | (path << 1));
3427 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_IQ_SWITCH, 0x0);
3428 }
3429
3430 rtw8822c_dpk_set_gnt_wl(rtwdev, false);
3431
3432 rtw_write8(rtwdev, 0x1b10, 0x0);
3433
3434 return result;
3435 }
3436
rtw8822c_dpk_dgain_read(struct rtw_dev * rtwdev,u8 path)3437 static u16 rtw8822c_dpk_dgain_read(struct rtw_dev *rtwdev, u8 path)
3438 {
3439 u16 dgain;
3440
3441 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
3442 rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, 0x00ff0000, 0x0);
3443
3444 dgain = (u16)rtw_read32_mask(rtwdev, REG_STAT_RPT, GENMASK(27, 16));
3445
3446 return dgain;
3447 }
3448
rtw8822c_dpk_thermal_read(struct rtw_dev * rtwdev,u8 path)3449 static u8 rtw8822c_dpk_thermal_read(struct rtw_dev *rtwdev, u8 path)
3450 {
3451 rtw_write_rf(rtwdev, path, RF_T_METER, BIT(19), 0x1);
3452 rtw_write_rf(rtwdev, path, RF_T_METER, BIT(19), 0x0);
3453 rtw_write_rf(rtwdev, path, RF_T_METER, BIT(19), 0x1);
3454 udelay(15);
3455
3456 return (u8)rtw_read_rf(rtwdev, path, RF_T_METER, 0x0007e);
3457 }
3458
rtw8822c_dpk_pas_read(struct rtw_dev * rtwdev,u8 path)3459 static u32 rtw8822c_dpk_pas_read(struct rtw_dev *rtwdev, u8 path)
3460 {
3461 u32 i_val, q_val;
3462
3463 rtw_write32(rtwdev, REG_NCTL0, 0x8 | (path << 1));
3464 rtw_write32_mask(rtwdev, 0x1b48, BIT(14), 0x0);
3465 rtw_write32(rtwdev, REG_RXSRAM_CTL, 0x00060001);
3466 rtw_write32(rtwdev, 0x1b4c, 0x00000000);
3467 rtw_write32(rtwdev, 0x1b4c, 0x00080000);
3468
3469 q_val = rtw_read32_mask(rtwdev, REG_STAT_RPT, MASKHWORD);
3470 i_val = rtw_read32_mask(rtwdev, REG_STAT_RPT, MASKLWORD);
3471
3472 if (i_val & BIT(15))
3473 i_val = 0x10000 - i_val;
3474 if (q_val & BIT(15))
3475 q_val = 0x10000 - q_val;
3476
3477 rtw_write32(rtwdev, 0x1b4c, 0x00000000);
3478
3479 return i_val * i_val + q_val * q_val;
3480 }
3481
rtw8822c_psd_log2base(u32 val)3482 static u32 rtw8822c_psd_log2base(u32 val)
3483 {
3484 u32 tmp, val_integerd_b, tindex;
3485 u32 result, val_fractiond_b;
3486 u32 table_fraction[21] = {0, 432, 332, 274, 232, 200, 174,
3487 151, 132, 115, 100, 86, 74, 62, 51,
3488 42, 32, 23, 15, 7, 0};
3489
3490 if (val == 0)
3491 return 0;
3492
3493 val_integerd_b = __fls(val) + 1;
3494
3495 tmp = (val * 100) / (1 << val_integerd_b);
3496 tindex = tmp / 5;
3497
3498 if (tindex >= ARRAY_SIZE(table_fraction))
3499 tindex = ARRAY_SIZE(table_fraction) - 1;
3500
3501 val_fractiond_b = table_fraction[tindex];
3502
3503 result = val_integerd_b * 100 - val_fractiond_b;
3504
3505 return result;
3506 }
3507
rtw8822c_dpk_gainloss_result(struct rtw_dev * rtwdev,u8 path)3508 static u8 rtw8822c_dpk_gainloss_result(struct rtw_dev *rtwdev, u8 path)
3509 {
3510 u8 result;
3511
3512 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x8 | (path << 1));
3513 rtw_write32_mask(rtwdev, 0x1b48, BIT(14), 0x1);
3514 rtw_write32(rtwdev, REG_RXSRAM_CTL, 0x00060000);
3515
3516 result = (u8)rtw_read32_mask(rtwdev, REG_STAT_RPT, 0x000000f0);
3517
3518 rtw_write32_mask(rtwdev, 0x1b48, BIT(14), 0x0);
3519
3520 return result;
3521 }
3522
rtw8822c_dpk_agc_gain_chk(struct rtw_dev * rtwdev,u8 path,u8 limited_pga)3523 static u8 rtw8822c_dpk_agc_gain_chk(struct rtw_dev *rtwdev, u8 path,
3524 u8 limited_pga)
3525 {
3526 u8 result = 0;
3527 u16 dgain;
3528
3529 rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_DAGC);
3530 dgain = rtw8822c_dpk_dgain_read(rtwdev, path);
3531
3532 if (dgain > 1535 && !limited_pga)
3533 return RTW_DPK_GAIN_LESS;
3534 else if (dgain < 768 && !limited_pga)
3535 return RTW_DPK_GAIN_LARGE;
3536 else
3537 return result;
3538 }
3539
rtw8822c_dpk_agc_loss_chk(struct rtw_dev * rtwdev,u8 path)3540 static u8 rtw8822c_dpk_agc_loss_chk(struct rtw_dev *rtwdev, u8 path)
3541 {
3542 u32 loss, loss_db;
3543
3544 loss = rtw8822c_dpk_pas_read(rtwdev, path);
3545 if (loss < 0x4000000)
3546 return RTW_DPK_GL_LESS;
3547 loss_db = 3 * rtw8822c_psd_log2base(loss >> 13) - 3870;
3548
3549 if (loss_db > 1000)
3550 return RTW_DPK_GL_LARGE;
3551 else if (loss_db < 250)
3552 return RTW_DPK_GL_LESS;
3553 else
3554 return RTW_DPK_AGC_OUT;
3555 }
3556
3557 struct rtw8822c_dpk_data {
3558 u8 txbb;
3559 u8 pga;
3560 u8 limited_pga;
3561 u8 agc_cnt;
3562 bool loss_only;
3563 bool gain_only;
3564 u8 path;
3565 };
3566
rtw8822c_gain_check_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data)3567 static u8 rtw8822c_gain_check_state(struct rtw_dev *rtwdev,
3568 struct rtw8822c_dpk_data *data)
3569 {
3570 u8 state;
3571
3572 data->txbb = (u8)rtw_read_rf(rtwdev, data->path, RF_TX_GAIN,
3573 BIT_GAIN_TXBB);
3574 data->pga = (u8)rtw_read_rf(rtwdev, data->path, RF_MODE_TRXAGC,
3575 BIT_RXAGC);
3576
3577 if (data->loss_only) {
3578 state = RTW_DPK_LOSS_CHECK;
3579 goto check_end;
3580 }
3581
3582 state = rtw8822c_dpk_agc_gain_chk(rtwdev, data->path,
3583 data->limited_pga);
3584 if (state == RTW_DPK_GAIN_CHECK && data->gain_only)
3585 state = RTW_DPK_AGC_OUT;
3586 else if (state == RTW_DPK_GAIN_CHECK)
3587 state = RTW_DPK_LOSS_CHECK;
3588
3589 check_end:
3590 data->agc_cnt++;
3591 if (data->agc_cnt >= 6)
3592 state = RTW_DPK_AGC_OUT;
3593
3594 return state;
3595 }
3596
rtw8822c_gain_large_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data)3597 static u8 rtw8822c_gain_large_state(struct rtw_dev *rtwdev,
3598 struct rtw8822c_dpk_data *data)
3599 {
3600 u8 pga = data->pga;
3601
3602 if (pga > 0xe)
3603 rtw_write_rf(rtwdev, data->path, RF_MODE_TRXAGC, BIT_RXAGC, 0xc);
3604 else if (pga > 0xb && pga < 0xf)
3605 rtw_write_rf(rtwdev, data->path, RF_MODE_TRXAGC, BIT_RXAGC, 0x0);
3606 else if (pga < 0xc)
3607 data->limited_pga = 1;
3608
3609 return RTW_DPK_GAIN_CHECK;
3610 }
3611
rtw8822c_gain_less_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data)3612 static u8 rtw8822c_gain_less_state(struct rtw_dev *rtwdev,
3613 struct rtw8822c_dpk_data *data)
3614 {
3615 u8 pga = data->pga;
3616
3617 if (pga < 0xc)
3618 rtw_write_rf(rtwdev, data->path, RF_MODE_TRXAGC, BIT_RXAGC, 0xc);
3619 else if (pga > 0xb && pga < 0xf)
3620 rtw_write_rf(rtwdev, data->path, RF_MODE_TRXAGC, BIT_RXAGC, 0xf);
3621 else if (pga > 0xe)
3622 data->limited_pga = 1;
3623
3624 return RTW_DPK_GAIN_CHECK;
3625 }
3626
rtw8822c_gl_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data,u8 is_large)3627 static u8 rtw8822c_gl_state(struct rtw_dev *rtwdev,
3628 struct rtw8822c_dpk_data *data, u8 is_large)
3629 {
3630 u8 txbb_bound[] = {0x1f, 0};
3631
3632 if (data->txbb == txbb_bound[is_large])
3633 return RTW_DPK_AGC_OUT;
3634
3635 if (is_large == 1)
3636 data->txbb -= 2;
3637 else
3638 data->txbb += 3;
3639
3640 rtw_write_rf(rtwdev, data->path, RF_TX_GAIN, BIT_GAIN_TXBB, data->txbb);
3641 data->limited_pga = 0;
3642
3643 return RTW_DPK_GAIN_CHECK;
3644 }
3645
rtw8822c_gl_large_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data)3646 static u8 rtw8822c_gl_large_state(struct rtw_dev *rtwdev,
3647 struct rtw8822c_dpk_data *data)
3648 {
3649 return rtw8822c_gl_state(rtwdev, data, 1);
3650 }
3651
rtw8822c_gl_less_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data)3652 static u8 rtw8822c_gl_less_state(struct rtw_dev *rtwdev,
3653 struct rtw8822c_dpk_data *data)
3654 {
3655 return rtw8822c_gl_state(rtwdev, data, 0);
3656 }
3657
rtw8822c_loss_check_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data)3658 static u8 rtw8822c_loss_check_state(struct rtw_dev *rtwdev,
3659 struct rtw8822c_dpk_data *data)
3660 {
3661 u8 path = data->path;
3662 u8 state;
3663
3664 rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_GAIN_LOSS);
3665 state = rtw8822c_dpk_agc_loss_chk(rtwdev, path);
3666
3667 return state;
3668 }
3669
3670 static u8 (*dpk_state[])(struct rtw_dev *rtwdev,
3671 struct rtw8822c_dpk_data *data) = {
3672 rtw8822c_gain_check_state, rtw8822c_gain_large_state,
3673 rtw8822c_gain_less_state, rtw8822c_gl_large_state,
3674 rtw8822c_gl_less_state, rtw8822c_loss_check_state };
3675
rtw8822c_dpk_pas_agc(struct rtw_dev * rtwdev,u8 path,bool gain_only,bool loss_only)3676 static u8 rtw8822c_dpk_pas_agc(struct rtw_dev *rtwdev, u8 path,
3677 bool gain_only, bool loss_only)
3678 {
3679 struct rtw8822c_dpk_data data = {0};
3680 u8 (*func)(struct rtw_dev *rtwdev, struct rtw8822c_dpk_data *data);
3681 u8 state = RTW_DPK_GAIN_CHECK;
3682
3683 data.loss_only = loss_only;
3684 data.gain_only = gain_only;
3685 data.path = path;
3686
3687 for (;;) {
3688 func = dpk_state[state];
3689 state = func(rtwdev, &data);
3690 if (state == RTW_DPK_AGC_OUT)
3691 break;
3692 }
3693
3694 return data.txbb;
3695 }
3696
rtw8822c_dpk_coef_iq_check(struct rtw_dev * rtwdev,u16 coef_i,u16 coef_q)3697 static bool rtw8822c_dpk_coef_iq_check(struct rtw_dev *rtwdev,
3698 u16 coef_i, u16 coef_q)
3699 {
3700 if (coef_i == 0x1000 || coef_i == 0x0fff ||
3701 coef_q == 0x1000 || coef_q == 0x0fff)
3702 return true;
3703
3704 return false;
3705 }
3706
rtw8822c_dpk_coef_transfer(struct rtw_dev * rtwdev)3707 static u32 rtw8822c_dpk_coef_transfer(struct rtw_dev *rtwdev)
3708 {
3709 u32 reg = 0;
3710 u16 coef_i = 0, coef_q = 0;
3711
3712 reg = rtw_read32(rtwdev, REG_STAT_RPT);
3713
3714 coef_i = (u16)rtw_read32_mask(rtwdev, REG_STAT_RPT, MASKHWORD) & 0x1fff;
3715 coef_q = (u16)rtw_read32_mask(rtwdev, REG_STAT_RPT, MASKLWORD) & 0x1fff;
3716
3717 coef_q = ((0x2000 - coef_q) & 0x1fff) - 1;
3718
3719 reg = (coef_i << 16) | coef_q;
3720
3721 return reg;
3722 }
3723
3724 static const u32 rtw8822c_dpk_get_coef_tbl[] = {
3725 0x000400f0, 0x040400f0, 0x080400f0, 0x010400f0, 0x050400f0,
3726 0x090400f0, 0x020400f0, 0x060400f0, 0x0a0400f0, 0x030400f0,
3727 0x070400f0, 0x0b0400f0, 0x0c0400f0, 0x100400f0, 0x0d0400f0,
3728 0x110400f0, 0x0e0400f0, 0x120400f0, 0x0f0400f0, 0x130400f0,
3729 };
3730
rtw8822c_dpk_coef_tbl_apply(struct rtw_dev * rtwdev,u8 path)3731 static void rtw8822c_dpk_coef_tbl_apply(struct rtw_dev *rtwdev, u8 path)
3732 {
3733 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3734 int i;
3735
3736 for (i = 0; i < 20; i++) {
3737 rtw_write32(rtwdev, REG_RXSRAM_CTL,
3738 rtw8822c_dpk_get_coef_tbl[i]);
3739 dpk_info->coef[path][i] = rtw8822c_dpk_coef_transfer(rtwdev);
3740 }
3741 }
3742
rtw8822c_dpk_get_coef(struct rtw_dev * rtwdev,u8 path)3743 static void rtw8822c_dpk_get_coef(struct rtw_dev *rtwdev, u8 path)
3744 {
3745 rtw_write32(rtwdev, REG_NCTL0, 0x0000000c);
3746
3747 if (path == RF_PATH_A) {
3748 rtw_write32_mask(rtwdev, REG_DPD_CTL0, BIT(24), 0x0);
3749 rtw_write32(rtwdev, REG_DPD_CTL0_S0, 0x30000080);
3750 } else if (path == RF_PATH_B) {
3751 rtw_write32_mask(rtwdev, REG_DPD_CTL0, BIT(24), 0x1);
3752 rtw_write32(rtwdev, REG_DPD_CTL0_S1, 0x30000080);
3753 }
3754
3755 rtw8822c_dpk_coef_tbl_apply(rtwdev, path);
3756 }
3757
rtw8822c_dpk_coef_read(struct rtw_dev * rtwdev,u8 path)3758 static u8 rtw8822c_dpk_coef_read(struct rtw_dev *rtwdev, u8 path)
3759 {
3760 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3761 u8 addr, result = 1;
3762 u16 coef_i, coef_q;
3763
3764 for (addr = 0; addr < 20; addr++) {
3765 coef_i = FIELD_GET(0x1fff0000, dpk_info->coef[path][addr]);
3766 coef_q = FIELD_GET(0x1fff, dpk_info->coef[path][addr]);
3767
3768 if (rtw8822c_dpk_coef_iq_check(rtwdev, coef_i, coef_q)) {
3769 result = 0;
3770 break;
3771 }
3772 }
3773 return result;
3774 }
3775
rtw8822c_dpk_coef_write(struct rtw_dev * rtwdev,u8 path,u8 result)3776 static void rtw8822c_dpk_coef_write(struct rtw_dev *rtwdev, u8 path, u8 result)
3777 {
3778 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3779 u16 reg[DPK_RF_PATH_NUM] = {0x1b0c, 0x1b64};
3780 u32 coef;
3781 u8 addr;
3782
3783 rtw_write32(rtwdev, REG_NCTL0, 0x0000000c);
3784 rtw_write32(rtwdev, REG_RXSRAM_CTL, 0x000000f0);
3785
3786 for (addr = 0; addr < 20; addr++) {
3787 if (result == 0) {
3788 if (addr == 3)
3789 coef = 0x04001fff;
3790 else
3791 coef = 0x00001fff;
3792 } else {
3793 coef = dpk_info->coef[path][addr];
3794 }
3795 rtw_write32(rtwdev, reg[path] + addr * 4, coef);
3796 }
3797 }
3798
rtw8822c_dpk_fill_result(struct rtw_dev * rtwdev,u32 dpk_txagc,u8 path,u8 result)3799 static void rtw8822c_dpk_fill_result(struct rtw_dev *rtwdev, u32 dpk_txagc,
3800 u8 path, u8 result)
3801 {
3802 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3803
3804 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x8 | (path << 1));
3805
3806 if (result)
3807 rtw_write8(rtwdev, REG_DPD_AGC, (u8)(dpk_txagc - 6));
3808 else
3809 rtw_write8(rtwdev, REG_DPD_AGC, 0x00);
3810
3811 dpk_info->result[path] = result;
3812 dpk_info->dpk_txagc[path] = rtw_read8(rtwdev, REG_DPD_AGC);
3813
3814 rtw8822c_dpk_coef_write(rtwdev, path, result);
3815 }
3816
rtw8822c_dpk_gainloss(struct rtw_dev * rtwdev,u8 path)3817 static u32 rtw8822c_dpk_gainloss(struct rtw_dev *rtwdev, u8 path)
3818 {
3819 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3820 u8 tx_agc, tx_bb, ori_txbb, ori_txagc, tx_agc_search, t1, t2;
3821
3822 ori_txbb = rtw8822c_dpk_rf_setting(rtwdev, path);
3823 ori_txagc = (u8)rtw_read_rf(rtwdev, path, RF_MODE_TRXAGC, BIT_TXAGC);
3824
3825 rtw8822c_dpk_rxbb_dc_cal(rtwdev, path);
3826 rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_DAGC);
3827 rtw8822c_dpk_dgain_read(rtwdev, path);
3828
3829 if (rtw8822c_dpk_dc_corr_check(rtwdev, path)) {
3830 rtw8822c_dpk_rxbb_dc_cal(rtwdev, path);
3831 rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_DAGC);
3832 rtw8822c_dpk_dc_corr_check(rtwdev, path);
3833 }
3834
3835 t1 = rtw8822c_dpk_thermal_read(rtwdev, path);
3836 tx_bb = rtw8822c_dpk_pas_agc(rtwdev, path, false, true);
3837 tx_agc_search = rtw8822c_dpk_gainloss_result(rtwdev, path);
3838
3839 if (tx_bb < tx_agc_search)
3840 tx_bb = 0;
3841 else
3842 tx_bb = tx_bb - tx_agc_search;
3843
3844 rtw_write_rf(rtwdev, path, RF_TX_GAIN, BIT_GAIN_TXBB, tx_bb);
3845
3846 tx_agc = ori_txagc - (ori_txbb - tx_bb);
3847
3848 t2 = rtw8822c_dpk_thermal_read(rtwdev, path);
3849
3850 dpk_info->thermal_dpk_delta[path] = abs(t2 - t1);
3851
3852 return tx_agc;
3853 }
3854
rtw8822c_dpk_by_path(struct rtw_dev * rtwdev,u32 tx_agc,u8 path)3855 static u8 rtw8822c_dpk_by_path(struct rtw_dev *rtwdev, u32 tx_agc, u8 path)
3856 {
3857 u8 result;
3858
3859 result = rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_DO_DPK);
3860
3861 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x8 | (path << 1));
3862
3863 result = result | (u8)rtw_read32_mask(rtwdev, REG_DPD_CTL1_S0, BIT(26));
3864
3865 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, RFREG_MASK, 0x33e14);
3866
3867 rtw8822c_dpk_get_coef(rtwdev, path);
3868
3869 return result;
3870 }
3871
rtw8822c_dpk_cal_gs(struct rtw_dev * rtwdev,u8 path)3872 static void rtw8822c_dpk_cal_gs(struct rtw_dev *rtwdev, u8 path)
3873 {
3874 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3875 u32 tmp_gs = 0;
3876
3877 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x8 | (path << 1));
3878 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_BYPASS_DPD, 0x0);
3879 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x0);
3880 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_IQ_SWITCH, 0x9);
3881 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_INNER_LB, 0x1);
3882 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
3883 rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, BIT_DPD_CLK, 0xf);
3884
3885 if (path == RF_PATH_A) {
3886 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0, BIT_GS_PWSF,
3887 0x1066680);
3888 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S0, BIT_DPD_EN, 0x1);
3889 } else {
3890 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S1, BIT_GS_PWSF,
3891 0x1066680);
3892 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S1, BIT_DPD_EN, 0x1);
3893 }
3894
3895 if (dpk_info->dpk_bw == DPK_CHANNEL_WIDTH_80) {
3896 rtw_write32(rtwdev, REG_DPD_CTL16, 0x80001310);
3897 rtw_write32(rtwdev, REG_DPD_CTL16, 0x00001310);
3898 rtw_write32(rtwdev, REG_DPD_CTL16, 0x810000db);
3899 rtw_write32(rtwdev, REG_DPD_CTL16, 0x010000db);
3900 rtw_write32(rtwdev, REG_DPD_CTL16, 0x0000b428);
3901 rtw_write32(rtwdev, REG_DPD_CTL15,
3902 0x05020000 | (BIT(path) << 28));
3903 } else {
3904 rtw_write32(rtwdev, REG_DPD_CTL16, 0x8200190c);
3905 rtw_write32(rtwdev, REG_DPD_CTL16, 0x0200190c);
3906 rtw_write32(rtwdev, REG_DPD_CTL16, 0x8301ee14);
3907 rtw_write32(rtwdev, REG_DPD_CTL16, 0x0301ee14);
3908 rtw_write32(rtwdev, REG_DPD_CTL16, 0x0000b428);
3909 rtw_write32(rtwdev, REG_DPD_CTL15,
3910 0x05020008 | (BIT(path) << 28));
3911 }
3912
3913 rtw_write32_mask(rtwdev, REG_DPD_CTL0, MASKBYTE3, 0x8 | path);
3914
3915 rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_CAL_PWR);
3916
3917 rtw_write32_mask(rtwdev, REG_DPD_CTL15, MASKBYTE3, 0x0);
3918 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x8 | (path << 1));
3919 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_IQ_SWITCH, 0x0);
3920 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_INNER_LB, 0x0);
3921 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
3922
3923 if (path == RF_PATH_A)
3924 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0, BIT_GS_PWSF, 0x5b);
3925 else
3926 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S1, BIT_GS_PWSF, 0x5b);
3927
3928 rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, BIT_RPT_SEL, 0x0);
3929
3930 tmp_gs = (u16)rtw_read32_mask(rtwdev, REG_STAT_RPT, BIT_RPT_DGAIN);
3931 tmp_gs = (tmp_gs * 910) >> 10;
3932 tmp_gs = DIV_ROUND_CLOSEST(tmp_gs, 10);
3933
3934 if (path == RF_PATH_A)
3935 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0, BIT_GS_PWSF, tmp_gs);
3936 else
3937 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S1, BIT_GS_PWSF, tmp_gs);
3938
3939 dpk_info->dpk_gs[path] = tmp_gs;
3940 }
3941
rtw8822c_dpk_cal_coef1(struct rtw_dev * rtwdev)3942 static void rtw8822c_dpk_cal_coef1(struct rtw_dev *rtwdev)
3943 {
3944 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3945 u32 offset[DPK_RF_PATH_NUM] = {0, 0x58};
3946 u32 i_scaling;
3947 u8 path;
3948
3949 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x0000000c);
3950 rtw_write32(rtwdev, REG_RXSRAM_CTL, 0x000000f0);
3951 rtw_write32(rtwdev, REG_NCTL0, 0x00001148);
3952 rtw_write32(rtwdev, REG_NCTL0, 0x00001149);
3953
3954 check_hw_ready(rtwdev, 0x2d9c, MASKBYTE0, 0x55);
3955
3956 rtw_write8(rtwdev, 0x1b10, 0x0);
3957 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x0000000c);
3958
3959 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
3960 i_scaling = 0x16c00 / dpk_info->dpk_gs[path];
3961
3962 rtw_write32_mask(rtwdev, 0x1b18 + offset[path], MASKHWORD,
3963 i_scaling);
3964 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0 + offset[path],
3965 GENMASK(31, 28), 0x9);
3966 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0 + offset[path],
3967 GENMASK(31, 28), 0x1);
3968 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0 + offset[path],
3969 GENMASK(31, 28), 0x0);
3970 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S0 + offset[path],
3971 BIT(14), 0x0);
3972 }
3973 }
3974
rtw8822c_dpk_on(struct rtw_dev * rtwdev,u8 path)3975 static void rtw8822c_dpk_on(struct rtw_dev *rtwdev, u8 path)
3976 {
3977 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3978
3979 rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_DPK_ON);
3980
3981 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x8 | (path << 1));
3982 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x0);
3983
3984 if (test_bit(path, dpk_info->dpk_path_ok))
3985 rtw8822c_dpk_cal_gs(rtwdev, path);
3986 }
3987
rtw8822c_dpk_check_pass(struct rtw_dev * rtwdev,bool is_fail,u32 dpk_txagc,u8 path)3988 static bool rtw8822c_dpk_check_pass(struct rtw_dev *rtwdev, bool is_fail,
3989 u32 dpk_txagc, u8 path)
3990 {
3991 bool result;
3992
3993 if (!is_fail) {
3994 if (rtw8822c_dpk_coef_read(rtwdev, path))
3995 result = true;
3996 else
3997 result = false;
3998 } else {
3999 result = false;
4000 }
4001
4002 rtw8822c_dpk_fill_result(rtwdev, dpk_txagc, path, result);
4003
4004 return result;
4005 }
4006
rtw8822c_dpk_result_reset(struct rtw_dev * rtwdev)4007 static void rtw8822c_dpk_result_reset(struct rtw_dev *rtwdev)
4008 {
4009 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4010 u8 path;
4011
4012 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
4013 clear_bit(path, dpk_info->dpk_path_ok);
4014 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE,
4015 0x8 | (path << 1));
4016 rtw_write32_mask(rtwdev, 0x1b58, 0x0000007f, 0x0);
4017
4018 dpk_info->dpk_txagc[path] = 0;
4019 dpk_info->result[path] = 0;
4020 dpk_info->dpk_gs[path] = 0x5b;
4021 dpk_info->pre_pwsf[path] = 0;
4022 dpk_info->thermal_dpk[path] = rtw8822c_dpk_thermal_read(rtwdev,
4023 path);
4024 }
4025 }
4026
rtw8822c_dpk_calibrate(struct rtw_dev * rtwdev,u8 path)4027 static void rtw8822c_dpk_calibrate(struct rtw_dev *rtwdev, u8 path)
4028 {
4029 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4030 u32 dpk_txagc;
4031 u8 dpk_fail;
4032
4033 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DPK] s%d dpk start\n", path);
4034
4035 dpk_txagc = rtw8822c_dpk_gainloss(rtwdev, path);
4036
4037 dpk_fail = rtw8822c_dpk_by_path(rtwdev, dpk_txagc, path);
4038
4039 if (!rtw8822c_dpk_check_pass(rtwdev, dpk_fail, dpk_txagc, path))
4040 rtw_err(rtwdev, "failed to do dpk calibration\n");
4041
4042 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DPK] s%d dpk finish\n", path);
4043
4044 if (dpk_info->result[path])
4045 set_bit(path, dpk_info->dpk_path_ok);
4046 }
4047
rtw8822c_dpk_path_select(struct rtw_dev * rtwdev)4048 static void rtw8822c_dpk_path_select(struct rtw_dev *rtwdev)
4049 {
4050 rtw8822c_dpk_calibrate(rtwdev, RF_PATH_A);
4051 rtw8822c_dpk_calibrate(rtwdev, RF_PATH_B);
4052 rtw8822c_dpk_on(rtwdev, RF_PATH_A);
4053 rtw8822c_dpk_on(rtwdev, RF_PATH_B);
4054 rtw8822c_dpk_cal_coef1(rtwdev);
4055 }
4056
rtw8822c_dpk_enable_disable(struct rtw_dev * rtwdev)4057 static void rtw8822c_dpk_enable_disable(struct rtw_dev *rtwdev)
4058 {
4059 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4060 u32 mask = BIT(15) | BIT(14);
4061
4062 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
4063
4064 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S0, BIT_DPD_EN,
4065 dpk_info->is_dpk_pwr_on);
4066 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S1, BIT_DPD_EN,
4067 dpk_info->is_dpk_pwr_on);
4068
4069 if (test_bit(RF_PATH_A, dpk_info->dpk_path_ok)) {
4070 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S0, mask, 0x0);
4071 rtw_write8(rtwdev, REG_DPD_CTL0_S0, dpk_info->dpk_gs[RF_PATH_A]);
4072 }
4073 if (test_bit(RF_PATH_B, dpk_info->dpk_path_ok)) {
4074 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S1, mask, 0x0);
4075 rtw_write8(rtwdev, REG_DPD_CTL0_S1, dpk_info->dpk_gs[RF_PATH_B]);
4076 }
4077 }
4078
rtw8822c_dpk_reload_data(struct rtw_dev * rtwdev)4079 static void rtw8822c_dpk_reload_data(struct rtw_dev *rtwdev)
4080 {
4081 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4082 u8 path;
4083
4084 if (!test_bit(RF_PATH_A, dpk_info->dpk_path_ok) &&
4085 !test_bit(RF_PATH_B, dpk_info->dpk_path_ok) &&
4086 dpk_info->dpk_ch == 0)
4087 return;
4088
4089 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
4090 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE,
4091 0x8 | (path << 1));
4092 if (dpk_info->dpk_band == RTW_BAND_2G)
4093 rtw_write32(rtwdev, REG_DPD_CTL1_S1, 0x1f100000);
4094 else
4095 rtw_write32(rtwdev, REG_DPD_CTL1_S1, 0x1f0d0000);
4096
4097 rtw_write8(rtwdev, REG_DPD_AGC, dpk_info->dpk_txagc[path]);
4098
4099 rtw8822c_dpk_coef_write(rtwdev, path,
4100 test_bit(path, dpk_info->dpk_path_ok));
4101
4102 rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_DPK_ON);
4103
4104 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
4105
4106 if (path == RF_PATH_A)
4107 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0, BIT_GS_PWSF,
4108 dpk_info->dpk_gs[path]);
4109 else
4110 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S1, BIT_GS_PWSF,
4111 dpk_info->dpk_gs[path]);
4112 }
4113 rtw8822c_dpk_cal_coef1(rtwdev);
4114 }
4115
rtw8822c_dpk_reload(struct rtw_dev * rtwdev)4116 static bool rtw8822c_dpk_reload(struct rtw_dev *rtwdev)
4117 {
4118 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4119 u8 channel;
4120
4121 dpk_info->is_reload = false;
4122
4123 channel = (u8)(rtw_read_rf(rtwdev, RF_PATH_A, 0x18, RFREG_MASK) & 0xff);
4124
4125 if (channel == dpk_info->dpk_ch) {
4126 rtw_dbg(rtwdev, RTW_DBG_RFK,
4127 "[DPK] DPK reload for CH%d!!\n", dpk_info->dpk_ch);
4128 rtw8822c_dpk_reload_data(rtwdev);
4129 dpk_info->is_reload = true;
4130 }
4131
4132 return dpk_info->is_reload;
4133 }
4134
rtw8822c_do_dpk(struct rtw_dev * rtwdev)4135 static void rtw8822c_do_dpk(struct rtw_dev *rtwdev)
4136 {
4137 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4138 struct rtw_backup_info bckp[DPK_BB_REG_NUM];
4139 u32 rf_reg_backup[DPK_RF_REG_NUM][DPK_RF_PATH_NUM];
4140 u32 bb_reg[DPK_BB_REG_NUM] = {
4141 0x520, 0x820, 0x824, 0x1c3c, 0x1d58, 0x1864,
4142 0x4164, 0x180c, 0x410c, 0x186c, 0x416c,
4143 0x1a14, 0x1e70, 0x80c, 0x1d70, 0x1e7c, 0x18a4, 0x41a4};
4144 u32 rf_reg[DPK_RF_REG_NUM] = {
4145 0x0, 0x1a, 0x55, 0x63, 0x87, 0x8f, 0xde};
4146 u8 path;
4147
4148 if (!dpk_info->is_dpk_pwr_on) {
4149 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DPK] Skip DPK due to DPD PWR off\n");
4150 return;
4151 } else if (rtw8822c_dpk_reload(rtwdev)) {
4152 return;
4153 }
4154
4155 for (path = RF_PATH_A; path < DPK_RF_PATH_NUM; path++)
4156 ewma_thermal_init(&dpk_info->avg_thermal[path]);
4157
4158 rtw8822c_dpk_information(rtwdev);
4159
4160 rtw8822c_dpk_backup_registers(rtwdev, bb_reg, DPK_BB_REG_NUM, bckp);
4161 rtw8822c_dpk_backup_rf_registers(rtwdev, rf_reg, rf_reg_backup);
4162
4163 rtw8822c_dpk_mac_bb_setting(rtwdev);
4164 rtw8822c_dpk_afe_setting(rtwdev, true);
4165 rtw8822c_dpk_pre_setting(rtwdev);
4166 rtw8822c_dpk_result_reset(rtwdev);
4167 rtw8822c_dpk_path_select(rtwdev);
4168 rtw8822c_dpk_afe_setting(rtwdev, false);
4169 rtw8822c_dpk_enable_disable(rtwdev);
4170
4171 rtw8822c_dpk_reload_rf_registers(rtwdev, rf_reg, rf_reg_backup);
4172 for (path = 0; path < rtwdev->hal.rf_path_num; path++)
4173 rtw8822c_dpk_rxbb_dc_cal(rtwdev, path);
4174 rtw8822c_dpk_restore_registers(rtwdev, DPK_BB_REG_NUM, bckp);
4175 }
4176
rtw8822c_phy_calibration(struct rtw_dev * rtwdev)4177 static void rtw8822c_phy_calibration(struct rtw_dev *rtwdev)
4178 {
4179 rtw8822c_rfk_power_save(rtwdev, false);
4180 rtw8822c_do_gapk(rtwdev);
4181 rtw8822c_do_iqk(rtwdev);
4182 rtw8822c_do_dpk(rtwdev);
4183 rtw8822c_rfk_power_save(rtwdev, true);
4184 }
4185
rtw8822c_dpk_track(struct rtw_dev * rtwdev)4186 static void rtw8822c_dpk_track(struct rtw_dev *rtwdev)
4187 {
4188 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4189 u8 path;
4190 u8 thermal_value[DPK_RF_PATH_NUM] = {0};
4191 s8 offset[DPK_RF_PATH_NUM], delta_dpk[DPK_RF_PATH_NUM];
4192
4193 if (dpk_info->thermal_dpk[0] == 0 && dpk_info->thermal_dpk[1] == 0)
4194 return;
4195
4196 for (path = 0; path < DPK_RF_PATH_NUM; path++) {
4197 thermal_value[path] = rtw8822c_dpk_thermal_read(rtwdev, path);
4198 ewma_thermal_add(&dpk_info->avg_thermal[path],
4199 thermal_value[path]);
4200 thermal_value[path] =
4201 ewma_thermal_read(&dpk_info->avg_thermal[path]);
4202 delta_dpk[path] = dpk_info->thermal_dpk[path] -
4203 thermal_value[path];
4204 offset[path] = delta_dpk[path] -
4205 dpk_info->thermal_dpk_delta[path];
4206 offset[path] &= 0x7f;
4207
4208 if (offset[path] != dpk_info->pre_pwsf[path]) {
4209 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE,
4210 0x8 | (path << 1));
4211 rtw_write32_mask(rtwdev, 0x1b58, GENMASK(6, 0),
4212 offset[path]);
4213 dpk_info->pre_pwsf[path] = offset[path];
4214 }
4215 }
4216 }
4217
4218 #define XCAP_EXTEND(val) ({typeof(val) _v = (val); _v | _v << 7; })
rtw8822c_set_crystal_cap_reg(struct rtw_dev * rtwdev,u8 crystal_cap)4219 static void rtw8822c_set_crystal_cap_reg(struct rtw_dev *rtwdev, u8 crystal_cap)
4220 {
4221 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4222 struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4223 u32 val = 0;
4224
4225 val = XCAP_EXTEND(crystal_cap);
4226 cfo->crystal_cap = crystal_cap;
4227 rtw_write32_mask(rtwdev, REG_ANAPAR_XTAL_0, BIT_XCAP_0, val);
4228 }
4229
rtw8822c_set_crystal_cap(struct rtw_dev * rtwdev,u8 crystal_cap)4230 static void rtw8822c_set_crystal_cap(struct rtw_dev *rtwdev, u8 crystal_cap)
4231 {
4232 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4233 struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4234
4235 if (cfo->crystal_cap == crystal_cap)
4236 return;
4237
4238 rtw8822c_set_crystal_cap_reg(rtwdev, crystal_cap);
4239 }
4240
rtw8822c_cfo_tracking_reset(struct rtw_dev * rtwdev)4241 static void rtw8822c_cfo_tracking_reset(struct rtw_dev *rtwdev)
4242 {
4243 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4244 struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4245
4246 cfo->is_adjust = true;
4247
4248 if (cfo->crystal_cap > rtwdev->efuse.crystal_cap)
4249 rtw8822c_set_crystal_cap(rtwdev, cfo->crystal_cap - 1);
4250 else if (cfo->crystal_cap < rtwdev->efuse.crystal_cap)
4251 rtw8822c_set_crystal_cap(rtwdev, cfo->crystal_cap + 1);
4252 }
4253
rtw8822c_cfo_init(struct rtw_dev * rtwdev)4254 static void rtw8822c_cfo_init(struct rtw_dev *rtwdev)
4255 {
4256 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4257 struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4258
4259 cfo->crystal_cap = rtwdev->efuse.crystal_cap;
4260 cfo->is_adjust = true;
4261 }
4262
4263 #define REPORT_TO_KHZ(val) ({typeof(val) _v = (val); (_v << 1) + (_v >> 1); })
rtw8822c_cfo_calc_avg(struct rtw_dev * rtwdev,u8 path_num)4264 static s32 rtw8822c_cfo_calc_avg(struct rtw_dev *rtwdev, u8 path_num)
4265 {
4266 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4267 struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4268 s32 cfo_avg, cfo_path_sum = 0, cfo_rpt_sum;
4269 u8 i;
4270
4271 for (i = 0; i < path_num; i++) {
4272 cfo_rpt_sum = REPORT_TO_KHZ(cfo->cfo_tail[i]);
4273
4274 if (cfo->cfo_cnt[i])
4275 cfo_avg = cfo_rpt_sum / cfo->cfo_cnt[i];
4276 else
4277 cfo_avg = 0;
4278
4279 cfo_path_sum += cfo_avg;
4280 }
4281
4282 for (i = 0; i < path_num; i++) {
4283 cfo->cfo_tail[i] = 0;
4284 cfo->cfo_cnt[i] = 0;
4285 }
4286
4287 return cfo_path_sum / path_num;
4288 }
4289
rtw8822c_cfo_need_adjust(struct rtw_dev * rtwdev,s32 cfo_avg)4290 static void rtw8822c_cfo_need_adjust(struct rtw_dev *rtwdev, s32 cfo_avg)
4291 {
4292 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4293 struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4294
4295 if (!cfo->is_adjust) {
4296 if (abs(cfo_avg) > CFO_TRK_ENABLE_TH)
4297 cfo->is_adjust = true;
4298 } else {
4299 if (abs(cfo_avg) <= CFO_TRK_STOP_TH)
4300 cfo->is_adjust = false;
4301 }
4302
4303 if (!rtw_coex_disabled(rtwdev)) {
4304 cfo->is_adjust = false;
4305 rtw8822c_set_crystal_cap(rtwdev, rtwdev->efuse.crystal_cap);
4306 }
4307 }
4308
rtw8822c_cfo_track(struct rtw_dev * rtwdev)4309 static void rtw8822c_cfo_track(struct rtw_dev *rtwdev)
4310 {
4311 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4312 struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4313 u8 path_num = rtwdev->hal.rf_path_num;
4314 s8 crystal_cap = cfo->crystal_cap;
4315 s32 cfo_avg = 0;
4316
4317 if (rtwdev->sta_cnt != 1) {
4318 rtw8822c_cfo_tracking_reset(rtwdev);
4319 return;
4320 }
4321
4322 if (cfo->packet_count == cfo->packet_count_pre)
4323 return;
4324
4325 cfo->packet_count_pre = cfo->packet_count;
4326 cfo_avg = rtw8822c_cfo_calc_avg(rtwdev, path_num);
4327 rtw8822c_cfo_need_adjust(rtwdev, cfo_avg);
4328
4329 if (cfo->is_adjust) {
4330 if (cfo_avg > CFO_TRK_ADJ_TH)
4331 crystal_cap++;
4332 else if (cfo_avg < -CFO_TRK_ADJ_TH)
4333 crystal_cap--;
4334
4335 crystal_cap = clamp_t(s8, crystal_cap, 0, XCAP_MASK);
4336 rtw8822c_set_crystal_cap(rtwdev, (u8)crystal_cap);
4337 }
4338 }
4339
4340 static const struct rtw_phy_cck_pd_reg
4341 rtw8822c_cck_pd_reg[RTW_CHANNEL_WIDTH_40 + 1][RTW_RF_PATH_MAX] = {
4342 {
4343 {0x1ac8, 0x00ff, 0x1ad0, 0x01f},
4344 {0x1ac8, 0xff00, 0x1ad0, 0x3e0}
4345 },
4346 {
4347 {0x1acc, 0x00ff, 0x1ad0, 0x01F00000},
4348 {0x1acc, 0xff00, 0x1ad0, 0x3E000000}
4349 },
4350 };
4351
4352 #define RTW_CCK_PD_MAX 255
4353 #define RTW_CCK_CS_MAX 31
4354 #define RTW_CCK_CS_ERR1 27
4355 #define RTW_CCK_CS_ERR2 29
4356 static void
rtw8822c_phy_cck_pd_set_reg(struct rtw_dev * rtwdev,s8 pd_diff,s8 cs_diff,u8 bw,u8 nrx)4357 rtw8822c_phy_cck_pd_set_reg(struct rtw_dev *rtwdev,
4358 s8 pd_diff, s8 cs_diff, u8 bw, u8 nrx)
4359 {
4360 u32 pd, cs;
4361
4362 if (WARN_ON(bw > RTW_CHANNEL_WIDTH_40 || nrx >= RTW_RF_PATH_MAX))
4363 return;
4364
4365 pd = rtw_read32_mask(rtwdev,
4366 rtw8822c_cck_pd_reg[bw][nrx].reg_pd,
4367 rtw8822c_cck_pd_reg[bw][nrx].mask_pd);
4368 cs = rtw_read32_mask(rtwdev,
4369 rtw8822c_cck_pd_reg[bw][nrx].reg_cs,
4370 rtw8822c_cck_pd_reg[bw][nrx].mask_cs);
4371 pd += pd_diff;
4372 cs += cs_diff;
4373 if (pd > RTW_CCK_PD_MAX)
4374 pd = RTW_CCK_PD_MAX;
4375 if (cs == RTW_CCK_CS_ERR1 || cs == RTW_CCK_CS_ERR2)
4376 cs++;
4377 else if (cs > RTW_CCK_CS_MAX)
4378 cs = RTW_CCK_CS_MAX;
4379 rtw_write32_mask(rtwdev,
4380 rtw8822c_cck_pd_reg[bw][nrx].reg_pd,
4381 rtw8822c_cck_pd_reg[bw][nrx].mask_pd,
4382 pd);
4383 rtw_write32_mask(rtwdev,
4384 rtw8822c_cck_pd_reg[bw][nrx].reg_cs,
4385 rtw8822c_cck_pd_reg[bw][nrx].mask_cs,
4386 cs);
4387
4388 rtw_dbg(rtwdev, RTW_DBG_PHY,
4389 "is_linked=%d, bw=%d, nrx=%d, cs_ratio=0x%x, pd_th=0x%x\n",
4390 rtw_is_assoc(rtwdev), bw, nrx, cs, pd);
4391 }
4392
rtw8822c_phy_cck_pd_set(struct rtw_dev * rtwdev,u8 new_lvl)4393 static void rtw8822c_phy_cck_pd_set(struct rtw_dev *rtwdev, u8 new_lvl)
4394 {
4395 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4396 s8 pd_lvl[CCK_PD_LV_MAX] = {0, 2, 4, 6, 8};
4397 s8 cs_lvl[CCK_PD_LV_MAX] = {0, 2, 2, 2, 4};
4398 u8 cur_lvl;
4399 u8 nrx, bw;
4400
4401 nrx = (u8)rtw_read32_mask(rtwdev, 0x1a2c, 0x60000);
4402 bw = (u8)rtw_read32_mask(rtwdev, 0x9b0, 0xc);
4403
4404 rtw_dbg(rtwdev, RTW_DBG_PHY, "lv: (%d) -> (%d) bw=%d nr=%d cck_fa_avg=%d\n",
4405 dm_info->cck_pd_lv[bw][nrx], new_lvl, bw, nrx,
4406 dm_info->cck_fa_avg);
4407
4408 if (dm_info->cck_pd_lv[bw][nrx] == new_lvl)
4409 return;
4410
4411 cur_lvl = dm_info->cck_pd_lv[bw][nrx];
4412
4413 /* update cck pd info */
4414 dm_info->cck_fa_avg = CCK_FA_AVG_RESET;
4415
4416 rtw8822c_phy_cck_pd_set_reg(rtwdev,
4417 pd_lvl[new_lvl] - pd_lvl[cur_lvl],
4418 cs_lvl[new_lvl] - cs_lvl[cur_lvl],
4419 bw, nrx);
4420 dm_info->cck_pd_lv[bw][nrx] = new_lvl;
4421 }
4422
4423 #define PWR_TRACK_MASK 0x7f
rtw8822c_pwrtrack_set(struct rtw_dev * rtwdev,u8 rf_path)4424 static void rtw8822c_pwrtrack_set(struct rtw_dev *rtwdev, u8 rf_path)
4425 {
4426 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4427
4428 switch (rf_path) {
4429 case RF_PATH_A:
4430 rtw_write32_mask(rtwdev, 0x18a0, PWR_TRACK_MASK,
4431 dm_info->delta_power_index[rf_path]);
4432 break;
4433 case RF_PATH_B:
4434 rtw_write32_mask(rtwdev, 0x41a0, PWR_TRACK_MASK,
4435 dm_info->delta_power_index[rf_path]);
4436 break;
4437 default:
4438 break;
4439 }
4440 }
4441
rtw8822c_pwr_track_stats(struct rtw_dev * rtwdev,u8 path)4442 static void rtw8822c_pwr_track_stats(struct rtw_dev *rtwdev, u8 path)
4443 {
4444 u8 thermal_value;
4445
4446 if (rtwdev->efuse.thermal_meter[path] == 0xff)
4447 return;
4448
4449 thermal_value = rtw_read_rf(rtwdev, path, RF_T_METER, 0x7e);
4450 rtw_phy_pwrtrack_avg(rtwdev, thermal_value, path);
4451 }
4452
rtw8822c_pwr_track_path(struct rtw_dev * rtwdev,struct rtw_swing_table * swing_table,u8 path)4453 static void rtw8822c_pwr_track_path(struct rtw_dev *rtwdev,
4454 struct rtw_swing_table *swing_table,
4455 u8 path)
4456 {
4457 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4458 u8 delta;
4459
4460 delta = rtw_phy_pwrtrack_get_delta(rtwdev, path);
4461 dm_info->delta_power_index[path] =
4462 rtw_phy_pwrtrack_get_pwridx(rtwdev, swing_table, path, path,
4463 delta);
4464 rtw8822c_pwrtrack_set(rtwdev, path);
4465 }
4466
__rtw8822c_pwr_track(struct rtw_dev * rtwdev)4467 static void __rtw8822c_pwr_track(struct rtw_dev *rtwdev)
4468 {
4469 struct rtw_swing_table swing_table;
4470 u8 i;
4471
4472 rtw_phy_config_swing_table(rtwdev, &swing_table);
4473
4474 for (i = 0; i < rtwdev->hal.rf_path_num; i++)
4475 rtw8822c_pwr_track_stats(rtwdev, i);
4476 if (rtw_phy_pwrtrack_need_lck(rtwdev))
4477 rtw8822c_do_lck(rtwdev);
4478 for (i = 0; i < rtwdev->hal.rf_path_num; i++)
4479 rtw8822c_pwr_track_path(rtwdev, &swing_table, i);
4480 }
4481
rtw8822c_pwr_track(struct rtw_dev * rtwdev)4482 static void rtw8822c_pwr_track(struct rtw_dev *rtwdev)
4483 {
4484 struct rtw_efuse *efuse = &rtwdev->efuse;
4485 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4486
4487 if (efuse->power_track_type != 0)
4488 return;
4489
4490 if (!dm_info->pwr_trk_triggered) {
4491 rtw_write_rf(rtwdev, RF_PATH_A, RF_T_METER, BIT(19), 0x01);
4492 rtw_write_rf(rtwdev, RF_PATH_A, RF_T_METER, BIT(19), 0x00);
4493 rtw_write_rf(rtwdev, RF_PATH_A, RF_T_METER, BIT(19), 0x01);
4494
4495 rtw_write_rf(rtwdev, RF_PATH_B, RF_T_METER, BIT(19), 0x01);
4496 rtw_write_rf(rtwdev, RF_PATH_B, RF_T_METER, BIT(19), 0x00);
4497 rtw_write_rf(rtwdev, RF_PATH_B, RF_T_METER, BIT(19), 0x01);
4498
4499 dm_info->pwr_trk_triggered = true;
4500 return;
4501 }
4502
4503 __rtw8822c_pwr_track(rtwdev);
4504 dm_info->pwr_trk_triggered = false;
4505 }
4506
rtw8822c_adaptivity_init(struct rtw_dev * rtwdev)4507 static void rtw8822c_adaptivity_init(struct rtw_dev *rtwdev)
4508 {
4509 rtw_phy_set_edcca_th(rtwdev, RTW8822C_EDCCA_MAX, RTW8822C_EDCCA_MAX);
4510
4511 /* mac edcca state setting */
4512 rtw_write32_clr(rtwdev, REG_TX_PTCL_CTRL, BIT_DIS_EDCCA);
4513 rtw_write32_set(rtwdev, REG_RD_CTRL, BIT_EDCCA_MSK_CNTDOWN_EN);
4514
4515 /* edcca decistion opt */
4516 rtw_write32_clr(rtwdev, REG_EDCCA_DECISION, BIT_EDCCA_OPTION);
4517 }
4518
rtw8822c_adaptivity(struct rtw_dev * rtwdev)4519 static void rtw8822c_adaptivity(struct rtw_dev *rtwdev)
4520 {
4521 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4522 s8 l2h, h2l;
4523 u8 igi;
4524
4525 igi = dm_info->igi_history[0];
4526 if (dm_info->edcca_mode == RTW_EDCCA_NORMAL) {
4527 l2h = max_t(s8, igi + EDCCA_IGI_L2H_DIFF, EDCCA_TH_L2H_LB);
4528 h2l = l2h - EDCCA_L2H_H2L_DIFF_NORMAL;
4529 } else {
4530 if (igi < dm_info->l2h_th_ini - EDCCA_ADC_BACKOFF)
4531 l2h = igi + EDCCA_ADC_BACKOFF;
4532 else
4533 l2h = dm_info->l2h_th_ini;
4534 h2l = l2h - EDCCA_L2H_H2L_DIFF;
4535 }
4536
4537 rtw_phy_set_edcca_th(rtwdev, l2h, h2l);
4538 }
4539
rtw8822c_fill_txdesc_checksum(struct rtw_dev * rtwdev,struct rtw_tx_pkt_info * pkt_info,u8 * txdesc)4540 static void rtw8822c_fill_txdesc_checksum(struct rtw_dev *rtwdev,
4541 struct rtw_tx_pkt_info *pkt_info,
4542 u8 *txdesc)
4543 {
4544 const struct rtw_chip_info *chip = rtwdev->chip;
4545 size_t words;
4546
4547 words = (pkt_info->pkt_offset * 8 + chip->tx_pkt_desc_sz) / 2;
4548
4549 fill_txdesc_checksum_common(txdesc, words);
4550 }
4551
4552 static const struct rtw_pwr_seq_cmd trans_carddis_to_cardemu_8822c[] = {
4553 {0x0086,
4554 RTW_PWR_CUT_ALL_MSK,
4555 RTW_PWR_INTF_SDIO_MSK,
4556 RTW_PWR_ADDR_SDIO,
4557 RTW_PWR_CMD_WRITE, BIT(0), 0},
4558 {0x0086,
4559 RTW_PWR_CUT_ALL_MSK,
4560 RTW_PWR_INTF_SDIO_MSK,
4561 RTW_PWR_ADDR_SDIO,
4562 RTW_PWR_CMD_POLLING, BIT(1), BIT(1)},
4563 {0x002E,
4564 RTW_PWR_CUT_ALL_MSK,
4565 RTW_PWR_INTF_ALL_MSK,
4566 RTW_PWR_ADDR_MAC,
4567 RTW_PWR_CMD_WRITE, BIT(2), BIT(2)},
4568 {0x002D,
4569 RTW_PWR_CUT_ALL_MSK,
4570 RTW_PWR_INTF_ALL_MSK,
4571 RTW_PWR_ADDR_MAC,
4572 RTW_PWR_CMD_WRITE, BIT(0), 0},
4573 {0x007F,
4574 RTW_PWR_CUT_ALL_MSK,
4575 RTW_PWR_INTF_ALL_MSK,
4576 RTW_PWR_ADDR_MAC,
4577 RTW_PWR_CMD_WRITE, BIT(7), 0},
4578 {0x004A,
4579 RTW_PWR_CUT_ALL_MSK,
4580 RTW_PWR_INTF_USB_MSK,
4581 RTW_PWR_ADDR_MAC,
4582 RTW_PWR_CMD_WRITE, BIT(0), 0},
4583 {0x0005,
4584 RTW_PWR_CUT_ALL_MSK,
4585 RTW_PWR_INTF_ALL_MSK,
4586 RTW_PWR_ADDR_MAC,
4587 RTW_PWR_CMD_WRITE, BIT(3) | BIT(4) | BIT(7), 0},
4588 {0xFFFF,
4589 RTW_PWR_CUT_ALL_MSK,
4590 RTW_PWR_INTF_ALL_MSK,
4591 0,
4592 RTW_PWR_CMD_END, 0, 0},
4593 };
4594
4595 static const struct rtw_pwr_seq_cmd trans_cardemu_to_act_8822c[] = {
4596 {0x0000,
4597 RTW_PWR_CUT_ALL_MSK,
4598 RTW_PWR_INTF_USB_MSK | RTW_PWR_INTF_SDIO_MSK,
4599 RTW_PWR_ADDR_MAC,
4600 RTW_PWR_CMD_WRITE, BIT(5), 0},
4601 {0x0005,
4602 RTW_PWR_CUT_ALL_MSK,
4603 RTW_PWR_INTF_ALL_MSK,
4604 RTW_PWR_ADDR_MAC,
4605 RTW_PWR_CMD_WRITE, (BIT(4) | BIT(3) | BIT(2)), 0},
4606 {0x0075,
4607 RTW_PWR_CUT_ALL_MSK,
4608 RTW_PWR_INTF_PCI_MSK,
4609 RTW_PWR_ADDR_MAC,
4610 RTW_PWR_CMD_WRITE, BIT(0), BIT(0)},
4611 {0x0006,
4612 RTW_PWR_CUT_ALL_MSK,
4613 RTW_PWR_INTF_ALL_MSK,
4614 RTW_PWR_ADDR_MAC,
4615 RTW_PWR_CMD_POLLING, BIT(1), BIT(1)},
4616 {0x0075,
4617 RTW_PWR_CUT_ALL_MSK,
4618 RTW_PWR_INTF_PCI_MSK,
4619 RTW_PWR_ADDR_MAC,
4620 RTW_PWR_CMD_WRITE, BIT(0), 0},
4621 {0xFF1A,
4622 RTW_PWR_CUT_ALL_MSK,
4623 RTW_PWR_INTF_USB_MSK,
4624 RTW_PWR_ADDR_MAC,
4625 RTW_PWR_CMD_WRITE, 0xFF, 0},
4626 {0x002E,
4627 RTW_PWR_CUT_ALL_MSK,
4628 RTW_PWR_INTF_ALL_MSK,
4629 RTW_PWR_ADDR_MAC,
4630 RTW_PWR_CMD_WRITE, BIT(3), 0},
4631 {0x0006,
4632 RTW_PWR_CUT_ALL_MSK,
4633 RTW_PWR_INTF_ALL_MSK,
4634 RTW_PWR_ADDR_MAC,
4635 RTW_PWR_CMD_WRITE, BIT(0), BIT(0)},
4636 {0x0005,
4637 RTW_PWR_CUT_ALL_MSK,
4638 RTW_PWR_INTF_ALL_MSK,
4639 RTW_PWR_ADDR_MAC,
4640 RTW_PWR_CMD_WRITE, (BIT(4) | BIT(3)), 0},
4641 {0x1018,
4642 RTW_PWR_CUT_ALL_MSK,
4643 RTW_PWR_INTF_ALL_MSK,
4644 RTW_PWR_ADDR_MAC,
4645 RTW_PWR_CMD_WRITE, BIT(2), BIT(2)},
4646 {0x0005,
4647 RTW_PWR_CUT_ALL_MSK,
4648 RTW_PWR_INTF_ALL_MSK,
4649 RTW_PWR_ADDR_MAC,
4650 RTW_PWR_CMD_WRITE, BIT(0), BIT(0)},
4651 {0x0005,
4652 RTW_PWR_CUT_ALL_MSK,
4653 RTW_PWR_INTF_ALL_MSK,
4654 RTW_PWR_ADDR_MAC,
4655 RTW_PWR_CMD_POLLING, BIT(0), 0},
4656 {0x0074,
4657 RTW_PWR_CUT_ALL_MSK,
4658 RTW_PWR_INTF_PCI_MSK,
4659 RTW_PWR_ADDR_MAC,
4660 RTW_PWR_CMD_WRITE, BIT(5), BIT(5)},
4661 {0x0071,
4662 RTW_PWR_CUT_ALL_MSK,
4663 RTW_PWR_INTF_PCI_MSK,
4664 RTW_PWR_ADDR_MAC,
4665 RTW_PWR_CMD_WRITE, BIT(4), 0},
4666 {0x0062,
4667 RTW_PWR_CUT_ALL_MSK,
4668 RTW_PWR_INTF_PCI_MSK,
4669 RTW_PWR_ADDR_MAC,
4670 RTW_PWR_CMD_WRITE, (BIT(7) | BIT(6) | BIT(5)),
4671 (BIT(7) | BIT(6) | BIT(5))},
4672 {0x0061,
4673 RTW_PWR_CUT_ALL_MSK,
4674 RTW_PWR_INTF_PCI_MSK,
4675 RTW_PWR_ADDR_MAC,
4676 RTW_PWR_CMD_WRITE, (BIT(7) | BIT(6) | BIT(5)), 0},
4677 {0x001F,
4678 RTW_PWR_CUT_ALL_MSK,
4679 RTW_PWR_INTF_ALL_MSK,
4680 RTW_PWR_ADDR_MAC,
4681 RTW_PWR_CMD_WRITE, (BIT(7) | BIT(6)), BIT(7)},
4682 {0x00EF,
4683 RTW_PWR_CUT_ALL_MSK,
4684 RTW_PWR_INTF_ALL_MSK,
4685 RTW_PWR_ADDR_MAC,
4686 RTW_PWR_CMD_WRITE, (BIT(7) | BIT(6)), BIT(7)},
4687 {0x1045,
4688 RTW_PWR_CUT_ALL_MSK,
4689 RTW_PWR_INTF_ALL_MSK,
4690 RTW_PWR_ADDR_MAC,
4691 RTW_PWR_CMD_WRITE, BIT(4), BIT(4)},
4692 {0x0010,
4693 RTW_PWR_CUT_ALL_MSK,
4694 RTW_PWR_INTF_ALL_MSK,
4695 RTW_PWR_ADDR_MAC,
4696 RTW_PWR_CMD_WRITE, BIT(2), BIT(2)},
4697 {0x1064,
4698 RTW_PWR_CUT_ALL_MSK,
4699 RTW_PWR_INTF_ALL_MSK,
4700 RTW_PWR_ADDR_MAC,
4701 RTW_PWR_CMD_WRITE, BIT(1), BIT(1)},
4702 {0xFFFF,
4703 RTW_PWR_CUT_ALL_MSK,
4704 RTW_PWR_INTF_ALL_MSK,
4705 0,
4706 RTW_PWR_CMD_END, 0, 0},
4707 };
4708
4709 static const struct rtw_pwr_seq_cmd trans_act_to_cardemu_8822c[] = {
4710 {0x0093,
4711 RTW_PWR_CUT_ALL_MSK,
4712 RTW_PWR_INTF_ALL_MSK,
4713 RTW_PWR_ADDR_MAC,
4714 RTW_PWR_CMD_WRITE, BIT(3), 0},
4715 {0x001F,
4716 RTW_PWR_CUT_ALL_MSK,
4717 RTW_PWR_INTF_ALL_MSK,
4718 RTW_PWR_ADDR_MAC,
4719 RTW_PWR_CMD_WRITE, 0xFF, 0},
4720 {0x00EF,
4721 RTW_PWR_CUT_ALL_MSK,
4722 RTW_PWR_INTF_ALL_MSK,
4723 RTW_PWR_ADDR_MAC,
4724 RTW_PWR_CMD_WRITE, 0xFF, 0},
4725 {0x1045,
4726 RTW_PWR_CUT_ALL_MSK,
4727 RTW_PWR_INTF_ALL_MSK,
4728 RTW_PWR_ADDR_MAC,
4729 RTW_PWR_CMD_WRITE, BIT(4), 0},
4730 {0xFF1A,
4731 RTW_PWR_CUT_ALL_MSK,
4732 RTW_PWR_INTF_USB_MSK,
4733 RTW_PWR_ADDR_MAC,
4734 RTW_PWR_CMD_WRITE, 0xFF, 0x30},
4735 {0x0049,
4736 RTW_PWR_CUT_ALL_MSK,
4737 RTW_PWR_INTF_ALL_MSK,
4738 RTW_PWR_ADDR_MAC,
4739 RTW_PWR_CMD_WRITE, BIT(1), 0},
4740 {0x0006,
4741 RTW_PWR_CUT_ALL_MSK,
4742 RTW_PWR_INTF_ALL_MSK,
4743 RTW_PWR_ADDR_MAC,
4744 RTW_PWR_CMD_WRITE, BIT(0), BIT(0)},
4745 {0x0002,
4746 RTW_PWR_CUT_ALL_MSK,
4747 RTW_PWR_INTF_ALL_MSK,
4748 RTW_PWR_ADDR_MAC,
4749 RTW_PWR_CMD_WRITE, BIT(1), 0},
4750 {0x0005,
4751 RTW_PWR_CUT_ALL_MSK,
4752 RTW_PWR_INTF_ALL_MSK,
4753 RTW_PWR_ADDR_MAC,
4754 RTW_PWR_CMD_WRITE, BIT(1), BIT(1)},
4755 {0x0005,
4756 RTW_PWR_CUT_ALL_MSK,
4757 RTW_PWR_INTF_ALL_MSK,
4758 RTW_PWR_ADDR_MAC,
4759 RTW_PWR_CMD_POLLING, BIT(1), 0},
4760 {0x0000,
4761 RTW_PWR_CUT_ALL_MSK,
4762 RTW_PWR_INTF_USB_MSK | RTW_PWR_INTF_SDIO_MSK,
4763 RTW_PWR_ADDR_MAC,
4764 RTW_PWR_CMD_WRITE, BIT(5), BIT(5)},
4765 {0xFFFF,
4766 RTW_PWR_CUT_ALL_MSK,
4767 RTW_PWR_INTF_ALL_MSK,
4768 0,
4769 RTW_PWR_CMD_END, 0, 0},
4770 };
4771
4772 static const struct rtw_pwr_seq_cmd trans_cardemu_to_carddis_8822c[] = {
4773 {0x0005,
4774 RTW_PWR_CUT_ALL_MSK,
4775 RTW_PWR_INTF_SDIO_MSK,
4776 RTW_PWR_ADDR_MAC,
4777 RTW_PWR_CMD_WRITE, BIT(7), BIT(7)},
4778 {0x0007,
4779 RTW_PWR_CUT_ALL_MSK,
4780 RTW_PWR_INTF_USB_MSK | RTW_PWR_INTF_SDIO_MSK,
4781 RTW_PWR_ADDR_MAC,
4782 RTW_PWR_CMD_WRITE, 0xFF, 0x00},
4783 {0x0067,
4784 RTW_PWR_CUT_ALL_MSK,
4785 RTW_PWR_INTF_ALL_MSK,
4786 RTW_PWR_ADDR_MAC,
4787 RTW_PWR_CMD_WRITE, BIT(5), 0},
4788 {0x004A,
4789 RTW_PWR_CUT_ALL_MSK,
4790 RTW_PWR_INTF_USB_MSK,
4791 RTW_PWR_ADDR_MAC,
4792 RTW_PWR_CMD_WRITE, BIT(0), 0},
4793 {0x0081,
4794 RTW_PWR_CUT_ALL_MSK,
4795 RTW_PWR_INTF_ALL_MSK,
4796 RTW_PWR_ADDR_MAC,
4797 RTW_PWR_CMD_WRITE, BIT(7) | BIT(6), 0},
4798 {0x0090,
4799 RTW_PWR_CUT_ALL_MSK,
4800 RTW_PWR_INTF_ALL_MSK,
4801 RTW_PWR_ADDR_MAC,
4802 RTW_PWR_CMD_WRITE, BIT(1), 0},
4803 {0x0092,
4804 RTW_PWR_CUT_ALL_MSK,
4805 RTW_PWR_INTF_PCI_MSK,
4806 RTW_PWR_ADDR_MAC,
4807 RTW_PWR_CMD_WRITE, 0xFF, 0x20},
4808 {0x0093,
4809 RTW_PWR_CUT_ALL_MSK,
4810 RTW_PWR_INTF_PCI_MSK,
4811 RTW_PWR_ADDR_MAC,
4812 RTW_PWR_CMD_WRITE, 0xFF, 0x04},
4813 {0x0005,
4814 RTW_PWR_CUT_ALL_MSK,
4815 RTW_PWR_INTF_USB_MSK | RTW_PWR_INTF_SDIO_MSK,
4816 RTW_PWR_ADDR_MAC,
4817 RTW_PWR_CMD_WRITE, BIT(3) | BIT(4), BIT(3)},
4818 {0x0005,
4819 RTW_PWR_CUT_ALL_MSK,
4820 RTW_PWR_INTF_PCI_MSK,
4821 RTW_PWR_ADDR_MAC,
4822 RTW_PWR_CMD_WRITE, BIT(2), BIT(2)},
4823 {0x0086,
4824 RTW_PWR_CUT_ALL_MSK,
4825 RTW_PWR_INTF_SDIO_MSK,
4826 RTW_PWR_ADDR_SDIO,
4827 RTW_PWR_CMD_WRITE, BIT(0), BIT(0)},
4828 {0xFFFF,
4829 RTW_PWR_CUT_ALL_MSK,
4830 RTW_PWR_INTF_ALL_MSK,
4831 0,
4832 RTW_PWR_CMD_END, 0, 0},
4833 };
4834
4835 static const struct rtw_pwr_seq_cmd * const card_enable_flow_8822c[] = {
4836 trans_carddis_to_cardemu_8822c,
4837 trans_cardemu_to_act_8822c,
4838 NULL
4839 };
4840
4841 static const struct rtw_pwr_seq_cmd * const card_disable_flow_8822c[] = {
4842 trans_act_to_cardemu_8822c,
4843 trans_cardemu_to_carddis_8822c,
4844 NULL
4845 };
4846
4847 static const struct rtw_intf_phy_para usb2_param_8822c[] = {
4848 {0xFFFF, 0x00,
4849 RTW_IP_SEL_PHY,
4850 RTW_INTF_PHY_CUT_ALL,
4851 RTW_INTF_PHY_PLATFORM_ALL},
4852 };
4853
4854 static const struct rtw_intf_phy_para usb3_param_8822c[] = {
4855 {0xFFFF, 0x0000,
4856 RTW_IP_SEL_PHY,
4857 RTW_INTF_PHY_CUT_ALL,
4858 RTW_INTF_PHY_PLATFORM_ALL},
4859 };
4860
4861 static const struct rtw_intf_phy_para pcie_gen1_param_8822c[] = {
4862 {0xFFFF, 0x0000,
4863 RTW_IP_SEL_PHY,
4864 RTW_INTF_PHY_CUT_ALL,
4865 RTW_INTF_PHY_PLATFORM_ALL},
4866 };
4867
4868 static const struct rtw_intf_phy_para pcie_gen2_param_8822c[] = {
4869 {0xFFFF, 0x0000,
4870 RTW_IP_SEL_PHY,
4871 RTW_INTF_PHY_CUT_ALL,
4872 RTW_INTF_PHY_PLATFORM_ALL},
4873 };
4874
4875 static const struct rtw_intf_phy_para_table phy_para_table_8822c = {
4876 .usb2_para = usb2_param_8822c,
4877 .usb3_para = usb3_param_8822c,
4878 .gen1_para = pcie_gen1_param_8822c,
4879 .gen2_para = pcie_gen2_param_8822c,
4880 .n_usb2_para = ARRAY_SIZE(usb2_param_8822c),
4881 .n_usb3_para = ARRAY_SIZE(usb2_param_8822c),
4882 .n_gen1_para = ARRAY_SIZE(pcie_gen1_param_8822c),
4883 .n_gen2_para = ARRAY_SIZE(pcie_gen2_param_8822c),
4884 };
4885
4886 static const struct rtw_hw_reg rtw8822c_dig[] = {
4887 [0] = { .addr = 0x1d70, .mask = 0x7f },
4888 [1] = { .addr = 0x1d70, .mask = 0x7f00 },
4889 };
4890
4891 static const struct rtw_ltecoex_addr rtw8822c_ltecoex_addr = {
4892 .ctrl = LTECOEX_ACCESS_CTRL,
4893 .wdata = LTECOEX_WRITE_DATA,
4894 .rdata = LTECOEX_READ_DATA,
4895 };
4896
4897 static const struct rtw_page_table page_table_8822c[] = {
4898 {64, 64, 64, 64, 1},
4899 {64, 64, 64, 64, 1},
4900 {64, 64, 0, 0, 1},
4901 {64, 64, 64, 0, 1},
4902 {64, 64, 64, 64, 1},
4903 };
4904
4905 static const struct rtw_rqpn rqpn_table_8822c[] = {
4906 {RTW_DMA_MAPPING_NORMAL, RTW_DMA_MAPPING_NORMAL,
4907 RTW_DMA_MAPPING_LOW, RTW_DMA_MAPPING_LOW,
4908 RTW_DMA_MAPPING_EXTRA, RTW_DMA_MAPPING_HIGH},
4909 {RTW_DMA_MAPPING_NORMAL, RTW_DMA_MAPPING_NORMAL,
4910 RTW_DMA_MAPPING_LOW, RTW_DMA_MAPPING_LOW,
4911 RTW_DMA_MAPPING_EXTRA, RTW_DMA_MAPPING_HIGH},
4912 {RTW_DMA_MAPPING_NORMAL, RTW_DMA_MAPPING_NORMAL,
4913 RTW_DMA_MAPPING_NORMAL, RTW_DMA_MAPPING_HIGH,
4914 RTW_DMA_MAPPING_HIGH, RTW_DMA_MAPPING_HIGH},
4915 {RTW_DMA_MAPPING_NORMAL, RTW_DMA_MAPPING_NORMAL,
4916 RTW_DMA_MAPPING_LOW, RTW_DMA_MAPPING_LOW,
4917 RTW_DMA_MAPPING_HIGH, RTW_DMA_MAPPING_HIGH},
4918 {RTW_DMA_MAPPING_NORMAL, RTW_DMA_MAPPING_NORMAL,
4919 RTW_DMA_MAPPING_LOW, RTW_DMA_MAPPING_LOW,
4920 RTW_DMA_MAPPING_EXTRA, RTW_DMA_MAPPING_HIGH},
4921 };
4922
4923 static const struct rtw_prioq_addrs prioq_addrs_8822c = {
4924 .prio[RTW_DMA_MAPPING_EXTRA] = {
4925 .rsvd = REG_FIFOPAGE_INFO_4, .avail = REG_FIFOPAGE_INFO_4 + 2,
4926 },
4927 .prio[RTW_DMA_MAPPING_LOW] = {
4928 .rsvd = REG_FIFOPAGE_INFO_2, .avail = REG_FIFOPAGE_INFO_2 + 2,
4929 },
4930 .prio[RTW_DMA_MAPPING_NORMAL] = {
4931 .rsvd = REG_FIFOPAGE_INFO_3, .avail = REG_FIFOPAGE_INFO_3 + 2,
4932 },
4933 .prio[RTW_DMA_MAPPING_HIGH] = {
4934 .rsvd = REG_FIFOPAGE_INFO_1, .avail = REG_FIFOPAGE_INFO_1 + 2,
4935 },
4936 .wsize = true,
4937 };
4938
4939 static const struct rtw_chip_ops rtw8822c_ops = {
4940 .power_on = rtw_power_on,
4941 .power_off = rtw_power_off,
4942 .phy_set_param = rtw8822c_phy_set_param,
4943 .read_efuse = rtw8822c_read_efuse,
4944 .query_phy_status = query_phy_status,
4945 .set_channel = rtw8822c_set_channel,
4946 .mac_init = rtw8822c_mac_init,
4947 .dump_fw_crash = rtw8822c_dump_fw_crash,
4948 .read_rf = rtw_phy_read_rf,
4949 .write_rf = rtw_phy_write_rf_reg_mix,
4950 .set_tx_power_index = rtw8822c_set_tx_power_index,
4951 .set_antenna = rtw8822c_set_antenna,
4952 .cfg_ldo25 = rtw8822c_cfg_ldo25,
4953 .false_alarm_statistics = rtw8822c_false_alarm_statistics,
4954 .dpk_track = rtw8822c_dpk_track,
4955 .phy_calibration = rtw8822c_phy_calibration,
4956 .cck_pd_set = rtw8822c_phy_cck_pd_set,
4957 .pwr_track = rtw8822c_pwr_track,
4958 .config_bfee = rtw8822c_bf_config_bfee,
4959 .set_gid_table = rtw_bf_set_gid_table,
4960 .cfg_csi_rate = rtw_bf_cfg_csi_rate,
4961 .adaptivity_init = rtw8822c_adaptivity_init,
4962 .adaptivity = rtw8822c_adaptivity,
4963 .cfo_init = rtw8822c_cfo_init,
4964 .cfo_track = rtw8822c_cfo_track,
4965 .config_tx_path = rtw8822c_config_tx_path,
4966 .config_txrx_mode = rtw8822c_config_trx_mode,
4967 .fill_txdesc_checksum = rtw8822c_fill_txdesc_checksum,
4968
4969 .coex_set_init = rtw8822c_coex_cfg_init,
4970 .coex_set_ant_switch = NULL,
4971 .coex_set_gnt_fix = rtw8822c_coex_cfg_gnt_fix,
4972 .coex_set_gnt_debug = rtw8822c_coex_cfg_gnt_debug,
4973 .coex_set_rfe_type = rtw8822c_coex_cfg_rfe_type,
4974 .coex_set_wl_tx_power = rtw8822c_coex_cfg_wl_tx_power,
4975 .coex_set_wl_rx_gain = rtw8822c_coex_cfg_wl_rx_gain,
4976 };
4977
4978 /* Shared-Antenna Coex Table */
4979 static const struct coex_table_para table_sant_8822c[] = {
4980 {0xffffffff, 0xffffffff}, /* case-0 */
4981 {0x55555555, 0x55555555},
4982 {0x66555555, 0x66555555},
4983 {0xaaaaaaaa, 0xaaaaaaaa},
4984 {0x5a5a5a5a, 0x5a5a5a5a},
4985 {0xfafafafa, 0xfafafafa}, /* case-5 */
4986 {0x6a5a5555, 0xaaaaaaaa},
4987 {0x6a5a56aa, 0x6a5a56aa},
4988 {0x6a5a5a5a, 0x6a5a5a5a},
4989 {0x66555555, 0x5a5a5a5a},
4990 {0x66555555, 0x6a5a5a5a}, /* case-10 */
4991 {0x66555555, 0x6a5a5aaa},
4992 {0x66555555, 0x5a5a5aaa},
4993 {0x66555555, 0x6aaa5aaa},
4994 {0x66555555, 0xaaaa5aaa},
4995 {0x66555555, 0xaaaaaaaa}, /* case-15 */
4996 {0xffff55ff, 0xfafafafa},
4997 {0xffff55ff, 0x6afa5afa},
4998 {0xaaffffaa, 0xfafafafa},
4999 {0xaa5555aa, 0x5a5a5a5a},
5000 {0xaa5555aa, 0x6a5a5a5a}, /* case-20 */
5001 {0xaa5555aa, 0xaaaaaaaa},
5002 {0xffffffff, 0x5a5a5a5a},
5003 {0xffffffff, 0x5a5a5a5a},
5004 {0xffffffff, 0x55555555},
5005 {0xffffffff, 0x5a5a5aaa}, /* case-25 */
5006 {0x55555555, 0x5a5a5a5a},
5007 {0x55555555, 0xaaaaaaaa},
5008 {0x55555555, 0x6a5a6a5a},
5009 {0x66556655, 0x66556655},
5010 {0x66556aaa, 0x6a5a6aaa}, /*case-30*/
5011 {0xffffffff, 0x5aaa5aaa},
5012 {0x56555555, 0x5a5a5aaa},
5013 {0xdaffdaff, 0xdaffdaff},
5014 {0xddffddff, 0xddffddff},
5015 };
5016
5017 /* Non-Shared-Antenna Coex Table */
5018 static const struct coex_table_para table_nsant_8822c[] = {
5019 {0xffffffff, 0xffffffff}, /* case-100 */
5020 {0x55555555, 0x55555555},
5021 {0x66555555, 0x66555555},
5022 {0xaaaaaaaa, 0xaaaaaaaa},
5023 {0x5a5a5a5a, 0x5a5a5a5a},
5024 {0xfafafafa, 0xfafafafa}, /* case-105 */
5025 {0x5afa5afa, 0x5afa5afa},
5026 {0x55555555, 0xfafafafa},
5027 {0x66555555, 0xfafafafa},
5028 {0x66555555, 0x5a5a5a5a},
5029 {0x66555555, 0x6a5a5a5a}, /* case-110 */
5030 {0x66555555, 0xaaaaaaaa},
5031 {0xffff55ff, 0xfafafafa},
5032 {0xffff55ff, 0x5afa5afa},
5033 {0xffff55ff, 0xaaaaaaaa},
5034 {0xffff55ff, 0xffff55ff}, /* case-115 */
5035 {0xaaffffaa, 0x5afa5afa},
5036 {0xaaffffaa, 0xaaaaaaaa},
5037 {0xffffffff, 0xfafafafa},
5038 {0xffffffff, 0x5afa5afa},
5039 {0xffffffff, 0xaaaaaaaa}, /* case-120 */
5040 {0x55ff55ff, 0x5afa5afa},
5041 {0x55ff55ff, 0xaaaaaaaa},
5042 {0x55ff55ff, 0x55ff55ff}
5043 };
5044
5045 /* Shared-Antenna TDMA */
5046 static const struct coex_tdma_para tdma_sant_8822c[] = {
5047 { {0x00, 0x00, 0x00, 0x00, 0x00} }, /* case-0 */
5048 { {0x61, 0x45, 0x03, 0x11, 0x11} }, /* case-1 */
5049 { {0x61, 0x3a, 0x03, 0x11, 0x11} },
5050 { {0x61, 0x30, 0x03, 0x11, 0x11} },
5051 { {0x61, 0x20, 0x03, 0x11, 0x11} },
5052 { {0x61, 0x10, 0x03, 0x11, 0x11} }, /* case-5 */
5053 { {0x61, 0x45, 0x03, 0x11, 0x10} },
5054 { {0x61, 0x3a, 0x03, 0x11, 0x10} },
5055 { {0x61, 0x30, 0x03, 0x11, 0x10} },
5056 { {0x61, 0x20, 0x03, 0x11, 0x10} },
5057 { {0x61, 0x10, 0x03, 0x11, 0x10} }, /* case-10 */
5058 { {0x61, 0x08, 0x03, 0x11, 0x14} },
5059 { {0x61, 0x08, 0x03, 0x10, 0x14} },
5060 { {0x51, 0x08, 0x03, 0x10, 0x54} },
5061 { {0x51, 0x08, 0x03, 0x10, 0x55} },
5062 { {0x51, 0x08, 0x07, 0x10, 0x54} }, /* case-15 */
5063 { {0x51, 0x45, 0x03, 0x10, 0x50} },
5064 { {0x51, 0x3a, 0x03, 0x10, 0x50} },
5065 { {0x51, 0x30, 0x03, 0x10, 0x50} },
5066 { {0x51, 0x20, 0x03, 0x10, 0x50} },
5067 { {0x51, 0x10, 0x03, 0x10, 0x50} }, /* case-20 */
5068 { {0x51, 0x4a, 0x03, 0x10, 0x50} },
5069 { {0x51, 0x0c, 0x03, 0x10, 0x54} },
5070 { {0x55, 0x08, 0x03, 0x10, 0x54} },
5071 { {0x65, 0x10, 0x03, 0x11, 0x10} },
5072 { {0x51, 0x10, 0x03, 0x10, 0x51} }, /* case-25 */
5073 { {0x51, 0x08, 0x03, 0x10, 0x50} },
5074 { {0x61, 0x08, 0x03, 0x11, 0x11} }
5075 };
5076
5077 /* Non-Shared-Antenna TDMA */
5078 static const struct coex_tdma_para tdma_nsant_8822c[] = {
5079 { {0x00, 0x00, 0x00, 0x00, 0x00} }, /* case-100 */
5080 { {0x61, 0x45, 0x03, 0x11, 0x11} },
5081 { {0x61, 0x3a, 0x03, 0x11, 0x11} },
5082 { {0x61, 0x30, 0x03, 0x11, 0x11} },
5083 { {0x61, 0x20, 0x03, 0x11, 0x11} },
5084 { {0x61, 0x10, 0x03, 0x11, 0x11} }, /* case-105 */
5085 { {0x61, 0x45, 0x03, 0x11, 0x10} },
5086 { {0x61, 0x3a, 0x03, 0x11, 0x10} },
5087 { {0x61, 0x30, 0x03, 0x11, 0x10} },
5088 { {0x61, 0x20, 0x03, 0x11, 0x10} },
5089 { {0x61, 0x10, 0x03, 0x11, 0x10} }, /* case-110 */
5090 { {0x61, 0x08, 0x03, 0x11, 0x14} },
5091 { {0x61, 0x08, 0x03, 0x10, 0x14} },
5092 { {0x51, 0x08, 0x03, 0x10, 0x54} },
5093 { {0x51, 0x08, 0x03, 0x10, 0x55} },
5094 { {0x51, 0x08, 0x07, 0x10, 0x54} }, /* case-115 */
5095 { {0x51, 0x45, 0x03, 0x10, 0x50} },
5096 { {0x51, 0x3a, 0x03, 0x10, 0x50} },
5097 { {0x51, 0x30, 0x03, 0x10, 0x50} },
5098 { {0x51, 0x20, 0x03, 0x10, 0x50} },
5099 { {0x51, 0x10, 0x03, 0x10, 0x50} }, /* case-120 */
5100 { {0x51, 0x08, 0x03, 0x10, 0x50} }
5101 };
5102
5103 /* rssi in percentage % (dbm = % - 100) */
5104 static const u8 wl_rssi_step_8822c[] = {60, 50, 44, 30};
5105 static const u8 bt_rssi_step_8822c[] = {8, 15, 20, 25};
5106 static const struct coex_5g_afh_map afh_5g_8822c[] = { {0, 0, 0} };
5107
5108 /* wl_tx_dec_power, bt_tx_dec_power, wl_rx_gain, bt_rx_lna_constrain */
5109 static const struct coex_rf_para rf_para_tx_8822c[] = {
5110 {0, 0, false, 7}, /* for normal */
5111 {0, 16, false, 7}, /* for WL-CPT */
5112 {8, 17, true, 4},
5113 {7, 18, true, 4},
5114 {6, 19, true, 4},
5115 {5, 20, true, 4},
5116 {0, 21, true, 4} /* for gamg hid */
5117 };
5118
5119 static const struct coex_rf_para rf_para_rx_8822c[] = {
5120 {0, 0, false, 7}, /* for normal */
5121 {0, 16, false, 7}, /* for WL-CPT */
5122 {3, 24, true, 5},
5123 {2, 26, true, 5},
5124 {1, 27, true, 5},
5125 {0, 28, true, 5},
5126 {0, 28, true, 5} /* for gamg hid */
5127 };
5128
5129 static_assert(ARRAY_SIZE(rf_para_tx_8822c) == ARRAY_SIZE(rf_para_rx_8822c));
5130
5131 static const u8
5132 rtw8822c_pwrtrk_5gb_n[RTW_PWR_TRK_5G_NUM][RTW_PWR_TRK_TBL_SZ] = {
5133 { 0, 1, 2, 3, 5, 6, 7, 8, 9, 10,
5134 11, 12, 13, 14, 15, 16, 18, 19, 20, 21,
5135 22, 23, 24, 25, 26, 27, 28, 29, 30, 32 },
5136 { 0, 1, 2, 3, 5, 6, 7, 8, 9, 10,
5137 11, 12, 13, 14, 15, 16, 18, 19, 20, 21,
5138 22, 23, 24, 25, 26, 27, 28, 29, 30, 32 },
5139 { 0, 1, 2, 3, 5, 6, 7, 8, 9, 10,
5140 11, 12, 13, 14, 15, 16, 18, 19, 20, 21,
5141 22, 23, 24, 25, 26, 27, 28, 29, 30, 32 },
5142 };
5143
5144 static const u8
5145 rtw8822c_pwrtrk_5gb_p[RTW_PWR_TRK_5G_NUM][RTW_PWR_TRK_TBL_SZ] = {
5146 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5147 10, 10, 11, 12, 13, 14, 15, 16, 17, 18,
5148 19, 20, 21, 22, 22, 23, 24, 25, 26, 27 },
5149 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5150 10, 10, 11, 12, 13, 14, 15, 16, 17, 18,
5151 19, 20, 21, 22, 22, 23, 24, 25, 26, 27 },
5152 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5153 10, 10, 11, 12, 13, 14, 15, 16, 17, 18,
5154 19, 20, 21, 22, 22, 23, 24, 25, 26, 27 },
5155 };
5156
5157 static const u8
5158 rtw8822c_pwrtrk_5ga_n[RTW_PWR_TRK_5G_NUM][RTW_PWR_TRK_TBL_SZ] = {
5159 { 0, 1, 2, 4, 5, 6, 7, 8, 9, 10,
5160 11, 13, 14, 15, 16, 17, 18, 19, 20, 21,
5161 23, 24, 25, 26, 27, 28, 29, 30, 31, 33 },
5162 { 0, 1, 2, 4, 5, 6, 7, 8, 9, 10,
5163 11, 13, 14, 15, 16, 17, 18, 19, 20, 21,
5164 23, 24, 25, 26, 27, 28, 29, 30, 31, 33 },
5165 { 0, 1, 2, 4, 5, 6, 7, 8, 9, 10,
5166 11, 13, 14, 15, 16, 17, 18, 19, 20, 21,
5167 23, 24, 25, 26, 27, 28, 29, 30, 31, 33 },
5168 };
5169
5170 static const u8
5171 rtw8822c_pwrtrk_5ga_p[RTW_PWR_TRK_5G_NUM][RTW_PWR_TRK_TBL_SZ] = {
5172 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5173 10, 11, 12, 13, 14, 15, 16, 17, 18, 20,
5174 21, 22, 23, 24, 25, 26, 27, 28, 29, 30 },
5175 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5176 10, 11, 12, 13, 14, 15, 16, 17, 18, 20,
5177 21, 22, 23, 24, 25, 26, 27, 28, 29, 30 },
5178 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5179 10, 11, 12, 13, 14, 15, 16, 17, 18, 20,
5180 21, 22, 23, 24, 25, 26, 27, 28, 29, 30 },
5181 };
5182
5183 static const u8 rtw8822c_pwrtrk_2gb_n[RTW_PWR_TRK_TBL_SZ] = {
5184 0, 1, 2, 3, 4, 4, 5, 6, 7, 8,
5185 9, 9, 10, 11, 12, 13, 14, 15, 15, 16,
5186 17, 18, 19, 20, 20, 21, 22, 23, 24, 25
5187 };
5188
5189 static const u8 rtw8822c_pwrtrk_2gb_p[RTW_PWR_TRK_TBL_SZ] = {
5190 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5191 10, 11, 12, 13, 14, 14, 15, 16, 17, 18,
5192 19, 20, 21, 22, 23, 24, 25, 26, 27, 28
5193 };
5194
5195 static const u8 rtw8822c_pwrtrk_2ga_n[RTW_PWR_TRK_TBL_SZ] = {
5196 0, 1, 2, 2, 3, 4, 4, 5, 6, 6,
5197 7, 8, 8, 9, 9, 10, 11, 11, 12, 13,
5198 13, 14, 15, 15, 16, 17, 17, 18, 19, 19
5199 };
5200
5201 static const u8 rtw8822c_pwrtrk_2ga_p[RTW_PWR_TRK_TBL_SZ] = {
5202 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5203 10, 11, 11, 12, 13, 14, 15, 16, 17, 18,
5204 19, 20, 21, 22, 23, 24, 25, 25, 26, 27
5205 };
5206
5207 static const u8 rtw8822c_pwrtrk_2g_cck_b_n[RTW_PWR_TRK_TBL_SZ] = {
5208 0, 1, 2, 3, 4, 5, 5, 6, 7, 8,
5209 9, 10, 11, 11, 12, 13, 14, 15, 16, 17,
5210 17, 18, 19, 20, 21, 22, 23, 23, 24, 25
5211 };
5212
5213 static const u8 rtw8822c_pwrtrk_2g_cck_b_p[RTW_PWR_TRK_TBL_SZ] = {
5214 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5215 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
5216 20, 21, 22, 23, 24, 25, 26, 27, 28, 29
5217 };
5218
5219 static const u8 rtw8822c_pwrtrk_2g_cck_a_n[RTW_PWR_TRK_TBL_SZ] = {
5220 0, 1, 2, 3, 3, 4, 5, 6, 6, 7,
5221 8, 9, 9, 10, 11, 12, 12, 13, 14, 15,
5222 15, 16, 17, 18, 18, 19, 20, 21, 21, 22
5223 };
5224
5225 static const u8 rtw8822c_pwrtrk_2g_cck_a_p[RTW_PWR_TRK_TBL_SZ] = {
5226 0, 1, 2, 3, 4, 5, 5, 6, 7, 8,
5227 9, 10, 11, 11, 12, 13, 14, 15, 16, 17,
5228 18, 18, 19, 20, 21, 22, 23, 24, 24, 25
5229 };
5230
5231 static const struct rtw_pwr_track_tbl rtw8822c_pwr_track_type0_tbl = {
5232 .pwrtrk_5gb_n[RTW_PWR_TRK_5G_1] = rtw8822c_pwrtrk_5gb_n[RTW_PWR_TRK_5G_1],
5233 .pwrtrk_5gb_n[RTW_PWR_TRK_5G_2] = rtw8822c_pwrtrk_5gb_n[RTW_PWR_TRK_5G_2],
5234 .pwrtrk_5gb_n[RTW_PWR_TRK_5G_3] = rtw8822c_pwrtrk_5gb_n[RTW_PWR_TRK_5G_3],
5235 .pwrtrk_5gb_p[RTW_PWR_TRK_5G_1] = rtw8822c_pwrtrk_5gb_p[RTW_PWR_TRK_5G_1],
5236 .pwrtrk_5gb_p[RTW_PWR_TRK_5G_2] = rtw8822c_pwrtrk_5gb_p[RTW_PWR_TRK_5G_2],
5237 .pwrtrk_5gb_p[RTW_PWR_TRK_5G_3] = rtw8822c_pwrtrk_5gb_p[RTW_PWR_TRK_5G_3],
5238 .pwrtrk_5ga_n[RTW_PWR_TRK_5G_1] = rtw8822c_pwrtrk_5ga_n[RTW_PWR_TRK_5G_1],
5239 .pwrtrk_5ga_n[RTW_PWR_TRK_5G_2] = rtw8822c_pwrtrk_5ga_n[RTW_PWR_TRK_5G_2],
5240 .pwrtrk_5ga_n[RTW_PWR_TRK_5G_3] = rtw8822c_pwrtrk_5ga_n[RTW_PWR_TRK_5G_3],
5241 .pwrtrk_5ga_p[RTW_PWR_TRK_5G_1] = rtw8822c_pwrtrk_5ga_p[RTW_PWR_TRK_5G_1],
5242 .pwrtrk_5ga_p[RTW_PWR_TRK_5G_2] = rtw8822c_pwrtrk_5ga_p[RTW_PWR_TRK_5G_2],
5243 .pwrtrk_5ga_p[RTW_PWR_TRK_5G_3] = rtw8822c_pwrtrk_5ga_p[RTW_PWR_TRK_5G_3],
5244 .pwrtrk_2gb_n = rtw8822c_pwrtrk_2gb_n,
5245 .pwrtrk_2gb_p = rtw8822c_pwrtrk_2gb_p,
5246 .pwrtrk_2ga_n = rtw8822c_pwrtrk_2ga_n,
5247 .pwrtrk_2ga_p = rtw8822c_pwrtrk_2ga_p,
5248 .pwrtrk_2g_cckb_n = rtw8822c_pwrtrk_2g_cck_b_n,
5249 .pwrtrk_2g_cckb_p = rtw8822c_pwrtrk_2g_cck_b_p,
5250 .pwrtrk_2g_ccka_n = rtw8822c_pwrtrk_2g_cck_a_n,
5251 .pwrtrk_2g_ccka_p = rtw8822c_pwrtrk_2g_cck_a_p,
5252 };
5253
5254 static const struct rtw_rfe_def rtw8822c_rfe_defs[] = {
5255 [0] = RTW_DEF_RFE(8822c, 0, 0, 0),
5256 [1] = RTW_DEF_RFE(8822c, 0, 0, 0),
5257 [2] = RTW_DEF_RFE(8822c, 0, 0, 0),
5258 [3] = RTW_DEF_RFE(8822c, 0, 0, 0),
5259 [4] = RTW_DEF_RFE(8822c, 0, 0, 0),
5260 [5] = RTW_DEF_RFE(8822c, 0, 5, 0),
5261 [6] = RTW_DEF_RFE(8822c, 0, 0, 0),
5262 };
5263
5264 static const struct rtw_hw_reg_offset rtw8822c_edcca_th[] = {
5265 [EDCCA_TH_L2H_IDX] = {
5266 {.addr = 0x84c, .mask = MASKBYTE2}, .offset = 0x80
5267 },
5268 [EDCCA_TH_H2L_IDX] = {
5269 {.addr = 0x84c, .mask = MASKBYTE3}, .offset = 0x80
5270 },
5271 };
5272
5273 #ifdef CONFIG_PM
5274 static const struct wiphy_wowlan_support rtw_wowlan_stub_8822c = {
5275 .flags = WIPHY_WOWLAN_MAGIC_PKT | WIPHY_WOWLAN_GTK_REKEY_FAILURE |
5276 WIPHY_WOWLAN_DISCONNECT | WIPHY_WOWLAN_SUPPORTS_GTK_REKEY |
5277 WIPHY_WOWLAN_NET_DETECT,
5278 .n_patterns = RTW_MAX_PATTERN_NUM,
5279 .pattern_max_len = RTW_MAX_PATTERN_SIZE,
5280 .pattern_min_len = 1,
5281 .max_nd_match_sets = 4,
5282 };
5283 #endif
5284
5285 static const struct rtw_reg_domain coex_info_hw_regs_8822c[] = {
5286 {0x1860, BIT(3), RTW_REG_DOMAIN_MAC8},
5287 {0x4160, BIT(3), RTW_REG_DOMAIN_MAC8},
5288 {0x1c32, BIT(6), RTW_REG_DOMAIN_MAC8},
5289 {0x1c38, BIT(28), RTW_REG_DOMAIN_MAC32},
5290 {0, 0, RTW_REG_DOMAIN_NL},
5291 {0x430, MASKDWORD, RTW_REG_DOMAIN_MAC32},
5292 {0x434, MASKDWORD, RTW_REG_DOMAIN_MAC32},
5293 {0x42a, MASKLWORD, RTW_REG_DOMAIN_MAC16},
5294 {0x426, MASKBYTE0, RTW_REG_DOMAIN_MAC8},
5295 {0x45e, BIT(3), RTW_REG_DOMAIN_MAC8},
5296 {0x454, MASKLWORD, RTW_REG_DOMAIN_MAC16},
5297 {0, 0, RTW_REG_DOMAIN_NL},
5298 {0x4c, BIT(24) | BIT(23), RTW_REG_DOMAIN_MAC32},
5299 {0x64, BIT(0), RTW_REG_DOMAIN_MAC8},
5300 {0x4c6, BIT(4), RTW_REG_DOMAIN_MAC8},
5301 {0x40, BIT(5), RTW_REG_DOMAIN_MAC8},
5302 {0x1, RFREG_MASK, RTW_REG_DOMAIN_RF_B},
5303 {0, 0, RTW_REG_DOMAIN_NL},
5304 {0x550, MASKDWORD, RTW_REG_DOMAIN_MAC32},
5305 {0x522, MASKBYTE0, RTW_REG_DOMAIN_MAC8},
5306 {0x953, BIT(1), RTW_REG_DOMAIN_MAC8},
5307 {0xc50, MASKBYTE0, RTW_REG_DOMAIN_MAC8},
5308 };
5309
5310 const struct rtw_chip_info rtw8822c_hw_spec = {
5311 .ops = &rtw8822c_ops,
5312 .id = RTW_CHIP_TYPE_8822C,
5313 .fw_name = "rtw88/rtw8822c_fw.bin",
5314 .wlan_cpu = RTW_WCPU_11AC,
5315 .tx_pkt_desc_sz = 48,
5316 .tx_buf_desc_sz = 16,
5317 .rx_pkt_desc_sz = 24,
5318 .rx_buf_desc_sz = 8,
5319 .phy_efuse_size = 512,
5320 .log_efuse_size = 768,
5321 .ptct_efuse_size = 124,
5322 .txff_size = 262144,
5323 .rxff_size = 24576,
5324 .fw_rxff_size = 12288,
5325 .rsvd_drv_pg_num = 16,
5326 .txgi_factor = 2,
5327 .is_pwr_by_rate_dec = false,
5328 .max_power_index = 0x7f,
5329 .csi_buf_pg_num = 50,
5330 .band = RTW_BAND_2G | RTW_BAND_5G,
5331 .page_size = TX_PAGE_SIZE,
5332 .dig_min = 0x20,
5333 .usb_tx_agg_desc_num = 3,
5334 .hw_feature_report = true,
5335 .c2h_ra_report_size = 7,
5336 .old_datarate_fb_limit = false,
5337 .default_1ss_tx_path = BB_PATH_A,
5338 .path_div_supported = true,
5339 .ht_supported = true,
5340 .vht_supported = true,
5341 .lps_deep_mode_supported = BIT(LPS_DEEP_MODE_LCLK) | BIT(LPS_DEEP_MODE_PG),
5342 .sys_func_en = 0xD8,
5343 .pwr_on_seq = card_enable_flow_8822c,
5344 .pwr_off_seq = card_disable_flow_8822c,
5345 .page_table = page_table_8822c,
5346 .rqpn_table = rqpn_table_8822c,
5347 .prioq_addrs = &prioq_addrs_8822c,
5348 .intf_table = &phy_para_table_8822c,
5349 .dig = rtw8822c_dig,
5350 .dig_cck = NULL,
5351 .rf_base_addr = {0x3c00, 0x4c00},
5352 .rf_sipi_addr = {0x1808, 0x4108},
5353 .ltecoex_addr = &rtw8822c_ltecoex_addr,
5354 .mac_tbl = &rtw8822c_mac_tbl,
5355 .agc_tbl = &rtw8822c_agc_tbl,
5356 .bb_tbl = &rtw8822c_bb_tbl,
5357 .rfk_init_tbl = &rtw8822c_array_mp_cal_init_tbl,
5358 .rf_tbl = {&rtw8822c_rf_b_tbl, &rtw8822c_rf_a_tbl},
5359 .rfe_defs = rtw8822c_rfe_defs,
5360 .rfe_defs_size = ARRAY_SIZE(rtw8822c_rfe_defs),
5361 .en_dis_dpd = true,
5362 .dpd_ratemask = DIS_DPD_RATEALL,
5363 .iqk_threshold = 8,
5364 .lck_threshold = 8,
5365 .bfer_su_max_num = 2,
5366 .bfer_mu_max_num = 1,
5367 .rx_ldpc = true,
5368 .tx_stbc = true,
5369 .edcca_th = rtw8822c_edcca_th,
5370 .l2h_th_ini_cs = 60,
5371 .l2h_th_ini_ad = 45,
5372 .ampdu_density = IEEE80211_HT_MPDU_DENSITY_2,
5373
5374 #ifdef CONFIG_PM
5375 .wow_fw_name = "rtw88/rtw8822c_wow_fw.bin",
5376 .wowlan_stub = &rtw_wowlan_stub_8822c,
5377 .max_sched_scan_ssids = 4,
5378 #endif
5379 .max_scan_ie_len = (RTW_PROBE_PG_CNT - 1) * TX_PAGE_SIZE,
5380 .coex_para_ver = 0x22020720,
5381 .bt_desired_ver = 0x20,
5382 .scbd_support = true,
5383 .new_scbd10_def = true,
5384 .ble_hid_profile_support = true,
5385 .wl_mimo_ps_support = true,
5386 .pstdma_type = COEX_PSTDMA_FORCE_LPSOFF,
5387 .bt_rssi_type = COEX_BTRSSI_DBM,
5388 .ant_isolation = 15,
5389 .rssi_tolerance = 2,
5390 .wl_rssi_step = wl_rssi_step_8822c,
5391 .bt_rssi_step = bt_rssi_step_8822c,
5392 .table_sant_num = ARRAY_SIZE(table_sant_8822c),
5393 .table_sant = table_sant_8822c,
5394 .table_nsant_num = ARRAY_SIZE(table_nsant_8822c),
5395 .table_nsant = table_nsant_8822c,
5396 .tdma_sant_num = ARRAY_SIZE(tdma_sant_8822c),
5397 .tdma_sant = tdma_sant_8822c,
5398 .tdma_nsant_num = ARRAY_SIZE(tdma_nsant_8822c),
5399 .tdma_nsant = tdma_nsant_8822c,
5400 .wl_rf_para_num = ARRAY_SIZE(rf_para_tx_8822c),
5401 .wl_rf_para_tx = rf_para_tx_8822c,
5402 .wl_rf_para_rx = rf_para_rx_8822c,
5403 .bt_afh_span_bw20 = 0x24,
5404 .bt_afh_span_bw40 = 0x36,
5405 .afh_5g_num = ARRAY_SIZE(afh_5g_8822c),
5406 .afh_5g = afh_5g_8822c,
5407
5408 .coex_info_hw_regs_num = ARRAY_SIZE(coex_info_hw_regs_8822c),
5409 .coex_info_hw_regs = coex_info_hw_regs_8822c,
5410
5411 .fw_fifo_addr = {0x780, 0x700, 0x780, 0x660, 0x650, 0x680},
5412 .fwcd_segs = &rtw8822c_fwcd_segs,
5413 };
5414 EXPORT_SYMBOL(rtw8822c_hw_spec);
5415
5416 MODULE_FIRMWARE("rtw88/rtw8822c_fw.bin");
5417 MODULE_FIRMWARE("rtw88/rtw8822c_wow_fw.bin");
5418
5419 MODULE_AUTHOR("Realtek Corporation");
5420 MODULE_DESCRIPTION("Realtek 802.11ac wireless 8822c driver");
5421 MODULE_LICENSE("Dual BSD/GPL");
5422