1 // SPDX-License-Identifier: GPL-2.0 OR BSD-3-Clause
2 /* Copyright(c) 2018-2019 Realtek Corporation
3 */
4
5 #include <linux/module.h>
6 #include "main.h"
7 #include "coex.h"
8 #include "fw.h"
9 #include "tx.h"
10 #include "rx.h"
11 #include "phy.h"
12 #include "rtw8822c.h"
13 #include "rtw8822c_table.h"
14 #include "mac.h"
15 #include "reg.h"
16 #include "debug.h"
17 #include "util.h"
18 #include "bf.h"
19 #include "efuse.h"
20
21 #define IQK_DONE_8822C 0xaa
22
23 static void rtw8822c_config_trx_mode(struct rtw_dev *rtwdev, u8 tx_path,
24 u8 rx_path, bool is_tx2_path);
25
rtw8822ce_efuse_parsing(struct rtw_efuse * efuse,struct rtw8822c_efuse * map)26 static void rtw8822ce_efuse_parsing(struct rtw_efuse *efuse,
27 struct rtw8822c_efuse *map)
28 {
29 ether_addr_copy(efuse->addr, map->e.mac_addr);
30 }
31
rtw8822cu_efuse_parsing(struct rtw_efuse * efuse,struct rtw8822c_efuse * map)32 static void rtw8822cu_efuse_parsing(struct rtw_efuse *efuse,
33 struct rtw8822c_efuse *map)
34 {
35 ether_addr_copy(efuse->addr, map->u.mac_addr);
36 }
37
rtw8822cs_efuse_parsing(struct rtw_efuse * efuse,struct rtw8822c_efuse * map)38 static void rtw8822cs_efuse_parsing(struct rtw_efuse *efuse,
39 struct rtw8822c_efuse *map)
40 {
41 ether_addr_copy(efuse->addr, map->s.mac_addr);
42 }
43
rtw8822c_read_efuse(struct rtw_dev * rtwdev,u8 * log_map)44 static int rtw8822c_read_efuse(struct rtw_dev *rtwdev, u8 *log_map)
45 {
46 struct rtw_efuse *efuse = &rtwdev->efuse;
47 struct rtw8822c_efuse *map;
48 int i;
49
50 map = (struct rtw8822c_efuse *)log_map;
51
52 efuse->rfe_option = map->rfe_option;
53 efuse->rf_board_option = map->rf_board_option;
54 efuse->crystal_cap = map->xtal_k & XCAP_MASK;
55 efuse->channel_plan = map->channel_plan;
56 efuse->country_code[0] = map->country_code[0];
57 efuse->country_code[1] = map->country_code[1];
58 efuse->bt_setting = map->rf_bt_setting;
59 efuse->regd = map->rf_board_option & 0x7;
60 efuse->thermal_meter[RF_PATH_A] = map->path_a_thermal;
61 efuse->thermal_meter[RF_PATH_B] = map->path_b_thermal;
62 efuse->thermal_meter_k =
63 (map->path_a_thermal + map->path_b_thermal) >> 1;
64 efuse->power_track_type = (map->tx_pwr_calibrate_rate >> 4) & 0xf;
65
66 for (i = 0; i < 4; i++)
67 efuse->txpwr_idx_table[i] = map->txpwr_idx_table[i];
68
69 switch (rtw_hci_type(rtwdev)) {
70 case RTW_HCI_TYPE_PCIE:
71 rtw8822ce_efuse_parsing(efuse, map);
72 break;
73 case RTW_HCI_TYPE_USB:
74 rtw8822cu_efuse_parsing(efuse, map);
75 break;
76 case RTW_HCI_TYPE_SDIO:
77 rtw8822cs_efuse_parsing(efuse, map);
78 break;
79 default:
80 /* unsupported now */
81 return -ENOTSUPP;
82 }
83
84 return 0;
85 }
86
rtw8822c_header_file_init(struct rtw_dev * rtwdev,bool pre)87 static void rtw8822c_header_file_init(struct rtw_dev *rtwdev, bool pre)
88 {
89 rtw_write32_set(rtwdev, REG_3WIRE, BIT_3WIRE_TX_EN | BIT_3WIRE_RX_EN);
90 rtw_write32_set(rtwdev, REG_3WIRE, BIT_3WIRE_PI_ON);
91 rtw_write32_set(rtwdev, REG_3WIRE2, BIT_3WIRE_TX_EN | BIT_3WIRE_RX_EN);
92 rtw_write32_set(rtwdev, REG_3WIRE2, BIT_3WIRE_PI_ON);
93
94 if (pre)
95 rtw_write32_clr(rtwdev, REG_ENCCK, BIT_CCK_OFDM_BLK_EN);
96 else
97 rtw_write32_set(rtwdev, REG_ENCCK, BIT_CCK_OFDM_BLK_EN);
98 }
99
rtw8822c_bb_reset(struct rtw_dev * rtwdev)100 static void rtw8822c_bb_reset(struct rtw_dev *rtwdev)
101 {
102 rtw_write16_set(rtwdev, REG_SYS_FUNC_EN, BIT_FEN_BB_RSTB);
103 rtw_write16_clr(rtwdev, REG_SYS_FUNC_EN, BIT_FEN_BB_RSTB);
104 rtw_write16_set(rtwdev, REG_SYS_FUNC_EN, BIT_FEN_BB_RSTB);
105 }
106
rtw8822c_dac_backup_reg(struct rtw_dev * rtwdev,struct rtw_backup_info * backup,struct rtw_backup_info * backup_rf)107 static void rtw8822c_dac_backup_reg(struct rtw_dev *rtwdev,
108 struct rtw_backup_info *backup,
109 struct rtw_backup_info *backup_rf)
110 {
111 u32 path, i;
112 u32 val;
113 u32 reg;
114 u32 rf_addr[DACK_RF_8822C] = {0x8f};
115 u32 addrs[DACK_REG_8822C] = {0x180c, 0x1810, 0x410c, 0x4110,
116 0x1c3c, 0x1c24, 0x1d70, 0x9b4,
117 0x1a00, 0x1a14, 0x1d58, 0x1c38,
118 0x1e24, 0x1e28, 0x1860, 0x4160};
119
120 for (i = 0; i < DACK_REG_8822C; i++) {
121 backup[i].len = 4;
122 backup[i].reg = addrs[i];
123 backup[i].val = rtw_read32(rtwdev, addrs[i]);
124 }
125
126 for (path = 0; path < DACK_PATH_8822C; path++) {
127 for (i = 0; i < DACK_RF_8822C; i++) {
128 reg = rf_addr[i];
129 val = rtw_read_rf(rtwdev, path, reg, RFREG_MASK);
130 backup_rf[path * i + i].reg = reg;
131 backup_rf[path * i + i].val = val;
132 }
133 }
134 }
135
rtw8822c_dac_restore_reg(struct rtw_dev * rtwdev,struct rtw_backup_info * backup,struct rtw_backup_info * backup_rf)136 static void rtw8822c_dac_restore_reg(struct rtw_dev *rtwdev,
137 struct rtw_backup_info *backup,
138 struct rtw_backup_info *backup_rf)
139 {
140 u32 path, i;
141 u32 val;
142 u32 reg;
143
144 rtw_restore_reg(rtwdev, backup, DACK_REG_8822C);
145
146 for (path = 0; path < DACK_PATH_8822C; path++) {
147 for (i = 0; i < DACK_RF_8822C; i++) {
148 val = backup_rf[path * i + i].val;
149 reg = backup_rf[path * i + i].reg;
150 rtw_write_rf(rtwdev, path, reg, RFREG_MASK, val);
151 }
152 }
153 }
154
rtw8822c_rf_minmax_cmp(struct rtw_dev * rtwdev,u32 value,u32 * min,u32 * max)155 static void rtw8822c_rf_minmax_cmp(struct rtw_dev *rtwdev, u32 value,
156 u32 *min, u32 *max)
157 {
158 if (value >= 0x200) {
159 if (*min >= 0x200) {
160 if (*min > value)
161 *min = value;
162 } else {
163 *min = value;
164 }
165 if (*max >= 0x200) {
166 if (*max < value)
167 *max = value;
168 }
169 } else {
170 if (*min < 0x200) {
171 if (*min > value)
172 *min = value;
173 }
174
175 if (*max >= 0x200) {
176 *max = value;
177 } else {
178 if (*max < value)
179 *max = value;
180 }
181 }
182 }
183
__rtw8822c_dac_iq_sort(struct rtw_dev * rtwdev,u32 * v1,u32 * v2)184 static void __rtw8822c_dac_iq_sort(struct rtw_dev *rtwdev, u32 *v1, u32 *v2)
185 {
186 if (*v1 >= 0x200 && *v2 >= 0x200) {
187 if (*v1 > *v2)
188 swap(*v1, *v2);
189 } else if (*v1 < 0x200 && *v2 < 0x200) {
190 if (*v1 > *v2)
191 swap(*v1, *v2);
192 } else if (*v1 < 0x200 && *v2 >= 0x200) {
193 swap(*v1, *v2);
194 }
195 }
196
rtw8822c_dac_iq_sort(struct rtw_dev * rtwdev,u32 * iv,u32 * qv)197 static void rtw8822c_dac_iq_sort(struct rtw_dev *rtwdev, u32 *iv, u32 *qv)
198 {
199 u32 i, j;
200
201 for (i = 0; i < DACK_SN_8822C - 1; i++) {
202 for (j = 0; j < (DACK_SN_8822C - 1 - i) ; j++) {
203 __rtw8822c_dac_iq_sort(rtwdev, &iv[j], &iv[j + 1]);
204 __rtw8822c_dac_iq_sort(rtwdev, &qv[j], &qv[j + 1]);
205 }
206 }
207 }
208
rtw8822c_dac_iq_offset(struct rtw_dev * rtwdev,u32 * vec,u32 * val)209 static void rtw8822c_dac_iq_offset(struct rtw_dev *rtwdev, u32 *vec, u32 *val)
210 {
211 u32 p, m, t, i;
212
213 m = 0;
214 p = 0;
215 for (i = 10; i < DACK_SN_8822C - 10; i++) {
216 if (vec[i] > 0x200)
217 m = (0x400 - vec[i]) + m;
218 else
219 p = vec[i] + p;
220 }
221
222 if (p > m) {
223 t = p - m;
224 t = t / (DACK_SN_8822C - 20);
225 } else {
226 t = m - p;
227 t = t / (DACK_SN_8822C - 20);
228 if (t != 0x0)
229 t = 0x400 - t;
230 }
231
232 *val = t;
233 }
234
rtw8822c_get_path_write_addr(u8 path)235 static u32 rtw8822c_get_path_write_addr(u8 path)
236 {
237 u32 base_addr;
238
239 switch (path) {
240 case RF_PATH_A:
241 base_addr = 0x1800;
242 break;
243 case RF_PATH_B:
244 base_addr = 0x4100;
245 break;
246 default:
247 WARN_ON(1);
248 return -1;
249 }
250
251 return base_addr;
252 }
253
rtw8822c_get_path_read_addr(u8 path)254 static u32 rtw8822c_get_path_read_addr(u8 path)
255 {
256 u32 base_addr;
257
258 switch (path) {
259 case RF_PATH_A:
260 base_addr = 0x2800;
261 break;
262 case RF_PATH_B:
263 base_addr = 0x4500;
264 break;
265 default:
266 WARN_ON(1);
267 return -1;
268 }
269
270 return base_addr;
271 }
272
rtw8822c_dac_iq_check(struct rtw_dev * rtwdev,u32 value)273 static bool rtw8822c_dac_iq_check(struct rtw_dev *rtwdev, u32 value)
274 {
275 bool ret = true;
276
277 if ((value >= 0x200 && (0x400 - value) > 0x64) ||
278 (value < 0x200 && value > 0x64)) {
279 ret = false;
280 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] Error overflow\n");
281 }
282
283 return ret;
284 }
285
rtw8822c_dac_cal_iq_sample(struct rtw_dev * rtwdev,u32 * iv,u32 * qv)286 static void rtw8822c_dac_cal_iq_sample(struct rtw_dev *rtwdev, u32 *iv, u32 *qv)
287 {
288 u32 temp;
289 int i = 0, cnt = 0;
290
291 while (i < DACK_SN_8822C && cnt < 10000) {
292 cnt++;
293 temp = rtw_read32_mask(rtwdev, 0x2dbc, 0x3fffff);
294 iv[i] = (temp & 0x3ff000) >> 12;
295 qv[i] = temp & 0x3ff;
296
297 if (rtw8822c_dac_iq_check(rtwdev, iv[i]) &&
298 rtw8822c_dac_iq_check(rtwdev, qv[i]))
299 i++;
300 }
301 }
302
rtw8822c_dac_cal_iq_search(struct rtw_dev * rtwdev,u32 * iv,u32 * qv,u32 * i_value,u32 * q_value)303 static void rtw8822c_dac_cal_iq_search(struct rtw_dev *rtwdev,
304 u32 *iv, u32 *qv,
305 u32 *i_value, u32 *q_value)
306 {
307 u32 i_max = 0, q_max = 0, i_min = 0, q_min = 0;
308 u32 i_delta, q_delta;
309 u32 temp;
310 int i, cnt = 0;
311
312 do {
313 i_min = iv[0];
314 i_max = iv[0];
315 q_min = qv[0];
316 q_max = qv[0];
317 for (i = 0; i < DACK_SN_8822C; i++) {
318 rtw8822c_rf_minmax_cmp(rtwdev, iv[i], &i_min, &i_max);
319 rtw8822c_rf_minmax_cmp(rtwdev, qv[i], &q_min, &q_max);
320 }
321
322 if (i_max < 0x200 && i_min < 0x200)
323 i_delta = i_max - i_min;
324 else if (i_max >= 0x200 && i_min >= 0x200)
325 i_delta = i_max - i_min;
326 else
327 i_delta = i_max + (0x400 - i_min);
328
329 if (q_max < 0x200 && q_min < 0x200)
330 q_delta = q_max - q_min;
331 else if (q_max >= 0x200 && q_min >= 0x200)
332 q_delta = q_max - q_min;
333 else
334 q_delta = q_max + (0x400 - q_min);
335
336 rtw_dbg(rtwdev, RTW_DBG_RFK,
337 "[DACK] i: min=0x%08x, max=0x%08x, delta=0x%08x\n",
338 i_min, i_max, i_delta);
339 rtw_dbg(rtwdev, RTW_DBG_RFK,
340 "[DACK] q: min=0x%08x, max=0x%08x, delta=0x%08x\n",
341 q_min, q_max, q_delta);
342
343 rtw8822c_dac_iq_sort(rtwdev, iv, qv);
344
345 if (i_delta > 5 || q_delta > 5) {
346 temp = rtw_read32_mask(rtwdev, 0x2dbc, 0x3fffff);
347 iv[0] = (temp & 0x3ff000) >> 12;
348 qv[0] = temp & 0x3ff;
349 temp = rtw_read32_mask(rtwdev, 0x2dbc, 0x3fffff);
350 iv[DACK_SN_8822C - 1] = (temp & 0x3ff000) >> 12;
351 qv[DACK_SN_8822C - 1] = temp & 0x3ff;
352 } else {
353 break;
354 }
355 } while (cnt++ < 100);
356
357 rtw8822c_dac_iq_offset(rtwdev, iv, i_value);
358 rtw8822c_dac_iq_offset(rtwdev, qv, q_value);
359 }
360
rtw8822c_dac_cal_rf_mode(struct rtw_dev * rtwdev,u32 * i_value,u32 * q_value)361 static void rtw8822c_dac_cal_rf_mode(struct rtw_dev *rtwdev,
362 u32 *i_value, u32 *q_value)
363 {
364 u32 iv[DACK_SN_8822C], qv[DACK_SN_8822C];
365 u32 rf_a, rf_b;
366
367 rf_a = rtw_read_rf(rtwdev, RF_PATH_A, 0x0, RFREG_MASK);
368 rf_b = rtw_read_rf(rtwdev, RF_PATH_B, 0x0, RFREG_MASK);
369
370 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] RF path-A=0x%05x\n", rf_a);
371 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] RF path-B=0x%05x\n", rf_b);
372
373 rtw8822c_dac_cal_iq_sample(rtwdev, iv, qv);
374 rtw8822c_dac_cal_iq_search(rtwdev, iv, qv, i_value, q_value);
375 }
376
rtw8822c_dac_bb_setting(struct rtw_dev * rtwdev)377 static void rtw8822c_dac_bb_setting(struct rtw_dev *rtwdev)
378 {
379 rtw_write32_mask(rtwdev, 0x1d58, 0xff8, 0x1ff);
380 rtw_write32_mask(rtwdev, 0x1a00, 0x3, 0x2);
381 rtw_write32_mask(rtwdev, 0x1a14, 0x300, 0x3);
382 rtw_write32(rtwdev, 0x1d70, 0x7e7e7e7e);
383 rtw_write32_mask(rtwdev, 0x180c, 0x3, 0x0);
384 rtw_write32_mask(rtwdev, 0x410c, 0x3, 0x0);
385 rtw_write32(rtwdev, 0x1b00, 0x00000008);
386 rtw_write8(rtwdev, 0x1bcc, 0x3f);
387 rtw_write32(rtwdev, 0x1b00, 0x0000000a);
388 rtw_write8(rtwdev, 0x1bcc, 0x3f);
389 rtw_write32_mask(rtwdev, 0x1e24, BIT(31), 0x0);
390 rtw_write32_mask(rtwdev, 0x1e28, 0xf, 0x3);
391 }
392
rtw8822c_dac_cal_adc(struct rtw_dev * rtwdev,u8 path,u32 * adc_ic,u32 * adc_qc)393 static void rtw8822c_dac_cal_adc(struct rtw_dev *rtwdev,
394 u8 path, u32 *adc_ic, u32 *adc_qc)
395 {
396 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
397 u32 ic = 0, qc = 0, temp = 0;
398 u32 base_addr;
399 u32 path_sel;
400 int i;
401
402 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] ADCK path(%d)\n", path);
403
404 base_addr = rtw8822c_get_path_write_addr(path);
405 switch (path) {
406 case RF_PATH_A:
407 path_sel = 0xa0000;
408 break;
409 case RF_PATH_B:
410 path_sel = 0x80000;
411 break;
412 default:
413 WARN_ON(1);
414 return;
415 }
416
417 /* ADCK step1 */
418 rtw_write32_mask(rtwdev, base_addr + 0x30, BIT(30), 0x0);
419 if (path == RF_PATH_B)
420 rtw_write32(rtwdev, base_addr + 0x30, 0x30db8041);
421 rtw_write32(rtwdev, base_addr + 0x60, 0xf0040ff0);
422 rtw_write32(rtwdev, base_addr + 0x0c, 0xdff00220);
423 rtw_write32(rtwdev, base_addr + 0x10, 0x02dd08c4);
424 rtw_write32(rtwdev, base_addr + 0x0c, 0x10000260);
425 rtw_write_rf(rtwdev, RF_PATH_A, 0x0, RFREG_MASK, 0x10000);
426 rtw_write_rf(rtwdev, RF_PATH_B, 0x0, RFREG_MASK, 0x10000);
427 for (i = 0; i < 10; i++) {
428 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] ADCK count=%d\n", i);
429 rtw_write32(rtwdev, 0x1c3c, path_sel + 0x8003);
430 rtw_write32(rtwdev, 0x1c24, 0x00010002);
431 rtw8822c_dac_cal_rf_mode(rtwdev, &ic, &qc);
432 rtw_dbg(rtwdev, RTW_DBG_RFK,
433 "[DACK] before: i=0x%x, q=0x%x\n", ic, qc);
434
435 /* compensation value */
436 if (ic != 0x0) {
437 ic = 0x400 - ic;
438 *adc_ic = ic;
439 }
440 if (qc != 0x0) {
441 qc = 0x400 - qc;
442 *adc_qc = qc;
443 }
444 temp = (ic & 0x3ff) | ((qc & 0x3ff) << 10);
445 rtw_write32(rtwdev, base_addr + 0x68, temp);
446 dm_info->dack_adck[path] = temp;
447 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] ADCK 0x%08x=0x08%x\n",
448 base_addr + 0x68, temp);
449 /* check ADC DC offset */
450 rtw_write32(rtwdev, 0x1c3c, path_sel + 0x8103);
451 rtw8822c_dac_cal_rf_mode(rtwdev, &ic, &qc);
452 rtw_dbg(rtwdev, RTW_DBG_RFK,
453 "[DACK] after: i=0x%08x, q=0x%08x\n", ic, qc);
454 if (ic >= 0x200)
455 ic = 0x400 - ic;
456 if (qc >= 0x200)
457 qc = 0x400 - qc;
458 if (ic < 5 && qc < 5)
459 break;
460 }
461
462 /* ADCK step2 */
463 rtw_write32(rtwdev, 0x1c3c, 0x00000003);
464 rtw_write32(rtwdev, base_addr + 0x0c, 0x10000260);
465 rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c4);
466
467 /* release pull low switch on IQ path */
468 rtw_write_rf(rtwdev, path, 0x8f, BIT(13), 0x1);
469 }
470
rtw8822c_dac_cal_step1(struct rtw_dev * rtwdev,u8 path)471 static void rtw8822c_dac_cal_step1(struct rtw_dev *rtwdev, u8 path)
472 {
473 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
474 u32 base_addr;
475 u32 read_addr;
476
477 base_addr = rtw8822c_get_path_write_addr(path);
478 read_addr = rtw8822c_get_path_read_addr(path);
479
480 rtw_write32(rtwdev, base_addr + 0x68, dm_info->dack_adck[path]);
481 rtw_write32(rtwdev, base_addr + 0x0c, 0xdff00220);
482 if (path == RF_PATH_A) {
483 rtw_write32(rtwdev, base_addr + 0x60, 0xf0040ff0);
484 rtw_write32(rtwdev, 0x1c38, 0xffffffff);
485 }
486 rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c5);
487 rtw_write32(rtwdev, 0x9b4, 0xdb66db00);
488 rtw_write32(rtwdev, base_addr + 0xb0, 0x0a11fb88);
489 rtw_write32(rtwdev, base_addr + 0xbc, 0x0008ff81);
490 rtw_write32(rtwdev, base_addr + 0xc0, 0x0003d208);
491 rtw_write32(rtwdev, base_addr + 0xcc, 0x0a11fb88);
492 rtw_write32(rtwdev, base_addr + 0xd8, 0x0008ff81);
493 rtw_write32(rtwdev, base_addr + 0xdc, 0x0003d208);
494 rtw_write32(rtwdev, base_addr + 0xb8, 0x60000000);
495 mdelay(2);
496 rtw_write32(rtwdev, base_addr + 0xbc, 0x000aff8d);
497 mdelay(2);
498 rtw_write32(rtwdev, base_addr + 0xb0, 0x0a11fb89);
499 rtw_write32(rtwdev, base_addr + 0xcc, 0x0a11fb89);
500 mdelay(1);
501 rtw_write32(rtwdev, base_addr + 0xb8, 0x62000000);
502 rtw_write32(rtwdev, base_addr + 0xd4, 0x62000000);
503 mdelay(20);
504 if (!check_hw_ready(rtwdev, read_addr + 0x08, 0x7fff80, 0xffff) ||
505 !check_hw_ready(rtwdev, read_addr + 0x34, 0x7fff80, 0xffff))
506 rtw_err(rtwdev, "failed to wait for dack ready\n");
507 rtw_write32(rtwdev, base_addr + 0xb8, 0x02000000);
508 mdelay(1);
509 rtw_write32(rtwdev, base_addr + 0xbc, 0x0008ff87);
510 rtw_write32(rtwdev, 0x9b4, 0xdb6db600);
511 rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c5);
512 rtw_write32(rtwdev, base_addr + 0xbc, 0x0008ff87);
513 rtw_write32(rtwdev, base_addr + 0x60, 0xf0000000);
514 }
515
rtw8822c_dac_cal_step2(struct rtw_dev * rtwdev,u8 path,u32 * ic_out,u32 * qc_out)516 static void rtw8822c_dac_cal_step2(struct rtw_dev *rtwdev,
517 u8 path, u32 *ic_out, u32 *qc_out)
518 {
519 u32 base_addr;
520 u32 ic, qc, ic_in, qc_in;
521
522 base_addr = rtw8822c_get_path_write_addr(path);
523 rtw_write32_mask(rtwdev, base_addr + 0xbc, 0xf0000000, 0x0);
524 rtw_write32_mask(rtwdev, base_addr + 0xc0, 0xf, 0x8);
525 rtw_write32_mask(rtwdev, base_addr + 0xd8, 0xf0000000, 0x0);
526 rtw_write32_mask(rtwdev, base_addr + 0xdc, 0xf, 0x8);
527
528 rtw_write32(rtwdev, 0x1b00, 0x00000008);
529 rtw_write8(rtwdev, 0x1bcc, 0x03f);
530 rtw_write32(rtwdev, base_addr + 0x0c, 0xdff00220);
531 rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c5);
532 rtw_write32(rtwdev, 0x1c3c, 0x00088103);
533
534 rtw8822c_dac_cal_rf_mode(rtwdev, &ic_in, &qc_in);
535 ic = ic_in;
536 qc = qc_in;
537
538 /* compensation value */
539 if (ic != 0x0)
540 ic = 0x400 - ic;
541 if (qc != 0x0)
542 qc = 0x400 - qc;
543 if (ic < 0x300) {
544 ic = ic * 2 * 6 / 5;
545 ic = ic + 0x80;
546 } else {
547 ic = (0x400 - ic) * 2 * 6 / 5;
548 ic = 0x7f - ic;
549 }
550 if (qc < 0x300) {
551 qc = qc * 2 * 6 / 5;
552 qc = qc + 0x80;
553 } else {
554 qc = (0x400 - qc) * 2 * 6 / 5;
555 qc = 0x7f - qc;
556 }
557
558 *ic_out = ic;
559 *qc_out = qc;
560
561 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] before i=0x%x, q=0x%x\n", ic_in, qc_in);
562 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] after i=0x%x, q=0x%x\n", ic, qc);
563 }
564
rtw8822c_dac_cal_step3(struct rtw_dev * rtwdev,u8 path,u32 adc_ic,u32 adc_qc,u32 * ic_in,u32 * qc_in,u32 * i_out,u32 * q_out)565 static void rtw8822c_dac_cal_step3(struct rtw_dev *rtwdev, u8 path,
566 u32 adc_ic, u32 adc_qc,
567 u32 *ic_in, u32 *qc_in,
568 u32 *i_out, u32 *q_out)
569 {
570 u32 base_addr;
571 u32 read_addr;
572 u32 ic, qc;
573 u32 temp;
574
575 base_addr = rtw8822c_get_path_write_addr(path);
576 read_addr = rtw8822c_get_path_read_addr(path);
577 ic = *ic_in;
578 qc = *qc_in;
579
580 rtw_write32(rtwdev, base_addr + 0x0c, 0xdff00220);
581 rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c5);
582 rtw_write32(rtwdev, 0x9b4, 0xdb66db00);
583 rtw_write32(rtwdev, base_addr + 0xb0, 0x0a11fb88);
584 rtw_write32(rtwdev, base_addr + 0xbc, 0xc008ff81);
585 rtw_write32(rtwdev, base_addr + 0xc0, 0x0003d208);
586 rtw_write32_mask(rtwdev, base_addr + 0xbc, 0xf0000000, ic & 0xf);
587 rtw_write32_mask(rtwdev, base_addr + 0xc0, 0xf, (ic & 0xf0) >> 4);
588 rtw_write32(rtwdev, base_addr + 0xcc, 0x0a11fb88);
589 rtw_write32(rtwdev, base_addr + 0xd8, 0xe008ff81);
590 rtw_write32(rtwdev, base_addr + 0xdc, 0x0003d208);
591 rtw_write32_mask(rtwdev, base_addr + 0xd8, 0xf0000000, qc & 0xf);
592 rtw_write32_mask(rtwdev, base_addr + 0xdc, 0xf, (qc & 0xf0) >> 4);
593 rtw_write32(rtwdev, base_addr + 0xb8, 0x60000000);
594 mdelay(2);
595 rtw_write32_mask(rtwdev, base_addr + 0xbc, 0xe, 0x6);
596 mdelay(2);
597 rtw_write32(rtwdev, base_addr + 0xb0, 0x0a11fb89);
598 rtw_write32(rtwdev, base_addr + 0xcc, 0x0a11fb89);
599 mdelay(1);
600 rtw_write32(rtwdev, base_addr + 0xb8, 0x62000000);
601 rtw_write32(rtwdev, base_addr + 0xd4, 0x62000000);
602 mdelay(20);
603 if (!check_hw_ready(rtwdev, read_addr + 0x24, 0x07f80000, ic) ||
604 !check_hw_ready(rtwdev, read_addr + 0x50, 0x07f80000, qc))
605 rtw_err(rtwdev, "failed to write IQ vector to hardware\n");
606 rtw_write32(rtwdev, base_addr + 0xb8, 0x02000000);
607 mdelay(1);
608 rtw_write32_mask(rtwdev, base_addr + 0xbc, 0xe, 0x3);
609 rtw_write32(rtwdev, 0x9b4, 0xdb6db600);
610
611 /* check DAC DC offset */
612 temp = ((adc_ic + 0x10) & 0x3ff) | (((adc_qc + 0x10) & 0x3ff) << 10);
613 rtw_write32(rtwdev, base_addr + 0x68, temp);
614 rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c5);
615 rtw_write32(rtwdev, base_addr + 0x60, 0xf0000000);
616 rtw8822c_dac_cal_rf_mode(rtwdev, &ic, &qc);
617 if (ic >= 0x10)
618 ic = ic - 0x10;
619 else
620 ic = 0x400 - (0x10 - ic);
621
622 if (qc >= 0x10)
623 qc = qc - 0x10;
624 else
625 qc = 0x400 - (0x10 - qc);
626
627 *i_out = ic;
628 *q_out = qc;
629
630 if (ic >= 0x200)
631 ic = 0x400 - ic;
632 if (qc >= 0x200)
633 qc = 0x400 - qc;
634
635 *ic_in = ic;
636 *qc_in = qc;
637
638 rtw_dbg(rtwdev, RTW_DBG_RFK,
639 "[DACK] after DACK i=0x%x, q=0x%x\n", *i_out, *q_out);
640 }
641
rtw8822c_dac_cal_step4(struct rtw_dev * rtwdev,u8 path)642 static void rtw8822c_dac_cal_step4(struct rtw_dev *rtwdev, u8 path)
643 {
644 u32 base_addr = rtw8822c_get_path_write_addr(path);
645
646 rtw_write32(rtwdev, base_addr + 0x68, 0x0);
647 rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c4);
648 rtw_write32_mask(rtwdev, base_addr + 0xbc, 0x1, 0x0);
649 rtw_write32_mask(rtwdev, base_addr + 0x30, BIT(30), 0x1);
650 }
651
rtw8822c_dac_cal_backup_vec(struct rtw_dev * rtwdev,u8 path,u8 vec,u32 w_addr,u32 r_addr)652 static void rtw8822c_dac_cal_backup_vec(struct rtw_dev *rtwdev,
653 u8 path, u8 vec, u32 w_addr, u32 r_addr)
654 {
655 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
656 u16 val;
657 u32 i;
658
659 if (WARN_ON(vec >= 2))
660 return;
661
662 for (i = 0; i < DACK_MSBK_BACKUP_NUM; i++) {
663 rtw_write32_mask(rtwdev, w_addr, 0xf0000000, i);
664 val = (u16)rtw_read32_mask(rtwdev, r_addr, 0x7fc0000);
665 dm_info->dack_msbk[path][vec][i] = val;
666 }
667 }
668
rtw8822c_dac_cal_backup_path(struct rtw_dev * rtwdev,u8 path)669 static void rtw8822c_dac_cal_backup_path(struct rtw_dev *rtwdev, u8 path)
670 {
671 u32 w_off = 0x1c;
672 u32 r_off = 0x2c;
673 u32 w_addr, r_addr;
674
675 if (WARN_ON(path >= 2))
676 return;
677
678 /* backup I vector */
679 w_addr = rtw8822c_get_path_write_addr(path) + 0xb0;
680 r_addr = rtw8822c_get_path_read_addr(path) + 0x10;
681 rtw8822c_dac_cal_backup_vec(rtwdev, path, 0, w_addr, r_addr);
682
683 /* backup Q vector */
684 w_addr = rtw8822c_get_path_write_addr(path) + 0xb0 + w_off;
685 r_addr = rtw8822c_get_path_read_addr(path) + 0x10 + r_off;
686 rtw8822c_dac_cal_backup_vec(rtwdev, path, 1, w_addr, r_addr);
687 }
688
rtw8822c_dac_cal_backup_dck(struct rtw_dev * rtwdev)689 static void rtw8822c_dac_cal_backup_dck(struct rtw_dev *rtwdev)
690 {
691 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
692 u8 val;
693
694 val = (u8)rtw_read32_mask(rtwdev, REG_DCKA_I_0, 0xf0000000);
695 dm_info->dack_dck[RF_PATH_A][0][0] = val;
696 val = (u8)rtw_read32_mask(rtwdev, REG_DCKA_I_1, 0xf);
697 dm_info->dack_dck[RF_PATH_A][0][1] = val;
698 val = (u8)rtw_read32_mask(rtwdev, REG_DCKA_Q_0, 0xf0000000);
699 dm_info->dack_dck[RF_PATH_A][1][0] = val;
700 val = (u8)rtw_read32_mask(rtwdev, REG_DCKA_Q_1, 0xf);
701 dm_info->dack_dck[RF_PATH_A][1][1] = val;
702
703 val = (u8)rtw_read32_mask(rtwdev, REG_DCKB_I_0, 0xf0000000);
704 dm_info->dack_dck[RF_PATH_B][0][0] = val;
705 val = (u8)rtw_read32_mask(rtwdev, REG_DCKB_I_1, 0xf);
706 dm_info->dack_dck[RF_PATH_B][1][0] = val;
707 val = (u8)rtw_read32_mask(rtwdev, REG_DCKB_Q_0, 0xf0000000);
708 dm_info->dack_dck[RF_PATH_B][0][1] = val;
709 val = (u8)rtw_read32_mask(rtwdev, REG_DCKB_Q_1, 0xf);
710 dm_info->dack_dck[RF_PATH_B][1][1] = val;
711 }
712
rtw8822c_dac_cal_backup(struct rtw_dev * rtwdev)713 static void rtw8822c_dac_cal_backup(struct rtw_dev *rtwdev)
714 {
715 u32 temp[3];
716
717 temp[0] = rtw_read32(rtwdev, 0x1860);
718 temp[1] = rtw_read32(rtwdev, 0x4160);
719 temp[2] = rtw_read32(rtwdev, 0x9b4);
720
721 /* set clock */
722 rtw_write32(rtwdev, 0x9b4, 0xdb66db00);
723
724 /* backup path-A I/Q */
725 rtw_write32_clr(rtwdev, 0x1830, BIT(30));
726 rtw_write32_mask(rtwdev, 0x1860, 0xfc000000, 0x3c);
727 rtw8822c_dac_cal_backup_path(rtwdev, RF_PATH_A);
728
729 /* backup path-B I/Q */
730 rtw_write32_clr(rtwdev, 0x4130, BIT(30));
731 rtw_write32_mask(rtwdev, 0x4160, 0xfc000000, 0x3c);
732 rtw8822c_dac_cal_backup_path(rtwdev, RF_PATH_B);
733
734 rtw8822c_dac_cal_backup_dck(rtwdev);
735 rtw_write32_set(rtwdev, 0x1830, BIT(30));
736 rtw_write32_set(rtwdev, 0x4130, BIT(30));
737
738 rtw_write32(rtwdev, 0x1860, temp[0]);
739 rtw_write32(rtwdev, 0x4160, temp[1]);
740 rtw_write32(rtwdev, 0x9b4, temp[2]);
741 }
742
rtw8822c_dac_cal_restore_dck(struct rtw_dev * rtwdev)743 static void rtw8822c_dac_cal_restore_dck(struct rtw_dev *rtwdev)
744 {
745 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
746 u8 val;
747
748 rtw_write32_set(rtwdev, REG_DCKA_I_0, BIT(19));
749 val = dm_info->dack_dck[RF_PATH_A][0][0];
750 rtw_write32_mask(rtwdev, REG_DCKA_I_0, 0xf0000000, val);
751 val = dm_info->dack_dck[RF_PATH_A][0][1];
752 rtw_write32_mask(rtwdev, REG_DCKA_I_1, 0xf, val);
753
754 rtw_write32_set(rtwdev, REG_DCKA_Q_0, BIT(19));
755 val = dm_info->dack_dck[RF_PATH_A][1][0];
756 rtw_write32_mask(rtwdev, REG_DCKA_Q_0, 0xf0000000, val);
757 val = dm_info->dack_dck[RF_PATH_A][1][1];
758 rtw_write32_mask(rtwdev, REG_DCKA_Q_1, 0xf, val);
759
760 rtw_write32_set(rtwdev, REG_DCKB_I_0, BIT(19));
761 val = dm_info->dack_dck[RF_PATH_B][0][0];
762 rtw_write32_mask(rtwdev, REG_DCKB_I_0, 0xf0000000, val);
763 val = dm_info->dack_dck[RF_PATH_B][0][1];
764 rtw_write32_mask(rtwdev, REG_DCKB_I_1, 0xf, val);
765
766 rtw_write32_set(rtwdev, REG_DCKB_Q_0, BIT(19));
767 val = dm_info->dack_dck[RF_PATH_B][1][0];
768 rtw_write32_mask(rtwdev, REG_DCKB_Q_0, 0xf0000000, val);
769 val = dm_info->dack_dck[RF_PATH_B][1][1];
770 rtw_write32_mask(rtwdev, REG_DCKB_Q_1, 0xf, val);
771 }
772
rtw8822c_dac_cal_restore_prepare(struct rtw_dev * rtwdev)773 static void rtw8822c_dac_cal_restore_prepare(struct rtw_dev *rtwdev)
774 {
775 rtw_write32(rtwdev, 0x9b4, 0xdb66db00);
776
777 rtw_write32_mask(rtwdev, 0x18b0, BIT(27), 0x0);
778 rtw_write32_mask(rtwdev, 0x18cc, BIT(27), 0x0);
779 rtw_write32_mask(rtwdev, 0x41b0, BIT(27), 0x0);
780 rtw_write32_mask(rtwdev, 0x41cc, BIT(27), 0x0);
781
782 rtw_write32_mask(rtwdev, 0x1830, BIT(30), 0x0);
783 rtw_write32_mask(rtwdev, 0x1860, 0xfc000000, 0x3c);
784 rtw_write32_mask(rtwdev, 0x18b4, BIT(0), 0x1);
785 rtw_write32_mask(rtwdev, 0x18d0, BIT(0), 0x1);
786
787 rtw_write32_mask(rtwdev, 0x4130, BIT(30), 0x0);
788 rtw_write32_mask(rtwdev, 0x4160, 0xfc000000, 0x3c);
789 rtw_write32_mask(rtwdev, 0x41b4, BIT(0), 0x1);
790 rtw_write32_mask(rtwdev, 0x41d0, BIT(0), 0x1);
791
792 rtw_write32_mask(rtwdev, 0x18b0, 0xf00, 0x0);
793 rtw_write32_mask(rtwdev, 0x18c0, BIT(14), 0x0);
794 rtw_write32_mask(rtwdev, 0x18cc, 0xf00, 0x0);
795 rtw_write32_mask(rtwdev, 0x18dc, BIT(14), 0x0);
796
797 rtw_write32_mask(rtwdev, 0x18b0, BIT(0), 0x0);
798 rtw_write32_mask(rtwdev, 0x18cc, BIT(0), 0x0);
799 rtw_write32_mask(rtwdev, 0x18b0, BIT(0), 0x1);
800 rtw_write32_mask(rtwdev, 0x18cc, BIT(0), 0x1);
801
802 rtw8822c_dac_cal_restore_dck(rtwdev);
803
804 rtw_write32_mask(rtwdev, 0x18c0, 0x38000, 0x7);
805 rtw_write32_mask(rtwdev, 0x18dc, 0x38000, 0x7);
806 rtw_write32_mask(rtwdev, 0x41c0, 0x38000, 0x7);
807 rtw_write32_mask(rtwdev, 0x41dc, 0x38000, 0x7);
808
809 rtw_write32_mask(rtwdev, 0x18b8, BIT(26) | BIT(25), 0x1);
810 rtw_write32_mask(rtwdev, 0x18d4, BIT(26) | BIT(25), 0x1);
811
812 rtw_write32_mask(rtwdev, 0x41b0, 0xf00, 0x0);
813 rtw_write32_mask(rtwdev, 0x41c0, BIT(14), 0x0);
814 rtw_write32_mask(rtwdev, 0x41cc, 0xf00, 0x0);
815 rtw_write32_mask(rtwdev, 0x41dc, BIT(14), 0x0);
816
817 rtw_write32_mask(rtwdev, 0x41b0, BIT(0), 0x0);
818 rtw_write32_mask(rtwdev, 0x41cc, BIT(0), 0x0);
819 rtw_write32_mask(rtwdev, 0x41b0, BIT(0), 0x1);
820 rtw_write32_mask(rtwdev, 0x41cc, BIT(0), 0x1);
821
822 rtw_write32_mask(rtwdev, 0x41b8, BIT(26) | BIT(25), 0x1);
823 rtw_write32_mask(rtwdev, 0x41d4, BIT(26) | BIT(25), 0x1);
824 }
825
rtw8822c_dac_cal_restore_wait(struct rtw_dev * rtwdev,u32 target_addr,u32 toggle_addr)826 static bool rtw8822c_dac_cal_restore_wait(struct rtw_dev *rtwdev,
827 u32 target_addr, u32 toggle_addr)
828 {
829 u32 cnt = 0;
830
831 do {
832 rtw_write32_mask(rtwdev, toggle_addr, BIT(26) | BIT(25), 0x0);
833 rtw_write32_mask(rtwdev, toggle_addr, BIT(26) | BIT(25), 0x2);
834
835 if (rtw_read32_mask(rtwdev, target_addr, 0xf) == 0x6)
836 return true;
837
838 } while (cnt++ < 100);
839
840 return false;
841 }
842
rtw8822c_dac_cal_restore_path(struct rtw_dev * rtwdev,u8 path)843 static bool rtw8822c_dac_cal_restore_path(struct rtw_dev *rtwdev, u8 path)
844 {
845 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
846 u32 w_off = 0x1c;
847 u32 r_off = 0x2c;
848 u32 w_i, r_i, w_q, r_q;
849 u32 value;
850 u32 i;
851
852 w_i = rtw8822c_get_path_write_addr(path) + 0xb0;
853 r_i = rtw8822c_get_path_read_addr(path) + 0x08;
854 w_q = rtw8822c_get_path_write_addr(path) + 0xb0 + w_off;
855 r_q = rtw8822c_get_path_read_addr(path) + 0x08 + r_off;
856
857 if (!rtw8822c_dac_cal_restore_wait(rtwdev, r_i, w_i + 0x8))
858 return false;
859
860 for (i = 0; i < DACK_MSBK_BACKUP_NUM; i++) {
861 rtw_write32_mask(rtwdev, w_i + 0x4, BIT(2), 0x0);
862 value = dm_info->dack_msbk[path][0][i];
863 rtw_write32_mask(rtwdev, w_i + 0x4, 0xff8, value);
864 rtw_write32_mask(rtwdev, w_i, 0xf0000000, i);
865 rtw_write32_mask(rtwdev, w_i + 0x4, BIT(2), 0x1);
866 }
867
868 rtw_write32_mask(rtwdev, w_i + 0x4, BIT(2), 0x0);
869
870 if (!rtw8822c_dac_cal_restore_wait(rtwdev, r_q, w_q + 0x8))
871 return false;
872
873 for (i = 0; i < DACK_MSBK_BACKUP_NUM; i++) {
874 rtw_write32_mask(rtwdev, w_q + 0x4, BIT(2), 0x0);
875 value = dm_info->dack_msbk[path][1][i];
876 rtw_write32_mask(rtwdev, w_q + 0x4, 0xff8, value);
877 rtw_write32_mask(rtwdev, w_q, 0xf0000000, i);
878 rtw_write32_mask(rtwdev, w_q + 0x4, BIT(2), 0x1);
879 }
880 rtw_write32_mask(rtwdev, w_q + 0x4, BIT(2), 0x0);
881
882 rtw_write32_mask(rtwdev, w_i + 0x8, BIT(26) | BIT(25), 0x0);
883 rtw_write32_mask(rtwdev, w_q + 0x8, BIT(26) | BIT(25), 0x0);
884 rtw_write32_mask(rtwdev, w_i + 0x4, BIT(0), 0x0);
885 rtw_write32_mask(rtwdev, w_q + 0x4, BIT(0), 0x0);
886
887 return true;
888 }
889
__rtw8822c_dac_cal_restore(struct rtw_dev * rtwdev)890 static bool __rtw8822c_dac_cal_restore(struct rtw_dev *rtwdev)
891 {
892 if (!rtw8822c_dac_cal_restore_path(rtwdev, RF_PATH_A))
893 return false;
894
895 if (!rtw8822c_dac_cal_restore_path(rtwdev, RF_PATH_B))
896 return false;
897
898 return true;
899 }
900
rtw8822c_dac_cal_restore(struct rtw_dev * rtwdev)901 static bool rtw8822c_dac_cal_restore(struct rtw_dev *rtwdev)
902 {
903 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
904 u32 temp[3];
905
906 /* sample the first element for both path's IQ vector */
907 if (dm_info->dack_msbk[RF_PATH_A][0][0] == 0 &&
908 dm_info->dack_msbk[RF_PATH_A][1][0] == 0 &&
909 dm_info->dack_msbk[RF_PATH_B][0][0] == 0 &&
910 dm_info->dack_msbk[RF_PATH_B][1][0] == 0)
911 return false;
912
913 temp[0] = rtw_read32(rtwdev, 0x1860);
914 temp[1] = rtw_read32(rtwdev, 0x4160);
915 temp[2] = rtw_read32(rtwdev, 0x9b4);
916
917 rtw8822c_dac_cal_restore_prepare(rtwdev);
918 if (!check_hw_ready(rtwdev, 0x2808, 0x7fff80, 0xffff) ||
919 !check_hw_ready(rtwdev, 0x2834, 0x7fff80, 0xffff) ||
920 !check_hw_ready(rtwdev, 0x4508, 0x7fff80, 0xffff) ||
921 !check_hw_ready(rtwdev, 0x4534, 0x7fff80, 0xffff))
922 return false;
923
924 if (!__rtw8822c_dac_cal_restore(rtwdev)) {
925 rtw_err(rtwdev, "failed to restore dack vectors\n");
926 return false;
927 }
928
929 rtw_write32_mask(rtwdev, 0x1830, BIT(30), 0x1);
930 rtw_write32_mask(rtwdev, 0x4130, BIT(30), 0x1);
931 rtw_write32(rtwdev, 0x1860, temp[0]);
932 rtw_write32(rtwdev, 0x4160, temp[1]);
933 rtw_write32_mask(rtwdev, 0x18b0, BIT(27), 0x1);
934 rtw_write32_mask(rtwdev, 0x18cc, BIT(27), 0x1);
935 rtw_write32_mask(rtwdev, 0x41b0, BIT(27), 0x1);
936 rtw_write32_mask(rtwdev, 0x41cc, BIT(27), 0x1);
937 rtw_write32(rtwdev, 0x9b4, temp[2]);
938
939 return true;
940 }
941
rtw8822c_rf_dac_cal(struct rtw_dev * rtwdev)942 static void rtw8822c_rf_dac_cal(struct rtw_dev *rtwdev)
943 {
944 struct rtw_backup_info backup_rf[DACK_RF_8822C * DACK_PATH_8822C];
945 struct rtw_backup_info backup[DACK_REG_8822C];
946 u32 ic = 0, qc = 0, i;
947 u32 i_a = 0x0, q_a = 0x0, i_b = 0x0, q_b = 0x0;
948 u32 ic_a = 0x0, qc_a = 0x0, ic_b = 0x0, qc_b = 0x0;
949 u32 adc_ic_a = 0x0, adc_qc_a = 0x0, adc_ic_b = 0x0, adc_qc_b = 0x0;
950
951 if (rtw8822c_dac_cal_restore(rtwdev))
952 return;
953
954 /* not able to restore, do it */
955
956 rtw8822c_dac_backup_reg(rtwdev, backup, backup_rf);
957
958 rtw8822c_dac_bb_setting(rtwdev);
959
960 /* path-A */
961 rtw8822c_dac_cal_adc(rtwdev, RF_PATH_A, &adc_ic_a, &adc_qc_a);
962 for (i = 0; i < 10; i++) {
963 rtw8822c_dac_cal_step1(rtwdev, RF_PATH_A);
964 rtw8822c_dac_cal_step2(rtwdev, RF_PATH_A, &ic, &qc);
965 ic_a = ic;
966 qc_a = qc;
967
968 rtw8822c_dac_cal_step3(rtwdev, RF_PATH_A, adc_ic_a, adc_qc_a,
969 &ic, &qc, &i_a, &q_a);
970
971 if (ic < 5 && qc < 5)
972 break;
973 }
974 rtw8822c_dac_cal_step4(rtwdev, RF_PATH_A);
975
976 /* path-B */
977 rtw8822c_dac_cal_adc(rtwdev, RF_PATH_B, &adc_ic_b, &adc_qc_b);
978 for (i = 0; i < 10; i++) {
979 rtw8822c_dac_cal_step1(rtwdev, RF_PATH_B);
980 rtw8822c_dac_cal_step2(rtwdev, RF_PATH_B, &ic, &qc);
981 ic_b = ic;
982 qc_b = qc;
983
984 rtw8822c_dac_cal_step3(rtwdev, RF_PATH_B, adc_ic_b, adc_qc_b,
985 &ic, &qc, &i_b, &q_b);
986
987 if (ic < 5 && qc < 5)
988 break;
989 }
990 rtw8822c_dac_cal_step4(rtwdev, RF_PATH_B);
991
992 rtw_write32(rtwdev, 0x1b00, 0x00000008);
993 rtw_write32_mask(rtwdev, 0x4130, BIT(30), 0x1);
994 rtw_write8(rtwdev, 0x1bcc, 0x0);
995 rtw_write32(rtwdev, 0x1b00, 0x0000000a);
996 rtw_write8(rtwdev, 0x1bcc, 0x0);
997
998 rtw8822c_dac_restore_reg(rtwdev, backup, backup_rf);
999
1000 /* backup results to restore, saving a lot of time */
1001 rtw8822c_dac_cal_backup(rtwdev);
1002
1003 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] path A: ic=0x%x, qc=0x%x\n", ic_a, qc_a);
1004 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] path B: ic=0x%x, qc=0x%x\n", ic_b, qc_b);
1005 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] path A: i=0x%x, q=0x%x\n", i_a, q_a);
1006 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] path B: i=0x%x, q=0x%x\n", i_b, q_b);
1007 }
1008
rtw8822c_rf_x2_check(struct rtw_dev * rtwdev)1009 static void rtw8822c_rf_x2_check(struct rtw_dev *rtwdev)
1010 {
1011 u8 x2k_busy;
1012
1013 mdelay(1);
1014 x2k_busy = rtw_read_rf(rtwdev, RF_PATH_A, 0xb8, BIT(15));
1015 if (x2k_busy == 1) {
1016 rtw_write_rf(rtwdev, RF_PATH_A, 0xb8, RFREG_MASK, 0xC4440);
1017 rtw_write_rf(rtwdev, RF_PATH_A, 0xba, RFREG_MASK, 0x6840D);
1018 rtw_write_rf(rtwdev, RF_PATH_A, 0xb8, RFREG_MASK, 0x80440);
1019 mdelay(1);
1020 }
1021 }
1022
rtw8822c_set_power_trim(struct rtw_dev * rtwdev,s8 bb_gain[2][8])1023 static void rtw8822c_set_power_trim(struct rtw_dev *rtwdev, s8 bb_gain[2][8])
1024 {
1025 #define RF_SET_POWER_TRIM(_path, _seq, _idx) \
1026 do { \
1027 rtw_write_rf(rtwdev, _path, 0x33, RFREG_MASK, _seq); \
1028 rtw_write_rf(rtwdev, _path, 0x3f, RFREG_MASK, \
1029 bb_gain[_path][_idx]); \
1030 } while (0)
1031 u8 path;
1032
1033 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1034 rtw_write_rf(rtwdev, path, 0xee, BIT(19), 1);
1035 RF_SET_POWER_TRIM(path, 0x0, 0);
1036 RF_SET_POWER_TRIM(path, 0x1, 1);
1037 RF_SET_POWER_TRIM(path, 0x2, 2);
1038 RF_SET_POWER_TRIM(path, 0x3, 2);
1039 RF_SET_POWER_TRIM(path, 0x4, 3);
1040 RF_SET_POWER_TRIM(path, 0x5, 4);
1041 RF_SET_POWER_TRIM(path, 0x6, 5);
1042 RF_SET_POWER_TRIM(path, 0x7, 6);
1043 RF_SET_POWER_TRIM(path, 0x8, 7);
1044 RF_SET_POWER_TRIM(path, 0x9, 3);
1045 RF_SET_POWER_TRIM(path, 0xa, 4);
1046 RF_SET_POWER_TRIM(path, 0xb, 5);
1047 RF_SET_POWER_TRIM(path, 0xc, 6);
1048 RF_SET_POWER_TRIM(path, 0xd, 7);
1049 RF_SET_POWER_TRIM(path, 0xe, 7);
1050 rtw_write_rf(rtwdev, path, 0xee, BIT(19), 0);
1051 }
1052 #undef RF_SET_POWER_TRIM
1053 }
1054
rtw8822c_power_trim(struct rtw_dev * rtwdev)1055 static void rtw8822c_power_trim(struct rtw_dev *rtwdev)
1056 {
1057 u8 pg_pwr = 0xff, i, path, idx;
1058 s8 bb_gain[2][8] = {};
1059 u16 rf_efuse_2g[3] = {PPG_2GL_TXAB, PPG_2GM_TXAB, PPG_2GH_TXAB};
1060 u16 rf_efuse_5g[2][5] = {{PPG_5GL1_TXA, PPG_5GL2_TXA, PPG_5GM1_TXA,
1061 PPG_5GM2_TXA, PPG_5GH1_TXA},
1062 {PPG_5GL1_TXB, PPG_5GL2_TXB, PPG_5GM1_TXB,
1063 PPG_5GM2_TXB, PPG_5GH1_TXB} };
1064 bool set = false;
1065
1066 for (i = 0; i < ARRAY_SIZE(rf_efuse_2g); i++) {
1067 rtw_read8_physical_efuse(rtwdev, rf_efuse_2g[i], &pg_pwr);
1068 if (pg_pwr == EFUSE_READ_FAIL)
1069 continue;
1070 set = true;
1071 bb_gain[RF_PATH_A][i] = FIELD_GET(PPG_2G_A_MASK, pg_pwr);
1072 bb_gain[RF_PATH_B][i] = FIELD_GET(PPG_2G_B_MASK, pg_pwr);
1073 }
1074
1075 for (i = 0; i < ARRAY_SIZE(rf_efuse_5g[0]); i++) {
1076 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1077 rtw_read8_physical_efuse(rtwdev, rf_efuse_5g[path][i],
1078 &pg_pwr);
1079 if (pg_pwr == EFUSE_READ_FAIL)
1080 continue;
1081 set = true;
1082 idx = i + ARRAY_SIZE(rf_efuse_2g);
1083 bb_gain[path][idx] = FIELD_GET(PPG_5G_MASK, pg_pwr);
1084 }
1085 }
1086 if (set)
1087 rtw8822c_set_power_trim(rtwdev, bb_gain);
1088
1089 rtw_write32_mask(rtwdev, REG_DIS_DPD, DIS_DPD_MASK, DIS_DPD_RATEALL);
1090 }
1091
rtw8822c_thermal_trim(struct rtw_dev * rtwdev)1092 static void rtw8822c_thermal_trim(struct rtw_dev *rtwdev)
1093 {
1094 u16 rf_efuse[2] = {PPG_THERMAL_A, PPG_THERMAL_B};
1095 u8 pg_therm = 0xff, thermal[2] = {0}, path;
1096
1097 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1098 rtw_read8_physical_efuse(rtwdev, rf_efuse[path], &pg_therm);
1099 if (pg_therm == EFUSE_READ_FAIL)
1100 return;
1101 /* Efuse value of BIT(0) shall be move to BIT(3), and the value
1102 * of BIT(1) to BIT(3) should be right shifted 1 bit.
1103 */
1104 thermal[path] = FIELD_GET(GENMASK(3, 1), pg_therm);
1105 thermal[path] |= FIELD_PREP(BIT(3), pg_therm & BIT(0));
1106 rtw_write_rf(rtwdev, path, 0x43, RF_THEMAL_MASK, thermal[path]);
1107 }
1108 }
1109
rtw8822c_pa_bias(struct rtw_dev * rtwdev)1110 static void rtw8822c_pa_bias(struct rtw_dev *rtwdev)
1111 {
1112 u16 rf_efuse_2g[2] = {PPG_PABIAS_2GA, PPG_PABIAS_2GB};
1113 u16 rf_efuse_5g[2] = {PPG_PABIAS_5GA, PPG_PABIAS_5GB};
1114 u8 pg_pa_bias = 0xff, path;
1115
1116 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1117 rtw_read8_physical_efuse(rtwdev, rf_efuse_2g[path],
1118 &pg_pa_bias);
1119 if (pg_pa_bias == EFUSE_READ_FAIL)
1120 return;
1121 pg_pa_bias = FIELD_GET(PPG_PABIAS_MASK, pg_pa_bias);
1122 rtw_write_rf(rtwdev, path, RF_PA, RF_PABIAS_2G_MASK, pg_pa_bias);
1123 }
1124 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1125 rtw_read8_physical_efuse(rtwdev, rf_efuse_5g[path],
1126 &pg_pa_bias);
1127 pg_pa_bias = FIELD_GET(PPG_PABIAS_MASK, pg_pa_bias);
1128 rtw_write_rf(rtwdev, path, RF_PA, RF_PABIAS_5G_MASK, pg_pa_bias);
1129 }
1130 }
1131
rtw8822c_rfk_handshake(struct rtw_dev * rtwdev,bool is_before_k)1132 static void rtw8822c_rfk_handshake(struct rtw_dev *rtwdev, bool is_before_k)
1133 {
1134 struct rtw_dm_info *dm = &rtwdev->dm_info;
1135 u8 u1b_tmp;
1136 u8 u4b_tmp;
1137 int ret;
1138
1139 if (is_before_k) {
1140 rtw_dbg(rtwdev, RTW_DBG_RFK,
1141 "[RFK] WiFi / BT RFK handshake start!!\n");
1142
1143 if (!dm->is_bt_iqk_timeout) {
1144 ret = read_poll_timeout(rtw_read32_mask, u4b_tmp,
1145 u4b_tmp == 0, 20, 600000, false,
1146 rtwdev, REG_PMC_DBG_CTRL1,
1147 BITS_PMC_BT_IQK_STS);
1148 if (ret) {
1149 rtw_dbg(rtwdev, RTW_DBG_RFK,
1150 "[RFK] Wait BT IQK finish timeout!!\n");
1151 dm->is_bt_iqk_timeout = true;
1152 }
1153 }
1154
1155 rtw_fw_inform_rfk_status(rtwdev, true);
1156
1157 ret = read_poll_timeout(rtw_read8_mask, u1b_tmp,
1158 u1b_tmp == 1, 20, 100000, false,
1159 rtwdev, REG_ARFR4, BIT_WL_RFK);
1160 if (ret)
1161 rtw_dbg(rtwdev, RTW_DBG_RFK,
1162 "[RFK] Send WiFi RFK start H2C cmd FAIL!!\n");
1163 } else {
1164 rtw_fw_inform_rfk_status(rtwdev, false);
1165 ret = read_poll_timeout(rtw_read8_mask, u1b_tmp,
1166 u1b_tmp == 1, 20, 100000, false,
1167 rtwdev, REG_ARFR4,
1168 BIT_WL_RFK);
1169 if (ret)
1170 rtw_dbg(rtwdev, RTW_DBG_RFK,
1171 "[RFK] Send WiFi RFK finish H2C cmd FAIL!!\n");
1172
1173 rtw_dbg(rtwdev, RTW_DBG_RFK,
1174 "[RFK] WiFi / BT RFK handshake finish!!\n");
1175 }
1176 }
1177
rtw8822c_rfk_power_save(struct rtw_dev * rtwdev,bool is_power_save)1178 static void rtw8822c_rfk_power_save(struct rtw_dev *rtwdev,
1179 bool is_power_save)
1180 {
1181 u8 path;
1182
1183 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1184 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, path);
1185 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S0, BIT_PS_EN,
1186 is_power_save ? 0 : 1);
1187 }
1188 }
1189
rtw8822c_txgapk_backup_bb_reg(struct rtw_dev * rtwdev,const u32 reg[],u32 reg_backup[],u32 reg_num)1190 static void rtw8822c_txgapk_backup_bb_reg(struct rtw_dev *rtwdev, const u32 reg[],
1191 u32 reg_backup[], u32 reg_num)
1192 {
1193 u32 i;
1194
1195 for (i = 0; i < reg_num; i++) {
1196 reg_backup[i] = rtw_read32(rtwdev, reg[i]);
1197
1198 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] Backup BB 0x%x = 0x%x\n",
1199 reg[i], reg_backup[i]);
1200 }
1201 }
1202
rtw8822c_txgapk_reload_bb_reg(struct rtw_dev * rtwdev,const u32 reg[],u32 reg_backup[],u32 reg_num)1203 static void rtw8822c_txgapk_reload_bb_reg(struct rtw_dev *rtwdev,
1204 const u32 reg[], u32 reg_backup[],
1205 u32 reg_num)
1206 {
1207 u32 i;
1208
1209 for (i = 0; i < reg_num; i++) {
1210 rtw_write32(rtwdev, reg[i], reg_backup[i]);
1211 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] Reload BB 0x%x = 0x%x\n",
1212 reg[i], reg_backup[i]);
1213 }
1214 }
1215
check_rf_status(struct rtw_dev * rtwdev,u8 status)1216 static bool check_rf_status(struct rtw_dev *rtwdev, u8 status)
1217 {
1218 u8 reg_rf0_a, reg_rf0_b;
1219
1220 reg_rf0_a = (u8)rtw_read_rf(rtwdev, RF_PATH_A,
1221 RF_MODE_TRXAGC, BIT_RF_MODE);
1222 reg_rf0_b = (u8)rtw_read_rf(rtwdev, RF_PATH_B,
1223 RF_MODE_TRXAGC, BIT_RF_MODE);
1224
1225 if (reg_rf0_a == status || reg_rf0_b == status)
1226 return false;
1227
1228 return true;
1229 }
1230
rtw8822c_txgapk_tx_pause(struct rtw_dev * rtwdev)1231 static void rtw8822c_txgapk_tx_pause(struct rtw_dev *rtwdev)
1232 {
1233 bool status;
1234 int ret;
1235
1236 rtw_write8(rtwdev, REG_TXPAUSE, BIT_AC_QUEUE);
1237 rtw_write32_mask(rtwdev, REG_TX_FIFO, BIT_STOP_TX, 0x2);
1238
1239 ret = read_poll_timeout_atomic(check_rf_status, status, status,
1240 2, 5000, false, rtwdev, 2);
1241 if (ret)
1242 rtw_warn(rtwdev, "failed to pause TX\n");
1243
1244 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] Tx pause!!\n");
1245 }
1246
rtw8822c_txgapk_bb_dpk(struct rtw_dev * rtwdev,u8 path)1247 static void rtw8822c_txgapk_bb_dpk(struct rtw_dev *rtwdev, u8 path)
1248 {
1249 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1250
1251 rtw_write32_mask(rtwdev, REG_ENFN, BIT_IQK_DPK_EN, 0x1);
1252 rtw_write32_mask(rtwdev, REG_CH_DELAY_EXTR2,
1253 BIT_IQK_DPK_CLOCK_SRC, 0x1);
1254 rtw_write32_mask(rtwdev, REG_CH_DELAY_EXTR2,
1255 BIT_IQK_DPK_RESET_SRC, 0x1);
1256 rtw_write32_mask(rtwdev, REG_CH_DELAY_EXTR2, BIT_EN_IOQ_IQK_DPK, 0x1);
1257 rtw_write32_mask(rtwdev, REG_CH_DELAY_EXTR2, BIT_TST_IQK2SET_SRC, 0x0);
1258 rtw_write32_mask(rtwdev, REG_CCA_OFF, BIT_CCA_ON_BY_PW, 0x1ff);
1259
1260 if (path == RF_PATH_A) {
1261 rtw_write32_mask(rtwdev, REG_RFTXEN_GCK_A,
1262 BIT_RFTXEN_GCK_FORCE_ON, 0x1);
1263 rtw_write32_mask(rtwdev, REG_3WIRE, BIT_DIS_SHARERX_TXGAT, 0x1);
1264 rtw_write32_mask(rtwdev, REG_DIS_SHARE_RX_A,
1265 BIT_TX_SCALE_0DB, 0x1);
1266 rtw_write32_mask(rtwdev, REG_3WIRE, BIT_3WIRE_EN, 0x0);
1267 } else if (path == RF_PATH_B) {
1268 rtw_write32_mask(rtwdev, REG_RFTXEN_GCK_B,
1269 BIT_RFTXEN_GCK_FORCE_ON, 0x1);
1270 rtw_write32_mask(rtwdev, REG_3WIRE2,
1271 BIT_DIS_SHARERX_TXGAT, 0x1);
1272 rtw_write32_mask(rtwdev, REG_DIS_SHARE_RX_B,
1273 BIT_TX_SCALE_0DB, 0x1);
1274 rtw_write32_mask(rtwdev, REG_3WIRE2, BIT_3WIRE_EN, 0x0);
1275 }
1276 rtw_write32_mask(rtwdev, REG_CCKSB, BIT_BBMODE, 0x2);
1277 }
1278
rtw8822c_txgapk_afe_dpk(struct rtw_dev * rtwdev,u8 path)1279 static void rtw8822c_txgapk_afe_dpk(struct rtw_dev *rtwdev, u8 path)
1280 {
1281 u32 reg;
1282
1283 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1284
1285 if (path == RF_PATH_A) {
1286 reg = REG_ANAPAR_A;
1287 } else if (path == RF_PATH_B) {
1288 reg = REG_ANAPAR_B;
1289 } else {
1290 rtw_err(rtwdev, "[TXGAPK] unknown path %d!!\n", path);
1291 return;
1292 }
1293
1294 rtw_write32_mask(rtwdev, REG_IQK_CTRL, MASKDWORD, MASKDWORD);
1295 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x700f0001);
1296 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x700f0001);
1297 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x701f0001);
1298 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x702f0001);
1299 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x703f0001);
1300 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x704f0001);
1301 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x705f0001);
1302 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x706f0001);
1303 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x707f0001);
1304 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x708f0001);
1305 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x709f0001);
1306 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70af0001);
1307 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70bf0001);
1308 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70cf0001);
1309 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70df0001);
1310 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70ef0001);
1311 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70ff0001);
1312 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70ff0001);
1313 }
1314
rtw8822c_txgapk_afe_dpk_restore(struct rtw_dev * rtwdev,u8 path)1315 static void rtw8822c_txgapk_afe_dpk_restore(struct rtw_dev *rtwdev, u8 path)
1316 {
1317 u32 reg;
1318
1319 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1320
1321 if (path == RF_PATH_A) {
1322 reg = REG_ANAPAR_A;
1323 } else if (path == RF_PATH_B) {
1324 reg = REG_ANAPAR_B;
1325 } else {
1326 rtw_err(rtwdev, "[TXGAPK] unknown path %d!!\n", path);
1327 return;
1328 }
1329 rtw_write32_mask(rtwdev, REG_IQK_CTRL, MASKDWORD, 0xffa1005e);
1330 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x700b8041);
1331 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70144041);
1332 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70244041);
1333 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70344041);
1334 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70444041);
1335 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x705b8041);
1336 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70644041);
1337 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x707b8041);
1338 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x708b8041);
1339 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x709b8041);
1340 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70ab8041);
1341 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70bb8041);
1342 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70cb8041);
1343 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70db8041);
1344 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70eb8041);
1345 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70fb8041);
1346 }
1347
rtw8822c_txgapk_bb_dpk_restore(struct rtw_dev * rtwdev,u8 path)1348 static void rtw8822c_txgapk_bb_dpk_restore(struct rtw_dev *rtwdev, u8 path)
1349 {
1350 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1351
1352 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TX_GAIN, 0x0);
1353 rtw_write_rf(rtwdev, path, RF_DIS_BYPASS_TXBB, BIT_TIA_BYPASS, 0x0);
1354 rtw_write_rf(rtwdev, path, RF_DIS_BYPASS_TXBB, BIT_TXBB, 0x0);
1355
1356 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, 0x0);
1357 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x0);
1358 rtw_write32_mask(rtwdev, REG_SINGLE_TONE_SW, BIT_IRQ_TEST_MODE, 0x0);
1359 rtw_write32_mask(rtwdev, REG_R_CONFIG, MASKBYTE0, 0x00);
1360 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, 0x1);
1361 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x0);
1362 rtw_write32_mask(rtwdev, REG_SINGLE_TONE_SW, BIT_IRQ_TEST_MODE, 0x0);
1363 rtw_write32_mask(rtwdev, REG_R_CONFIG, MASKBYTE0, 0x00);
1364 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, 0x0);
1365 rtw_write32_mask(rtwdev, REG_CCA_OFF, BIT_CCA_ON_BY_PW, 0x0);
1366
1367 if (path == RF_PATH_A) {
1368 rtw_write32_mask(rtwdev, REG_RFTXEN_GCK_A,
1369 BIT_RFTXEN_GCK_FORCE_ON, 0x0);
1370 rtw_write32_mask(rtwdev, REG_3WIRE, BIT_DIS_SHARERX_TXGAT, 0x0);
1371 rtw_write32_mask(rtwdev, REG_DIS_SHARE_RX_A,
1372 BIT_TX_SCALE_0DB, 0x0);
1373 rtw_write32_mask(rtwdev, REG_3WIRE, BIT_3WIRE_EN, 0x3);
1374 } else if (path == RF_PATH_B) {
1375 rtw_write32_mask(rtwdev, REG_RFTXEN_GCK_B,
1376 BIT_RFTXEN_GCK_FORCE_ON, 0x0);
1377 rtw_write32_mask(rtwdev, REG_3WIRE2,
1378 BIT_DIS_SHARERX_TXGAT, 0x0);
1379 rtw_write32_mask(rtwdev, REG_DIS_SHARE_RX_B,
1380 BIT_TX_SCALE_0DB, 0x0);
1381 rtw_write32_mask(rtwdev, REG_3WIRE2, BIT_3WIRE_EN, 0x3);
1382 }
1383
1384 rtw_write32_mask(rtwdev, REG_CCKSB, BIT_BBMODE, 0x0);
1385 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_CFIR_EN, 0x5);
1386 }
1387
_rtw8822c_txgapk_gain_valid(struct rtw_dev * rtwdev,u32 gain)1388 static bool _rtw8822c_txgapk_gain_valid(struct rtw_dev *rtwdev, u32 gain)
1389 {
1390 if ((FIELD_GET(BIT_GAIN_TX_PAD_H, gain) >= 0xc) &&
1391 (FIELD_GET(BIT_GAIN_TX_PAD_L, gain) >= 0xe))
1392 return true;
1393
1394 return false;
1395 }
1396
_rtw8822c_txgapk_write_gain_bb_table(struct rtw_dev * rtwdev,u8 band,u8 path)1397 static void _rtw8822c_txgapk_write_gain_bb_table(struct rtw_dev *rtwdev,
1398 u8 band, u8 path)
1399 {
1400 struct rtw_gapk_info *txgapk = &rtwdev->dm_info.gapk;
1401 u32 v, tmp_3f = 0;
1402 u8 gain, check_txgain;
1403
1404 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, path);
1405
1406 switch (band) {
1407 case RF_BAND_2G_OFDM:
1408 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x0);
1409 break;
1410 case RF_BAND_5G_L:
1411 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x2);
1412 break;
1413 case RF_BAND_5G_M:
1414 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x3);
1415 break;
1416 case RF_BAND_5G_H:
1417 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x4);
1418 break;
1419 default:
1420 break;
1421 }
1422
1423 rtw_write32_mask(rtwdev, REG_TX_GAIN_SET, MASKBYTE0, 0x88);
1424
1425 check_txgain = 0;
1426 for (gain = 0; gain < RF_GAIN_NUM; gain++) {
1427 v = txgapk->rf3f_bp[band][gain][path];
1428 if (_rtw8822c_txgapk_gain_valid(rtwdev, v)) {
1429 if (!check_txgain) {
1430 tmp_3f = txgapk->rf3f_bp[band][gain][path];
1431 check_txgain = 1;
1432 }
1433 rtw_dbg(rtwdev, RTW_DBG_RFK,
1434 "[TXGAPK] tx_gain=0x%03X >= 0xCEX\n",
1435 txgapk->rf3f_bp[band][gain][path]);
1436 } else {
1437 tmp_3f = txgapk->rf3f_bp[band][gain][path];
1438 }
1439
1440 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_Q_GAIN, tmp_3f);
1441 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_I_GAIN, gain);
1442 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_GAIN_RST, 0x1);
1443 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_GAIN_RST, 0x0);
1444
1445 rtw_dbg(rtwdev, RTW_DBG_RFK,
1446 "[TXGAPK] Band=%d 0x1b98[11:0]=0x%03X path=%d\n",
1447 band, tmp_3f, path);
1448 }
1449 }
1450
rtw8822c_txgapk_write_gain_bb_table(struct rtw_dev * rtwdev)1451 static void rtw8822c_txgapk_write_gain_bb_table(struct rtw_dev *rtwdev)
1452 {
1453 u8 path, band;
1454
1455 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s channel=%d\n",
1456 __func__, rtwdev->dm_info.gapk.channel);
1457
1458 for (band = 0; band < RF_BAND_MAX; band++) {
1459 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1460 _rtw8822c_txgapk_write_gain_bb_table(rtwdev,
1461 band, path);
1462 }
1463 }
1464 }
1465
rtw8822c_txgapk_read_offset(struct rtw_dev * rtwdev,u8 path)1466 static void rtw8822c_txgapk_read_offset(struct rtw_dev *rtwdev, u8 path)
1467 {
1468 static const u32 cfg1_1b00[2] = {0x00000d18, 0x00000d2a};
1469 static const u32 cfg2_1b00[2] = {0x00000d19, 0x00000d2b};
1470 static const u32 set_pi[2] = {REG_RSV_CTRL, REG_WLRF1};
1471 static const u32 path_setting[2] = {REG_ORITXCODE, REG_ORITXCODE2};
1472 struct rtw_gapk_info *txgapk = &rtwdev->dm_info.gapk;
1473 u8 channel = txgapk->channel;
1474 u32 val;
1475 int i;
1476
1477 if (path >= ARRAY_SIZE(cfg1_1b00) ||
1478 path >= ARRAY_SIZE(cfg2_1b00) ||
1479 path >= ARRAY_SIZE(set_pi) ||
1480 path >= ARRAY_SIZE(path_setting)) {
1481 rtw_warn(rtwdev, "[TXGAPK] wrong path %d\n", path);
1482 return;
1483 }
1484
1485 rtw_write32_mask(rtwdev, REG_ANTMAP0, BIT_ANT_PATH, path + 1);
1486 rtw_write32_mask(rtwdev, REG_TXLGMAP, MASKDWORD, 0xe4e40000);
1487 rtw_write32_mask(rtwdev, REG_TXANTSEG, BIT_ANTSEG, 0x3);
1488 rtw_write32_mask(rtwdev, path_setting[path], MASK20BITS, 0x33312);
1489 rtw_write32_mask(rtwdev, path_setting[path], BIT_PATH_EN, 0x1);
1490 rtw_write32_mask(rtwdev, set_pi[path], BITS_RFC_DIRECT, 0x0);
1491 rtw_write_rf(rtwdev, path, RF_LUTDBG, BIT_TXA_TANK, 0x1);
1492 rtw_write_rf(rtwdev, path, RF_IDAC, BIT_TX_MODE, 0x820);
1493 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, path);
1494 rtw_write32_mask(rtwdev, REG_IQKSTAT, MASKBYTE0, 0x0);
1495
1496 rtw_write32_mask(rtwdev, REG_TX_TONE_IDX, MASKBYTE0, 0x018);
1497 fsleep(1000);
1498 if (channel >= 1 && channel <= 14)
1499 rtw_write32_mask(rtwdev, REG_R_CONFIG, MASKBYTE0, BIT_2G_SWING);
1500 else
1501 rtw_write32_mask(rtwdev, REG_R_CONFIG, MASKBYTE0, BIT_5G_SWING);
1502 fsleep(1000);
1503
1504 rtw_write32_mask(rtwdev, REG_NCTL0, MASKDWORD, cfg1_1b00[path]);
1505 rtw_write32_mask(rtwdev, REG_NCTL0, MASKDWORD, cfg2_1b00[path]);
1506
1507 read_poll_timeout(rtw_read32_mask, val,
1508 val == 0x55, 1000, 100000, false,
1509 rtwdev, REG_RPT_CIP, BIT_RPT_CIP_STATUS);
1510
1511 rtw_write32_mask(rtwdev, set_pi[path], BITS_RFC_DIRECT, 0x2);
1512 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, path);
1513 rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, BIT_RPT_EN, 0x1);
1514 rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, BIT_RPT_SEL, 0x12);
1515 rtw_write32_mask(rtwdev, REG_TX_GAIN_SET, BIT_GAPK_RPT_IDX, 0x3);
1516 val = rtw_read32(rtwdev, REG_STAT_RPT);
1517
1518 txgapk->offset[0][path] = (s8)FIELD_GET(BIT_GAPK_RPT0, val);
1519 txgapk->offset[1][path] = (s8)FIELD_GET(BIT_GAPK_RPT1, val);
1520 txgapk->offset[2][path] = (s8)FIELD_GET(BIT_GAPK_RPT2, val);
1521 txgapk->offset[3][path] = (s8)FIELD_GET(BIT_GAPK_RPT3, val);
1522 txgapk->offset[4][path] = (s8)FIELD_GET(BIT_GAPK_RPT4, val);
1523 txgapk->offset[5][path] = (s8)FIELD_GET(BIT_GAPK_RPT5, val);
1524 txgapk->offset[6][path] = (s8)FIELD_GET(BIT_GAPK_RPT6, val);
1525 txgapk->offset[7][path] = (s8)FIELD_GET(BIT_GAPK_RPT7, val);
1526
1527 rtw_write32_mask(rtwdev, REG_TX_GAIN_SET, BIT_GAPK_RPT_IDX, 0x4);
1528 val = rtw_read32(rtwdev, REG_STAT_RPT);
1529
1530 txgapk->offset[8][path] = (s8)FIELD_GET(BIT_GAPK_RPT0, val);
1531 txgapk->offset[9][path] = (s8)FIELD_GET(BIT_GAPK_RPT1, val);
1532
1533 for (i = 0; i < RF_HW_OFFSET_NUM; i++)
1534 if (txgapk->offset[i][path] & BIT(3))
1535 txgapk->offset[i][path] = txgapk->offset[i][path] |
1536 0xf0;
1537 for (i = 0; i < RF_HW_OFFSET_NUM; i++)
1538 rtw_dbg(rtwdev, RTW_DBG_RFK,
1539 "[TXGAPK] offset %d %d path=%d\n",
1540 txgapk->offset[i][path], i, path);
1541 }
1542
rtw8822c_txgapk_calculate_offset(struct rtw_dev * rtwdev,u8 path)1543 static void rtw8822c_txgapk_calculate_offset(struct rtw_dev *rtwdev, u8 path)
1544 {
1545 static const u32 bb_reg[] = {REG_ANTMAP0, REG_TXLGMAP, REG_TXANTSEG,
1546 REG_ORITXCODE, REG_ORITXCODE2};
1547 struct rtw_gapk_info *txgapk = &rtwdev->dm_info.gapk;
1548 u8 channel = txgapk->channel;
1549 u32 reg_backup[ARRAY_SIZE(bb_reg)] = {0};
1550
1551 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s channel=%d\n",
1552 __func__, channel);
1553
1554 rtw8822c_txgapk_backup_bb_reg(rtwdev, bb_reg,
1555 reg_backup, ARRAY_SIZE(bb_reg));
1556
1557 if (channel >= 1 && channel <= 14) {
1558 rtw_write32_mask(rtwdev,
1559 REG_SINGLE_TONE_SW, BIT_IRQ_TEST_MODE, 0x0);
1560 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, path);
1561 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_IQ_SWITCH, 0x3f);
1562 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x0);
1563 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TX_GAIN, 0x1);
1564 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, RFREG_MASK, 0x5000f);
1565 rtw_write_rf(rtwdev, path, RF_TX_GAIN_OFFSET, BIT_RF_GAIN, 0x0);
1566 rtw_write_rf(rtwdev, path, RF_RXG_GAIN, BIT_RXG_GAIN, 0x1);
1567 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, BIT_RXAGC, 0x0f);
1568 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TRXBW, 0x1);
1569 rtw_write_rf(rtwdev, path, RF_BW_TRXBB, BIT_BW_TXBB, 0x1);
1570 rtw_write_rf(rtwdev, path, RF_BW_TRXBB, BIT_BW_RXBB, 0x0);
1571 rtw_write_rf(rtwdev, path, RF_EXT_TIA_BW, BIT_PW_EXT_TIA, 0x1);
1572
1573 rtw_write32_mask(rtwdev, REG_IQKSTAT, MASKBYTE0, 0x00);
1574 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x0);
1575
1576 rtw8822c_txgapk_read_offset(rtwdev, path);
1577 rtw_dbg(rtwdev, RTW_DBG_RFK, "=============================\n");
1578
1579 } else {
1580 rtw_write32_mask(rtwdev,
1581 REG_SINGLE_TONE_SW, BIT_IRQ_TEST_MODE, 0x0);
1582 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, path);
1583 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_IQ_SWITCH, 0x3f);
1584 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x0);
1585 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TX_GAIN, 0x1);
1586 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, RFREG_MASK, 0x50011);
1587 rtw_write_rf(rtwdev, path, RF_TXA_LB_SW, BIT_TXA_LB_ATT, 0x3);
1588 rtw_write_rf(rtwdev, path, RF_TXA_LB_SW, BIT_LB_ATT, 0x3);
1589 rtw_write_rf(rtwdev, path, RF_TXA_LB_SW, BIT_LB_SW, 0x1);
1590 rtw_write_rf(rtwdev, path,
1591 RF_RXA_MIX_GAIN, BIT_RXA_MIX_GAIN, 0x2);
1592 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, BIT_RXAGC, 0x12);
1593 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TRXBW, 0x1);
1594 rtw_write_rf(rtwdev, path, RF_BW_TRXBB, BIT_BW_RXBB, 0x0);
1595 rtw_write_rf(rtwdev, path, RF_EXT_TIA_BW, BIT_PW_EXT_TIA, 0x1);
1596 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, BIT_RF_MODE, 0x5);
1597
1598 rtw_write32_mask(rtwdev, REG_IQKSTAT, MASKBYTE0, 0x0);
1599
1600 if (channel >= 36 && channel <= 64)
1601 rtw_write32_mask(rtwdev,
1602 REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x2);
1603 else if (channel >= 100 && channel <= 144)
1604 rtw_write32_mask(rtwdev,
1605 REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x3);
1606 else if (channel >= 149 && channel <= 177)
1607 rtw_write32_mask(rtwdev,
1608 REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x4);
1609
1610 rtw8822c_txgapk_read_offset(rtwdev, path);
1611 rtw_dbg(rtwdev, RTW_DBG_RFK, "=============================\n");
1612 }
1613 rtw8822c_txgapk_reload_bb_reg(rtwdev, bb_reg,
1614 reg_backup, ARRAY_SIZE(bb_reg));
1615 }
1616
rtw8822c_txgapk_rf_restore(struct rtw_dev * rtwdev,u8 path)1617 static void rtw8822c_txgapk_rf_restore(struct rtw_dev *rtwdev, u8 path)
1618 {
1619 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1620
1621 if (path >= rtwdev->hal.rf_path_num)
1622 return;
1623
1624 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, BIT_RF_MODE, 0x3);
1625 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TRXBW, 0x0);
1626 rtw_write_rf(rtwdev, path, RF_EXT_TIA_BW, BIT_PW_EXT_TIA, 0x0);
1627 }
1628
rtw8822c_txgapk_cal_gain(struct rtw_dev * rtwdev,u32 gain,s8 offset)1629 static u32 rtw8822c_txgapk_cal_gain(struct rtw_dev *rtwdev, u32 gain, s8 offset)
1630 {
1631 u32 gain_x2, new_gain;
1632
1633 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1634
1635 if (_rtw8822c_txgapk_gain_valid(rtwdev, gain)) {
1636 new_gain = gain;
1637 rtw_dbg(rtwdev, RTW_DBG_RFK,
1638 "[TXGAPK] gain=0x%03X(>=0xCEX) offset=%d new_gain=0x%03X\n",
1639 gain, offset, new_gain);
1640 return new_gain;
1641 }
1642
1643 gain_x2 = (gain << 1) + offset;
1644 new_gain = (gain_x2 >> 1) | (gain_x2 & BIT(0) ? BIT_GAIN_EXT : 0);
1645
1646 rtw_dbg(rtwdev, RTW_DBG_RFK,
1647 "[TXGAPK] gain=0x%X offset=%d new_gain=0x%X\n",
1648 gain, offset, new_gain);
1649
1650 return new_gain;
1651 }
1652
rtw8822c_txgapk_write_tx_gain(struct rtw_dev * rtwdev)1653 static void rtw8822c_txgapk_write_tx_gain(struct rtw_dev *rtwdev)
1654 {
1655 struct rtw_gapk_info *txgapk = &rtwdev->dm_info.gapk;
1656 u32 i, j, tmp = 0x20, tmp_3f, v;
1657 s8 offset_tmp[RF_GAIN_NUM] = {0};
1658 u8 path, band = RF_BAND_2G_OFDM, channel = txgapk->channel;
1659
1660 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1661
1662 if (channel >= 1 && channel <= 14) {
1663 tmp = 0x20;
1664 band = RF_BAND_2G_OFDM;
1665 } else if (channel >= 36 && channel <= 64) {
1666 tmp = 0x200;
1667 band = RF_BAND_5G_L;
1668 } else if (channel >= 100 && channel <= 144) {
1669 tmp = 0x280;
1670 band = RF_BAND_5G_M;
1671 } else if (channel >= 149 && channel <= 177) {
1672 tmp = 0x300;
1673 band = RF_BAND_5G_H;
1674 } else {
1675 rtw_err(rtwdev, "[TXGAPK] unknown channel %d!!\n", channel);
1676 return;
1677 }
1678
1679 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1680 for (i = 0; i < RF_GAIN_NUM; i++) {
1681 offset_tmp[i] = 0;
1682 for (j = i; j < RF_GAIN_NUM; j++) {
1683 v = txgapk->rf3f_bp[band][j][path];
1684 if (_rtw8822c_txgapk_gain_valid(rtwdev, v))
1685 continue;
1686
1687 offset_tmp[i] += txgapk->offset[j][path];
1688 txgapk->fianl_offset[i][path] = offset_tmp[i];
1689 }
1690
1691 v = txgapk->rf3f_bp[band][i][path];
1692 if (_rtw8822c_txgapk_gain_valid(rtwdev, v)) {
1693 rtw_dbg(rtwdev, RTW_DBG_RFK,
1694 "[TXGAPK] tx_gain=0x%03X >= 0xCEX\n",
1695 txgapk->rf3f_bp[band][i][path]);
1696 } else {
1697 txgapk->rf3f_fs[path][i] = offset_tmp[i];
1698 rtw_dbg(rtwdev, RTW_DBG_RFK,
1699 "[TXGAPK] offset %d %d\n",
1700 offset_tmp[i], i);
1701 }
1702 }
1703
1704 rtw_write_rf(rtwdev, path, RF_LUTWE2, RFREG_MASK, 0x10000);
1705 for (i = 0; i < RF_GAIN_NUM; i++) {
1706 rtw_write_rf(rtwdev, path,
1707 RF_LUTWA, RFREG_MASK, tmp + i);
1708
1709 tmp_3f = rtw8822c_txgapk_cal_gain(rtwdev,
1710 txgapk->rf3f_bp[band][i][path],
1711 offset_tmp[i]);
1712 rtw_write_rf(rtwdev, path, RF_LUTWD0,
1713 BIT_GAIN_EXT | BIT_DATA_L, tmp_3f);
1714
1715 rtw_dbg(rtwdev, RTW_DBG_RFK,
1716 "[TXGAPK] 0x33=0x%05X 0x3f=0x%04X\n",
1717 tmp + i, tmp_3f);
1718 }
1719 rtw_write_rf(rtwdev, path, RF_LUTWE2, RFREG_MASK, 0x0);
1720 }
1721 }
1722
rtw8822c_txgapk_save_all_tx_gain_table(struct rtw_dev * rtwdev)1723 static void rtw8822c_txgapk_save_all_tx_gain_table(struct rtw_dev *rtwdev)
1724 {
1725 struct rtw_gapk_info *txgapk = &rtwdev->dm_info.gapk;
1726 static const u32 three_wire[2] = {REG_3WIRE, REG_3WIRE2};
1727 static const u8 ch_num[RF_BAND_MAX] = {1, 1, 36, 100, 149};
1728 static const u8 band_num[RF_BAND_MAX] = {0x0, 0x0, 0x1, 0x3, 0x5};
1729 static const u8 cck[RF_BAND_MAX] = {0x1, 0x0, 0x0, 0x0, 0x0};
1730 u8 path, band, gain, rf0_idx;
1731 u32 rf18, v;
1732
1733 if (rtwdev->dm_info.dm_flags & BIT(RTW_DM_CAP_TXGAPK))
1734 return;
1735
1736 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1737
1738 if (txgapk->read_txgain == 1) {
1739 rtw_dbg(rtwdev, RTW_DBG_RFK,
1740 "[TXGAPK] Already Read txgapk->read_txgain return!!!\n");
1741 rtw8822c_txgapk_write_gain_bb_table(rtwdev);
1742 return;
1743 }
1744
1745 for (band = 0; band < RF_BAND_MAX; band++) {
1746 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1747 rf18 = rtw_read_rf(rtwdev, path, RF_CFGCH, RFREG_MASK);
1748
1749 rtw_write32_mask(rtwdev,
1750 three_wire[path], BIT_3WIRE_EN, 0x0);
1751 rtw_write_rf(rtwdev, path,
1752 RF_CFGCH, MASKBYTE0, ch_num[band]);
1753 rtw_write_rf(rtwdev, path,
1754 RF_CFGCH, BIT_BAND, band_num[band]);
1755 rtw_write_rf(rtwdev, path,
1756 RF_BW_TRXBB, BIT_DBG_CCK_CCA, cck[band]);
1757 rtw_write_rf(rtwdev, path,
1758 RF_BW_TRXBB, BIT_TX_CCK_IND, cck[band]);
1759 gain = 0;
1760 for (rf0_idx = 1; rf0_idx < 32; rf0_idx += 3) {
1761 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC,
1762 MASKBYTE0, rf0_idx);
1763 v = rtw_read_rf(rtwdev, path,
1764 RF_TX_RESULT, RFREG_MASK);
1765 txgapk->rf3f_bp[band][gain][path] = v & BIT_DATA_L;
1766
1767 rtw_dbg(rtwdev, RTW_DBG_RFK,
1768 "[TXGAPK] 0x5f=0x%03X band=%d path=%d\n",
1769 txgapk->rf3f_bp[band][gain][path],
1770 band, path);
1771 gain++;
1772 }
1773 rtw_write_rf(rtwdev, path, RF_CFGCH, RFREG_MASK, rf18);
1774 rtw_write32_mask(rtwdev,
1775 three_wire[path], BIT_3WIRE_EN, 0x3);
1776 }
1777 }
1778 rtw8822c_txgapk_write_gain_bb_table(rtwdev);
1779 txgapk->read_txgain = 1;
1780 }
1781
rtw8822c_txgapk(struct rtw_dev * rtwdev)1782 static void rtw8822c_txgapk(struct rtw_dev *rtwdev)
1783 {
1784 static const u32 bb_reg[2] = {REG_TX_PTCL_CTRL, REG_TX_FIFO};
1785 struct rtw_gapk_info *txgapk = &rtwdev->dm_info.gapk;
1786 u32 bb_reg_backup[2];
1787 u8 path;
1788
1789 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1790
1791 rtw8822c_txgapk_save_all_tx_gain_table(rtwdev);
1792
1793 if (txgapk->read_txgain == 0) {
1794 rtw_dbg(rtwdev, RTW_DBG_RFK,
1795 "[TXGAPK] txgapk->read_txgain == 0 return!!!\n");
1796 return;
1797 }
1798
1799 if (rtwdev->efuse.power_track_type >= 4 &&
1800 rtwdev->efuse.power_track_type <= 7) {
1801 rtw_dbg(rtwdev, RTW_DBG_RFK,
1802 "[TXGAPK] Normal Mode in TSSI mode. return!!!\n");
1803 return;
1804 }
1805
1806 rtw8822c_txgapk_backup_bb_reg(rtwdev, bb_reg,
1807 bb_reg_backup, ARRAY_SIZE(bb_reg));
1808 rtw8822c_txgapk_tx_pause(rtwdev);
1809 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1810 txgapk->channel = rtw_read_rf(rtwdev, path,
1811 RF_CFGCH, RFREG_MASK) & MASKBYTE0;
1812 rtw8822c_txgapk_bb_dpk(rtwdev, path);
1813 rtw8822c_txgapk_afe_dpk(rtwdev, path);
1814 rtw8822c_txgapk_calculate_offset(rtwdev, path);
1815 rtw8822c_txgapk_rf_restore(rtwdev, path);
1816 rtw8822c_txgapk_afe_dpk_restore(rtwdev, path);
1817 rtw8822c_txgapk_bb_dpk_restore(rtwdev, path);
1818 }
1819 rtw8822c_txgapk_write_tx_gain(rtwdev);
1820 rtw8822c_txgapk_reload_bb_reg(rtwdev, bb_reg,
1821 bb_reg_backup, ARRAY_SIZE(bb_reg));
1822 }
1823
rtw8822c_do_gapk(struct rtw_dev * rtwdev)1824 static void rtw8822c_do_gapk(struct rtw_dev *rtwdev)
1825 {
1826 struct rtw_dm_info *dm = &rtwdev->dm_info;
1827
1828 if (dm->dm_flags & BIT(RTW_DM_CAP_TXGAPK)) {
1829 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] feature disable!!!\n");
1830 return;
1831 }
1832 rtw8822c_rfk_handshake(rtwdev, true);
1833 rtw8822c_txgapk(rtwdev);
1834 rtw8822c_rfk_handshake(rtwdev, false);
1835 }
1836
rtw8822c_rf_init(struct rtw_dev * rtwdev)1837 static void rtw8822c_rf_init(struct rtw_dev *rtwdev)
1838 {
1839 rtw8822c_rf_dac_cal(rtwdev);
1840 rtw8822c_rf_x2_check(rtwdev);
1841 rtw8822c_thermal_trim(rtwdev);
1842 rtw8822c_power_trim(rtwdev);
1843 rtw8822c_pa_bias(rtwdev);
1844 }
1845
rtw8822c_pwrtrack_init(struct rtw_dev * rtwdev)1846 static void rtw8822c_pwrtrack_init(struct rtw_dev *rtwdev)
1847 {
1848 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
1849 u8 path;
1850
1851 for (path = RF_PATH_A; path < RTW_RF_PATH_MAX; path++) {
1852 dm_info->delta_power_index[path] = 0;
1853 ewma_thermal_init(&dm_info->avg_thermal[path]);
1854 dm_info->thermal_avg[path] = 0xff;
1855 }
1856
1857 dm_info->pwr_trk_triggered = false;
1858 dm_info->thermal_meter_k = rtwdev->efuse.thermal_meter_k;
1859 dm_info->thermal_meter_lck = rtwdev->efuse.thermal_meter_k;
1860 }
1861
rtw8822c_phy_set_param(struct rtw_dev * rtwdev)1862 static void rtw8822c_phy_set_param(struct rtw_dev *rtwdev)
1863 {
1864 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
1865 struct rtw_hal *hal = &rtwdev->hal;
1866 u8 crystal_cap;
1867 u8 cck_gi_u_bnd_msb = 0;
1868 u8 cck_gi_u_bnd_lsb = 0;
1869 u8 cck_gi_l_bnd_msb = 0;
1870 u8 cck_gi_l_bnd_lsb = 0;
1871 bool is_tx2_path;
1872
1873 /* power on BB/RF domain */
1874 rtw_write8_set(rtwdev, REG_SYS_FUNC_EN,
1875 BIT_FEN_BB_GLB_RST | BIT_FEN_BB_RSTB);
1876 rtw_write8_set(rtwdev, REG_RF_CTRL,
1877 BIT_RF_EN | BIT_RF_RSTB | BIT_RF_SDM_RSTB);
1878 rtw_write32_set(rtwdev, REG_WLRF1, BIT_WLRF1_BBRF_EN);
1879
1880 /* disable low rate DPD */
1881 rtw_write32_mask(rtwdev, REG_DIS_DPD, DIS_DPD_MASK, DIS_DPD_RATEALL);
1882
1883 /* pre init before header files config */
1884 rtw8822c_header_file_init(rtwdev, true);
1885
1886 rtw_phy_load_tables(rtwdev);
1887
1888 crystal_cap = rtwdev->efuse.crystal_cap & 0x7f;
1889 rtw_write32_mask(rtwdev, REG_ANAPAR_XTAL_0, 0xfffc00,
1890 crystal_cap | (crystal_cap << 7));
1891
1892 /* post init after header files config */
1893 rtw8822c_header_file_init(rtwdev, false);
1894
1895 is_tx2_path = false;
1896 rtw8822c_config_trx_mode(rtwdev, hal->antenna_tx, hal->antenna_rx,
1897 is_tx2_path);
1898 rtw_phy_init(rtwdev);
1899
1900 cck_gi_u_bnd_msb = (u8)rtw_read32_mask(rtwdev, 0x1a98, 0xc000);
1901 cck_gi_u_bnd_lsb = (u8)rtw_read32_mask(rtwdev, 0x1aa8, 0xf0000);
1902 cck_gi_l_bnd_msb = (u8)rtw_read32_mask(rtwdev, 0x1a98, 0xc0);
1903 cck_gi_l_bnd_lsb = (u8)rtw_read32_mask(rtwdev, 0x1a70, 0x0f000000);
1904
1905 dm_info->cck_gi_u_bnd = ((cck_gi_u_bnd_msb << 4) | (cck_gi_u_bnd_lsb));
1906 dm_info->cck_gi_l_bnd = ((cck_gi_l_bnd_msb << 4) | (cck_gi_l_bnd_lsb));
1907
1908 rtw8822c_rf_init(rtwdev);
1909 rtw8822c_pwrtrack_init(rtwdev);
1910
1911 rtw_bf_phy_init(rtwdev);
1912 }
1913
1914 #define WLAN_TXQ_RPT_EN 0x1F
1915 #define WLAN_SLOT_TIME 0x09
1916 #define WLAN_PIFS_TIME 0x1C
1917 #define WLAN_SIFS_CCK_CONT_TX 0x0A
1918 #define WLAN_SIFS_OFDM_CONT_TX 0x0E
1919 #define WLAN_SIFS_CCK_TRX 0x0A
1920 #define WLAN_SIFS_OFDM_TRX 0x10
1921 #define WLAN_NAV_MAX 0xC8
1922 #define WLAN_RDG_NAV 0x05
1923 #define WLAN_TXOP_NAV 0x1B
1924 #define WLAN_CCK_RX_TSF 0x30
1925 #define WLAN_OFDM_RX_TSF 0x30
1926 #define WLAN_TBTT_PROHIBIT 0x04 /* unit : 32us */
1927 #define WLAN_TBTT_HOLD_TIME 0x064 /* unit : 32us */
1928 #define WLAN_DRV_EARLY_INT 0x04
1929 #define WLAN_BCN_CTRL_CLT0 0x10
1930 #define WLAN_BCN_DMA_TIME 0x02
1931 #define WLAN_BCN_MAX_ERR 0xFF
1932 #define WLAN_SIFS_CCK_DUR_TUNE 0x0A
1933 #define WLAN_SIFS_OFDM_DUR_TUNE 0x10
1934 #define WLAN_SIFS_CCK_CTX 0x0A
1935 #define WLAN_SIFS_CCK_IRX 0x0A
1936 #define WLAN_SIFS_OFDM_CTX 0x0E
1937 #define WLAN_SIFS_OFDM_IRX 0x0E
1938 #define WLAN_EIFS_DUR_TUNE 0x40
1939 #define WLAN_EDCA_VO_PARAM 0x002FA226
1940 #define WLAN_EDCA_VI_PARAM 0x005EA328
1941 #define WLAN_EDCA_BE_PARAM 0x005EA42B
1942 #define WLAN_EDCA_BK_PARAM 0x0000A44F
1943
1944 #define WLAN_RX_FILTER0 0xFFFFFFFF
1945 #define WLAN_RX_FILTER2 0xFFFF
1946 #define WLAN_RCR_CFG 0xE400220E
1947 #define WLAN_RXPKT_MAX_SZ 12288
1948 #define WLAN_RXPKT_MAX_SZ_512 (WLAN_RXPKT_MAX_SZ >> 9)
1949
1950 #define WLAN_AMPDU_MAX_TIME 0x70
1951 #define WLAN_RTS_LEN_TH 0xFF
1952 #define WLAN_RTS_TX_TIME_TH 0x08
1953 #define WLAN_MAX_AGG_PKT_LIMIT 0x3f
1954 #define WLAN_RTS_MAX_AGG_PKT_LIMIT 0x3f
1955 #define WLAN_PRE_TXCNT_TIME_TH 0x1E0
1956 #define FAST_EDCA_VO_TH 0x06
1957 #define FAST_EDCA_VI_TH 0x06
1958 #define FAST_EDCA_BE_TH 0x06
1959 #define FAST_EDCA_BK_TH 0x06
1960 #define WLAN_BAR_RETRY_LIMIT 0x01
1961 #define WLAN_BAR_ACK_TYPE 0x05
1962 #define WLAN_RA_TRY_RATE_AGG_LIMIT 0x08
1963 #define WLAN_RESP_TXRATE 0x84
1964 #define WLAN_ACK_TO 0x21
1965 #define WLAN_ACK_TO_CCK 0x6A
1966 #define WLAN_DATA_RATE_FB_CNT_1_4 0x01000000
1967 #define WLAN_DATA_RATE_FB_CNT_5_8 0x08070504
1968 #define WLAN_RTS_RATE_FB_CNT_5_8 0x08070504
1969 #define WLAN_DATA_RATE_FB_RATE0 0xFE01F010
1970 #define WLAN_DATA_RATE_FB_RATE0_H 0x40000000
1971 #define WLAN_RTS_RATE_FB_RATE1 0x003FF010
1972 #define WLAN_RTS_RATE_FB_RATE1_H 0x40000000
1973 #define WLAN_RTS_RATE_FB_RATE4 0x0600F010
1974 #define WLAN_RTS_RATE_FB_RATE4_H 0x400003E0
1975 #define WLAN_RTS_RATE_FB_RATE5 0x0600F015
1976 #define WLAN_RTS_RATE_FB_RATE5_H 0x000000E0
1977 #define WLAN_MULTI_ADDR 0xFFFFFFFF
1978
1979 #define WLAN_TX_FUNC_CFG1 0x30
1980 #define WLAN_TX_FUNC_CFG2 0x30
1981 #define WLAN_MAC_OPT_NORM_FUNC1 0x98
1982 #define WLAN_MAC_OPT_LB_FUNC1 0x80
1983 #define WLAN_MAC_OPT_FUNC2 0xb0810041
1984 #define WLAN_MAC_INT_MIG_CFG 0x33330000
1985
1986 #define WLAN_SIFS_CFG (WLAN_SIFS_CCK_CONT_TX | \
1987 (WLAN_SIFS_OFDM_CONT_TX << BIT_SHIFT_SIFS_OFDM_CTX) | \
1988 (WLAN_SIFS_CCK_TRX << BIT_SHIFT_SIFS_CCK_TRX) | \
1989 (WLAN_SIFS_OFDM_TRX << BIT_SHIFT_SIFS_OFDM_TRX))
1990
1991 #define WLAN_SIFS_DUR_TUNE (WLAN_SIFS_CCK_DUR_TUNE | \
1992 (WLAN_SIFS_OFDM_DUR_TUNE << 8))
1993
1994 #define WLAN_TBTT_TIME (WLAN_TBTT_PROHIBIT |\
1995 (WLAN_TBTT_HOLD_TIME << BIT_SHIFT_TBTT_HOLD_TIME_AP))
1996
1997 #define WLAN_NAV_CFG (WLAN_RDG_NAV | (WLAN_TXOP_NAV << 16))
1998 #define WLAN_RX_TSF_CFG (WLAN_CCK_RX_TSF | (WLAN_OFDM_RX_TSF) << 8)
1999
2000 #define MAC_CLK_SPEED 80 /* 80M */
2001 #define EFUSE_PCB_INFO_OFFSET 0xCA
2002
rtw8822c_mac_init(struct rtw_dev * rtwdev)2003 static int rtw8822c_mac_init(struct rtw_dev *rtwdev)
2004 {
2005 u8 value8;
2006 u16 value16;
2007 u32 value32;
2008 u16 pre_txcnt;
2009
2010 /* txq control */
2011 value8 = rtw_read8(rtwdev, REG_FWHW_TXQ_CTRL);
2012 value8 |= (BIT(7) & ~BIT(1) & ~BIT(2));
2013 rtw_write8(rtwdev, REG_FWHW_TXQ_CTRL, value8);
2014 rtw_write8(rtwdev, REG_FWHW_TXQ_CTRL + 1, WLAN_TXQ_RPT_EN);
2015 /* sifs control */
2016 rtw_write16(rtwdev, REG_SPEC_SIFS, WLAN_SIFS_DUR_TUNE);
2017 rtw_write32(rtwdev, REG_SIFS, WLAN_SIFS_CFG);
2018 rtw_write16(rtwdev, REG_RESP_SIFS_CCK,
2019 WLAN_SIFS_CCK_CTX | WLAN_SIFS_CCK_IRX << 8);
2020 rtw_write16(rtwdev, REG_RESP_SIFS_OFDM,
2021 WLAN_SIFS_OFDM_CTX | WLAN_SIFS_OFDM_IRX << 8);
2022 /* rate fallback control */
2023 rtw_write32(rtwdev, REG_DARFRC, WLAN_DATA_RATE_FB_CNT_1_4);
2024 rtw_write32(rtwdev, REG_DARFRCH, WLAN_DATA_RATE_FB_CNT_5_8);
2025 rtw_write32(rtwdev, REG_RARFRCH, WLAN_RTS_RATE_FB_CNT_5_8);
2026 rtw_write32(rtwdev, REG_ARFR0, WLAN_DATA_RATE_FB_RATE0);
2027 rtw_write32(rtwdev, REG_ARFRH0, WLAN_DATA_RATE_FB_RATE0_H);
2028 rtw_write32(rtwdev, REG_ARFR1_V1, WLAN_RTS_RATE_FB_RATE1);
2029 rtw_write32(rtwdev, REG_ARFRH1_V1, WLAN_RTS_RATE_FB_RATE1_H);
2030 rtw_write32(rtwdev, REG_ARFR4, WLAN_RTS_RATE_FB_RATE4);
2031 rtw_write32(rtwdev, REG_ARFRH4, WLAN_RTS_RATE_FB_RATE4_H);
2032 rtw_write32(rtwdev, REG_ARFR5, WLAN_RTS_RATE_FB_RATE5);
2033 rtw_write32(rtwdev, REG_ARFRH5, WLAN_RTS_RATE_FB_RATE5_H);
2034 /* protocol configuration */
2035 rtw_write8(rtwdev, REG_AMPDU_MAX_TIME_V1, WLAN_AMPDU_MAX_TIME);
2036 rtw_write8_set(rtwdev, REG_TX_HANG_CTRL, BIT_EN_EOF_V1);
2037 pre_txcnt = WLAN_PRE_TXCNT_TIME_TH | BIT_EN_PRECNT;
2038 rtw_write8(rtwdev, REG_PRECNT_CTRL, (u8)(pre_txcnt & 0xFF));
2039 rtw_write8(rtwdev, REG_PRECNT_CTRL + 1, (u8)(pre_txcnt >> 8));
2040 value32 = WLAN_RTS_LEN_TH | (WLAN_RTS_TX_TIME_TH << 8) |
2041 (WLAN_MAX_AGG_PKT_LIMIT << 16) |
2042 (WLAN_RTS_MAX_AGG_PKT_LIMIT << 24);
2043 rtw_write32(rtwdev, REG_PROT_MODE_CTRL, value32);
2044 rtw_write16(rtwdev, REG_BAR_MODE_CTRL + 2,
2045 WLAN_BAR_RETRY_LIMIT | WLAN_RA_TRY_RATE_AGG_LIMIT << 8);
2046 rtw_write8(rtwdev, REG_FAST_EDCA_VOVI_SETTING, FAST_EDCA_VO_TH);
2047 rtw_write8(rtwdev, REG_FAST_EDCA_VOVI_SETTING + 2, FAST_EDCA_VI_TH);
2048 rtw_write8(rtwdev, REG_FAST_EDCA_BEBK_SETTING, FAST_EDCA_BE_TH);
2049 rtw_write8(rtwdev, REG_FAST_EDCA_BEBK_SETTING + 2, FAST_EDCA_BK_TH);
2050 /* close BA parser */
2051 rtw_write8_clr(rtwdev, REG_LIFETIME_EN, BIT_BA_PARSER_EN);
2052 rtw_write32_clr(rtwdev, REG_RRSR, BITS_RRSR_RSC);
2053
2054 /* EDCA configuration */
2055 rtw_write32(rtwdev, REG_EDCA_VO_PARAM, WLAN_EDCA_VO_PARAM);
2056 rtw_write32(rtwdev, REG_EDCA_VI_PARAM, WLAN_EDCA_VI_PARAM);
2057 rtw_write32(rtwdev, REG_EDCA_BE_PARAM, WLAN_EDCA_BE_PARAM);
2058 rtw_write32(rtwdev, REG_EDCA_BK_PARAM, WLAN_EDCA_BK_PARAM);
2059 rtw_write8(rtwdev, REG_PIFS, WLAN_PIFS_TIME);
2060 rtw_write8_clr(rtwdev, REG_TX_PTCL_CTRL + 1, BIT_SIFS_BK_EN >> 8);
2061 rtw_write8_set(rtwdev, REG_RD_CTRL + 1,
2062 (BIT_DIS_TXOP_CFE | BIT_DIS_LSIG_CFE |
2063 BIT_DIS_STBC_CFE) >> 8);
2064
2065 /* MAC clock configuration */
2066 rtw_write32_clr(rtwdev, REG_AFE_CTRL1, BIT_MAC_CLK_SEL);
2067 rtw_write8(rtwdev, REG_USTIME_TSF, MAC_CLK_SPEED);
2068 rtw_write8(rtwdev, REG_USTIME_EDCA, MAC_CLK_SPEED);
2069
2070 rtw_write8_set(rtwdev, REG_MISC_CTRL,
2071 BIT_EN_FREE_CNT | BIT_DIS_SECOND_CCA);
2072 rtw_write8_clr(rtwdev, REG_TIMER0_SRC_SEL, BIT_TSFT_SEL_TIMER0);
2073 rtw_write16(rtwdev, REG_TXPAUSE, 0x0000);
2074 rtw_write8(rtwdev, REG_SLOT, WLAN_SLOT_TIME);
2075 rtw_write32(rtwdev, REG_RD_NAV_NXT, WLAN_NAV_CFG);
2076 rtw_write16(rtwdev, REG_RXTSF_OFFSET_CCK, WLAN_RX_TSF_CFG);
2077 /* Set beacon cotnrol - enable TSF and other related functions */
2078 rtw_write8_set(rtwdev, REG_BCN_CTRL, BIT_EN_BCN_FUNCTION);
2079 /* Set send beacon related registers */
2080 rtw_write32(rtwdev, REG_TBTT_PROHIBIT, WLAN_TBTT_TIME);
2081 rtw_write8(rtwdev, REG_DRVERLYINT, WLAN_DRV_EARLY_INT);
2082 rtw_write8(rtwdev, REG_BCN_CTRL_CLINT0, WLAN_BCN_CTRL_CLT0);
2083 rtw_write8(rtwdev, REG_BCNDMATIM, WLAN_BCN_DMA_TIME);
2084 rtw_write8(rtwdev, REG_BCN_MAX_ERR, WLAN_BCN_MAX_ERR);
2085
2086 /* WMAC configuration */
2087 rtw_write32(rtwdev, REG_MAR, WLAN_MULTI_ADDR);
2088 rtw_write32(rtwdev, REG_MAR + 4, WLAN_MULTI_ADDR);
2089 rtw_write8(rtwdev, REG_BBPSF_CTRL + 2, WLAN_RESP_TXRATE);
2090 rtw_write8(rtwdev, REG_ACKTO, WLAN_ACK_TO);
2091 rtw_write8(rtwdev, REG_ACKTO_CCK, WLAN_ACK_TO_CCK);
2092 rtw_write16(rtwdev, REG_EIFS, WLAN_EIFS_DUR_TUNE);
2093 rtw_write8(rtwdev, REG_NAV_CTRL + 2, WLAN_NAV_MAX);
2094 rtw_write8(rtwdev, REG_WMAC_TRXPTCL_CTL_H + 2, WLAN_BAR_ACK_TYPE);
2095 rtw_write32(rtwdev, REG_RXFLTMAP0, WLAN_RX_FILTER0);
2096 rtw_write16(rtwdev, REG_RXFLTMAP2, WLAN_RX_FILTER2);
2097 rtw_write32(rtwdev, REG_RCR, WLAN_RCR_CFG);
2098 rtw_write8(rtwdev, REG_RX_PKT_LIMIT, WLAN_RXPKT_MAX_SZ_512);
2099 rtw_write8(rtwdev, REG_TCR + 2, WLAN_TX_FUNC_CFG2);
2100 rtw_write8(rtwdev, REG_TCR + 1, WLAN_TX_FUNC_CFG1);
2101 rtw_write32_set(rtwdev, REG_GENERAL_OPTION, BIT_DUMMY_FCS_READY_MASK_EN);
2102 rtw_write32(rtwdev, REG_WMAC_OPTION_FUNCTION + 8, WLAN_MAC_OPT_FUNC2);
2103 rtw_write8(rtwdev, REG_WMAC_OPTION_FUNCTION_1, WLAN_MAC_OPT_NORM_FUNC1);
2104
2105 /* init low power */
2106 value16 = rtw_read16(rtwdev, REG_RXPSF_CTRL + 2) & 0xF00F;
2107 value16 |= (BIT_RXGCK_VHT_FIFOTHR(1) | BIT_RXGCK_HT_FIFOTHR(1) |
2108 BIT_RXGCK_OFDM_FIFOTHR(1) | BIT_RXGCK_CCK_FIFOTHR(1)) >> 16;
2109 rtw_write16(rtwdev, REG_RXPSF_CTRL + 2, value16);
2110 value16 = 0;
2111 value16 = BIT_SET_RXPSF_PKTLENTHR(value16, 1);
2112 value16 |= BIT_RXPSF_CTRLEN | BIT_RXPSF_VHTCHKEN | BIT_RXPSF_HTCHKEN
2113 | BIT_RXPSF_OFDMCHKEN | BIT_RXPSF_CCKCHKEN
2114 | BIT_RXPSF_OFDMRST;
2115 rtw_write16(rtwdev, REG_RXPSF_CTRL, value16);
2116 rtw_write32(rtwdev, REG_RXPSF_TYPE_CTRL, 0xFFFFFFFF);
2117 /* rx ignore configuration */
2118 value16 = rtw_read16(rtwdev, REG_RXPSF_CTRL);
2119 value16 &= ~(BIT_RXPSF_MHCHKEN | BIT_RXPSF_CCKRST |
2120 BIT_RXPSF_CONT_ERRCHKEN);
2121 value16 = BIT_SET_RXPSF_ERRTHR(value16, 0x07);
2122 rtw_write16(rtwdev, REG_RXPSF_CTRL, value16);
2123 rtw_write8_set(rtwdev, REG_SND_PTCL_CTRL,
2124 BIT_DIS_CHK_VHTSIGB_CRC);
2125
2126 /* Interrupt migration configuration */
2127 rtw_write32(rtwdev, REG_INT_MIG, WLAN_MAC_INT_MIG_CFG);
2128
2129 return 0;
2130 }
2131
2132 #define FWCD_SIZE_REG_8822C 0x2000
2133 #define FWCD_SIZE_DMEM_8822C 0x10000
2134 #define FWCD_SIZE_IMEM_8822C 0x10000
2135 #define FWCD_SIZE_EMEM_8822C 0x20000
2136 #define FWCD_SIZE_ROM_8822C 0x10000
2137
2138 static const u32 __fwcd_segs_8822c[] = {
2139 FWCD_SIZE_REG_8822C,
2140 FWCD_SIZE_DMEM_8822C,
2141 FWCD_SIZE_IMEM_8822C,
2142 FWCD_SIZE_EMEM_8822C,
2143 FWCD_SIZE_ROM_8822C,
2144 };
2145
2146 static const struct rtw_fwcd_segs rtw8822c_fwcd_segs = {
2147 .segs = __fwcd_segs_8822c,
2148 .num = ARRAY_SIZE(__fwcd_segs_8822c),
2149 };
2150
rtw8822c_dump_fw_crash(struct rtw_dev * rtwdev)2151 static int rtw8822c_dump_fw_crash(struct rtw_dev *rtwdev)
2152 {
2153 #define __dump_fw_8822c(_dev, _mem) \
2154 rtw_dump_fw(_dev, OCPBASE_ ## _mem ## _88XX, \
2155 FWCD_SIZE_ ## _mem ## _8822C, RTW_FWCD_ ## _mem)
2156 int ret;
2157
2158 ret = rtw_dump_reg(rtwdev, 0x0, FWCD_SIZE_REG_8822C);
2159 if (ret)
2160 return ret;
2161 ret = __dump_fw_8822c(rtwdev, DMEM);
2162 if (ret)
2163 return ret;
2164 ret = __dump_fw_8822c(rtwdev, IMEM);
2165 if (ret)
2166 return ret;
2167 ret = __dump_fw_8822c(rtwdev, EMEM);
2168 if (ret)
2169 return ret;
2170 ret = __dump_fw_8822c(rtwdev, ROM);
2171 if (ret)
2172 return ret;
2173
2174 return 0;
2175
2176 #undef __dump_fw_8822c
2177 }
2178
rtw8822c_rstb_3wire(struct rtw_dev * rtwdev,bool enable)2179 static void rtw8822c_rstb_3wire(struct rtw_dev *rtwdev, bool enable)
2180 {
2181 if (enable) {
2182 rtw_write32_mask(rtwdev, REG_RSTB, BIT_RSTB_3WIRE, 0x1);
2183 rtw_write32_mask(rtwdev, REG_ANAPAR_A, BIT_ANAPAR_UPDATE, 0x1);
2184 rtw_write32_mask(rtwdev, REG_ANAPAR_B, BIT_ANAPAR_UPDATE, 0x1);
2185 } else {
2186 rtw_write32_mask(rtwdev, REG_RSTB, BIT_RSTB_3WIRE, 0x0);
2187 }
2188 }
2189
rtw8822c_set_channel_rf(struct rtw_dev * rtwdev,u8 channel,u8 bw)2190 static void rtw8822c_set_channel_rf(struct rtw_dev *rtwdev, u8 channel, u8 bw)
2191 {
2192 #define RF18_BAND_MASK (BIT(16) | BIT(9) | BIT(8))
2193 #define RF18_BAND_2G (0)
2194 #define RF18_BAND_5G (BIT(16) | BIT(8))
2195 #define RF18_CHANNEL_MASK (MASKBYTE0)
2196 #define RF18_RFSI_MASK (BIT(18) | BIT(17))
2197 #define RF18_RFSI_GE_CH80 (BIT(17))
2198 #define RF18_RFSI_GT_CH140 (BIT(18))
2199 #define RF18_BW_MASK (BIT(13) | BIT(12))
2200 #define RF18_BW_20M (BIT(13) | BIT(12))
2201 #define RF18_BW_40M (BIT(13))
2202 #define RF18_BW_80M (BIT(12))
2203
2204 u32 rf_reg18 = 0;
2205 u32 rf_rxbb = 0;
2206
2207 rf_reg18 = rtw_read_rf(rtwdev, RF_PATH_A, 0x18, RFREG_MASK);
2208
2209 rf_reg18 &= ~(RF18_BAND_MASK | RF18_CHANNEL_MASK | RF18_RFSI_MASK |
2210 RF18_BW_MASK);
2211
2212 rf_reg18 |= (IS_CH_2G_BAND(channel) ? RF18_BAND_2G : RF18_BAND_5G);
2213 rf_reg18 |= (channel & RF18_CHANNEL_MASK);
2214 if (IS_CH_5G_BAND_4(channel))
2215 rf_reg18 |= RF18_RFSI_GT_CH140;
2216 else if (IS_CH_5G_BAND_3(channel))
2217 rf_reg18 |= RF18_RFSI_GE_CH80;
2218
2219 switch (bw) {
2220 case RTW_CHANNEL_WIDTH_5:
2221 case RTW_CHANNEL_WIDTH_10:
2222 case RTW_CHANNEL_WIDTH_20:
2223 default:
2224 rf_reg18 |= RF18_BW_20M;
2225 rf_rxbb = 0x18;
2226 break;
2227 case RTW_CHANNEL_WIDTH_40:
2228 /* RF bandwidth */
2229 rf_reg18 |= RF18_BW_40M;
2230 rf_rxbb = 0x10;
2231 break;
2232 case RTW_CHANNEL_WIDTH_80:
2233 rf_reg18 |= RF18_BW_80M;
2234 rf_rxbb = 0x8;
2235 break;
2236 }
2237
2238 rtw8822c_rstb_3wire(rtwdev, false);
2239
2240 rtw_write_rf(rtwdev, RF_PATH_A, RF_LUTWE2, 0x04, 0x01);
2241 rtw_write_rf(rtwdev, RF_PATH_A, RF_LUTWA, 0x1f, 0x12);
2242 rtw_write_rf(rtwdev, RF_PATH_A, RF_LUTWD0, 0xfffff, rf_rxbb);
2243 rtw_write_rf(rtwdev, RF_PATH_A, RF_LUTWE2, 0x04, 0x00);
2244
2245 rtw_write_rf(rtwdev, RF_PATH_B, RF_LUTWE2, 0x04, 0x01);
2246 rtw_write_rf(rtwdev, RF_PATH_B, RF_LUTWA, 0x1f, 0x12);
2247 rtw_write_rf(rtwdev, RF_PATH_B, RF_LUTWD0, 0xfffff, rf_rxbb);
2248 rtw_write_rf(rtwdev, RF_PATH_B, RF_LUTWE2, 0x04, 0x00);
2249
2250 rtw_write_rf(rtwdev, RF_PATH_A, RF_CFGCH, RFREG_MASK, rf_reg18);
2251 rtw_write_rf(rtwdev, RF_PATH_B, RF_CFGCH, RFREG_MASK, rf_reg18);
2252
2253 rtw8822c_rstb_3wire(rtwdev, true);
2254 }
2255
rtw8822c_toggle_igi(struct rtw_dev * rtwdev)2256 static void rtw8822c_toggle_igi(struct rtw_dev *rtwdev)
2257 {
2258 u32 igi;
2259
2260 igi = rtw_read32_mask(rtwdev, REG_RXIGI, 0x7f);
2261 rtw_write32_mask(rtwdev, REG_RXIGI, 0x7f, igi - 2);
2262 rtw_write32_mask(rtwdev, REG_RXIGI, 0x7f00, igi - 2);
2263 rtw_write32_mask(rtwdev, REG_RXIGI, 0x7f, igi);
2264 rtw_write32_mask(rtwdev, REG_RXIGI, 0x7f00, igi);
2265 }
2266
rtw8822c_set_channel_bb(struct rtw_dev * rtwdev,u8 channel,u8 bw,u8 primary_ch_idx)2267 static void rtw8822c_set_channel_bb(struct rtw_dev *rtwdev, u8 channel, u8 bw,
2268 u8 primary_ch_idx)
2269 {
2270 if (IS_CH_2G_BAND(channel)) {
2271 rtw_write32_clr(rtwdev, REG_BGCTRL, BITS_RX_IQ_WEIGHT);
2272 rtw_write32_set(rtwdev, REG_TXF4, BIT(20));
2273 rtw_write32_clr(rtwdev, REG_CCK_CHECK, BIT_CHECK_CCK_EN);
2274 rtw_write32_clr(rtwdev, REG_CCKTXONLY, BIT_BB_CCK_CHECK_EN);
2275 rtw_write32_mask(rtwdev, REG_CCAMSK, 0x3F000000, 0xF);
2276
2277 switch (bw) {
2278 case RTW_CHANNEL_WIDTH_20:
2279 rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_CCK,
2280 0x5);
2281 rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_CCK,
2282 0x5);
2283 rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_OFDM,
2284 0x6);
2285 rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_OFDM,
2286 0x6);
2287 break;
2288 case RTW_CHANNEL_WIDTH_40:
2289 rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_CCK,
2290 0x4);
2291 rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_CCK,
2292 0x4);
2293 rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_OFDM,
2294 0x0);
2295 rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_OFDM,
2296 0x0);
2297 break;
2298 }
2299 if (channel == 13 || channel == 14)
2300 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x969);
2301 else if (channel == 11 || channel == 12)
2302 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x96a);
2303 else
2304 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x9aa);
2305 if (channel == 14) {
2306 rtw_write32_mask(rtwdev, REG_TXF0, MASKHWORD, 0x3da0);
2307 rtw_write32_mask(rtwdev, REG_TXF1, MASKDWORD,
2308 0x4962c931);
2309 rtw_write32_mask(rtwdev, REG_TXF2, MASKLWORD, 0x6aa3);
2310 rtw_write32_mask(rtwdev, REG_TXF3, MASKHWORD, 0xaa7b);
2311 rtw_write32_mask(rtwdev, REG_TXF4, MASKLWORD, 0xf3d7);
2312 rtw_write32_mask(rtwdev, REG_TXF5, MASKDWORD, 0x0);
2313 rtw_write32_mask(rtwdev, REG_TXF6, MASKDWORD,
2314 0xff012455);
2315 rtw_write32_mask(rtwdev, REG_TXF7, MASKDWORD, 0xffff);
2316 } else {
2317 rtw_write32_mask(rtwdev, REG_TXF0, MASKHWORD, 0x5284);
2318 rtw_write32_mask(rtwdev, REG_TXF1, MASKDWORD,
2319 0x3e18fec8);
2320 rtw_write32_mask(rtwdev, REG_TXF2, MASKLWORD, 0x0a88);
2321 rtw_write32_mask(rtwdev, REG_TXF3, MASKHWORD, 0xacc4);
2322 rtw_write32_mask(rtwdev, REG_TXF4, MASKLWORD, 0xc8b2);
2323 rtw_write32_mask(rtwdev, REG_TXF5, MASKDWORD,
2324 0x00faf0de);
2325 rtw_write32_mask(rtwdev, REG_TXF6, MASKDWORD,
2326 0x00122344);
2327 rtw_write32_mask(rtwdev, REG_TXF7, MASKDWORD,
2328 0x0fffffff);
2329 }
2330 if (channel == 13)
2331 rtw_write32_mask(rtwdev, REG_TXDFIR0, 0x70, 0x3);
2332 else
2333 rtw_write32_mask(rtwdev, REG_TXDFIR0, 0x70, 0x1);
2334 } else if (IS_CH_5G_BAND(channel)) {
2335 rtw_write32_set(rtwdev, REG_CCKTXONLY, BIT_BB_CCK_CHECK_EN);
2336 rtw_write32_set(rtwdev, REG_CCK_CHECK, BIT_CHECK_CCK_EN);
2337 rtw_write32_set(rtwdev, REG_BGCTRL, BITS_RX_IQ_WEIGHT);
2338 rtw_write32_clr(rtwdev, REG_TXF4, BIT(20));
2339 rtw_write32_mask(rtwdev, REG_CCAMSK, 0x3F000000, 0x22);
2340 rtw_write32_mask(rtwdev, REG_TXDFIR0, 0x70, 0x3);
2341 if (IS_CH_5G_BAND_1(channel) || IS_CH_5G_BAND_2(channel)) {
2342 rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_OFDM,
2343 0x1);
2344 rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_OFDM,
2345 0x1);
2346 } else if (IS_CH_5G_BAND_3(channel)) {
2347 rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_OFDM,
2348 0x2);
2349 rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_OFDM,
2350 0x2);
2351 } else if (IS_CH_5G_BAND_4(channel)) {
2352 rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_OFDM,
2353 0x3);
2354 rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_OFDM,
2355 0x3);
2356 }
2357
2358 if (channel >= 36 && channel <= 51)
2359 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x494);
2360 else if (channel >= 52 && channel <= 55)
2361 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x493);
2362 else if (channel >= 56 && channel <= 111)
2363 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x453);
2364 else if (channel >= 112 && channel <= 119)
2365 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x452);
2366 else if (channel >= 120 && channel <= 172)
2367 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x412);
2368 else if (channel >= 173 && channel <= 177)
2369 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x411);
2370 }
2371
2372 switch (bw) {
2373 case RTW_CHANNEL_WIDTH_20:
2374 rtw_write32_mask(rtwdev, REG_DFIRBW, 0x3FF0, 0x19B);
2375 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xf, 0x0);
2376 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xffc0, 0x0);
2377 rtw_write32_mask(rtwdev, REG_TXCLK, 0x700, 0x7);
2378 rtw_write32_mask(rtwdev, REG_TXCLK, 0x700000, 0x6);
2379 rtw_write32_mask(rtwdev, REG_CCK_SOURCE, BIT_NBI_EN, 0x0);
2380 rtw_write32_mask(rtwdev, REG_SBD, BITS_SUBTUNE, 0x1);
2381 rtw_write32_mask(rtwdev, REG_PT_CHSMO, BIT_PT_OPT, 0x0);
2382 break;
2383 case RTW_CHANNEL_WIDTH_40:
2384 rtw_write32_mask(rtwdev, REG_CCKSB, BIT(4),
2385 (primary_ch_idx == RTW_SC_20_UPPER ? 1 : 0));
2386 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xf, 0x5);
2387 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xc0, 0x0);
2388 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xff00,
2389 (primary_ch_idx | (primary_ch_idx << 4)));
2390 rtw_write32_mask(rtwdev, REG_CCK_SOURCE, BIT_NBI_EN, 0x1);
2391 rtw_write32_mask(rtwdev, REG_SBD, BITS_SUBTUNE, 0x1);
2392 rtw_write32_mask(rtwdev, REG_PT_CHSMO, BIT_PT_OPT, 0x1);
2393 break;
2394 case RTW_CHANNEL_WIDTH_80:
2395 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xf, 0xa);
2396 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xc0, 0x0);
2397 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xff00,
2398 (primary_ch_idx | (primary_ch_idx << 4)));
2399 rtw_write32_mask(rtwdev, REG_SBD, BITS_SUBTUNE, 0x6);
2400 rtw_write32_mask(rtwdev, REG_PT_CHSMO, BIT_PT_OPT, 0x1);
2401 break;
2402 case RTW_CHANNEL_WIDTH_5:
2403 rtw_write32_mask(rtwdev, REG_DFIRBW, 0x3FF0, 0x2AB);
2404 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xf, 0x0);
2405 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xffc0, 0x1);
2406 rtw_write32_mask(rtwdev, REG_TXCLK, 0x700, 0x4);
2407 rtw_write32_mask(rtwdev, REG_TXCLK, 0x700000, 0x4);
2408 rtw_write32_mask(rtwdev, REG_CCK_SOURCE, BIT_NBI_EN, 0x0);
2409 rtw_write32_mask(rtwdev, REG_SBD, BITS_SUBTUNE, 0x1);
2410 rtw_write32_mask(rtwdev, REG_PT_CHSMO, BIT_PT_OPT, 0x0);
2411 break;
2412 case RTW_CHANNEL_WIDTH_10:
2413 rtw_write32_mask(rtwdev, REG_DFIRBW, 0x3FF0, 0x2AB);
2414 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xf, 0x0);
2415 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xffc0, 0x2);
2416 rtw_write32_mask(rtwdev, REG_TXCLK, 0x700, 0x6);
2417 rtw_write32_mask(rtwdev, REG_TXCLK, 0x700000, 0x5);
2418 rtw_write32_mask(rtwdev, REG_CCK_SOURCE, BIT_NBI_EN, 0x0);
2419 rtw_write32_mask(rtwdev, REG_SBD, BITS_SUBTUNE, 0x1);
2420 rtw_write32_mask(rtwdev, REG_PT_CHSMO, BIT_PT_OPT, 0x0);
2421 break;
2422 }
2423 }
2424
rtw8822c_set_channel(struct rtw_dev * rtwdev,u8 channel,u8 bw,u8 primary_chan_idx)2425 static void rtw8822c_set_channel(struct rtw_dev *rtwdev, u8 channel, u8 bw,
2426 u8 primary_chan_idx)
2427 {
2428 rtw8822c_set_channel_bb(rtwdev, channel, bw, primary_chan_idx);
2429 rtw_set_channel_mac(rtwdev, channel, bw, primary_chan_idx);
2430 rtw8822c_set_channel_rf(rtwdev, channel, bw);
2431 rtw8822c_toggle_igi(rtwdev);
2432 }
2433
rtw8822c_config_cck_rx_path(struct rtw_dev * rtwdev,u8 rx_path)2434 static void rtw8822c_config_cck_rx_path(struct rtw_dev *rtwdev, u8 rx_path)
2435 {
2436 if (rx_path == BB_PATH_A || rx_path == BB_PATH_B) {
2437 rtw_write32_mask(rtwdev, REG_CCANRX, 0x00060000, 0x0);
2438 rtw_write32_mask(rtwdev, REG_CCANRX, 0x00600000, 0x0);
2439 } else if (rx_path == BB_PATH_AB) {
2440 rtw_write32_mask(rtwdev, REG_CCANRX, 0x00600000, 0x1);
2441 rtw_write32_mask(rtwdev, REG_CCANRX, 0x00060000, 0x1);
2442 }
2443
2444 if (rx_path == BB_PATH_A)
2445 rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0x0f000000, 0x0);
2446 else if (rx_path == BB_PATH_B)
2447 rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0x0f000000, 0x5);
2448 else if (rx_path == BB_PATH_AB)
2449 rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0x0f000000, 0x1);
2450 }
2451
rtw8822c_config_ofdm_rx_path(struct rtw_dev * rtwdev,u8 rx_path)2452 static void rtw8822c_config_ofdm_rx_path(struct rtw_dev *rtwdev, u8 rx_path)
2453 {
2454 if (rx_path == BB_PATH_A || rx_path == BB_PATH_B) {
2455 rtw_write32_mask(rtwdev, REG_RXFNCTL, 0x300, 0x0);
2456 rtw_write32_mask(rtwdev, REG_RXFNCTL, 0x600000, 0x0);
2457 rtw_write32_mask(rtwdev, REG_AGCSWSH, BIT(17), 0x0);
2458 rtw_write32_mask(rtwdev, REG_ANTWTPD, BIT(20), 0x0);
2459 rtw_write32_mask(rtwdev, REG_MRCM, BIT(24), 0x0);
2460 } else if (rx_path == BB_PATH_AB) {
2461 rtw_write32_mask(rtwdev, REG_RXFNCTL, 0x300, 0x1);
2462 rtw_write32_mask(rtwdev, REG_RXFNCTL, 0x600000, 0x1);
2463 rtw_write32_mask(rtwdev, REG_AGCSWSH, BIT(17), 0x1);
2464 rtw_write32_mask(rtwdev, REG_ANTWTPD, BIT(20), 0x1);
2465 rtw_write32_mask(rtwdev, REG_MRCM, BIT(24), 0x1);
2466 }
2467
2468 rtw_write32_mask(rtwdev, 0x824, 0x0f000000, rx_path);
2469 rtw_write32_mask(rtwdev, 0x824, 0x000f0000, rx_path);
2470 }
2471
rtw8822c_config_rx_path(struct rtw_dev * rtwdev,u8 rx_path)2472 static void rtw8822c_config_rx_path(struct rtw_dev *rtwdev, u8 rx_path)
2473 {
2474 rtw8822c_config_cck_rx_path(rtwdev, rx_path);
2475 rtw8822c_config_ofdm_rx_path(rtwdev, rx_path);
2476 }
2477
rtw8822c_config_cck_tx_path(struct rtw_dev * rtwdev,u8 tx_path,bool is_tx2_path)2478 static void rtw8822c_config_cck_tx_path(struct rtw_dev *rtwdev, u8 tx_path,
2479 bool is_tx2_path)
2480 {
2481 if (tx_path == BB_PATH_A) {
2482 rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0xf0000000, 0x8);
2483 } else if (tx_path == BB_PATH_B) {
2484 rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0xf0000000, 0x4);
2485 } else {
2486 if (is_tx2_path)
2487 rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0xf0000000, 0xc);
2488 else
2489 rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0xf0000000, 0x8);
2490 }
2491 rtw8822c_bb_reset(rtwdev);
2492 }
2493
rtw8822c_config_ofdm_tx_path(struct rtw_dev * rtwdev,u8 tx_path,enum rtw_bb_path tx_path_sel_1ss)2494 static void rtw8822c_config_ofdm_tx_path(struct rtw_dev *rtwdev, u8 tx_path,
2495 enum rtw_bb_path tx_path_sel_1ss)
2496 {
2497 if (tx_path == BB_PATH_A) {
2498 rtw_write32_mask(rtwdev, REG_ANTMAP0, 0xff, 0x11);
2499 rtw_write32_mask(rtwdev, REG_TXLGMAP, 0xff, 0x0);
2500 } else if (tx_path == BB_PATH_B) {
2501 rtw_write32_mask(rtwdev, REG_ANTMAP0, 0xff, 0x12);
2502 rtw_write32_mask(rtwdev, REG_TXLGMAP, 0xff, 0x0);
2503 } else {
2504 if (tx_path_sel_1ss == BB_PATH_AB) {
2505 rtw_write32_mask(rtwdev, REG_ANTMAP0, 0xff, 0x33);
2506 rtw_write32_mask(rtwdev, REG_TXLGMAP, 0xffff, 0x0404);
2507 } else if (tx_path_sel_1ss == BB_PATH_B) {
2508 rtw_write32_mask(rtwdev, REG_ANTMAP0, 0xff, 0x32);
2509 rtw_write32_mask(rtwdev, REG_TXLGMAP, 0xffff, 0x0400);
2510 } else if (tx_path_sel_1ss == BB_PATH_A) {
2511 rtw_write32_mask(rtwdev, REG_ANTMAP0, 0xff, 0x31);
2512 rtw_write32_mask(rtwdev, REG_TXLGMAP, 0xffff, 0x0400);
2513 }
2514 }
2515 rtw8822c_bb_reset(rtwdev);
2516 }
2517
rtw8822c_config_tx_path(struct rtw_dev * rtwdev,u8 tx_path,enum rtw_bb_path tx_path_sel_1ss,enum rtw_bb_path tx_path_cck,bool is_tx2_path)2518 static void rtw8822c_config_tx_path(struct rtw_dev *rtwdev, u8 tx_path,
2519 enum rtw_bb_path tx_path_sel_1ss,
2520 enum rtw_bb_path tx_path_cck,
2521 bool is_tx2_path)
2522 {
2523 rtw8822c_config_cck_tx_path(rtwdev, tx_path_cck, is_tx2_path);
2524 rtw8822c_config_ofdm_tx_path(rtwdev, tx_path, tx_path_sel_1ss);
2525 rtw8822c_bb_reset(rtwdev);
2526 }
2527
rtw8822c_config_trx_mode(struct rtw_dev * rtwdev,u8 tx_path,u8 rx_path,bool is_tx2_path)2528 static void rtw8822c_config_trx_mode(struct rtw_dev *rtwdev, u8 tx_path,
2529 u8 rx_path, bool is_tx2_path)
2530 {
2531 if ((tx_path | rx_path) & BB_PATH_A)
2532 rtw_write32_mask(rtwdev, REG_ORITXCODE, MASK20BITS, 0x33312);
2533 else
2534 rtw_write32_mask(rtwdev, REG_ORITXCODE, MASK20BITS, 0x11111);
2535 if ((tx_path | rx_path) & BB_PATH_B)
2536 rtw_write32_mask(rtwdev, REG_ORITXCODE2, MASK20BITS, 0x33312);
2537 else
2538 rtw_write32_mask(rtwdev, REG_ORITXCODE2, MASK20BITS, 0x11111);
2539
2540 rtw8822c_config_rx_path(rtwdev, rx_path);
2541 rtw8822c_config_tx_path(rtwdev, tx_path, BB_PATH_A, BB_PATH_A,
2542 is_tx2_path);
2543
2544 rtw8822c_toggle_igi(rtwdev);
2545 }
2546
query_phy_status_page0(struct rtw_dev * rtwdev,u8 * phy_status,struct rtw_rx_pkt_stat * pkt_stat)2547 static void query_phy_status_page0(struct rtw_dev *rtwdev, u8 *phy_status,
2548 struct rtw_rx_pkt_stat *pkt_stat)
2549 {
2550 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
2551 u8 l_bnd, u_bnd;
2552 u8 gain_a, gain_b;
2553 s8 rx_power[RTW_RF_PATH_MAX];
2554 s8 min_rx_power = -120;
2555 u8 rssi;
2556 u8 channel;
2557 int path;
2558
2559 rx_power[RF_PATH_A] = GET_PHY_STAT_P0_PWDB_A(phy_status);
2560 rx_power[RF_PATH_B] = GET_PHY_STAT_P0_PWDB_B(phy_status);
2561 l_bnd = dm_info->cck_gi_l_bnd;
2562 u_bnd = dm_info->cck_gi_u_bnd;
2563 gain_a = GET_PHY_STAT_P0_GAIN_A(phy_status);
2564 gain_b = GET_PHY_STAT_P0_GAIN_B(phy_status);
2565 if (gain_a < l_bnd)
2566 rx_power[RF_PATH_A] += (l_bnd - gain_a) << 1;
2567 else if (gain_a > u_bnd)
2568 rx_power[RF_PATH_A] -= (gain_a - u_bnd) << 1;
2569 if (gain_b < l_bnd)
2570 rx_power[RF_PATH_B] += (l_bnd - gain_b) << 1;
2571 else if (gain_b > u_bnd)
2572 rx_power[RF_PATH_B] -= (gain_b - u_bnd) << 1;
2573
2574 rx_power[RF_PATH_A] -= 110;
2575 rx_power[RF_PATH_B] -= 110;
2576
2577 channel = GET_PHY_STAT_P0_CHANNEL(phy_status);
2578 if (channel == 0)
2579 channel = rtwdev->hal.current_channel;
2580 rtw_set_rx_freq_band(pkt_stat, channel);
2581
2582 pkt_stat->rx_power[RF_PATH_A] = rx_power[RF_PATH_A];
2583 pkt_stat->rx_power[RF_PATH_B] = rx_power[RF_PATH_B];
2584
2585 for (path = 0; path <= rtwdev->hal.rf_path_num; path++) {
2586 rssi = rtw_phy_rf_power_2_rssi(&pkt_stat->rx_power[path], 1);
2587 dm_info->rssi[path] = rssi;
2588 }
2589
2590 pkt_stat->rssi = rtw_phy_rf_power_2_rssi(pkt_stat->rx_power, 1);
2591 pkt_stat->bw = RTW_CHANNEL_WIDTH_20;
2592 pkt_stat->signal_power = max(pkt_stat->rx_power[RF_PATH_A],
2593 min_rx_power);
2594 }
2595
query_phy_status_page1(struct rtw_dev * rtwdev,u8 * phy_status,struct rtw_rx_pkt_stat * pkt_stat)2596 static void query_phy_status_page1(struct rtw_dev *rtwdev, u8 *phy_status,
2597 struct rtw_rx_pkt_stat *pkt_stat)
2598 {
2599 struct rtw_path_div *p_div = &rtwdev->dm_path_div;
2600 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
2601 u8 rxsc, bw;
2602 s8 min_rx_power = -120;
2603 s8 rx_evm;
2604 u8 evm_dbm = 0;
2605 u8 rssi;
2606 int path;
2607 u8 channel;
2608
2609 if (pkt_stat->rate > DESC_RATE11M && pkt_stat->rate < DESC_RATEMCS0)
2610 rxsc = GET_PHY_STAT_P1_L_RXSC(phy_status);
2611 else
2612 rxsc = GET_PHY_STAT_P1_HT_RXSC(phy_status);
2613
2614 if (rxsc >= 9 && rxsc <= 12)
2615 bw = RTW_CHANNEL_WIDTH_40;
2616 else if (rxsc >= 13)
2617 bw = RTW_CHANNEL_WIDTH_80;
2618 else
2619 bw = RTW_CHANNEL_WIDTH_20;
2620
2621 channel = GET_PHY_STAT_P1_CHANNEL(phy_status);
2622 rtw_set_rx_freq_band(pkt_stat, channel);
2623
2624 pkt_stat->rx_power[RF_PATH_A] = GET_PHY_STAT_P1_PWDB_A(phy_status) - 110;
2625 pkt_stat->rx_power[RF_PATH_B] = GET_PHY_STAT_P1_PWDB_B(phy_status) - 110;
2626 pkt_stat->rssi = rtw_phy_rf_power_2_rssi(pkt_stat->rx_power, 2);
2627 pkt_stat->bw = bw;
2628 pkt_stat->signal_power = max3(pkt_stat->rx_power[RF_PATH_A],
2629 pkt_stat->rx_power[RF_PATH_B],
2630 min_rx_power);
2631
2632 dm_info->curr_rx_rate = pkt_stat->rate;
2633
2634 pkt_stat->rx_evm[RF_PATH_A] = GET_PHY_STAT_P1_RXEVM_A(phy_status);
2635 pkt_stat->rx_evm[RF_PATH_B] = GET_PHY_STAT_P1_RXEVM_B(phy_status);
2636
2637 pkt_stat->rx_snr[RF_PATH_A] = GET_PHY_STAT_P1_RXSNR_A(phy_status);
2638 pkt_stat->rx_snr[RF_PATH_B] = GET_PHY_STAT_P1_RXSNR_B(phy_status);
2639
2640 pkt_stat->cfo_tail[RF_PATH_A] = GET_PHY_STAT_P1_CFO_TAIL_A(phy_status);
2641 pkt_stat->cfo_tail[RF_PATH_B] = GET_PHY_STAT_P1_CFO_TAIL_B(phy_status);
2642
2643 for (path = 0; path <= rtwdev->hal.rf_path_num; path++) {
2644 rssi = rtw_phy_rf_power_2_rssi(&pkt_stat->rx_power[path], 1);
2645 dm_info->rssi[path] = rssi;
2646 if (path == RF_PATH_A) {
2647 p_div->path_a_sum += rssi;
2648 p_div->path_a_cnt++;
2649 } else if (path == RF_PATH_B) {
2650 p_div->path_b_sum += rssi;
2651 p_div->path_b_cnt++;
2652 }
2653 dm_info->rx_snr[path] = pkt_stat->rx_snr[path] >> 1;
2654 dm_info->cfo_tail[path] = (pkt_stat->cfo_tail[path] * 5) >> 1;
2655
2656 rx_evm = pkt_stat->rx_evm[path];
2657
2658 if (rx_evm < 0) {
2659 if (rx_evm == S8_MIN)
2660 evm_dbm = 0;
2661 else
2662 evm_dbm = ((u8)-rx_evm >> 1);
2663 }
2664 dm_info->rx_evm_dbm[path] = evm_dbm;
2665 }
2666 rtw_phy_parsing_cfo(rtwdev, pkt_stat);
2667 }
2668
query_phy_status(struct rtw_dev * rtwdev,u8 * phy_status,struct rtw_rx_pkt_stat * pkt_stat)2669 static void query_phy_status(struct rtw_dev *rtwdev, u8 *phy_status,
2670 struct rtw_rx_pkt_stat *pkt_stat)
2671 {
2672 u8 page;
2673
2674 page = *phy_status & 0xf;
2675
2676 switch (page) {
2677 case 0:
2678 query_phy_status_page0(rtwdev, phy_status, pkt_stat);
2679 break;
2680 case 1:
2681 query_phy_status_page1(rtwdev, phy_status, pkt_stat);
2682 break;
2683 default:
2684 rtw_warn(rtwdev, "unused phy status page (%d)\n", page);
2685 return;
2686 }
2687 }
2688
rtw8822c_query_rx_desc(struct rtw_dev * rtwdev,u8 * rx_desc,struct rtw_rx_pkt_stat * pkt_stat,struct ieee80211_rx_status * rx_status)2689 static void rtw8822c_query_rx_desc(struct rtw_dev *rtwdev, u8 *rx_desc,
2690 struct rtw_rx_pkt_stat *pkt_stat,
2691 struct ieee80211_rx_status *rx_status)
2692 {
2693 struct ieee80211_hdr *hdr;
2694 u32 desc_sz = rtwdev->chip->rx_pkt_desc_sz;
2695 u8 *phy_status = NULL;
2696
2697 memset(pkt_stat, 0, sizeof(*pkt_stat));
2698
2699 pkt_stat->phy_status = GET_RX_DESC_PHYST(rx_desc);
2700 pkt_stat->icv_err = GET_RX_DESC_ICV_ERR(rx_desc);
2701 pkt_stat->crc_err = GET_RX_DESC_CRC32(rx_desc);
2702 pkt_stat->decrypted = !GET_RX_DESC_SWDEC(rx_desc) &&
2703 GET_RX_DESC_ENC_TYPE(rx_desc) != RX_DESC_ENC_NONE;
2704 pkt_stat->is_c2h = GET_RX_DESC_C2H(rx_desc);
2705 pkt_stat->pkt_len = GET_RX_DESC_PKT_LEN(rx_desc);
2706 pkt_stat->drv_info_sz = GET_RX_DESC_DRV_INFO_SIZE(rx_desc);
2707 pkt_stat->shift = GET_RX_DESC_SHIFT(rx_desc);
2708 pkt_stat->rate = GET_RX_DESC_RX_RATE(rx_desc);
2709 pkt_stat->cam_id = GET_RX_DESC_MACID(rx_desc);
2710 pkt_stat->ppdu_cnt = GET_RX_DESC_PPDU_CNT(rx_desc);
2711 pkt_stat->tsf_low = GET_RX_DESC_TSFL(rx_desc);
2712
2713 /* drv_info_sz is in unit of 8-bytes */
2714 pkt_stat->drv_info_sz *= 8;
2715
2716 /* c2h cmd pkt's rx/phy status is not interested */
2717 if (pkt_stat->is_c2h)
2718 return;
2719
2720 hdr = (struct ieee80211_hdr *)(rx_desc + desc_sz + pkt_stat->shift +
2721 pkt_stat->drv_info_sz);
2722 pkt_stat->hdr = hdr;
2723 if (pkt_stat->phy_status) {
2724 phy_status = rx_desc + desc_sz + pkt_stat->shift;
2725 query_phy_status(rtwdev, phy_status, pkt_stat);
2726 }
2727
2728 rtw_rx_fill_rx_status(rtwdev, pkt_stat, hdr, rx_status, phy_status);
2729 }
2730
2731 static void
rtw8822c_set_write_tx_power_ref(struct rtw_dev * rtwdev,u8 * tx_pwr_ref_cck,u8 * tx_pwr_ref_ofdm)2732 rtw8822c_set_write_tx_power_ref(struct rtw_dev *rtwdev, u8 *tx_pwr_ref_cck,
2733 u8 *tx_pwr_ref_ofdm)
2734 {
2735 struct rtw_hal *hal = &rtwdev->hal;
2736 u32 txref_cck[2] = {0x18a0, 0x41a0};
2737 u32 txref_ofdm[2] = {0x18e8, 0x41e8};
2738 u8 path;
2739
2740 for (path = 0; path < hal->rf_path_num; path++) {
2741 rtw_write32_mask(rtwdev, 0x1c90, BIT(15), 0);
2742 rtw_write32_mask(rtwdev, txref_cck[path], 0x7f0000,
2743 tx_pwr_ref_cck[path]);
2744 }
2745 for (path = 0; path < hal->rf_path_num; path++) {
2746 rtw_write32_mask(rtwdev, 0x1c90, BIT(15), 0);
2747 rtw_write32_mask(rtwdev, txref_ofdm[path], 0x1fc00,
2748 tx_pwr_ref_ofdm[path]);
2749 }
2750 }
2751
rtw8822c_set_tx_power_diff(struct rtw_dev * rtwdev,u8 rate,s8 * diff_idx)2752 static void rtw8822c_set_tx_power_diff(struct rtw_dev *rtwdev, u8 rate,
2753 s8 *diff_idx)
2754 {
2755 u32 offset_txagc = 0x3a00;
2756 u8 rate_idx = rate & 0xfc;
2757 u8 pwr_idx[4];
2758 u32 phy_pwr_idx;
2759 int i;
2760
2761 for (i = 0; i < 4; i++)
2762 pwr_idx[i] = diff_idx[i] & 0x7f;
2763
2764 phy_pwr_idx = pwr_idx[0] |
2765 (pwr_idx[1] << 8) |
2766 (pwr_idx[2] << 16) |
2767 (pwr_idx[3] << 24);
2768
2769 rtw_write32_mask(rtwdev, 0x1c90, BIT(15), 0x0);
2770 rtw_write32_mask(rtwdev, offset_txagc + rate_idx, MASKDWORD,
2771 phy_pwr_idx);
2772 }
2773
rtw8822c_set_tx_power_index(struct rtw_dev * rtwdev)2774 static void rtw8822c_set_tx_power_index(struct rtw_dev *rtwdev)
2775 {
2776 struct rtw_hal *hal = &rtwdev->hal;
2777 u8 rs, rate, j;
2778 u8 pwr_ref_cck[2] = {hal->tx_pwr_tbl[RF_PATH_A][DESC_RATE11M],
2779 hal->tx_pwr_tbl[RF_PATH_B][DESC_RATE11M]};
2780 u8 pwr_ref_ofdm[2] = {hal->tx_pwr_tbl[RF_PATH_A][DESC_RATEMCS7],
2781 hal->tx_pwr_tbl[RF_PATH_B][DESC_RATEMCS7]};
2782 s8 diff_a, diff_b;
2783 u8 pwr_a, pwr_b;
2784 s8 diff_idx[4];
2785
2786 rtw8822c_set_write_tx_power_ref(rtwdev, pwr_ref_cck, pwr_ref_ofdm);
2787 for (rs = 0; rs < RTW_RATE_SECTION_MAX; rs++) {
2788 for (j = 0; j < rtw_rate_size[rs]; j++) {
2789 rate = rtw_rate_section[rs][j];
2790 pwr_a = hal->tx_pwr_tbl[RF_PATH_A][rate];
2791 pwr_b = hal->tx_pwr_tbl[RF_PATH_B][rate];
2792 if (rs == 0) {
2793 diff_a = (s8)pwr_a - (s8)pwr_ref_cck[0];
2794 diff_b = (s8)pwr_b - (s8)pwr_ref_cck[1];
2795 } else {
2796 diff_a = (s8)pwr_a - (s8)pwr_ref_ofdm[0];
2797 diff_b = (s8)pwr_b - (s8)pwr_ref_ofdm[1];
2798 }
2799 diff_idx[rate % 4] = min(diff_a, diff_b);
2800 if (rate % 4 == 3)
2801 rtw8822c_set_tx_power_diff(rtwdev, rate - 3,
2802 diff_idx);
2803 }
2804 }
2805 }
2806
rtw8822c_set_antenna(struct rtw_dev * rtwdev,u32 antenna_tx,u32 antenna_rx)2807 static int rtw8822c_set_antenna(struct rtw_dev *rtwdev,
2808 u32 antenna_tx,
2809 u32 antenna_rx)
2810 {
2811 struct rtw_hal *hal = &rtwdev->hal;
2812
2813 switch (antenna_tx) {
2814 case BB_PATH_A:
2815 case BB_PATH_B:
2816 case BB_PATH_AB:
2817 break;
2818 default:
2819 rtw_warn(rtwdev, "unsupported tx path 0x%x\n", antenna_tx);
2820 return -EINVAL;
2821 }
2822
2823 /* path B only is not available for RX */
2824 switch (antenna_rx) {
2825 case BB_PATH_A:
2826 case BB_PATH_AB:
2827 break;
2828 default:
2829 rtw_warn(rtwdev, "unsupported rx path 0x%x\n", antenna_rx);
2830 return -EINVAL;
2831 }
2832
2833 hal->antenna_tx = antenna_tx;
2834 hal->antenna_rx = antenna_rx;
2835
2836 rtw8822c_config_trx_mode(rtwdev, antenna_tx, antenna_rx, false);
2837
2838 return 0;
2839 }
2840
rtw8822c_cfg_ldo25(struct rtw_dev * rtwdev,bool enable)2841 static void rtw8822c_cfg_ldo25(struct rtw_dev *rtwdev, bool enable)
2842 {
2843 u8 ldo_pwr;
2844
2845 ldo_pwr = rtw_read8(rtwdev, REG_ANAPARLDO_POW_MAC);
2846 ldo_pwr = enable ? ldo_pwr | BIT_LDOE25_PON : ldo_pwr & ~BIT_LDOE25_PON;
2847 rtw_write8(rtwdev, REG_ANAPARLDO_POW_MAC, ldo_pwr);
2848 }
2849
rtw8822c_false_alarm_statistics(struct rtw_dev * rtwdev)2850 static void rtw8822c_false_alarm_statistics(struct rtw_dev *rtwdev)
2851 {
2852 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
2853 u32 cck_enable;
2854 u32 cck_fa_cnt;
2855 u32 crc32_cnt;
2856 u32 cca32_cnt;
2857 u32 ofdm_fa_cnt;
2858 u32 ofdm_fa_cnt1, ofdm_fa_cnt2, ofdm_fa_cnt3, ofdm_fa_cnt4, ofdm_fa_cnt5;
2859 u16 parity_fail, rate_illegal, crc8_fail, mcs_fail, sb_search_fail,
2860 fast_fsync, crc8_fail_vhta, mcs_fail_vht;
2861
2862 cck_enable = rtw_read32(rtwdev, REG_ENCCK) & BIT_CCK_BLK_EN;
2863 cck_fa_cnt = rtw_read16(rtwdev, REG_CCK_FACNT);
2864
2865 ofdm_fa_cnt1 = rtw_read32(rtwdev, REG_OFDM_FACNT1);
2866 ofdm_fa_cnt2 = rtw_read32(rtwdev, REG_OFDM_FACNT2);
2867 ofdm_fa_cnt3 = rtw_read32(rtwdev, REG_OFDM_FACNT3);
2868 ofdm_fa_cnt4 = rtw_read32(rtwdev, REG_OFDM_FACNT4);
2869 ofdm_fa_cnt5 = rtw_read32(rtwdev, REG_OFDM_FACNT5);
2870
2871 parity_fail = FIELD_GET(GENMASK(31, 16), ofdm_fa_cnt1);
2872 rate_illegal = FIELD_GET(GENMASK(15, 0), ofdm_fa_cnt2);
2873 crc8_fail = FIELD_GET(GENMASK(31, 16), ofdm_fa_cnt2);
2874 crc8_fail_vhta = FIELD_GET(GENMASK(15, 0), ofdm_fa_cnt3);
2875 mcs_fail = FIELD_GET(GENMASK(15, 0), ofdm_fa_cnt4);
2876 mcs_fail_vht = FIELD_GET(GENMASK(31, 16), ofdm_fa_cnt4);
2877 fast_fsync = FIELD_GET(GENMASK(15, 0), ofdm_fa_cnt5);
2878 sb_search_fail = FIELD_GET(GENMASK(31, 16), ofdm_fa_cnt5);
2879
2880 ofdm_fa_cnt = parity_fail + rate_illegal + crc8_fail + crc8_fail_vhta +
2881 mcs_fail + mcs_fail_vht + fast_fsync + sb_search_fail;
2882
2883 dm_info->cck_fa_cnt = cck_fa_cnt;
2884 dm_info->ofdm_fa_cnt = ofdm_fa_cnt;
2885 dm_info->total_fa_cnt = ofdm_fa_cnt;
2886 dm_info->total_fa_cnt += cck_enable ? cck_fa_cnt : 0;
2887
2888 crc32_cnt = rtw_read32(rtwdev, 0x2c04);
2889 dm_info->cck_ok_cnt = crc32_cnt & 0xffff;
2890 dm_info->cck_err_cnt = (crc32_cnt & 0xffff0000) >> 16;
2891 crc32_cnt = rtw_read32(rtwdev, 0x2c14);
2892 dm_info->ofdm_ok_cnt = crc32_cnt & 0xffff;
2893 dm_info->ofdm_err_cnt = (crc32_cnt & 0xffff0000) >> 16;
2894 crc32_cnt = rtw_read32(rtwdev, 0x2c10);
2895 dm_info->ht_ok_cnt = crc32_cnt & 0xffff;
2896 dm_info->ht_err_cnt = (crc32_cnt & 0xffff0000) >> 16;
2897 crc32_cnt = rtw_read32(rtwdev, 0x2c0c);
2898 dm_info->vht_ok_cnt = crc32_cnt & 0xffff;
2899 dm_info->vht_err_cnt = (crc32_cnt & 0xffff0000) >> 16;
2900
2901 cca32_cnt = rtw_read32(rtwdev, 0x2c08);
2902 dm_info->ofdm_cca_cnt = ((cca32_cnt & 0xffff0000) >> 16);
2903 dm_info->cck_cca_cnt = cca32_cnt & 0xffff;
2904 dm_info->total_cca_cnt = dm_info->ofdm_cca_cnt;
2905 if (cck_enable)
2906 dm_info->total_cca_cnt += dm_info->cck_cca_cnt;
2907
2908 rtw_write32_mask(rtwdev, REG_CCANRX, BIT_CCK_FA_RST, 0);
2909 rtw_write32_mask(rtwdev, REG_CCANRX, BIT_CCK_FA_RST, 2);
2910 rtw_write32_mask(rtwdev, REG_CCANRX, BIT_OFDM_FA_RST, 0);
2911 rtw_write32_mask(rtwdev, REG_CCANRX, BIT_OFDM_FA_RST, 2);
2912
2913 /* disable rx clk gating to reset counters */
2914 rtw_write32_clr(rtwdev, REG_RX_BREAK, BIT_COM_RX_GCK_EN);
2915 rtw_write32_set(rtwdev, REG_CNT_CTRL, BIT_ALL_CNT_RST);
2916 rtw_write32_clr(rtwdev, REG_CNT_CTRL, BIT_ALL_CNT_RST);
2917 rtw_write32_set(rtwdev, REG_RX_BREAK, BIT_COM_RX_GCK_EN);
2918 }
2919
rtw8822c_do_lck(struct rtw_dev * rtwdev)2920 static void rtw8822c_do_lck(struct rtw_dev *rtwdev)
2921 {
2922 u32 val;
2923
2924 rtw_write_rf(rtwdev, RF_PATH_A, RF_SYN_CTRL, RFREG_MASK, 0x80010);
2925 rtw_write_rf(rtwdev, RF_PATH_A, RF_SYN_PFD, RFREG_MASK, 0x1F0FA);
2926 fsleep(1);
2927 rtw_write_rf(rtwdev, RF_PATH_A, RF_AAC_CTRL, RFREG_MASK, 0x80000);
2928 rtw_write_rf(rtwdev, RF_PATH_A, RF_SYN_AAC, RFREG_MASK, 0x80001);
2929 read_poll_timeout(rtw_read_rf, val, val != 0x1, 1000, 100000,
2930 true, rtwdev, RF_PATH_A, RF_AAC_CTRL, 0x1000);
2931 rtw_write_rf(rtwdev, RF_PATH_A, RF_SYN_PFD, RFREG_MASK, 0x1F0F8);
2932 rtw_write_rf(rtwdev, RF_PATH_B, RF_SYN_CTRL, RFREG_MASK, 0x80010);
2933
2934 rtw_write_rf(rtwdev, RF_PATH_A, RF_FAST_LCK, RFREG_MASK, 0x0f000);
2935 rtw_write_rf(rtwdev, RF_PATH_A, RF_FAST_LCK, RFREG_MASK, 0x4f000);
2936 fsleep(1);
2937 rtw_write_rf(rtwdev, RF_PATH_A, RF_FAST_LCK, RFREG_MASK, 0x0f000);
2938 }
2939
rtw8822c_do_iqk(struct rtw_dev * rtwdev)2940 static void rtw8822c_do_iqk(struct rtw_dev *rtwdev)
2941 {
2942 struct rtw_iqk_para para = {0};
2943 u8 iqk_chk;
2944 int ret;
2945
2946 para.clear = 1;
2947 rtw_fw_do_iqk(rtwdev, ¶);
2948
2949 ret = read_poll_timeout(rtw_read8, iqk_chk, iqk_chk == IQK_DONE_8822C,
2950 20000, 300000, false, rtwdev, REG_RPT_CIP);
2951 if (ret)
2952 rtw_warn(rtwdev, "failed to poll iqk status bit\n");
2953
2954 rtw_write8(rtwdev, REG_IQKSTAT, 0x0);
2955 }
2956
2957 /* for coex */
rtw8822c_coex_cfg_init(struct rtw_dev * rtwdev)2958 static void rtw8822c_coex_cfg_init(struct rtw_dev *rtwdev)
2959 {
2960 /* enable TBTT nterrupt */
2961 rtw_write8_set(rtwdev, REG_BCN_CTRL, BIT_EN_BCN_FUNCTION);
2962
2963 /* BT report packet sample rate */
2964 /* 0x790[5:0]=0x5 */
2965 rtw_write8_mask(rtwdev, REG_BT_TDMA_TIME, BIT_MASK_SAMPLE_RATE, 0x5);
2966
2967 /* enable BT counter statistics */
2968 rtw_write8(rtwdev, REG_BT_STAT_CTRL, 0x1);
2969
2970 /* enable PTA (3-wire function form BT side) */
2971 rtw_write32_set(rtwdev, REG_GPIO_MUXCFG, BIT_BT_PTA_EN);
2972 rtw_write32_set(rtwdev, REG_GPIO_MUXCFG, BIT_PO_BT_PTA_PINS);
2973
2974 /* enable PTA (tx/rx signal form WiFi side) */
2975 rtw_write8_set(rtwdev, REG_QUEUE_CTRL, BIT_PTA_WL_TX_EN);
2976 /* wl tx signal to PTA not case EDCCA */
2977 rtw_write8_clr(rtwdev, REG_QUEUE_CTRL, BIT_PTA_EDCCA_EN);
2978 /* GNT_BT=1 while select both */
2979 rtw_write16_set(rtwdev, REG_BT_COEX_V2, BIT_GNT_BT_POLARITY);
2980 /* BT_CCA = ~GNT_WL_BB, not or GNT_BT_BB, LTE_Rx */
2981 rtw_write8_clr(rtwdev, REG_DUMMY_PAGE4_V1, BIT_BTCCA_CTRL);
2982
2983 /* to avoid RF parameter error */
2984 rtw_write_rf(rtwdev, RF_PATH_B, RF_MODOPT, 0xfffff, 0x40000);
2985 }
2986
rtw8822c_coex_cfg_gnt_fix(struct rtw_dev * rtwdev)2987 static void rtw8822c_coex_cfg_gnt_fix(struct rtw_dev *rtwdev)
2988 {
2989 struct rtw_coex *coex = &rtwdev->coex;
2990 struct rtw_coex_stat *coex_stat = &coex->stat;
2991 struct rtw_efuse *efuse = &rtwdev->efuse;
2992 u32 rf_0x1;
2993
2994 if (coex_stat->gnt_workaround_state == coex_stat->wl_coex_mode)
2995 return;
2996
2997 coex_stat->gnt_workaround_state = coex_stat->wl_coex_mode;
2998
2999 if ((coex_stat->kt_ver == 0 && coex->under_5g) || coex->freerun)
3000 rf_0x1 = 0x40021;
3001 else
3002 rf_0x1 = 0x40000;
3003
3004 /* BT at S1 for Shared-Ant */
3005 if (efuse->share_ant)
3006 rf_0x1 |= BIT(13);
3007
3008 rtw_write_rf(rtwdev, RF_PATH_B, 0x1, 0xfffff, rf_0x1);
3009
3010 /* WL-S0 2G RF TRX cannot be masked by GNT_BT
3011 * enable "WLS0 BB chage RF mode if GNT_BT = 1" for shared-antenna type
3012 * disable:0x1860[3] = 1, enable:0x1860[3] = 0
3013 *
3014 * enable "DAC off if GNT_WL = 0" for non-shared-antenna
3015 * disable 0x1c30[22] = 0,
3016 * enable: 0x1c30[22] = 1, 0x1c38[12] = 0, 0x1c38[28] = 1
3017 */
3018 if (coex_stat->wl_coex_mode == COEX_WLINK_2GFREE) {
3019 rtw_write8_mask(rtwdev, REG_ANAPAR + 2,
3020 BIT_ANAPAR_BTPS >> 16, 0);
3021 } else {
3022 rtw_write8_mask(rtwdev, REG_ANAPAR + 2,
3023 BIT_ANAPAR_BTPS >> 16, 1);
3024 rtw_write8_mask(rtwdev, REG_RSTB_SEL + 1,
3025 BIT_DAC_OFF_ENABLE, 0);
3026 rtw_write8_mask(rtwdev, REG_RSTB_SEL + 3,
3027 BIT_DAC_OFF_ENABLE, 1);
3028 }
3029
3030 /* disable WL-S1 BB chage RF mode if GNT_BT
3031 * since RF TRx mask can do it
3032 */
3033 rtw_write8_mask(rtwdev, REG_IGN_GNTBT4,
3034 BIT_PI_IGNORE_GNT_BT, 1);
3035
3036 /* disable WL-S0 BB chage RF mode if wifi is at 5G,
3037 * or antenna path is separated
3038 */
3039 if (coex_stat->wl_coex_mode == COEX_WLINK_2GFREE) {
3040 rtw_write8_mask(rtwdev, REG_IGN_GNT_BT1,
3041 BIT_PI_IGNORE_GNT_BT, 1);
3042 rtw_write8_mask(rtwdev, REG_NOMASK_TXBT,
3043 BIT_NOMASK_TXBT_ENABLE, 1);
3044 } else if (coex_stat->wl_coex_mode == COEX_WLINK_5G ||
3045 coex->under_5g || !efuse->share_ant) {
3046 if (coex_stat->kt_ver >= 3) {
3047 rtw_write8_mask(rtwdev, REG_IGN_GNT_BT1,
3048 BIT_PI_IGNORE_GNT_BT, 0);
3049 rtw_write8_mask(rtwdev, REG_NOMASK_TXBT,
3050 BIT_NOMASK_TXBT_ENABLE, 1);
3051 } else {
3052 rtw_write8_mask(rtwdev, REG_IGN_GNT_BT1,
3053 BIT_PI_IGNORE_GNT_BT, 1);
3054 }
3055 } else {
3056 /* shared-antenna */
3057 rtw_write8_mask(rtwdev, REG_IGN_GNT_BT1,
3058 BIT_PI_IGNORE_GNT_BT, 0);
3059 if (coex_stat->kt_ver >= 3) {
3060 rtw_write8_mask(rtwdev, REG_NOMASK_TXBT,
3061 BIT_NOMASK_TXBT_ENABLE, 0);
3062 }
3063 }
3064 }
3065
rtw8822c_coex_cfg_gnt_debug(struct rtw_dev * rtwdev)3066 static void rtw8822c_coex_cfg_gnt_debug(struct rtw_dev *rtwdev)
3067 {
3068 rtw_write8_mask(rtwdev, REG_PAD_CTRL1 + 2, BIT_BTGP_SPI_EN >> 16, 0);
3069 rtw_write8_mask(rtwdev, REG_PAD_CTRL1 + 3, BIT_BTGP_JTAG_EN >> 24, 0);
3070 rtw_write8_mask(rtwdev, REG_GPIO_MUXCFG + 2, BIT_FSPI_EN >> 16, 0);
3071 rtw_write8_mask(rtwdev, REG_PAD_CTRL1 + 1, BIT_LED1DIS >> 8, 0);
3072 rtw_write8_mask(rtwdev, REG_SYS_SDIO_CTRL + 3, BIT_DBG_GNT_WL_BT >> 24, 0);
3073 }
3074
rtw8822c_coex_cfg_rfe_type(struct rtw_dev * rtwdev)3075 static void rtw8822c_coex_cfg_rfe_type(struct rtw_dev *rtwdev)
3076 {
3077 struct rtw_coex *coex = &rtwdev->coex;
3078 struct rtw_coex_rfe *coex_rfe = &coex->rfe;
3079 struct rtw_efuse *efuse = &rtwdev->efuse;
3080
3081 coex_rfe->rfe_module_type = rtwdev->efuse.rfe_option;
3082 coex_rfe->ant_switch_polarity = 0;
3083 coex_rfe->ant_switch_exist = false;
3084 coex_rfe->ant_switch_with_bt = false;
3085 coex_rfe->ant_switch_diversity = false;
3086
3087 if (efuse->share_ant)
3088 coex_rfe->wlg_at_btg = true;
3089 else
3090 coex_rfe->wlg_at_btg = false;
3091
3092 /* disable LTE coex in wifi side */
3093 rtw_coex_write_indirect_reg(rtwdev, LTE_COEX_CTRL, BIT_LTE_COEX_EN, 0x0);
3094 rtw_coex_write_indirect_reg(rtwdev, LTE_WL_TRX_CTRL, MASKLWORD, 0xffff);
3095 rtw_coex_write_indirect_reg(rtwdev, LTE_BT_TRX_CTRL, MASKLWORD, 0xffff);
3096 }
3097
rtw8822c_coex_cfg_wl_tx_power(struct rtw_dev * rtwdev,u8 wl_pwr)3098 static void rtw8822c_coex_cfg_wl_tx_power(struct rtw_dev *rtwdev, u8 wl_pwr)
3099 {
3100 struct rtw_coex *coex = &rtwdev->coex;
3101 struct rtw_coex_dm *coex_dm = &coex->dm;
3102
3103 if (wl_pwr == coex_dm->cur_wl_pwr_lvl)
3104 return;
3105
3106 coex_dm->cur_wl_pwr_lvl = wl_pwr;
3107 }
3108
rtw8822c_coex_cfg_wl_rx_gain(struct rtw_dev * rtwdev,bool low_gain)3109 static void rtw8822c_coex_cfg_wl_rx_gain(struct rtw_dev *rtwdev, bool low_gain)
3110 {
3111 struct rtw_coex *coex = &rtwdev->coex;
3112 struct rtw_coex_dm *coex_dm = &coex->dm;
3113
3114 if (low_gain == coex_dm->cur_wl_rx_low_gain_en)
3115 return;
3116
3117 coex_dm->cur_wl_rx_low_gain_en = low_gain;
3118
3119 if (coex_dm->cur_wl_rx_low_gain_en) {
3120 rtw_dbg(rtwdev, RTW_DBG_COEX, "[BTCoex], Hi-Li Table On!\n");
3121
3122 /* set Rx filter corner RCK offset */
3123 rtw_write_rf(rtwdev, RF_PATH_A, RF_RCKD, RFREG_MASK, 0x22);
3124 rtw_write_rf(rtwdev, RF_PATH_A, RF_RCK, RFREG_MASK, 0x36);
3125 rtw_write_rf(rtwdev, RF_PATH_B, RF_RCKD, RFREG_MASK, 0x22);
3126 rtw_write_rf(rtwdev, RF_PATH_B, RF_RCK, RFREG_MASK, 0x36);
3127
3128 } else {
3129 rtw_dbg(rtwdev, RTW_DBG_COEX, "[BTCoex], Hi-Li Table Off!\n");
3130
3131 /* set Rx filter corner RCK offset */
3132 rtw_write_rf(rtwdev, RF_PATH_A, RF_RCKD, RFREG_MASK, 0x20);
3133 rtw_write_rf(rtwdev, RF_PATH_A, RF_RCK, RFREG_MASK, 0x0);
3134 rtw_write_rf(rtwdev, RF_PATH_B, RF_RCKD, RFREG_MASK, 0x20);
3135 rtw_write_rf(rtwdev, RF_PATH_B, RF_RCK, RFREG_MASK, 0x0);
3136 }
3137 }
3138
rtw8822c_bf_enable_bfee_su(struct rtw_dev * rtwdev,struct rtw_vif * vif,struct rtw_bfee * bfee)3139 static void rtw8822c_bf_enable_bfee_su(struct rtw_dev *rtwdev,
3140 struct rtw_vif *vif,
3141 struct rtw_bfee *bfee)
3142 {
3143 u8 csi_rsc = 0;
3144 u32 tmp6dc;
3145
3146 rtw_bf_enable_bfee_su(rtwdev, vif, bfee);
3147
3148 tmp6dc = rtw_read32(rtwdev, REG_BBPSF_CTRL) |
3149 BIT_WMAC_USE_NDPARATE |
3150 (csi_rsc << 13);
3151 if (vif->net_type == RTW_NET_AP_MODE)
3152 rtw_write32(rtwdev, REG_BBPSF_CTRL, tmp6dc | BIT(12));
3153 else
3154 rtw_write32(rtwdev, REG_BBPSF_CTRL, tmp6dc & ~BIT(12));
3155
3156 rtw_write32(rtwdev, REG_CSI_RRSR, 0x550);
3157 }
3158
rtw8822c_bf_config_bfee_su(struct rtw_dev * rtwdev,struct rtw_vif * vif,struct rtw_bfee * bfee,bool enable)3159 static void rtw8822c_bf_config_bfee_su(struct rtw_dev *rtwdev,
3160 struct rtw_vif *vif,
3161 struct rtw_bfee *bfee, bool enable)
3162 {
3163 if (enable)
3164 rtw8822c_bf_enable_bfee_su(rtwdev, vif, bfee);
3165 else
3166 rtw_bf_remove_bfee_su(rtwdev, bfee);
3167 }
3168
rtw8822c_bf_config_bfee_mu(struct rtw_dev * rtwdev,struct rtw_vif * vif,struct rtw_bfee * bfee,bool enable)3169 static void rtw8822c_bf_config_bfee_mu(struct rtw_dev *rtwdev,
3170 struct rtw_vif *vif,
3171 struct rtw_bfee *bfee, bool enable)
3172 {
3173 if (enable)
3174 rtw_bf_enable_bfee_mu(rtwdev, vif, bfee);
3175 else
3176 rtw_bf_remove_bfee_mu(rtwdev, bfee);
3177 }
3178
rtw8822c_bf_config_bfee(struct rtw_dev * rtwdev,struct rtw_vif * vif,struct rtw_bfee * bfee,bool enable)3179 static void rtw8822c_bf_config_bfee(struct rtw_dev *rtwdev, struct rtw_vif *vif,
3180 struct rtw_bfee *bfee, bool enable)
3181 {
3182 if (bfee->role == RTW_BFEE_SU)
3183 rtw8822c_bf_config_bfee_su(rtwdev, vif, bfee, enable);
3184 else if (bfee->role == RTW_BFEE_MU)
3185 rtw8822c_bf_config_bfee_mu(rtwdev, vif, bfee, enable);
3186 else
3187 rtw_warn(rtwdev, "wrong bfee role\n");
3188 }
3189
3190 struct dpk_cfg_pair {
3191 u32 addr;
3192 u32 bitmask;
3193 u32 data;
3194 };
3195
rtw8822c_parse_tbl_dpk(struct rtw_dev * rtwdev,const struct rtw_table * tbl)3196 void rtw8822c_parse_tbl_dpk(struct rtw_dev *rtwdev,
3197 const struct rtw_table *tbl)
3198 {
3199 const struct dpk_cfg_pair *p = tbl->data;
3200 const struct dpk_cfg_pair *end = p + tbl->size / 3;
3201
3202 BUILD_BUG_ON(sizeof(struct dpk_cfg_pair) != sizeof(u32) * 3);
3203
3204 for (; p < end; p++)
3205 rtw_write32_mask(rtwdev, p->addr, p->bitmask, p->data);
3206 }
3207
rtw8822c_dpk_set_gnt_wl(struct rtw_dev * rtwdev,bool is_before_k)3208 static void rtw8822c_dpk_set_gnt_wl(struct rtw_dev *rtwdev, bool is_before_k)
3209 {
3210 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3211
3212 if (is_before_k) {
3213 dpk_info->gnt_control = rtw_read32(rtwdev, 0x70);
3214 dpk_info->gnt_value = rtw_coex_read_indirect_reg(rtwdev, 0x38);
3215 rtw_write32_mask(rtwdev, 0x70, BIT(26), 0x1);
3216 rtw_coex_write_indirect_reg(rtwdev, 0x38, MASKBYTE1, 0x77);
3217 } else {
3218 rtw_coex_write_indirect_reg(rtwdev, 0x38, MASKDWORD,
3219 dpk_info->gnt_value);
3220 rtw_write32(rtwdev, 0x70, dpk_info->gnt_control);
3221 }
3222 }
3223
3224 static void
rtw8822c_dpk_restore_registers(struct rtw_dev * rtwdev,u32 reg_num,struct rtw_backup_info * bckp)3225 rtw8822c_dpk_restore_registers(struct rtw_dev *rtwdev, u32 reg_num,
3226 struct rtw_backup_info *bckp)
3227 {
3228 rtw_restore_reg(rtwdev, bckp, reg_num);
3229 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
3230 rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, BIT_DPD_CLK, 0x4);
3231 }
3232
3233 static void
rtw8822c_dpk_backup_registers(struct rtw_dev * rtwdev,u32 * reg,u32 reg_num,struct rtw_backup_info * bckp)3234 rtw8822c_dpk_backup_registers(struct rtw_dev *rtwdev, u32 *reg,
3235 u32 reg_num, struct rtw_backup_info *bckp)
3236 {
3237 u32 i;
3238
3239 for (i = 0; i < reg_num; i++) {
3240 bckp[i].len = 4;
3241 bckp[i].reg = reg[i];
3242 bckp[i].val = rtw_read32(rtwdev, reg[i]);
3243 }
3244 }
3245
rtw8822c_dpk_backup_rf_registers(struct rtw_dev * rtwdev,u32 * rf_reg,u32 rf_reg_bak[][2])3246 static void rtw8822c_dpk_backup_rf_registers(struct rtw_dev *rtwdev,
3247 u32 *rf_reg,
3248 u32 rf_reg_bak[][2])
3249 {
3250 u32 i;
3251
3252 for (i = 0; i < DPK_RF_REG_NUM; i++) {
3253 rf_reg_bak[i][RF_PATH_A] = rtw_read_rf(rtwdev, RF_PATH_A,
3254 rf_reg[i], RFREG_MASK);
3255 rf_reg_bak[i][RF_PATH_B] = rtw_read_rf(rtwdev, RF_PATH_B,
3256 rf_reg[i], RFREG_MASK);
3257 }
3258 }
3259
rtw8822c_dpk_reload_rf_registers(struct rtw_dev * rtwdev,u32 * rf_reg,u32 rf_reg_bak[][2])3260 static void rtw8822c_dpk_reload_rf_registers(struct rtw_dev *rtwdev,
3261 u32 *rf_reg,
3262 u32 rf_reg_bak[][2])
3263 {
3264 u32 i;
3265
3266 for (i = 0; i < DPK_RF_REG_NUM; i++) {
3267 rtw_write_rf(rtwdev, RF_PATH_A, rf_reg[i], RFREG_MASK,
3268 rf_reg_bak[i][RF_PATH_A]);
3269 rtw_write_rf(rtwdev, RF_PATH_B, rf_reg[i], RFREG_MASK,
3270 rf_reg_bak[i][RF_PATH_B]);
3271 }
3272 }
3273
rtw8822c_dpk_information(struct rtw_dev * rtwdev)3274 static void rtw8822c_dpk_information(struct rtw_dev *rtwdev)
3275 {
3276 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3277 u32 reg;
3278 u8 band_shift;
3279
3280 reg = rtw_read_rf(rtwdev, RF_PATH_A, 0x18, RFREG_MASK);
3281
3282 band_shift = FIELD_GET(BIT(16), reg);
3283 dpk_info->dpk_band = 1 << band_shift;
3284 dpk_info->dpk_ch = FIELD_GET(0xff, reg);
3285 dpk_info->dpk_bw = FIELD_GET(0x3000, reg);
3286 }
3287
rtw8822c_dpk_rxbb_dc_cal(struct rtw_dev * rtwdev,u8 path)3288 static void rtw8822c_dpk_rxbb_dc_cal(struct rtw_dev *rtwdev, u8 path)
3289 {
3290 rtw_write_rf(rtwdev, path, 0x92, RFREG_MASK, 0x84800);
3291 udelay(5);
3292 rtw_write_rf(rtwdev, path, 0x92, RFREG_MASK, 0x84801);
3293 usleep_range(600, 610);
3294 rtw_write_rf(rtwdev, path, 0x92, RFREG_MASK, 0x84800);
3295 }
3296
rtw8822c_dpk_dc_corr_check(struct rtw_dev * rtwdev,u8 path)3297 static u8 rtw8822c_dpk_dc_corr_check(struct rtw_dev *rtwdev, u8 path)
3298 {
3299 u16 dc_i, dc_q;
3300 u8 corr_idx;
3301
3302 rtw_write32(rtwdev, REG_RXSRAM_CTL, 0x000900f0);
3303 dc_i = (u16)rtw_read32_mask(rtwdev, REG_STAT_RPT, GENMASK(27, 16));
3304 dc_q = (u16)rtw_read32_mask(rtwdev, REG_STAT_RPT, GENMASK(11, 0));
3305
3306 if (dc_i & BIT(11))
3307 dc_i = 0x1000 - dc_i;
3308 if (dc_q & BIT(11))
3309 dc_q = 0x1000 - dc_q;
3310
3311 rtw_write32(rtwdev, REG_RXSRAM_CTL, 0x000000f0);
3312 corr_idx = (u8)rtw_read32_mask(rtwdev, REG_STAT_RPT, GENMASK(7, 0));
3313 rtw_read32_mask(rtwdev, REG_STAT_RPT, GENMASK(15, 8));
3314
3315 if (dc_i > 200 || dc_q > 200 || corr_idx < 40 || corr_idx > 65)
3316 return 1;
3317 else
3318 return 0;
3319
3320 }
3321
rtw8822c_dpk_tx_pause(struct rtw_dev * rtwdev)3322 static void rtw8822c_dpk_tx_pause(struct rtw_dev *rtwdev)
3323 {
3324 u8 reg_a, reg_b;
3325 u16 count = 0;
3326
3327 rtw_write8(rtwdev, 0x522, 0xff);
3328 rtw_write32_mask(rtwdev, 0x1e70, 0xf, 0x2);
3329
3330 do {
3331 reg_a = (u8)rtw_read_rf(rtwdev, RF_PATH_A, 0x00, 0xf0000);
3332 reg_b = (u8)rtw_read_rf(rtwdev, RF_PATH_B, 0x00, 0xf0000);
3333 udelay(2);
3334 count++;
3335 } while ((reg_a == 2 || reg_b == 2) && count < 2500);
3336 }
3337
rtw8822c_dpk_mac_bb_setting(struct rtw_dev * rtwdev)3338 static void rtw8822c_dpk_mac_bb_setting(struct rtw_dev *rtwdev)
3339 {
3340 rtw8822c_dpk_tx_pause(rtwdev);
3341 rtw_load_table(rtwdev, &rtw8822c_dpk_mac_bb_tbl);
3342 }
3343
rtw8822c_dpk_afe_setting(struct rtw_dev * rtwdev,bool is_do_dpk)3344 static void rtw8822c_dpk_afe_setting(struct rtw_dev *rtwdev, bool is_do_dpk)
3345 {
3346 if (is_do_dpk)
3347 rtw_load_table(rtwdev, &rtw8822c_dpk_afe_is_dpk_tbl);
3348 else
3349 rtw_load_table(rtwdev, &rtw8822c_dpk_afe_no_dpk_tbl);
3350 }
3351
rtw8822c_dpk_pre_setting(struct rtw_dev * rtwdev)3352 static void rtw8822c_dpk_pre_setting(struct rtw_dev *rtwdev)
3353 {
3354 u8 path;
3355
3356 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
3357 rtw_write_rf(rtwdev, path, RF_RXAGC_OFFSET, RFREG_MASK, 0x0);
3358 rtw_write32(rtwdev, REG_NCTL0, 0x8 | (path << 1));
3359 if (rtwdev->dm_info.dpk_info.dpk_band == RTW_BAND_2G)
3360 rtw_write32(rtwdev, REG_DPD_CTL1_S1, 0x1f100000);
3361 else
3362 rtw_write32(rtwdev, REG_DPD_CTL1_S1, 0x1f0d0000);
3363 rtw_write32_mask(rtwdev, REG_DPD_LUT0, BIT_GLOSS_DB, 0x4);
3364 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x3);
3365 }
3366 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
3367 rtw_write32(rtwdev, REG_DPD_CTL11, 0x3b23170b);
3368 rtw_write32(rtwdev, REG_DPD_CTL12, 0x775f5347);
3369 }
3370
rtw8822c_dpk_rf_setting(struct rtw_dev * rtwdev,u8 path)3371 static u32 rtw8822c_dpk_rf_setting(struct rtw_dev *rtwdev, u8 path)
3372 {
3373 u32 ori_txbb;
3374
3375 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, RFREG_MASK, 0x50017);
3376 ori_txbb = rtw_read_rf(rtwdev, path, RF_TX_GAIN, RFREG_MASK);
3377
3378 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TX_GAIN, 0x1);
3379 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_PWR_TRIM, 0x1);
3380 rtw_write_rf(rtwdev, path, RF_TX_GAIN_OFFSET, BIT_BB_GAIN, 0x0);
3381 rtw_write_rf(rtwdev, path, RF_TX_GAIN, RFREG_MASK, ori_txbb);
3382
3383 if (rtwdev->dm_info.dpk_info.dpk_band == RTW_BAND_2G) {
3384 rtw_write_rf(rtwdev, path, RF_TX_GAIN_OFFSET, BIT_RF_GAIN, 0x1);
3385 rtw_write_rf(rtwdev, path, RF_RXG_GAIN, BIT_RXG_GAIN, 0x0);
3386 } else {
3387 rtw_write_rf(rtwdev, path, RF_TXA_LB_SW, BIT_TXA_LB_ATT, 0x0);
3388 rtw_write_rf(rtwdev, path, RF_TXA_LB_SW, BIT_LB_ATT, 0x6);
3389 rtw_write_rf(rtwdev, path, RF_TXA_LB_SW, BIT_LB_SW, 0x1);
3390 rtw_write_rf(rtwdev, path, RF_RXA_MIX_GAIN, BIT_RXA_MIX_GAIN, 0);
3391 }
3392
3393 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, BIT_RXAGC, 0xf);
3394 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TRXBW, 0x1);
3395 rtw_write_rf(rtwdev, path, RF_BW_TRXBB, BIT_BW_RXBB, 0x0);
3396
3397 if (rtwdev->dm_info.dpk_info.dpk_bw == DPK_CHANNEL_WIDTH_80)
3398 rtw_write_rf(rtwdev, path, RF_BW_TRXBB, BIT_BW_TXBB, 0x2);
3399 else
3400 rtw_write_rf(rtwdev, path, RF_BW_TRXBB, BIT_BW_TXBB, 0x1);
3401
3402 rtw_write_rf(rtwdev, path, RF_EXT_TIA_BW, BIT(1), 0x1);
3403
3404 usleep_range(100, 110);
3405
3406 return ori_txbb & 0x1f;
3407 }
3408
rtw8822c_dpk_get_cmd(struct rtw_dev * rtwdev,u8 action,u8 path)3409 static u16 rtw8822c_dpk_get_cmd(struct rtw_dev *rtwdev, u8 action, u8 path)
3410 {
3411 u16 cmd;
3412 u8 bw = rtwdev->dm_info.dpk_info.dpk_bw == DPK_CHANNEL_WIDTH_80 ? 2 : 0;
3413
3414 switch (action) {
3415 case RTW_DPK_GAIN_LOSS:
3416 cmd = 0x14 + path;
3417 break;
3418 case RTW_DPK_DO_DPK:
3419 cmd = 0x16 + path + bw;
3420 break;
3421 case RTW_DPK_DPK_ON:
3422 cmd = 0x1a + path;
3423 break;
3424 case RTW_DPK_DAGC:
3425 cmd = 0x1c + path + bw;
3426 break;
3427 default:
3428 return 0;
3429 }
3430
3431 return (cmd << 8) | 0x48;
3432 }
3433
rtw8822c_dpk_one_shot(struct rtw_dev * rtwdev,u8 path,u8 action)3434 static u8 rtw8822c_dpk_one_shot(struct rtw_dev *rtwdev, u8 path, u8 action)
3435 {
3436 u16 dpk_cmd;
3437 u8 result = 0;
3438
3439 rtw8822c_dpk_set_gnt_wl(rtwdev, true);
3440
3441 if (action == RTW_DPK_CAL_PWR) {
3442 rtw_write32_mask(rtwdev, REG_DPD_CTL0, BIT(12), 0x1);
3443 rtw_write32_mask(rtwdev, REG_DPD_CTL0, BIT(12), 0x0);
3444 rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, BIT_RPT_SEL, 0x0);
3445 msleep(10);
3446 if (!check_hw_ready(rtwdev, REG_STAT_RPT, BIT(31), 0x1)) {
3447 result = 1;
3448 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DPK] one-shot over 20ms\n");
3449 }
3450 } else {
3451 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE,
3452 0x8 | (path << 1));
3453 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_IQ_SWITCH, 0x9);
3454
3455 dpk_cmd = rtw8822c_dpk_get_cmd(rtwdev, action, path);
3456 rtw_write32(rtwdev, REG_NCTL0, dpk_cmd);
3457 rtw_write32(rtwdev, REG_NCTL0, dpk_cmd + 1);
3458 msleep(10);
3459 if (!check_hw_ready(rtwdev, 0x2d9c, 0xff, 0x55)) {
3460 result = 1;
3461 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DPK] one-shot over 20ms\n");
3462 }
3463 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE,
3464 0x8 | (path << 1));
3465 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_IQ_SWITCH, 0x0);
3466 }
3467
3468 rtw8822c_dpk_set_gnt_wl(rtwdev, false);
3469
3470 rtw_write8(rtwdev, 0x1b10, 0x0);
3471
3472 return result;
3473 }
3474
rtw8822c_dpk_dgain_read(struct rtw_dev * rtwdev,u8 path)3475 static u16 rtw8822c_dpk_dgain_read(struct rtw_dev *rtwdev, u8 path)
3476 {
3477 u16 dgain;
3478
3479 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
3480 rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, 0x00ff0000, 0x0);
3481
3482 dgain = (u16)rtw_read32_mask(rtwdev, REG_STAT_RPT, GENMASK(27, 16));
3483
3484 return dgain;
3485 }
3486
rtw8822c_dpk_thermal_read(struct rtw_dev * rtwdev,u8 path)3487 static u8 rtw8822c_dpk_thermal_read(struct rtw_dev *rtwdev, u8 path)
3488 {
3489 rtw_write_rf(rtwdev, path, RF_T_METER, BIT(19), 0x1);
3490 rtw_write_rf(rtwdev, path, RF_T_METER, BIT(19), 0x0);
3491 rtw_write_rf(rtwdev, path, RF_T_METER, BIT(19), 0x1);
3492 udelay(15);
3493
3494 return (u8)rtw_read_rf(rtwdev, path, RF_T_METER, 0x0007e);
3495 }
3496
rtw8822c_dpk_pas_read(struct rtw_dev * rtwdev,u8 path)3497 static u32 rtw8822c_dpk_pas_read(struct rtw_dev *rtwdev, u8 path)
3498 {
3499 u32 i_val, q_val;
3500
3501 rtw_write32(rtwdev, REG_NCTL0, 0x8 | (path << 1));
3502 rtw_write32_mask(rtwdev, 0x1b48, BIT(14), 0x0);
3503 rtw_write32(rtwdev, REG_RXSRAM_CTL, 0x00060001);
3504 rtw_write32(rtwdev, 0x1b4c, 0x00000000);
3505 rtw_write32(rtwdev, 0x1b4c, 0x00080000);
3506
3507 q_val = rtw_read32_mask(rtwdev, REG_STAT_RPT, MASKHWORD);
3508 i_val = rtw_read32_mask(rtwdev, REG_STAT_RPT, MASKLWORD);
3509
3510 if (i_val & BIT(15))
3511 i_val = 0x10000 - i_val;
3512 if (q_val & BIT(15))
3513 q_val = 0x10000 - q_val;
3514
3515 rtw_write32(rtwdev, 0x1b4c, 0x00000000);
3516
3517 return i_val * i_val + q_val * q_val;
3518 }
3519
rtw8822c_psd_log2base(u32 val)3520 static u32 rtw8822c_psd_log2base(u32 val)
3521 {
3522 u32 tmp, val_integerd_b, tindex;
3523 u32 result, val_fractiond_b;
3524 u32 table_fraction[21] = {0, 432, 332, 274, 232, 200, 174,
3525 151, 132, 115, 100, 86, 74, 62, 51,
3526 42, 32, 23, 15, 7, 0};
3527
3528 if (val == 0)
3529 return 0;
3530
3531 val_integerd_b = __fls(val) + 1;
3532
3533 tmp = (val * 100) / (1 << val_integerd_b);
3534 tindex = tmp / 5;
3535
3536 if (tindex >= ARRAY_SIZE(table_fraction))
3537 tindex = ARRAY_SIZE(table_fraction) - 1;
3538
3539 val_fractiond_b = table_fraction[tindex];
3540
3541 result = val_integerd_b * 100 - val_fractiond_b;
3542
3543 return result;
3544 }
3545
rtw8822c_dpk_gainloss_result(struct rtw_dev * rtwdev,u8 path)3546 static u8 rtw8822c_dpk_gainloss_result(struct rtw_dev *rtwdev, u8 path)
3547 {
3548 u8 result;
3549
3550 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x8 | (path << 1));
3551 rtw_write32_mask(rtwdev, 0x1b48, BIT(14), 0x1);
3552 rtw_write32(rtwdev, REG_RXSRAM_CTL, 0x00060000);
3553
3554 result = (u8)rtw_read32_mask(rtwdev, REG_STAT_RPT, 0x000000f0);
3555
3556 rtw_write32_mask(rtwdev, 0x1b48, BIT(14), 0x0);
3557
3558 return result;
3559 }
3560
rtw8822c_dpk_agc_gain_chk(struct rtw_dev * rtwdev,u8 path,u8 limited_pga)3561 static u8 rtw8822c_dpk_agc_gain_chk(struct rtw_dev *rtwdev, u8 path,
3562 u8 limited_pga)
3563 {
3564 u8 result = 0;
3565 u16 dgain;
3566
3567 rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_DAGC);
3568 dgain = rtw8822c_dpk_dgain_read(rtwdev, path);
3569
3570 if (dgain > 1535 && !limited_pga)
3571 return RTW_DPK_GAIN_LESS;
3572 else if (dgain < 768 && !limited_pga)
3573 return RTW_DPK_GAIN_LARGE;
3574 else
3575 return result;
3576 }
3577
rtw8822c_dpk_agc_loss_chk(struct rtw_dev * rtwdev,u8 path)3578 static u8 rtw8822c_dpk_agc_loss_chk(struct rtw_dev *rtwdev, u8 path)
3579 {
3580 u32 loss, loss_db;
3581
3582 loss = rtw8822c_dpk_pas_read(rtwdev, path);
3583 if (loss < 0x4000000)
3584 return RTW_DPK_GL_LESS;
3585 loss_db = 3 * rtw8822c_psd_log2base(loss >> 13) - 3870;
3586
3587 if (loss_db > 1000)
3588 return RTW_DPK_GL_LARGE;
3589 else if (loss_db < 250)
3590 return RTW_DPK_GL_LESS;
3591 else
3592 return RTW_DPK_AGC_OUT;
3593 }
3594
3595 struct rtw8822c_dpk_data {
3596 u8 txbb;
3597 u8 pga;
3598 u8 limited_pga;
3599 u8 agc_cnt;
3600 bool loss_only;
3601 bool gain_only;
3602 u8 path;
3603 };
3604
rtw8822c_gain_check_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data)3605 static u8 rtw8822c_gain_check_state(struct rtw_dev *rtwdev,
3606 struct rtw8822c_dpk_data *data)
3607 {
3608 u8 state;
3609
3610 data->txbb = (u8)rtw_read_rf(rtwdev, data->path, RF_TX_GAIN,
3611 BIT_GAIN_TXBB);
3612 data->pga = (u8)rtw_read_rf(rtwdev, data->path, RF_MODE_TRXAGC,
3613 BIT_RXAGC);
3614
3615 if (data->loss_only) {
3616 state = RTW_DPK_LOSS_CHECK;
3617 goto check_end;
3618 }
3619
3620 state = rtw8822c_dpk_agc_gain_chk(rtwdev, data->path,
3621 data->limited_pga);
3622 if (state == RTW_DPK_GAIN_CHECK && data->gain_only)
3623 state = RTW_DPK_AGC_OUT;
3624 else if (state == RTW_DPK_GAIN_CHECK)
3625 state = RTW_DPK_LOSS_CHECK;
3626
3627 check_end:
3628 data->agc_cnt++;
3629 if (data->agc_cnt >= 6)
3630 state = RTW_DPK_AGC_OUT;
3631
3632 return state;
3633 }
3634
rtw8822c_gain_large_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data)3635 static u8 rtw8822c_gain_large_state(struct rtw_dev *rtwdev,
3636 struct rtw8822c_dpk_data *data)
3637 {
3638 u8 pga = data->pga;
3639
3640 if (pga > 0xe)
3641 rtw_write_rf(rtwdev, data->path, RF_MODE_TRXAGC, BIT_RXAGC, 0xc);
3642 else if (pga > 0xb && pga < 0xf)
3643 rtw_write_rf(rtwdev, data->path, RF_MODE_TRXAGC, BIT_RXAGC, 0x0);
3644 else if (pga < 0xc)
3645 data->limited_pga = 1;
3646
3647 return RTW_DPK_GAIN_CHECK;
3648 }
3649
rtw8822c_gain_less_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data)3650 static u8 rtw8822c_gain_less_state(struct rtw_dev *rtwdev,
3651 struct rtw8822c_dpk_data *data)
3652 {
3653 u8 pga = data->pga;
3654
3655 if (pga < 0xc)
3656 rtw_write_rf(rtwdev, data->path, RF_MODE_TRXAGC, BIT_RXAGC, 0xc);
3657 else if (pga > 0xb && pga < 0xf)
3658 rtw_write_rf(rtwdev, data->path, RF_MODE_TRXAGC, BIT_RXAGC, 0xf);
3659 else if (pga > 0xe)
3660 data->limited_pga = 1;
3661
3662 return RTW_DPK_GAIN_CHECK;
3663 }
3664
rtw8822c_gl_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data,u8 is_large)3665 static u8 rtw8822c_gl_state(struct rtw_dev *rtwdev,
3666 struct rtw8822c_dpk_data *data, u8 is_large)
3667 {
3668 u8 txbb_bound[] = {0x1f, 0};
3669
3670 if (data->txbb == txbb_bound[is_large])
3671 return RTW_DPK_AGC_OUT;
3672
3673 if (is_large == 1)
3674 data->txbb -= 2;
3675 else
3676 data->txbb += 3;
3677
3678 rtw_write_rf(rtwdev, data->path, RF_TX_GAIN, BIT_GAIN_TXBB, data->txbb);
3679 data->limited_pga = 0;
3680
3681 return RTW_DPK_GAIN_CHECK;
3682 }
3683
rtw8822c_gl_large_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data)3684 static u8 rtw8822c_gl_large_state(struct rtw_dev *rtwdev,
3685 struct rtw8822c_dpk_data *data)
3686 {
3687 return rtw8822c_gl_state(rtwdev, data, 1);
3688 }
3689
rtw8822c_gl_less_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data)3690 static u8 rtw8822c_gl_less_state(struct rtw_dev *rtwdev,
3691 struct rtw8822c_dpk_data *data)
3692 {
3693 return rtw8822c_gl_state(rtwdev, data, 0);
3694 }
3695
rtw8822c_loss_check_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data)3696 static u8 rtw8822c_loss_check_state(struct rtw_dev *rtwdev,
3697 struct rtw8822c_dpk_data *data)
3698 {
3699 u8 path = data->path;
3700 u8 state;
3701
3702 rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_GAIN_LOSS);
3703 state = rtw8822c_dpk_agc_loss_chk(rtwdev, path);
3704
3705 return state;
3706 }
3707
3708 static u8 (*dpk_state[])(struct rtw_dev *rtwdev,
3709 struct rtw8822c_dpk_data *data) = {
3710 rtw8822c_gain_check_state, rtw8822c_gain_large_state,
3711 rtw8822c_gain_less_state, rtw8822c_gl_large_state,
3712 rtw8822c_gl_less_state, rtw8822c_loss_check_state };
3713
rtw8822c_dpk_pas_agc(struct rtw_dev * rtwdev,u8 path,bool gain_only,bool loss_only)3714 static u8 rtw8822c_dpk_pas_agc(struct rtw_dev *rtwdev, u8 path,
3715 bool gain_only, bool loss_only)
3716 {
3717 struct rtw8822c_dpk_data data = {0};
3718 u8 (*func)(struct rtw_dev *rtwdev, struct rtw8822c_dpk_data *data);
3719 u8 state = RTW_DPK_GAIN_CHECK;
3720
3721 data.loss_only = loss_only;
3722 data.gain_only = gain_only;
3723 data.path = path;
3724
3725 for (;;) {
3726 func = dpk_state[state];
3727 state = func(rtwdev, &data);
3728 if (state == RTW_DPK_AGC_OUT)
3729 break;
3730 }
3731
3732 return data.txbb;
3733 }
3734
rtw8822c_dpk_coef_iq_check(struct rtw_dev * rtwdev,u16 coef_i,u16 coef_q)3735 static bool rtw8822c_dpk_coef_iq_check(struct rtw_dev *rtwdev,
3736 u16 coef_i, u16 coef_q)
3737 {
3738 if (coef_i == 0x1000 || coef_i == 0x0fff ||
3739 coef_q == 0x1000 || coef_q == 0x0fff)
3740 return true;
3741
3742 return false;
3743 }
3744
rtw8822c_dpk_coef_transfer(struct rtw_dev * rtwdev)3745 static u32 rtw8822c_dpk_coef_transfer(struct rtw_dev *rtwdev)
3746 {
3747 u32 reg = 0;
3748 u16 coef_i = 0, coef_q = 0;
3749
3750 reg = rtw_read32(rtwdev, REG_STAT_RPT);
3751
3752 coef_i = (u16)rtw_read32_mask(rtwdev, REG_STAT_RPT, MASKHWORD) & 0x1fff;
3753 coef_q = (u16)rtw_read32_mask(rtwdev, REG_STAT_RPT, MASKLWORD) & 0x1fff;
3754
3755 coef_q = ((0x2000 - coef_q) & 0x1fff) - 1;
3756
3757 reg = (coef_i << 16) | coef_q;
3758
3759 return reg;
3760 }
3761
3762 static const u32 rtw8822c_dpk_get_coef_tbl[] = {
3763 0x000400f0, 0x040400f0, 0x080400f0, 0x010400f0, 0x050400f0,
3764 0x090400f0, 0x020400f0, 0x060400f0, 0x0a0400f0, 0x030400f0,
3765 0x070400f0, 0x0b0400f0, 0x0c0400f0, 0x100400f0, 0x0d0400f0,
3766 0x110400f0, 0x0e0400f0, 0x120400f0, 0x0f0400f0, 0x130400f0,
3767 };
3768
rtw8822c_dpk_coef_tbl_apply(struct rtw_dev * rtwdev,u8 path)3769 static void rtw8822c_dpk_coef_tbl_apply(struct rtw_dev *rtwdev, u8 path)
3770 {
3771 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3772 int i;
3773
3774 for (i = 0; i < 20; i++) {
3775 rtw_write32(rtwdev, REG_RXSRAM_CTL,
3776 rtw8822c_dpk_get_coef_tbl[i]);
3777 dpk_info->coef[path][i] = rtw8822c_dpk_coef_transfer(rtwdev);
3778 }
3779 }
3780
rtw8822c_dpk_get_coef(struct rtw_dev * rtwdev,u8 path)3781 static void rtw8822c_dpk_get_coef(struct rtw_dev *rtwdev, u8 path)
3782 {
3783 rtw_write32(rtwdev, REG_NCTL0, 0x0000000c);
3784
3785 if (path == RF_PATH_A) {
3786 rtw_write32_mask(rtwdev, REG_DPD_CTL0, BIT(24), 0x0);
3787 rtw_write32(rtwdev, REG_DPD_CTL0_S0, 0x30000080);
3788 } else if (path == RF_PATH_B) {
3789 rtw_write32_mask(rtwdev, REG_DPD_CTL0, BIT(24), 0x1);
3790 rtw_write32(rtwdev, REG_DPD_CTL0_S1, 0x30000080);
3791 }
3792
3793 rtw8822c_dpk_coef_tbl_apply(rtwdev, path);
3794 }
3795
rtw8822c_dpk_coef_read(struct rtw_dev * rtwdev,u8 path)3796 static u8 rtw8822c_dpk_coef_read(struct rtw_dev *rtwdev, u8 path)
3797 {
3798 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3799 u8 addr, result = 1;
3800 u16 coef_i, coef_q;
3801
3802 for (addr = 0; addr < 20; addr++) {
3803 coef_i = FIELD_GET(0x1fff0000, dpk_info->coef[path][addr]);
3804 coef_q = FIELD_GET(0x1fff, dpk_info->coef[path][addr]);
3805
3806 if (rtw8822c_dpk_coef_iq_check(rtwdev, coef_i, coef_q)) {
3807 result = 0;
3808 break;
3809 }
3810 }
3811 return result;
3812 }
3813
rtw8822c_dpk_coef_write(struct rtw_dev * rtwdev,u8 path,u8 result)3814 static void rtw8822c_dpk_coef_write(struct rtw_dev *rtwdev, u8 path, u8 result)
3815 {
3816 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3817 u16 reg[DPK_RF_PATH_NUM] = {0x1b0c, 0x1b64};
3818 u32 coef;
3819 u8 addr;
3820
3821 rtw_write32(rtwdev, REG_NCTL0, 0x0000000c);
3822 rtw_write32(rtwdev, REG_RXSRAM_CTL, 0x000000f0);
3823
3824 for (addr = 0; addr < 20; addr++) {
3825 if (result == 0) {
3826 if (addr == 3)
3827 coef = 0x04001fff;
3828 else
3829 coef = 0x00001fff;
3830 } else {
3831 coef = dpk_info->coef[path][addr];
3832 }
3833 rtw_write32(rtwdev, reg[path] + addr * 4, coef);
3834 }
3835 }
3836
rtw8822c_dpk_fill_result(struct rtw_dev * rtwdev,u32 dpk_txagc,u8 path,u8 result)3837 static void rtw8822c_dpk_fill_result(struct rtw_dev *rtwdev, u32 dpk_txagc,
3838 u8 path, u8 result)
3839 {
3840 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3841
3842 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x8 | (path << 1));
3843
3844 if (result)
3845 rtw_write8(rtwdev, REG_DPD_AGC, (u8)(dpk_txagc - 6));
3846 else
3847 rtw_write8(rtwdev, REG_DPD_AGC, 0x00);
3848
3849 dpk_info->result[path] = result;
3850 dpk_info->dpk_txagc[path] = rtw_read8(rtwdev, REG_DPD_AGC);
3851
3852 rtw8822c_dpk_coef_write(rtwdev, path, result);
3853 }
3854
rtw8822c_dpk_gainloss(struct rtw_dev * rtwdev,u8 path)3855 static u32 rtw8822c_dpk_gainloss(struct rtw_dev *rtwdev, u8 path)
3856 {
3857 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3858 u8 tx_agc, tx_bb, ori_txbb, ori_txagc, tx_agc_search, t1, t2;
3859
3860 ori_txbb = rtw8822c_dpk_rf_setting(rtwdev, path);
3861 ori_txagc = (u8)rtw_read_rf(rtwdev, path, RF_MODE_TRXAGC, BIT_TXAGC);
3862
3863 rtw8822c_dpk_rxbb_dc_cal(rtwdev, path);
3864 rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_DAGC);
3865 rtw8822c_dpk_dgain_read(rtwdev, path);
3866
3867 if (rtw8822c_dpk_dc_corr_check(rtwdev, path)) {
3868 rtw8822c_dpk_rxbb_dc_cal(rtwdev, path);
3869 rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_DAGC);
3870 rtw8822c_dpk_dc_corr_check(rtwdev, path);
3871 }
3872
3873 t1 = rtw8822c_dpk_thermal_read(rtwdev, path);
3874 tx_bb = rtw8822c_dpk_pas_agc(rtwdev, path, false, true);
3875 tx_agc_search = rtw8822c_dpk_gainloss_result(rtwdev, path);
3876
3877 if (tx_bb < tx_agc_search)
3878 tx_bb = 0;
3879 else
3880 tx_bb = tx_bb - tx_agc_search;
3881
3882 rtw_write_rf(rtwdev, path, RF_TX_GAIN, BIT_GAIN_TXBB, tx_bb);
3883
3884 tx_agc = ori_txagc - (ori_txbb - tx_bb);
3885
3886 t2 = rtw8822c_dpk_thermal_read(rtwdev, path);
3887
3888 dpk_info->thermal_dpk_delta[path] = abs(t2 - t1);
3889
3890 return tx_agc;
3891 }
3892
rtw8822c_dpk_by_path(struct rtw_dev * rtwdev,u32 tx_agc,u8 path)3893 static u8 rtw8822c_dpk_by_path(struct rtw_dev *rtwdev, u32 tx_agc, u8 path)
3894 {
3895 u8 result;
3896
3897 result = rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_DO_DPK);
3898
3899 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x8 | (path << 1));
3900
3901 result = result | (u8)rtw_read32_mask(rtwdev, REG_DPD_CTL1_S0, BIT(26));
3902
3903 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, RFREG_MASK, 0x33e14);
3904
3905 rtw8822c_dpk_get_coef(rtwdev, path);
3906
3907 return result;
3908 }
3909
rtw8822c_dpk_cal_gs(struct rtw_dev * rtwdev,u8 path)3910 static void rtw8822c_dpk_cal_gs(struct rtw_dev *rtwdev, u8 path)
3911 {
3912 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3913 u32 tmp_gs = 0;
3914
3915 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x8 | (path << 1));
3916 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_BYPASS_DPD, 0x0);
3917 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x0);
3918 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_IQ_SWITCH, 0x9);
3919 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_INNER_LB, 0x1);
3920 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
3921 rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, BIT_DPD_CLK, 0xf);
3922
3923 if (path == RF_PATH_A) {
3924 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0, BIT_GS_PWSF,
3925 0x1066680);
3926 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S0, BIT_DPD_EN, 0x1);
3927 } else {
3928 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S1, BIT_GS_PWSF,
3929 0x1066680);
3930 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S1, BIT_DPD_EN, 0x1);
3931 }
3932
3933 if (dpk_info->dpk_bw == DPK_CHANNEL_WIDTH_80) {
3934 rtw_write32(rtwdev, REG_DPD_CTL16, 0x80001310);
3935 rtw_write32(rtwdev, REG_DPD_CTL16, 0x00001310);
3936 rtw_write32(rtwdev, REG_DPD_CTL16, 0x810000db);
3937 rtw_write32(rtwdev, REG_DPD_CTL16, 0x010000db);
3938 rtw_write32(rtwdev, REG_DPD_CTL16, 0x0000b428);
3939 rtw_write32(rtwdev, REG_DPD_CTL15,
3940 0x05020000 | (BIT(path) << 28));
3941 } else {
3942 rtw_write32(rtwdev, REG_DPD_CTL16, 0x8200190c);
3943 rtw_write32(rtwdev, REG_DPD_CTL16, 0x0200190c);
3944 rtw_write32(rtwdev, REG_DPD_CTL16, 0x8301ee14);
3945 rtw_write32(rtwdev, REG_DPD_CTL16, 0x0301ee14);
3946 rtw_write32(rtwdev, REG_DPD_CTL16, 0x0000b428);
3947 rtw_write32(rtwdev, REG_DPD_CTL15,
3948 0x05020008 | (BIT(path) << 28));
3949 }
3950
3951 rtw_write32_mask(rtwdev, REG_DPD_CTL0, MASKBYTE3, 0x8 | path);
3952
3953 rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_CAL_PWR);
3954
3955 rtw_write32_mask(rtwdev, REG_DPD_CTL15, MASKBYTE3, 0x0);
3956 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x8 | (path << 1));
3957 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_IQ_SWITCH, 0x0);
3958 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_INNER_LB, 0x0);
3959 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
3960
3961 if (path == RF_PATH_A)
3962 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0, BIT_GS_PWSF, 0x5b);
3963 else
3964 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S1, BIT_GS_PWSF, 0x5b);
3965
3966 rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, BIT_RPT_SEL, 0x0);
3967
3968 tmp_gs = (u16)rtw_read32_mask(rtwdev, REG_STAT_RPT, BIT_RPT_DGAIN);
3969 tmp_gs = (tmp_gs * 910) >> 10;
3970 tmp_gs = DIV_ROUND_CLOSEST(tmp_gs, 10);
3971
3972 if (path == RF_PATH_A)
3973 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0, BIT_GS_PWSF, tmp_gs);
3974 else
3975 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S1, BIT_GS_PWSF, tmp_gs);
3976
3977 dpk_info->dpk_gs[path] = tmp_gs;
3978 }
3979
rtw8822c_dpk_cal_coef1(struct rtw_dev * rtwdev)3980 static void rtw8822c_dpk_cal_coef1(struct rtw_dev *rtwdev)
3981 {
3982 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3983 u32 offset[DPK_RF_PATH_NUM] = {0, 0x58};
3984 u32 i_scaling;
3985 u8 path;
3986
3987 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x0000000c);
3988 rtw_write32(rtwdev, REG_RXSRAM_CTL, 0x000000f0);
3989 rtw_write32(rtwdev, REG_NCTL0, 0x00001148);
3990 rtw_write32(rtwdev, REG_NCTL0, 0x00001149);
3991
3992 check_hw_ready(rtwdev, 0x2d9c, MASKBYTE0, 0x55);
3993
3994 rtw_write8(rtwdev, 0x1b10, 0x0);
3995 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x0000000c);
3996
3997 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
3998 i_scaling = 0x16c00 / dpk_info->dpk_gs[path];
3999
4000 rtw_write32_mask(rtwdev, 0x1b18 + offset[path], MASKHWORD,
4001 i_scaling);
4002 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0 + offset[path],
4003 GENMASK(31, 28), 0x9);
4004 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0 + offset[path],
4005 GENMASK(31, 28), 0x1);
4006 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0 + offset[path],
4007 GENMASK(31, 28), 0x0);
4008 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S0 + offset[path],
4009 BIT(14), 0x0);
4010 }
4011 }
4012
rtw8822c_dpk_on(struct rtw_dev * rtwdev,u8 path)4013 static void rtw8822c_dpk_on(struct rtw_dev *rtwdev, u8 path)
4014 {
4015 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4016
4017 rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_DPK_ON);
4018
4019 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x8 | (path << 1));
4020 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x0);
4021
4022 if (test_bit(path, dpk_info->dpk_path_ok))
4023 rtw8822c_dpk_cal_gs(rtwdev, path);
4024 }
4025
rtw8822c_dpk_check_pass(struct rtw_dev * rtwdev,bool is_fail,u32 dpk_txagc,u8 path)4026 static bool rtw8822c_dpk_check_pass(struct rtw_dev *rtwdev, bool is_fail,
4027 u32 dpk_txagc, u8 path)
4028 {
4029 bool result;
4030
4031 if (!is_fail) {
4032 if (rtw8822c_dpk_coef_read(rtwdev, path))
4033 result = true;
4034 else
4035 result = false;
4036 } else {
4037 result = false;
4038 }
4039
4040 rtw8822c_dpk_fill_result(rtwdev, dpk_txagc, path, result);
4041
4042 return result;
4043 }
4044
rtw8822c_dpk_result_reset(struct rtw_dev * rtwdev)4045 static void rtw8822c_dpk_result_reset(struct rtw_dev *rtwdev)
4046 {
4047 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4048 u8 path;
4049
4050 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
4051 clear_bit(path, dpk_info->dpk_path_ok);
4052 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE,
4053 0x8 | (path << 1));
4054 rtw_write32_mask(rtwdev, 0x1b58, 0x0000007f, 0x0);
4055
4056 dpk_info->dpk_txagc[path] = 0;
4057 dpk_info->result[path] = 0;
4058 dpk_info->dpk_gs[path] = 0x5b;
4059 dpk_info->pre_pwsf[path] = 0;
4060 dpk_info->thermal_dpk[path] = rtw8822c_dpk_thermal_read(rtwdev,
4061 path);
4062 }
4063 }
4064
rtw8822c_dpk_calibrate(struct rtw_dev * rtwdev,u8 path)4065 static void rtw8822c_dpk_calibrate(struct rtw_dev *rtwdev, u8 path)
4066 {
4067 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4068 u32 dpk_txagc;
4069 u8 dpk_fail;
4070
4071 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DPK] s%d dpk start\n", path);
4072
4073 dpk_txagc = rtw8822c_dpk_gainloss(rtwdev, path);
4074
4075 dpk_fail = rtw8822c_dpk_by_path(rtwdev, dpk_txagc, path);
4076
4077 if (!rtw8822c_dpk_check_pass(rtwdev, dpk_fail, dpk_txagc, path))
4078 rtw_err(rtwdev, "failed to do dpk calibration\n");
4079
4080 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DPK] s%d dpk finish\n", path);
4081
4082 if (dpk_info->result[path])
4083 set_bit(path, dpk_info->dpk_path_ok);
4084 }
4085
rtw8822c_dpk_path_select(struct rtw_dev * rtwdev)4086 static void rtw8822c_dpk_path_select(struct rtw_dev *rtwdev)
4087 {
4088 rtw8822c_dpk_calibrate(rtwdev, RF_PATH_A);
4089 rtw8822c_dpk_calibrate(rtwdev, RF_PATH_B);
4090 rtw8822c_dpk_on(rtwdev, RF_PATH_A);
4091 rtw8822c_dpk_on(rtwdev, RF_PATH_B);
4092 rtw8822c_dpk_cal_coef1(rtwdev);
4093 }
4094
rtw8822c_dpk_enable_disable(struct rtw_dev * rtwdev)4095 static void rtw8822c_dpk_enable_disable(struct rtw_dev *rtwdev)
4096 {
4097 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4098 u32 mask = BIT(15) | BIT(14);
4099
4100 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
4101
4102 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S0, BIT_DPD_EN,
4103 dpk_info->is_dpk_pwr_on);
4104 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S1, BIT_DPD_EN,
4105 dpk_info->is_dpk_pwr_on);
4106
4107 if (test_bit(RF_PATH_A, dpk_info->dpk_path_ok)) {
4108 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S0, mask, 0x0);
4109 rtw_write8(rtwdev, REG_DPD_CTL0_S0, dpk_info->dpk_gs[RF_PATH_A]);
4110 }
4111 if (test_bit(RF_PATH_B, dpk_info->dpk_path_ok)) {
4112 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S1, mask, 0x0);
4113 rtw_write8(rtwdev, REG_DPD_CTL0_S1, dpk_info->dpk_gs[RF_PATH_B]);
4114 }
4115 }
4116
rtw8822c_dpk_reload_data(struct rtw_dev * rtwdev)4117 static void rtw8822c_dpk_reload_data(struct rtw_dev *rtwdev)
4118 {
4119 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4120 u8 path;
4121
4122 if (!test_bit(RF_PATH_A, dpk_info->dpk_path_ok) &&
4123 !test_bit(RF_PATH_B, dpk_info->dpk_path_ok) &&
4124 dpk_info->dpk_ch == 0)
4125 return;
4126
4127 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
4128 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE,
4129 0x8 | (path << 1));
4130 if (dpk_info->dpk_band == RTW_BAND_2G)
4131 rtw_write32(rtwdev, REG_DPD_CTL1_S1, 0x1f100000);
4132 else
4133 rtw_write32(rtwdev, REG_DPD_CTL1_S1, 0x1f0d0000);
4134
4135 rtw_write8(rtwdev, REG_DPD_AGC, dpk_info->dpk_txagc[path]);
4136
4137 rtw8822c_dpk_coef_write(rtwdev, path,
4138 test_bit(path, dpk_info->dpk_path_ok));
4139
4140 rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_DPK_ON);
4141
4142 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
4143
4144 if (path == RF_PATH_A)
4145 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0, BIT_GS_PWSF,
4146 dpk_info->dpk_gs[path]);
4147 else
4148 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S1, BIT_GS_PWSF,
4149 dpk_info->dpk_gs[path]);
4150 }
4151 rtw8822c_dpk_cal_coef1(rtwdev);
4152 }
4153
rtw8822c_dpk_reload(struct rtw_dev * rtwdev)4154 static bool rtw8822c_dpk_reload(struct rtw_dev *rtwdev)
4155 {
4156 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4157 u8 channel;
4158
4159 dpk_info->is_reload = false;
4160
4161 channel = (u8)(rtw_read_rf(rtwdev, RF_PATH_A, 0x18, RFREG_MASK) & 0xff);
4162
4163 if (channel == dpk_info->dpk_ch) {
4164 rtw_dbg(rtwdev, RTW_DBG_RFK,
4165 "[DPK] DPK reload for CH%d!!\n", dpk_info->dpk_ch);
4166 rtw8822c_dpk_reload_data(rtwdev);
4167 dpk_info->is_reload = true;
4168 }
4169
4170 return dpk_info->is_reload;
4171 }
4172
rtw8822c_do_dpk(struct rtw_dev * rtwdev)4173 static void rtw8822c_do_dpk(struct rtw_dev *rtwdev)
4174 {
4175 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4176 struct rtw_backup_info bckp[DPK_BB_REG_NUM];
4177 u32 rf_reg_backup[DPK_RF_REG_NUM][DPK_RF_PATH_NUM];
4178 u32 bb_reg[DPK_BB_REG_NUM] = {
4179 0x520, 0x820, 0x824, 0x1c3c, 0x1d58, 0x1864,
4180 0x4164, 0x180c, 0x410c, 0x186c, 0x416c,
4181 0x1a14, 0x1e70, 0x80c, 0x1d70, 0x1e7c, 0x18a4, 0x41a4};
4182 u32 rf_reg[DPK_RF_REG_NUM] = {
4183 0x0, 0x1a, 0x55, 0x63, 0x87, 0x8f, 0xde};
4184 u8 path;
4185
4186 if (!dpk_info->is_dpk_pwr_on) {
4187 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DPK] Skip DPK due to DPD PWR off\n");
4188 return;
4189 } else if (rtw8822c_dpk_reload(rtwdev)) {
4190 return;
4191 }
4192
4193 for (path = RF_PATH_A; path < DPK_RF_PATH_NUM; path++)
4194 ewma_thermal_init(&dpk_info->avg_thermal[path]);
4195
4196 rtw8822c_dpk_information(rtwdev);
4197
4198 rtw8822c_dpk_backup_registers(rtwdev, bb_reg, DPK_BB_REG_NUM, bckp);
4199 rtw8822c_dpk_backup_rf_registers(rtwdev, rf_reg, rf_reg_backup);
4200
4201 rtw8822c_dpk_mac_bb_setting(rtwdev);
4202 rtw8822c_dpk_afe_setting(rtwdev, true);
4203 rtw8822c_dpk_pre_setting(rtwdev);
4204 rtw8822c_dpk_result_reset(rtwdev);
4205 rtw8822c_dpk_path_select(rtwdev);
4206 rtw8822c_dpk_afe_setting(rtwdev, false);
4207 rtw8822c_dpk_enable_disable(rtwdev);
4208
4209 rtw8822c_dpk_reload_rf_registers(rtwdev, rf_reg, rf_reg_backup);
4210 for (path = 0; path < rtwdev->hal.rf_path_num; path++)
4211 rtw8822c_dpk_rxbb_dc_cal(rtwdev, path);
4212 rtw8822c_dpk_restore_registers(rtwdev, DPK_BB_REG_NUM, bckp);
4213 }
4214
rtw8822c_phy_calibration(struct rtw_dev * rtwdev)4215 static void rtw8822c_phy_calibration(struct rtw_dev *rtwdev)
4216 {
4217 rtw8822c_rfk_power_save(rtwdev, false);
4218 rtw8822c_do_gapk(rtwdev);
4219 rtw8822c_do_iqk(rtwdev);
4220 rtw8822c_do_dpk(rtwdev);
4221 rtw8822c_rfk_power_save(rtwdev, true);
4222 }
4223
rtw8822c_dpk_track(struct rtw_dev * rtwdev)4224 static void rtw8822c_dpk_track(struct rtw_dev *rtwdev)
4225 {
4226 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4227 u8 path;
4228 u8 thermal_value[DPK_RF_PATH_NUM] = {0};
4229 s8 offset[DPK_RF_PATH_NUM], delta_dpk[DPK_RF_PATH_NUM];
4230
4231 if (dpk_info->thermal_dpk[0] == 0 && dpk_info->thermal_dpk[1] == 0)
4232 return;
4233
4234 for (path = 0; path < DPK_RF_PATH_NUM; path++) {
4235 thermal_value[path] = rtw8822c_dpk_thermal_read(rtwdev, path);
4236 ewma_thermal_add(&dpk_info->avg_thermal[path],
4237 thermal_value[path]);
4238 thermal_value[path] =
4239 ewma_thermal_read(&dpk_info->avg_thermal[path]);
4240 delta_dpk[path] = dpk_info->thermal_dpk[path] -
4241 thermal_value[path];
4242 offset[path] = delta_dpk[path] -
4243 dpk_info->thermal_dpk_delta[path];
4244 offset[path] &= 0x7f;
4245
4246 if (offset[path] != dpk_info->pre_pwsf[path]) {
4247 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE,
4248 0x8 | (path << 1));
4249 rtw_write32_mask(rtwdev, 0x1b58, GENMASK(6, 0),
4250 offset[path]);
4251 dpk_info->pre_pwsf[path] = offset[path];
4252 }
4253 }
4254 }
4255
4256 #define XCAP_EXTEND(val) ({typeof(val) _v = (val); _v | _v << 7; })
rtw8822c_set_crystal_cap_reg(struct rtw_dev * rtwdev,u8 crystal_cap)4257 static void rtw8822c_set_crystal_cap_reg(struct rtw_dev *rtwdev, u8 crystal_cap)
4258 {
4259 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4260 struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4261 u32 val = 0;
4262
4263 val = XCAP_EXTEND(crystal_cap);
4264 cfo->crystal_cap = crystal_cap;
4265 rtw_write32_mask(rtwdev, REG_ANAPAR_XTAL_0, BIT_XCAP_0, val);
4266 }
4267
rtw8822c_set_crystal_cap(struct rtw_dev * rtwdev,u8 crystal_cap)4268 static void rtw8822c_set_crystal_cap(struct rtw_dev *rtwdev, u8 crystal_cap)
4269 {
4270 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4271 struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4272
4273 if (cfo->crystal_cap == crystal_cap)
4274 return;
4275
4276 rtw8822c_set_crystal_cap_reg(rtwdev, crystal_cap);
4277 }
4278
rtw8822c_cfo_tracking_reset(struct rtw_dev * rtwdev)4279 static void rtw8822c_cfo_tracking_reset(struct rtw_dev *rtwdev)
4280 {
4281 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4282 struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4283
4284 cfo->is_adjust = true;
4285
4286 if (cfo->crystal_cap > rtwdev->efuse.crystal_cap)
4287 rtw8822c_set_crystal_cap(rtwdev, cfo->crystal_cap - 1);
4288 else if (cfo->crystal_cap < rtwdev->efuse.crystal_cap)
4289 rtw8822c_set_crystal_cap(rtwdev, cfo->crystal_cap + 1);
4290 }
4291
rtw8822c_cfo_init(struct rtw_dev * rtwdev)4292 static void rtw8822c_cfo_init(struct rtw_dev *rtwdev)
4293 {
4294 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4295 struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4296
4297 cfo->crystal_cap = rtwdev->efuse.crystal_cap;
4298 cfo->is_adjust = true;
4299 }
4300
4301 #define REPORT_TO_KHZ(val) ({typeof(val) _v = (val); (_v << 1) + (_v >> 1); })
rtw8822c_cfo_calc_avg(struct rtw_dev * rtwdev,u8 path_num)4302 static s32 rtw8822c_cfo_calc_avg(struct rtw_dev *rtwdev, u8 path_num)
4303 {
4304 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4305 struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4306 s32 cfo_avg, cfo_path_sum = 0, cfo_rpt_sum;
4307 u8 i;
4308
4309 for (i = 0; i < path_num; i++) {
4310 cfo_rpt_sum = REPORT_TO_KHZ(cfo->cfo_tail[i]);
4311
4312 if (cfo->cfo_cnt[i])
4313 cfo_avg = cfo_rpt_sum / cfo->cfo_cnt[i];
4314 else
4315 cfo_avg = 0;
4316
4317 cfo_path_sum += cfo_avg;
4318 }
4319
4320 for (i = 0; i < path_num; i++) {
4321 cfo->cfo_tail[i] = 0;
4322 cfo->cfo_cnt[i] = 0;
4323 }
4324
4325 return cfo_path_sum / path_num;
4326 }
4327
rtw8822c_cfo_need_adjust(struct rtw_dev * rtwdev,s32 cfo_avg)4328 static void rtw8822c_cfo_need_adjust(struct rtw_dev *rtwdev, s32 cfo_avg)
4329 {
4330 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4331 struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4332
4333 if (!cfo->is_adjust) {
4334 if (abs(cfo_avg) > CFO_TRK_ENABLE_TH)
4335 cfo->is_adjust = true;
4336 } else {
4337 if (abs(cfo_avg) <= CFO_TRK_STOP_TH)
4338 cfo->is_adjust = false;
4339 }
4340
4341 if (!rtw_coex_disabled(rtwdev)) {
4342 cfo->is_adjust = false;
4343 rtw8822c_set_crystal_cap(rtwdev, rtwdev->efuse.crystal_cap);
4344 }
4345 }
4346
rtw8822c_cfo_track(struct rtw_dev * rtwdev)4347 static void rtw8822c_cfo_track(struct rtw_dev *rtwdev)
4348 {
4349 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4350 struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4351 u8 path_num = rtwdev->hal.rf_path_num;
4352 s8 crystal_cap = cfo->crystal_cap;
4353 s32 cfo_avg = 0;
4354
4355 if (rtwdev->sta_cnt != 1) {
4356 rtw8822c_cfo_tracking_reset(rtwdev);
4357 return;
4358 }
4359
4360 if (cfo->packet_count == cfo->packet_count_pre)
4361 return;
4362
4363 cfo->packet_count_pre = cfo->packet_count;
4364 cfo_avg = rtw8822c_cfo_calc_avg(rtwdev, path_num);
4365 rtw8822c_cfo_need_adjust(rtwdev, cfo_avg);
4366
4367 if (cfo->is_adjust) {
4368 if (cfo_avg > CFO_TRK_ADJ_TH)
4369 crystal_cap++;
4370 else if (cfo_avg < -CFO_TRK_ADJ_TH)
4371 crystal_cap--;
4372
4373 crystal_cap = clamp_t(s8, crystal_cap, 0, XCAP_MASK);
4374 rtw8822c_set_crystal_cap(rtwdev, (u8)crystal_cap);
4375 }
4376 }
4377
4378 static const struct rtw_phy_cck_pd_reg
4379 rtw8822c_cck_pd_reg[RTW_CHANNEL_WIDTH_40 + 1][RTW_RF_PATH_MAX] = {
4380 {
4381 {0x1ac8, 0x00ff, 0x1ad0, 0x01f},
4382 {0x1ac8, 0xff00, 0x1ad0, 0x3e0}
4383 },
4384 {
4385 {0x1acc, 0x00ff, 0x1ad0, 0x01F00000},
4386 {0x1acc, 0xff00, 0x1ad0, 0x3E000000}
4387 },
4388 };
4389
4390 #define RTW_CCK_PD_MAX 255
4391 #define RTW_CCK_CS_MAX 31
4392 #define RTW_CCK_CS_ERR1 27
4393 #define RTW_CCK_CS_ERR2 29
4394 static void
rtw8822c_phy_cck_pd_set_reg(struct rtw_dev * rtwdev,s8 pd_diff,s8 cs_diff,u8 bw,u8 nrx)4395 rtw8822c_phy_cck_pd_set_reg(struct rtw_dev *rtwdev,
4396 s8 pd_diff, s8 cs_diff, u8 bw, u8 nrx)
4397 {
4398 u32 pd, cs;
4399
4400 if (WARN_ON(bw > RTW_CHANNEL_WIDTH_40 || nrx >= RTW_RF_PATH_MAX))
4401 return;
4402
4403 pd = rtw_read32_mask(rtwdev,
4404 rtw8822c_cck_pd_reg[bw][nrx].reg_pd,
4405 rtw8822c_cck_pd_reg[bw][nrx].mask_pd);
4406 cs = rtw_read32_mask(rtwdev,
4407 rtw8822c_cck_pd_reg[bw][nrx].reg_cs,
4408 rtw8822c_cck_pd_reg[bw][nrx].mask_cs);
4409 pd += pd_diff;
4410 cs += cs_diff;
4411 if (pd > RTW_CCK_PD_MAX)
4412 pd = RTW_CCK_PD_MAX;
4413 if (cs == RTW_CCK_CS_ERR1 || cs == RTW_CCK_CS_ERR2)
4414 cs++;
4415 else if (cs > RTW_CCK_CS_MAX)
4416 cs = RTW_CCK_CS_MAX;
4417 rtw_write32_mask(rtwdev,
4418 rtw8822c_cck_pd_reg[bw][nrx].reg_pd,
4419 rtw8822c_cck_pd_reg[bw][nrx].mask_pd,
4420 pd);
4421 rtw_write32_mask(rtwdev,
4422 rtw8822c_cck_pd_reg[bw][nrx].reg_cs,
4423 rtw8822c_cck_pd_reg[bw][nrx].mask_cs,
4424 cs);
4425
4426 rtw_dbg(rtwdev, RTW_DBG_PHY,
4427 "is_linked=%d, bw=%d, nrx=%d, cs_ratio=0x%x, pd_th=0x%x\n",
4428 rtw_is_assoc(rtwdev), bw, nrx, cs, pd);
4429 }
4430
rtw8822c_phy_cck_pd_set(struct rtw_dev * rtwdev,u8 new_lvl)4431 static void rtw8822c_phy_cck_pd_set(struct rtw_dev *rtwdev, u8 new_lvl)
4432 {
4433 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4434 s8 pd_lvl[CCK_PD_LV_MAX] = {0, 2, 4, 6, 8};
4435 s8 cs_lvl[CCK_PD_LV_MAX] = {0, 2, 2, 2, 4};
4436 u8 cur_lvl;
4437 u8 nrx, bw;
4438
4439 nrx = (u8)rtw_read32_mask(rtwdev, 0x1a2c, 0x60000);
4440 bw = (u8)rtw_read32_mask(rtwdev, 0x9b0, 0xc);
4441
4442 rtw_dbg(rtwdev, RTW_DBG_PHY, "lv: (%d) -> (%d) bw=%d nr=%d cck_fa_avg=%d\n",
4443 dm_info->cck_pd_lv[bw][nrx], new_lvl, bw, nrx,
4444 dm_info->cck_fa_avg);
4445
4446 if (dm_info->cck_pd_lv[bw][nrx] == new_lvl)
4447 return;
4448
4449 cur_lvl = dm_info->cck_pd_lv[bw][nrx];
4450
4451 /* update cck pd info */
4452 dm_info->cck_fa_avg = CCK_FA_AVG_RESET;
4453
4454 rtw8822c_phy_cck_pd_set_reg(rtwdev,
4455 pd_lvl[new_lvl] - pd_lvl[cur_lvl],
4456 cs_lvl[new_lvl] - cs_lvl[cur_lvl],
4457 bw, nrx);
4458 dm_info->cck_pd_lv[bw][nrx] = new_lvl;
4459 }
4460
4461 #define PWR_TRACK_MASK 0x7f
rtw8822c_pwrtrack_set(struct rtw_dev * rtwdev,u8 rf_path)4462 static void rtw8822c_pwrtrack_set(struct rtw_dev *rtwdev, u8 rf_path)
4463 {
4464 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4465
4466 switch (rf_path) {
4467 case RF_PATH_A:
4468 rtw_write32_mask(rtwdev, 0x18a0, PWR_TRACK_MASK,
4469 dm_info->delta_power_index[rf_path]);
4470 break;
4471 case RF_PATH_B:
4472 rtw_write32_mask(rtwdev, 0x41a0, PWR_TRACK_MASK,
4473 dm_info->delta_power_index[rf_path]);
4474 break;
4475 default:
4476 break;
4477 }
4478 }
4479
rtw8822c_pwr_track_stats(struct rtw_dev * rtwdev,u8 path)4480 static void rtw8822c_pwr_track_stats(struct rtw_dev *rtwdev, u8 path)
4481 {
4482 u8 thermal_value;
4483
4484 if (rtwdev->efuse.thermal_meter[path] == 0xff)
4485 return;
4486
4487 thermal_value = rtw_read_rf(rtwdev, path, RF_T_METER, 0x7e);
4488 rtw_phy_pwrtrack_avg(rtwdev, thermal_value, path);
4489 }
4490
rtw8822c_pwr_track_path(struct rtw_dev * rtwdev,struct rtw_swing_table * swing_table,u8 path)4491 static void rtw8822c_pwr_track_path(struct rtw_dev *rtwdev,
4492 struct rtw_swing_table *swing_table,
4493 u8 path)
4494 {
4495 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4496 u8 delta;
4497
4498 delta = rtw_phy_pwrtrack_get_delta(rtwdev, path);
4499 dm_info->delta_power_index[path] =
4500 rtw_phy_pwrtrack_get_pwridx(rtwdev, swing_table, path, path,
4501 delta);
4502 rtw8822c_pwrtrack_set(rtwdev, path);
4503 }
4504
__rtw8822c_pwr_track(struct rtw_dev * rtwdev)4505 static void __rtw8822c_pwr_track(struct rtw_dev *rtwdev)
4506 {
4507 struct rtw_swing_table swing_table;
4508 u8 i;
4509
4510 rtw_phy_config_swing_table(rtwdev, &swing_table);
4511
4512 for (i = 0; i < rtwdev->hal.rf_path_num; i++)
4513 rtw8822c_pwr_track_stats(rtwdev, i);
4514 if (rtw_phy_pwrtrack_need_lck(rtwdev))
4515 rtw8822c_do_lck(rtwdev);
4516 for (i = 0; i < rtwdev->hal.rf_path_num; i++)
4517 rtw8822c_pwr_track_path(rtwdev, &swing_table, i);
4518 }
4519
rtw8822c_pwr_track(struct rtw_dev * rtwdev)4520 static void rtw8822c_pwr_track(struct rtw_dev *rtwdev)
4521 {
4522 struct rtw_efuse *efuse = &rtwdev->efuse;
4523 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4524
4525 if (efuse->power_track_type != 0)
4526 return;
4527
4528 if (!dm_info->pwr_trk_triggered) {
4529 rtw_write_rf(rtwdev, RF_PATH_A, RF_T_METER, BIT(19), 0x01);
4530 rtw_write_rf(rtwdev, RF_PATH_A, RF_T_METER, BIT(19), 0x00);
4531 rtw_write_rf(rtwdev, RF_PATH_A, RF_T_METER, BIT(19), 0x01);
4532
4533 rtw_write_rf(rtwdev, RF_PATH_B, RF_T_METER, BIT(19), 0x01);
4534 rtw_write_rf(rtwdev, RF_PATH_B, RF_T_METER, BIT(19), 0x00);
4535 rtw_write_rf(rtwdev, RF_PATH_B, RF_T_METER, BIT(19), 0x01);
4536
4537 dm_info->pwr_trk_triggered = true;
4538 return;
4539 }
4540
4541 __rtw8822c_pwr_track(rtwdev);
4542 dm_info->pwr_trk_triggered = false;
4543 }
4544
rtw8822c_adaptivity_init(struct rtw_dev * rtwdev)4545 static void rtw8822c_adaptivity_init(struct rtw_dev *rtwdev)
4546 {
4547 rtw_phy_set_edcca_th(rtwdev, RTW8822C_EDCCA_MAX, RTW8822C_EDCCA_MAX);
4548
4549 /* mac edcca state setting */
4550 rtw_write32_clr(rtwdev, REG_TX_PTCL_CTRL, BIT_DIS_EDCCA);
4551 rtw_write32_set(rtwdev, REG_RD_CTRL, BIT_EDCCA_MSK_CNTDOWN_EN);
4552
4553 /* edcca decistion opt */
4554 rtw_write32_clr(rtwdev, REG_EDCCA_DECISION, BIT_EDCCA_OPTION);
4555 }
4556
rtw8822c_adaptivity(struct rtw_dev * rtwdev)4557 static void rtw8822c_adaptivity(struct rtw_dev *rtwdev)
4558 {
4559 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4560 s8 l2h, h2l;
4561 u8 igi;
4562
4563 igi = dm_info->igi_history[0];
4564 if (dm_info->edcca_mode == RTW_EDCCA_NORMAL) {
4565 l2h = max_t(s8, igi + EDCCA_IGI_L2H_DIFF, EDCCA_TH_L2H_LB);
4566 h2l = l2h - EDCCA_L2H_H2L_DIFF_NORMAL;
4567 } else {
4568 if (igi < dm_info->l2h_th_ini - EDCCA_ADC_BACKOFF)
4569 l2h = igi + EDCCA_ADC_BACKOFF;
4570 else
4571 l2h = dm_info->l2h_th_ini;
4572 h2l = l2h - EDCCA_L2H_H2L_DIFF;
4573 }
4574
4575 rtw_phy_set_edcca_th(rtwdev, l2h, h2l);
4576 }
4577
rtw8822c_fill_txdesc_checksum(struct rtw_dev * rtwdev,struct rtw_tx_pkt_info * pkt_info,u8 * txdesc)4578 static void rtw8822c_fill_txdesc_checksum(struct rtw_dev *rtwdev,
4579 struct rtw_tx_pkt_info *pkt_info,
4580 u8 *txdesc)
4581 {
4582 const struct rtw_chip_info *chip = rtwdev->chip;
4583 size_t words;
4584
4585 words = (pkt_info->pkt_offset * 8 + chip->tx_pkt_desc_sz) / 2;
4586
4587 fill_txdesc_checksum_common(txdesc, words);
4588 }
4589
4590 static const struct rtw_pwr_seq_cmd trans_carddis_to_cardemu_8822c[] = {
4591 {0x0086,
4592 RTW_PWR_CUT_ALL_MSK,
4593 RTW_PWR_INTF_SDIO_MSK,
4594 RTW_PWR_ADDR_SDIO,
4595 RTW_PWR_CMD_WRITE, BIT(0), 0},
4596 {0x0086,
4597 RTW_PWR_CUT_ALL_MSK,
4598 RTW_PWR_INTF_SDIO_MSK,
4599 RTW_PWR_ADDR_SDIO,
4600 RTW_PWR_CMD_POLLING, BIT(1), BIT(1)},
4601 {0x002E,
4602 RTW_PWR_CUT_ALL_MSK,
4603 RTW_PWR_INTF_ALL_MSK,
4604 RTW_PWR_ADDR_MAC,
4605 RTW_PWR_CMD_WRITE, BIT(2), BIT(2)},
4606 {0x002D,
4607 RTW_PWR_CUT_ALL_MSK,
4608 RTW_PWR_INTF_ALL_MSK,
4609 RTW_PWR_ADDR_MAC,
4610 RTW_PWR_CMD_WRITE, BIT(0), 0},
4611 {0x007F,
4612 RTW_PWR_CUT_ALL_MSK,
4613 RTW_PWR_INTF_ALL_MSK,
4614 RTW_PWR_ADDR_MAC,
4615 RTW_PWR_CMD_WRITE, BIT(7), 0},
4616 {0x004A,
4617 RTW_PWR_CUT_ALL_MSK,
4618 RTW_PWR_INTF_USB_MSK,
4619 RTW_PWR_ADDR_MAC,
4620 RTW_PWR_CMD_WRITE, BIT(0), 0},
4621 {0x0005,
4622 RTW_PWR_CUT_ALL_MSK,
4623 RTW_PWR_INTF_ALL_MSK,
4624 RTW_PWR_ADDR_MAC,
4625 RTW_PWR_CMD_WRITE, BIT(3) | BIT(4) | BIT(7), 0},
4626 {0xFFFF,
4627 RTW_PWR_CUT_ALL_MSK,
4628 RTW_PWR_INTF_ALL_MSK,
4629 0,
4630 RTW_PWR_CMD_END, 0, 0},
4631 };
4632
4633 static const struct rtw_pwr_seq_cmd trans_cardemu_to_act_8822c[] = {
4634 {0x0000,
4635 RTW_PWR_CUT_ALL_MSK,
4636 RTW_PWR_INTF_USB_MSK | RTW_PWR_INTF_SDIO_MSK,
4637 RTW_PWR_ADDR_MAC,
4638 RTW_PWR_CMD_WRITE, BIT(5), 0},
4639 {0x0005,
4640 RTW_PWR_CUT_ALL_MSK,
4641 RTW_PWR_INTF_ALL_MSK,
4642 RTW_PWR_ADDR_MAC,
4643 RTW_PWR_CMD_WRITE, (BIT(4) | BIT(3) | BIT(2)), 0},
4644 {0x0075,
4645 RTW_PWR_CUT_ALL_MSK,
4646 RTW_PWR_INTF_PCI_MSK,
4647 RTW_PWR_ADDR_MAC,
4648 RTW_PWR_CMD_WRITE, BIT(0), BIT(0)},
4649 {0x0006,
4650 RTW_PWR_CUT_ALL_MSK,
4651 RTW_PWR_INTF_ALL_MSK,
4652 RTW_PWR_ADDR_MAC,
4653 RTW_PWR_CMD_POLLING, BIT(1), BIT(1)},
4654 {0x0075,
4655 RTW_PWR_CUT_ALL_MSK,
4656 RTW_PWR_INTF_PCI_MSK,
4657 RTW_PWR_ADDR_MAC,
4658 RTW_PWR_CMD_WRITE, BIT(0), 0},
4659 {0xFF1A,
4660 RTW_PWR_CUT_ALL_MSK,
4661 RTW_PWR_INTF_USB_MSK,
4662 RTW_PWR_ADDR_MAC,
4663 RTW_PWR_CMD_WRITE, 0xFF, 0},
4664 {0x002E,
4665 RTW_PWR_CUT_ALL_MSK,
4666 RTW_PWR_INTF_ALL_MSK,
4667 RTW_PWR_ADDR_MAC,
4668 RTW_PWR_CMD_WRITE, BIT(3), 0},
4669 {0x0006,
4670 RTW_PWR_CUT_ALL_MSK,
4671 RTW_PWR_INTF_ALL_MSK,
4672 RTW_PWR_ADDR_MAC,
4673 RTW_PWR_CMD_WRITE, BIT(0), BIT(0)},
4674 {0x0005,
4675 RTW_PWR_CUT_ALL_MSK,
4676 RTW_PWR_INTF_ALL_MSK,
4677 RTW_PWR_ADDR_MAC,
4678 RTW_PWR_CMD_WRITE, (BIT(4) | BIT(3)), 0},
4679 {0x1018,
4680 RTW_PWR_CUT_ALL_MSK,
4681 RTW_PWR_INTF_ALL_MSK,
4682 RTW_PWR_ADDR_MAC,
4683 RTW_PWR_CMD_WRITE, BIT(2), BIT(2)},
4684 {0x0005,
4685 RTW_PWR_CUT_ALL_MSK,
4686 RTW_PWR_INTF_ALL_MSK,
4687 RTW_PWR_ADDR_MAC,
4688 RTW_PWR_CMD_WRITE, BIT(0), BIT(0)},
4689 {0x0005,
4690 RTW_PWR_CUT_ALL_MSK,
4691 RTW_PWR_INTF_ALL_MSK,
4692 RTW_PWR_ADDR_MAC,
4693 RTW_PWR_CMD_POLLING, BIT(0), 0},
4694 {0x0074,
4695 RTW_PWR_CUT_ALL_MSK,
4696 RTW_PWR_INTF_PCI_MSK,
4697 RTW_PWR_ADDR_MAC,
4698 RTW_PWR_CMD_WRITE, BIT(5), BIT(5)},
4699 {0x0071,
4700 RTW_PWR_CUT_ALL_MSK,
4701 RTW_PWR_INTF_PCI_MSK,
4702 RTW_PWR_ADDR_MAC,
4703 RTW_PWR_CMD_WRITE, BIT(4), 0},
4704 {0x0062,
4705 RTW_PWR_CUT_ALL_MSK,
4706 RTW_PWR_INTF_PCI_MSK,
4707 RTW_PWR_ADDR_MAC,
4708 RTW_PWR_CMD_WRITE, (BIT(7) | BIT(6) | BIT(5)),
4709 (BIT(7) | BIT(6) | BIT(5))},
4710 {0x0061,
4711 RTW_PWR_CUT_ALL_MSK,
4712 RTW_PWR_INTF_PCI_MSK,
4713 RTW_PWR_ADDR_MAC,
4714 RTW_PWR_CMD_WRITE, (BIT(7) | BIT(6) | BIT(5)), 0},
4715 {0x001F,
4716 RTW_PWR_CUT_ALL_MSK,
4717 RTW_PWR_INTF_ALL_MSK,
4718 RTW_PWR_ADDR_MAC,
4719 RTW_PWR_CMD_WRITE, (BIT(7) | BIT(6)), BIT(7)},
4720 {0x00EF,
4721 RTW_PWR_CUT_ALL_MSK,
4722 RTW_PWR_INTF_ALL_MSK,
4723 RTW_PWR_ADDR_MAC,
4724 RTW_PWR_CMD_WRITE, (BIT(7) | BIT(6)), BIT(7)},
4725 {0x1045,
4726 RTW_PWR_CUT_ALL_MSK,
4727 RTW_PWR_INTF_ALL_MSK,
4728 RTW_PWR_ADDR_MAC,
4729 RTW_PWR_CMD_WRITE, BIT(4), BIT(4)},
4730 {0x0010,
4731 RTW_PWR_CUT_ALL_MSK,
4732 RTW_PWR_INTF_ALL_MSK,
4733 RTW_PWR_ADDR_MAC,
4734 RTW_PWR_CMD_WRITE, BIT(2), BIT(2)},
4735 {0x1064,
4736 RTW_PWR_CUT_ALL_MSK,
4737 RTW_PWR_INTF_ALL_MSK,
4738 RTW_PWR_ADDR_MAC,
4739 RTW_PWR_CMD_WRITE, BIT(1), BIT(1)},
4740 {0xFFFF,
4741 RTW_PWR_CUT_ALL_MSK,
4742 RTW_PWR_INTF_ALL_MSK,
4743 0,
4744 RTW_PWR_CMD_END, 0, 0},
4745 };
4746
4747 static const struct rtw_pwr_seq_cmd trans_act_to_cardemu_8822c[] = {
4748 {0x0093,
4749 RTW_PWR_CUT_ALL_MSK,
4750 RTW_PWR_INTF_ALL_MSK,
4751 RTW_PWR_ADDR_MAC,
4752 RTW_PWR_CMD_WRITE, BIT(3), 0},
4753 {0x001F,
4754 RTW_PWR_CUT_ALL_MSK,
4755 RTW_PWR_INTF_ALL_MSK,
4756 RTW_PWR_ADDR_MAC,
4757 RTW_PWR_CMD_WRITE, 0xFF, 0},
4758 {0x00EF,
4759 RTW_PWR_CUT_ALL_MSK,
4760 RTW_PWR_INTF_ALL_MSK,
4761 RTW_PWR_ADDR_MAC,
4762 RTW_PWR_CMD_WRITE, 0xFF, 0},
4763 {0x1045,
4764 RTW_PWR_CUT_ALL_MSK,
4765 RTW_PWR_INTF_ALL_MSK,
4766 RTW_PWR_ADDR_MAC,
4767 RTW_PWR_CMD_WRITE, BIT(4), 0},
4768 {0xFF1A,
4769 RTW_PWR_CUT_ALL_MSK,
4770 RTW_PWR_INTF_USB_MSK,
4771 RTW_PWR_ADDR_MAC,
4772 RTW_PWR_CMD_WRITE, 0xFF, 0x30},
4773 {0x0049,
4774 RTW_PWR_CUT_ALL_MSK,
4775 RTW_PWR_INTF_ALL_MSK,
4776 RTW_PWR_ADDR_MAC,
4777 RTW_PWR_CMD_WRITE, BIT(1), 0},
4778 {0x0006,
4779 RTW_PWR_CUT_ALL_MSK,
4780 RTW_PWR_INTF_ALL_MSK,
4781 RTW_PWR_ADDR_MAC,
4782 RTW_PWR_CMD_WRITE, BIT(0), BIT(0)},
4783 {0x0002,
4784 RTW_PWR_CUT_ALL_MSK,
4785 RTW_PWR_INTF_ALL_MSK,
4786 RTW_PWR_ADDR_MAC,
4787 RTW_PWR_CMD_WRITE, BIT(1), 0},
4788 {0x0005,
4789 RTW_PWR_CUT_ALL_MSK,
4790 RTW_PWR_INTF_ALL_MSK,
4791 RTW_PWR_ADDR_MAC,
4792 RTW_PWR_CMD_WRITE, BIT(1), BIT(1)},
4793 {0x0005,
4794 RTW_PWR_CUT_ALL_MSK,
4795 RTW_PWR_INTF_ALL_MSK,
4796 RTW_PWR_ADDR_MAC,
4797 RTW_PWR_CMD_POLLING, BIT(1), 0},
4798 {0x0000,
4799 RTW_PWR_CUT_ALL_MSK,
4800 RTW_PWR_INTF_USB_MSK | RTW_PWR_INTF_SDIO_MSK,
4801 RTW_PWR_ADDR_MAC,
4802 RTW_PWR_CMD_WRITE, BIT(5), BIT(5)},
4803 {0xFFFF,
4804 RTW_PWR_CUT_ALL_MSK,
4805 RTW_PWR_INTF_ALL_MSK,
4806 0,
4807 RTW_PWR_CMD_END, 0, 0},
4808 };
4809
4810 static const struct rtw_pwr_seq_cmd trans_cardemu_to_carddis_8822c[] = {
4811 {0x0005,
4812 RTW_PWR_CUT_ALL_MSK,
4813 RTW_PWR_INTF_SDIO_MSK,
4814 RTW_PWR_ADDR_MAC,
4815 RTW_PWR_CMD_WRITE, BIT(7), BIT(7)},
4816 {0x0007,
4817 RTW_PWR_CUT_ALL_MSK,
4818 RTW_PWR_INTF_USB_MSK | RTW_PWR_INTF_SDIO_MSK,
4819 RTW_PWR_ADDR_MAC,
4820 RTW_PWR_CMD_WRITE, 0xFF, 0x00},
4821 {0x0067,
4822 RTW_PWR_CUT_ALL_MSK,
4823 RTW_PWR_INTF_ALL_MSK,
4824 RTW_PWR_ADDR_MAC,
4825 RTW_PWR_CMD_WRITE, BIT(5), 0},
4826 {0x004A,
4827 RTW_PWR_CUT_ALL_MSK,
4828 RTW_PWR_INTF_USB_MSK,
4829 RTW_PWR_ADDR_MAC,
4830 RTW_PWR_CMD_WRITE, BIT(0), 0},
4831 {0x0081,
4832 RTW_PWR_CUT_ALL_MSK,
4833 RTW_PWR_INTF_ALL_MSK,
4834 RTW_PWR_ADDR_MAC,
4835 RTW_PWR_CMD_WRITE, BIT(7) | BIT(6), 0},
4836 {0x0090,
4837 RTW_PWR_CUT_ALL_MSK,
4838 RTW_PWR_INTF_ALL_MSK,
4839 RTW_PWR_ADDR_MAC,
4840 RTW_PWR_CMD_WRITE, BIT(1), 0},
4841 {0x0092,
4842 RTW_PWR_CUT_ALL_MSK,
4843 RTW_PWR_INTF_PCI_MSK,
4844 RTW_PWR_ADDR_MAC,
4845 RTW_PWR_CMD_WRITE, 0xFF, 0x20},
4846 {0x0093,
4847 RTW_PWR_CUT_ALL_MSK,
4848 RTW_PWR_INTF_PCI_MSK,
4849 RTW_PWR_ADDR_MAC,
4850 RTW_PWR_CMD_WRITE, 0xFF, 0x04},
4851 {0x0005,
4852 RTW_PWR_CUT_ALL_MSK,
4853 RTW_PWR_INTF_USB_MSK | RTW_PWR_INTF_SDIO_MSK,
4854 RTW_PWR_ADDR_MAC,
4855 RTW_PWR_CMD_WRITE, BIT(3) | BIT(4), BIT(3)},
4856 {0x0005,
4857 RTW_PWR_CUT_ALL_MSK,
4858 RTW_PWR_INTF_PCI_MSK,
4859 RTW_PWR_ADDR_MAC,
4860 RTW_PWR_CMD_WRITE, BIT(2), BIT(2)},
4861 {0x0086,
4862 RTW_PWR_CUT_ALL_MSK,
4863 RTW_PWR_INTF_SDIO_MSK,
4864 RTW_PWR_ADDR_SDIO,
4865 RTW_PWR_CMD_WRITE, BIT(0), BIT(0)},
4866 {0xFFFF,
4867 RTW_PWR_CUT_ALL_MSK,
4868 RTW_PWR_INTF_ALL_MSK,
4869 0,
4870 RTW_PWR_CMD_END, 0, 0},
4871 };
4872
4873 static const struct rtw_pwr_seq_cmd *card_enable_flow_8822c[] = {
4874 trans_carddis_to_cardemu_8822c,
4875 trans_cardemu_to_act_8822c,
4876 NULL
4877 };
4878
4879 static const struct rtw_pwr_seq_cmd *card_disable_flow_8822c[] = {
4880 trans_act_to_cardemu_8822c,
4881 trans_cardemu_to_carddis_8822c,
4882 NULL
4883 };
4884
4885 static const struct rtw_intf_phy_para usb2_param_8822c[] = {
4886 {0xFFFF, 0x00,
4887 RTW_IP_SEL_PHY,
4888 RTW_INTF_PHY_CUT_ALL,
4889 RTW_INTF_PHY_PLATFORM_ALL},
4890 };
4891
4892 static const struct rtw_intf_phy_para usb3_param_8822c[] = {
4893 {0xFFFF, 0x0000,
4894 RTW_IP_SEL_PHY,
4895 RTW_INTF_PHY_CUT_ALL,
4896 RTW_INTF_PHY_PLATFORM_ALL},
4897 };
4898
4899 static const struct rtw_intf_phy_para pcie_gen1_param_8822c[] = {
4900 {0xFFFF, 0x0000,
4901 RTW_IP_SEL_PHY,
4902 RTW_INTF_PHY_CUT_ALL,
4903 RTW_INTF_PHY_PLATFORM_ALL},
4904 };
4905
4906 static const struct rtw_intf_phy_para pcie_gen2_param_8822c[] = {
4907 {0xFFFF, 0x0000,
4908 RTW_IP_SEL_PHY,
4909 RTW_INTF_PHY_CUT_ALL,
4910 RTW_INTF_PHY_PLATFORM_ALL},
4911 };
4912
4913 static const struct rtw_intf_phy_para_table phy_para_table_8822c = {
4914 .usb2_para = usb2_param_8822c,
4915 .usb3_para = usb3_param_8822c,
4916 .gen1_para = pcie_gen1_param_8822c,
4917 .gen2_para = pcie_gen2_param_8822c,
4918 .n_usb2_para = ARRAY_SIZE(usb2_param_8822c),
4919 .n_usb3_para = ARRAY_SIZE(usb2_param_8822c),
4920 .n_gen1_para = ARRAY_SIZE(pcie_gen1_param_8822c),
4921 .n_gen2_para = ARRAY_SIZE(pcie_gen2_param_8822c),
4922 };
4923
4924 static const struct rtw_rfe_def rtw8822c_rfe_defs[] = {
4925 [0] = RTW_DEF_RFE(8822c, 0, 0),
4926 [1] = RTW_DEF_RFE(8822c, 0, 0),
4927 [2] = RTW_DEF_RFE(8822c, 0, 0),
4928 [3] = RTW_DEF_RFE(8822c, 0, 0),
4929 [4] = RTW_DEF_RFE(8822c, 0, 0),
4930 [5] = RTW_DEF_RFE(8822c, 0, 5),
4931 [6] = RTW_DEF_RFE(8822c, 0, 0),
4932 };
4933
4934 static const struct rtw_hw_reg rtw8822c_dig[] = {
4935 [0] = { .addr = 0x1d70, .mask = 0x7f },
4936 [1] = { .addr = 0x1d70, .mask = 0x7f00 },
4937 };
4938
4939 static const struct rtw_ltecoex_addr rtw8822c_ltecoex_addr = {
4940 .ctrl = LTECOEX_ACCESS_CTRL,
4941 .wdata = LTECOEX_WRITE_DATA,
4942 .rdata = LTECOEX_READ_DATA,
4943 };
4944
4945 static const struct rtw_page_table page_table_8822c[] = {
4946 {64, 64, 64, 64, 1},
4947 {64, 64, 64, 64, 1},
4948 {64, 64, 0, 0, 1},
4949 {64, 64, 64, 0, 1},
4950 {64, 64, 64, 64, 1},
4951 };
4952
4953 static const struct rtw_rqpn rqpn_table_8822c[] = {
4954 {RTW_DMA_MAPPING_NORMAL, RTW_DMA_MAPPING_NORMAL,
4955 RTW_DMA_MAPPING_LOW, RTW_DMA_MAPPING_LOW,
4956 RTW_DMA_MAPPING_EXTRA, RTW_DMA_MAPPING_HIGH},
4957 {RTW_DMA_MAPPING_NORMAL, RTW_DMA_MAPPING_NORMAL,
4958 RTW_DMA_MAPPING_LOW, RTW_DMA_MAPPING_LOW,
4959 RTW_DMA_MAPPING_EXTRA, RTW_DMA_MAPPING_HIGH},
4960 {RTW_DMA_MAPPING_NORMAL, RTW_DMA_MAPPING_NORMAL,
4961 RTW_DMA_MAPPING_NORMAL, RTW_DMA_MAPPING_HIGH,
4962 RTW_DMA_MAPPING_HIGH, RTW_DMA_MAPPING_HIGH},
4963 {RTW_DMA_MAPPING_NORMAL, RTW_DMA_MAPPING_NORMAL,
4964 RTW_DMA_MAPPING_LOW, RTW_DMA_MAPPING_LOW,
4965 RTW_DMA_MAPPING_HIGH, RTW_DMA_MAPPING_HIGH},
4966 {RTW_DMA_MAPPING_NORMAL, RTW_DMA_MAPPING_NORMAL,
4967 RTW_DMA_MAPPING_LOW, RTW_DMA_MAPPING_LOW,
4968 RTW_DMA_MAPPING_EXTRA, RTW_DMA_MAPPING_HIGH},
4969 };
4970
4971 static struct rtw_prioq_addrs prioq_addrs_8822c = {
4972 .prio[RTW_DMA_MAPPING_EXTRA] = {
4973 .rsvd = REG_FIFOPAGE_INFO_4, .avail = REG_FIFOPAGE_INFO_4 + 2,
4974 },
4975 .prio[RTW_DMA_MAPPING_LOW] = {
4976 .rsvd = REG_FIFOPAGE_INFO_2, .avail = REG_FIFOPAGE_INFO_2 + 2,
4977 },
4978 .prio[RTW_DMA_MAPPING_NORMAL] = {
4979 .rsvd = REG_FIFOPAGE_INFO_3, .avail = REG_FIFOPAGE_INFO_3 + 2,
4980 },
4981 .prio[RTW_DMA_MAPPING_HIGH] = {
4982 .rsvd = REG_FIFOPAGE_INFO_1, .avail = REG_FIFOPAGE_INFO_1 + 2,
4983 },
4984 .wsize = true,
4985 };
4986
4987 static struct rtw_chip_ops rtw8822c_ops = {
4988 .phy_set_param = rtw8822c_phy_set_param,
4989 .read_efuse = rtw8822c_read_efuse,
4990 .query_rx_desc = rtw8822c_query_rx_desc,
4991 .set_channel = rtw8822c_set_channel,
4992 .mac_init = rtw8822c_mac_init,
4993 .dump_fw_crash = rtw8822c_dump_fw_crash,
4994 .read_rf = rtw_phy_read_rf,
4995 .write_rf = rtw_phy_write_rf_reg_mix,
4996 .set_tx_power_index = rtw8822c_set_tx_power_index,
4997 .set_antenna = rtw8822c_set_antenna,
4998 .cfg_ldo25 = rtw8822c_cfg_ldo25,
4999 .false_alarm_statistics = rtw8822c_false_alarm_statistics,
5000 .dpk_track = rtw8822c_dpk_track,
5001 .phy_calibration = rtw8822c_phy_calibration,
5002 .cck_pd_set = rtw8822c_phy_cck_pd_set,
5003 .pwr_track = rtw8822c_pwr_track,
5004 .config_bfee = rtw8822c_bf_config_bfee,
5005 .set_gid_table = rtw_bf_set_gid_table,
5006 .cfg_csi_rate = rtw_bf_cfg_csi_rate,
5007 .adaptivity_init = rtw8822c_adaptivity_init,
5008 .adaptivity = rtw8822c_adaptivity,
5009 .cfo_init = rtw8822c_cfo_init,
5010 .cfo_track = rtw8822c_cfo_track,
5011 .config_tx_path = rtw8822c_config_tx_path,
5012 .config_txrx_mode = rtw8822c_config_trx_mode,
5013 .fill_txdesc_checksum = rtw8822c_fill_txdesc_checksum,
5014
5015 .coex_set_init = rtw8822c_coex_cfg_init,
5016 .coex_set_ant_switch = NULL,
5017 .coex_set_gnt_fix = rtw8822c_coex_cfg_gnt_fix,
5018 .coex_set_gnt_debug = rtw8822c_coex_cfg_gnt_debug,
5019 .coex_set_rfe_type = rtw8822c_coex_cfg_rfe_type,
5020 .coex_set_wl_tx_power = rtw8822c_coex_cfg_wl_tx_power,
5021 .coex_set_wl_rx_gain = rtw8822c_coex_cfg_wl_rx_gain,
5022 };
5023
5024 /* Shared-Antenna Coex Table */
5025 static const struct coex_table_para table_sant_8822c[] = {
5026 {0xffffffff, 0xffffffff}, /* case-0 */
5027 {0x55555555, 0x55555555},
5028 {0x66555555, 0x66555555},
5029 {0xaaaaaaaa, 0xaaaaaaaa},
5030 {0x5a5a5a5a, 0x5a5a5a5a},
5031 {0xfafafafa, 0xfafafafa}, /* case-5 */
5032 {0x6a5a5555, 0xaaaaaaaa},
5033 {0x6a5a56aa, 0x6a5a56aa},
5034 {0x6a5a5a5a, 0x6a5a5a5a},
5035 {0x66555555, 0x5a5a5a5a},
5036 {0x66555555, 0x6a5a5a5a}, /* case-10 */
5037 {0x66555555, 0x6a5a5aaa},
5038 {0x66555555, 0x5a5a5aaa},
5039 {0x66555555, 0x6aaa5aaa},
5040 {0x66555555, 0xaaaa5aaa},
5041 {0x66555555, 0xaaaaaaaa}, /* case-15 */
5042 {0xffff55ff, 0xfafafafa},
5043 {0xffff55ff, 0x6afa5afa},
5044 {0xaaffffaa, 0xfafafafa},
5045 {0xaa5555aa, 0x5a5a5a5a},
5046 {0xaa5555aa, 0x6a5a5a5a}, /* case-20 */
5047 {0xaa5555aa, 0xaaaaaaaa},
5048 {0xffffffff, 0x5a5a5a5a},
5049 {0xffffffff, 0x5a5a5a5a},
5050 {0xffffffff, 0x55555555},
5051 {0xffffffff, 0x5a5a5aaa}, /* case-25 */
5052 {0x55555555, 0x5a5a5a5a},
5053 {0x55555555, 0xaaaaaaaa},
5054 {0x55555555, 0x6a5a6a5a},
5055 {0x66556655, 0x66556655},
5056 {0x66556aaa, 0x6a5a6aaa}, /*case-30*/
5057 {0xffffffff, 0x5aaa5aaa},
5058 {0x56555555, 0x5a5a5aaa},
5059 {0xdaffdaff, 0xdaffdaff},
5060 {0xddffddff, 0xddffddff},
5061 };
5062
5063 /* Non-Shared-Antenna Coex Table */
5064 static const struct coex_table_para table_nsant_8822c[] = {
5065 {0xffffffff, 0xffffffff}, /* case-100 */
5066 {0x55555555, 0x55555555},
5067 {0x66555555, 0x66555555},
5068 {0xaaaaaaaa, 0xaaaaaaaa},
5069 {0x5a5a5a5a, 0x5a5a5a5a},
5070 {0xfafafafa, 0xfafafafa}, /* case-105 */
5071 {0x5afa5afa, 0x5afa5afa},
5072 {0x55555555, 0xfafafafa},
5073 {0x66555555, 0xfafafafa},
5074 {0x66555555, 0x5a5a5a5a},
5075 {0x66555555, 0x6a5a5a5a}, /* case-110 */
5076 {0x66555555, 0xaaaaaaaa},
5077 {0xffff55ff, 0xfafafafa},
5078 {0xffff55ff, 0x5afa5afa},
5079 {0xffff55ff, 0xaaaaaaaa},
5080 {0xffff55ff, 0xffff55ff}, /* case-115 */
5081 {0xaaffffaa, 0x5afa5afa},
5082 {0xaaffffaa, 0xaaaaaaaa},
5083 {0xffffffff, 0xfafafafa},
5084 {0xffffffff, 0x5afa5afa},
5085 {0xffffffff, 0xaaaaaaaa}, /* case-120 */
5086 {0x55ff55ff, 0x5afa5afa},
5087 {0x55ff55ff, 0xaaaaaaaa},
5088 {0x55ff55ff, 0x55ff55ff}
5089 };
5090
5091 /* Shared-Antenna TDMA */
5092 static const struct coex_tdma_para tdma_sant_8822c[] = {
5093 { {0x00, 0x00, 0x00, 0x00, 0x00} }, /* case-0 */
5094 { {0x61, 0x45, 0x03, 0x11, 0x11} }, /* case-1 */
5095 { {0x61, 0x3a, 0x03, 0x11, 0x11} },
5096 { {0x61, 0x30, 0x03, 0x11, 0x11} },
5097 { {0x61, 0x20, 0x03, 0x11, 0x11} },
5098 { {0x61, 0x10, 0x03, 0x11, 0x11} }, /* case-5 */
5099 { {0x61, 0x45, 0x03, 0x11, 0x10} },
5100 { {0x61, 0x3a, 0x03, 0x11, 0x10} },
5101 { {0x61, 0x30, 0x03, 0x11, 0x10} },
5102 { {0x61, 0x20, 0x03, 0x11, 0x10} },
5103 { {0x61, 0x10, 0x03, 0x11, 0x10} }, /* case-10 */
5104 { {0x61, 0x08, 0x03, 0x11, 0x14} },
5105 { {0x61, 0x08, 0x03, 0x10, 0x14} },
5106 { {0x51, 0x08, 0x03, 0x10, 0x54} },
5107 { {0x51, 0x08, 0x03, 0x10, 0x55} },
5108 { {0x51, 0x08, 0x07, 0x10, 0x54} }, /* case-15 */
5109 { {0x51, 0x45, 0x03, 0x10, 0x50} },
5110 { {0x51, 0x3a, 0x03, 0x10, 0x50} },
5111 { {0x51, 0x30, 0x03, 0x10, 0x50} },
5112 { {0x51, 0x20, 0x03, 0x10, 0x50} },
5113 { {0x51, 0x10, 0x03, 0x10, 0x50} }, /* case-20 */
5114 { {0x51, 0x4a, 0x03, 0x10, 0x50} },
5115 { {0x51, 0x0c, 0x03, 0x10, 0x54} },
5116 { {0x55, 0x08, 0x03, 0x10, 0x54} },
5117 { {0x65, 0x10, 0x03, 0x11, 0x10} },
5118 { {0x51, 0x10, 0x03, 0x10, 0x51} }, /* case-25 */
5119 { {0x51, 0x08, 0x03, 0x10, 0x50} },
5120 { {0x61, 0x08, 0x03, 0x11, 0x11} }
5121 };
5122
5123 /* Non-Shared-Antenna TDMA */
5124 static const struct coex_tdma_para tdma_nsant_8822c[] = {
5125 { {0x00, 0x00, 0x00, 0x00, 0x00} }, /* case-100 */
5126 { {0x61, 0x45, 0x03, 0x11, 0x11} },
5127 { {0x61, 0x3a, 0x03, 0x11, 0x11} },
5128 { {0x61, 0x30, 0x03, 0x11, 0x11} },
5129 { {0x61, 0x20, 0x03, 0x11, 0x11} },
5130 { {0x61, 0x10, 0x03, 0x11, 0x11} }, /* case-105 */
5131 { {0x61, 0x45, 0x03, 0x11, 0x10} },
5132 { {0x61, 0x3a, 0x03, 0x11, 0x10} },
5133 { {0x61, 0x30, 0x03, 0x11, 0x10} },
5134 { {0x61, 0x20, 0x03, 0x11, 0x10} },
5135 { {0x61, 0x10, 0x03, 0x11, 0x10} }, /* case-110 */
5136 { {0x61, 0x08, 0x03, 0x11, 0x14} },
5137 { {0x61, 0x08, 0x03, 0x10, 0x14} },
5138 { {0x51, 0x08, 0x03, 0x10, 0x54} },
5139 { {0x51, 0x08, 0x03, 0x10, 0x55} },
5140 { {0x51, 0x08, 0x07, 0x10, 0x54} }, /* case-115 */
5141 { {0x51, 0x45, 0x03, 0x10, 0x50} },
5142 { {0x51, 0x3a, 0x03, 0x10, 0x50} },
5143 { {0x51, 0x30, 0x03, 0x10, 0x50} },
5144 { {0x51, 0x20, 0x03, 0x10, 0x50} },
5145 { {0x51, 0x10, 0x03, 0x10, 0x50} }, /* case-120 */
5146 { {0x51, 0x08, 0x03, 0x10, 0x50} }
5147 };
5148
5149 /* rssi in percentage % (dbm = % - 100) */
5150 static const u8 wl_rssi_step_8822c[] = {60, 50, 44, 30};
5151 static const u8 bt_rssi_step_8822c[] = {8, 15, 20, 25};
5152 static const struct coex_5g_afh_map afh_5g_8822c[] = { {0, 0, 0} };
5153
5154 /* wl_tx_dec_power, bt_tx_dec_power, wl_rx_gain, bt_rx_lna_constrain */
5155 static const struct coex_rf_para rf_para_tx_8822c[] = {
5156 {0, 0, false, 7}, /* for normal */
5157 {0, 16, false, 7}, /* for WL-CPT */
5158 {8, 17, true, 4},
5159 {7, 18, true, 4},
5160 {6, 19, true, 4},
5161 {5, 20, true, 4},
5162 {0, 21, true, 4} /* for gamg hid */
5163 };
5164
5165 static const struct coex_rf_para rf_para_rx_8822c[] = {
5166 {0, 0, false, 7}, /* for normal */
5167 {0, 16, false, 7}, /* for WL-CPT */
5168 {3, 24, true, 5},
5169 {2, 26, true, 5},
5170 {1, 27, true, 5},
5171 {0, 28, true, 5},
5172 {0, 28, true, 5} /* for gamg hid */
5173 };
5174
5175 static_assert(ARRAY_SIZE(rf_para_tx_8822c) == ARRAY_SIZE(rf_para_rx_8822c));
5176
5177 static const u8
5178 rtw8822c_pwrtrk_5gb_n[RTW_PWR_TRK_5G_NUM][RTW_PWR_TRK_TBL_SZ] = {
5179 { 0, 1, 2, 3, 5, 6, 7, 8, 9, 10,
5180 11, 12, 13, 14, 15, 16, 18, 19, 20, 21,
5181 22, 23, 24, 25, 26, 27, 28, 29, 30, 32 },
5182 { 0, 1, 2, 3, 5, 6, 7, 8, 9, 10,
5183 11, 12, 13, 14, 15, 16, 18, 19, 20, 21,
5184 22, 23, 24, 25, 26, 27, 28, 29, 30, 32 },
5185 { 0, 1, 2, 3, 5, 6, 7, 8, 9, 10,
5186 11, 12, 13, 14, 15, 16, 18, 19, 20, 21,
5187 22, 23, 24, 25, 26, 27, 28, 29, 30, 32 },
5188 };
5189
5190 static const u8
5191 rtw8822c_pwrtrk_5gb_p[RTW_PWR_TRK_5G_NUM][RTW_PWR_TRK_TBL_SZ] = {
5192 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5193 10, 10, 11, 12, 13, 14, 15, 16, 17, 18,
5194 19, 20, 21, 22, 22, 23, 24, 25, 26, 27 },
5195 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5196 10, 10, 11, 12, 13, 14, 15, 16, 17, 18,
5197 19, 20, 21, 22, 22, 23, 24, 25, 26, 27 },
5198 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5199 10, 10, 11, 12, 13, 14, 15, 16, 17, 18,
5200 19, 20, 21, 22, 22, 23, 24, 25, 26, 27 },
5201 };
5202
5203 static const u8
5204 rtw8822c_pwrtrk_5ga_n[RTW_PWR_TRK_5G_NUM][RTW_PWR_TRK_TBL_SZ] = {
5205 { 0, 1, 2, 4, 5, 6, 7, 8, 9, 10,
5206 11, 13, 14, 15, 16, 17, 18, 19, 20, 21,
5207 23, 24, 25, 26, 27, 28, 29, 30, 31, 33 },
5208 { 0, 1, 2, 4, 5, 6, 7, 8, 9, 10,
5209 11, 13, 14, 15, 16, 17, 18, 19, 20, 21,
5210 23, 24, 25, 26, 27, 28, 29, 30, 31, 33 },
5211 { 0, 1, 2, 4, 5, 6, 7, 8, 9, 10,
5212 11, 13, 14, 15, 16, 17, 18, 19, 20, 21,
5213 23, 24, 25, 26, 27, 28, 29, 30, 31, 33 },
5214 };
5215
5216 static const u8
5217 rtw8822c_pwrtrk_5ga_p[RTW_PWR_TRK_5G_NUM][RTW_PWR_TRK_TBL_SZ] = {
5218 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5219 10, 11, 12, 13, 14, 15, 16, 17, 18, 20,
5220 21, 22, 23, 24, 25, 26, 27, 28, 29, 30 },
5221 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5222 10, 11, 12, 13, 14, 15, 16, 17, 18, 20,
5223 21, 22, 23, 24, 25, 26, 27, 28, 29, 30 },
5224 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5225 10, 11, 12, 13, 14, 15, 16, 17, 18, 20,
5226 21, 22, 23, 24, 25, 26, 27, 28, 29, 30 },
5227 };
5228
5229 static const u8 rtw8822c_pwrtrk_2gb_n[RTW_PWR_TRK_TBL_SZ] = {
5230 0, 1, 2, 3, 4, 4, 5, 6, 7, 8,
5231 9, 9, 10, 11, 12, 13, 14, 15, 15, 16,
5232 17, 18, 19, 20, 20, 21, 22, 23, 24, 25
5233 };
5234
5235 static const u8 rtw8822c_pwrtrk_2gb_p[RTW_PWR_TRK_TBL_SZ] = {
5236 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5237 10, 11, 12, 13, 14, 14, 15, 16, 17, 18,
5238 19, 20, 21, 22, 23, 24, 25, 26, 27, 28
5239 };
5240
5241 static const u8 rtw8822c_pwrtrk_2ga_n[RTW_PWR_TRK_TBL_SZ] = {
5242 0, 1, 2, 2, 3, 4, 4, 5, 6, 6,
5243 7, 8, 8, 9, 9, 10, 11, 11, 12, 13,
5244 13, 14, 15, 15, 16, 17, 17, 18, 19, 19
5245 };
5246
5247 static const u8 rtw8822c_pwrtrk_2ga_p[RTW_PWR_TRK_TBL_SZ] = {
5248 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5249 10, 11, 11, 12, 13, 14, 15, 16, 17, 18,
5250 19, 20, 21, 22, 23, 24, 25, 25, 26, 27
5251 };
5252
5253 static const u8 rtw8822c_pwrtrk_2g_cck_b_n[RTW_PWR_TRK_TBL_SZ] = {
5254 0, 1, 2, 3, 4, 5, 5, 6, 7, 8,
5255 9, 10, 11, 11, 12, 13, 14, 15, 16, 17,
5256 17, 18, 19, 20, 21, 22, 23, 23, 24, 25
5257 };
5258
5259 static const u8 rtw8822c_pwrtrk_2g_cck_b_p[RTW_PWR_TRK_TBL_SZ] = {
5260 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5261 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
5262 20, 21, 22, 23, 24, 25, 26, 27, 28, 29
5263 };
5264
5265 static const u8 rtw8822c_pwrtrk_2g_cck_a_n[RTW_PWR_TRK_TBL_SZ] = {
5266 0, 1, 2, 3, 3, 4, 5, 6, 6, 7,
5267 8, 9, 9, 10, 11, 12, 12, 13, 14, 15,
5268 15, 16, 17, 18, 18, 19, 20, 21, 21, 22
5269 };
5270
5271 static const u8 rtw8822c_pwrtrk_2g_cck_a_p[RTW_PWR_TRK_TBL_SZ] = {
5272 0, 1, 2, 3, 4, 5, 5, 6, 7, 8,
5273 9, 10, 11, 11, 12, 13, 14, 15, 16, 17,
5274 18, 18, 19, 20, 21, 22, 23, 24, 24, 25
5275 };
5276
5277 static const struct rtw_pwr_track_tbl rtw8822c_rtw_pwr_track_tbl = {
5278 .pwrtrk_5gb_n[RTW_PWR_TRK_5G_1] = rtw8822c_pwrtrk_5gb_n[RTW_PWR_TRK_5G_1],
5279 .pwrtrk_5gb_n[RTW_PWR_TRK_5G_2] = rtw8822c_pwrtrk_5gb_n[RTW_PWR_TRK_5G_2],
5280 .pwrtrk_5gb_n[RTW_PWR_TRK_5G_3] = rtw8822c_pwrtrk_5gb_n[RTW_PWR_TRK_5G_3],
5281 .pwrtrk_5gb_p[RTW_PWR_TRK_5G_1] = rtw8822c_pwrtrk_5gb_p[RTW_PWR_TRK_5G_1],
5282 .pwrtrk_5gb_p[RTW_PWR_TRK_5G_2] = rtw8822c_pwrtrk_5gb_p[RTW_PWR_TRK_5G_2],
5283 .pwrtrk_5gb_p[RTW_PWR_TRK_5G_3] = rtw8822c_pwrtrk_5gb_p[RTW_PWR_TRK_5G_3],
5284 .pwrtrk_5ga_n[RTW_PWR_TRK_5G_1] = rtw8822c_pwrtrk_5ga_n[RTW_PWR_TRK_5G_1],
5285 .pwrtrk_5ga_n[RTW_PWR_TRK_5G_2] = rtw8822c_pwrtrk_5ga_n[RTW_PWR_TRK_5G_2],
5286 .pwrtrk_5ga_n[RTW_PWR_TRK_5G_3] = rtw8822c_pwrtrk_5ga_n[RTW_PWR_TRK_5G_3],
5287 .pwrtrk_5ga_p[RTW_PWR_TRK_5G_1] = rtw8822c_pwrtrk_5ga_p[RTW_PWR_TRK_5G_1],
5288 .pwrtrk_5ga_p[RTW_PWR_TRK_5G_2] = rtw8822c_pwrtrk_5ga_p[RTW_PWR_TRK_5G_2],
5289 .pwrtrk_5ga_p[RTW_PWR_TRK_5G_3] = rtw8822c_pwrtrk_5ga_p[RTW_PWR_TRK_5G_3],
5290 .pwrtrk_2gb_n = rtw8822c_pwrtrk_2gb_n,
5291 .pwrtrk_2gb_p = rtw8822c_pwrtrk_2gb_p,
5292 .pwrtrk_2ga_n = rtw8822c_pwrtrk_2ga_n,
5293 .pwrtrk_2ga_p = rtw8822c_pwrtrk_2ga_p,
5294 .pwrtrk_2g_cckb_n = rtw8822c_pwrtrk_2g_cck_b_n,
5295 .pwrtrk_2g_cckb_p = rtw8822c_pwrtrk_2g_cck_b_p,
5296 .pwrtrk_2g_ccka_n = rtw8822c_pwrtrk_2g_cck_a_n,
5297 .pwrtrk_2g_ccka_p = rtw8822c_pwrtrk_2g_cck_a_p,
5298 };
5299
5300 static struct rtw_hw_reg_offset rtw8822c_edcca_th[] = {
5301 [EDCCA_TH_L2H_IDX] = {
5302 {.addr = 0x84c, .mask = MASKBYTE2}, .offset = 0x80
5303 },
5304 [EDCCA_TH_H2L_IDX] = {
5305 {.addr = 0x84c, .mask = MASKBYTE3}, .offset = 0x80
5306 },
5307 };
5308
5309 #ifdef CONFIG_PM
5310 static const struct wiphy_wowlan_support rtw_wowlan_stub_8822c = {
5311 .flags = WIPHY_WOWLAN_MAGIC_PKT | WIPHY_WOWLAN_GTK_REKEY_FAILURE |
5312 WIPHY_WOWLAN_DISCONNECT | WIPHY_WOWLAN_SUPPORTS_GTK_REKEY |
5313 WIPHY_WOWLAN_NET_DETECT,
5314 .n_patterns = RTW_MAX_PATTERN_NUM,
5315 .pattern_max_len = RTW_MAX_PATTERN_SIZE,
5316 .pattern_min_len = 1,
5317 .max_nd_match_sets = 4,
5318 };
5319 #endif
5320
5321 static const struct rtw_reg_domain coex_info_hw_regs_8822c[] = {
5322 {0x1860, BIT(3), RTW_REG_DOMAIN_MAC8},
5323 {0x4160, BIT(3), RTW_REG_DOMAIN_MAC8},
5324 {0x1c32, BIT(6), RTW_REG_DOMAIN_MAC8},
5325 {0x1c38, BIT(28), RTW_REG_DOMAIN_MAC32},
5326 {0, 0, RTW_REG_DOMAIN_NL},
5327 {0x430, MASKDWORD, RTW_REG_DOMAIN_MAC32},
5328 {0x434, MASKDWORD, RTW_REG_DOMAIN_MAC32},
5329 {0x42a, MASKLWORD, RTW_REG_DOMAIN_MAC16},
5330 {0x426, MASKBYTE0, RTW_REG_DOMAIN_MAC8},
5331 {0x45e, BIT(3), RTW_REG_DOMAIN_MAC8},
5332 {0x454, MASKLWORD, RTW_REG_DOMAIN_MAC16},
5333 {0, 0, RTW_REG_DOMAIN_NL},
5334 {0x4c, BIT(24) | BIT(23), RTW_REG_DOMAIN_MAC32},
5335 {0x64, BIT(0), RTW_REG_DOMAIN_MAC8},
5336 {0x4c6, BIT(4), RTW_REG_DOMAIN_MAC8},
5337 {0x40, BIT(5), RTW_REG_DOMAIN_MAC8},
5338 {0x1, RFREG_MASK, RTW_REG_DOMAIN_RF_B},
5339 {0, 0, RTW_REG_DOMAIN_NL},
5340 {0x550, MASKDWORD, RTW_REG_DOMAIN_MAC32},
5341 {0x522, MASKBYTE0, RTW_REG_DOMAIN_MAC8},
5342 {0x953, BIT(1), RTW_REG_DOMAIN_MAC8},
5343 {0xc50, MASKBYTE0, RTW_REG_DOMAIN_MAC8},
5344 };
5345
5346 const struct rtw_chip_info rtw8822c_hw_spec = {
5347 .ops = &rtw8822c_ops,
5348 .id = RTW_CHIP_TYPE_8822C,
5349 .fw_name = "rtw88/rtw8822c_fw.bin",
5350 .wlan_cpu = RTW_WCPU_11AC,
5351 .tx_pkt_desc_sz = 48,
5352 .tx_buf_desc_sz = 16,
5353 .rx_pkt_desc_sz = 24,
5354 .rx_buf_desc_sz = 8,
5355 .phy_efuse_size = 512,
5356 .log_efuse_size = 768,
5357 .ptct_efuse_size = 124,
5358 .txff_size = 262144,
5359 .rxff_size = 24576,
5360 .fw_rxff_size = 12288,
5361 .rsvd_drv_pg_num = 16,
5362 .txgi_factor = 2,
5363 .is_pwr_by_rate_dec = false,
5364 .max_power_index = 0x7f,
5365 .csi_buf_pg_num = 50,
5366 .band = RTW_BAND_2G | RTW_BAND_5G,
5367 .page_size = TX_PAGE_SIZE,
5368 .dig_min = 0x20,
5369 .default_1ss_tx_path = BB_PATH_A,
5370 .path_div_supported = true,
5371 .ht_supported = true,
5372 .vht_supported = true,
5373 .lps_deep_mode_supported = BIT(LPS_DEEP_MODE_LCLK) | BIT(LPS_DEEP_MODE_PG),
5374 .sys_func_en = 0xD8,
5375 .pwr_on_seq = card_enable_flow_8822c,
5376 .pwr_off_seq = card_disable_flow_8822c,
5377 .page_table = page_table_8822c,
5378 .rqpn_table = rqpn_table_8822c,
5379 .prioq_addrs = &prioq_addrs_8822c,
5380 .intf_table = &phy_para_table_8822c,
5381 .dig = rtw8822c_dig,
5382 .dig_cck = NULL,
5383 .rf_base_addr = {0x3c00, 0x4c00},
5384 .rf_sipi_addr = {0x1808, 0x4108},
5385 .ltecoex_addr = &rtw8822c_ltecoex_addr,
5386 .mac_tbl = &rtw8822c_mac_tbl,
5387 .agc_tbl = &rtw8822c_agc_tbl,
5388 .bb_tbl = &rtw8822c_bb_tbl,
5389 .rfk_init_tbl = &rtw8822c_array_mp_cal_init_tbl,
5390 .rf_tbl = {&rtw8822c_rf_b_tbl, &rtw8822c_rf_a_tbl},
5391 .rfe_defs = rtw8822c_rfe_defs,
5392 .rfe_defs_size = ARRAY_SIZE(rtw8822c_rfe_defs),
5393 .en_dis_dpd = true,
5394 .dpd_ratemask = DIS_DPD_RATEALL,
5395 .pwr_track_tbl = &rtw8822c_rtw_pwr_track_tbl,
5396 .iqk_threshold = 8,
5397 .lck_threshold = 8,
5398 .bfer_su_max_num = 2,
5399 .bfer_mu_max_num = 1,
5400 .rx_ldpc = true,
5401 .tx_stbc = true,
5402 .edcca_th = rtw8822c_edcca_th,
5403 .l2h_th_ini_cs = 60,
5404 .l2h_th_ini_ad = 45,
5405 .ampdu_density = IEEE80211_HT_MPDU_DENSITY_2,
5406
5407 #ifdef CONFIG_PM
5408 .wow_fw_name = "rtw88/rtw8822c_wow_fw.bin",
5409 .wowlan_stub = &rtw_wowlan_stub_8822c,
5410 .max_sched_scan_ssids = 4,
5411 #endif
5412 .max_scan_ie_len = (RTW_PROBE_PG_CNT - 1) * TX_PAGE_SIZE,
5413 .coex_para_ver = 0x22020720,
5414 .bt_desired_ver = 0x20,
5415 .scbd_support = true,
5416 .new_scbd10_def = true,
5417 .ble_hid_profile_support = true,
5418 .wl_mimo_ps_support = true,
5419 .pstdma_type = COEX_PSTDMA_FORCE_LPSOFF,
5420 .bt_rssi_type = COEX_BTRSSI_DBM,
5421 .ant_isolation = 15,
5422 .rssi_tolerance = 2,
5423 .wl_rssi_step = wl_rssi_step_8822c,
5424 .bt_rssi_step = bt_rssi_step_8822c,
5425 .table_sant_num = ARRAY_SIZE(table_sant_8822c),
5426 .table_sant = table_sant_8822c,
5427 .table_nsant_num = ARRAY_SIZE(table_nsant_8822c),
5428 .table_nsant = table_nsant_8822c,
5429 .tdma_sant_num = ARRAY_SIZE(tdma_sant_8822c),
5430 .tdma_sant = tdma_sant_8822c,
5431 .tdma_nsant_num = ARRAY_SIZE(tdma_nsant_8822c),
5432 .tdma_nsant = tdma_nsant_8822c,
5433 .wl_rf_para_num = ARRAY_SIZE(rf_para_tx_8822c),
5434 .wl_rf_para_tx = rf_para_tx_8822c,
5435 .wl_rf_para_rx = rf_para_rx_8822c,
5436 .bt_afh_span_bw20 = 0x24,
5437 .bt_afh_span_bw40 = 0x36,
5438 .afh_5g_num = ARRAY_SIZE(afh_5g_8822c),
5439 .afh_5g = afh_5g_8822c,
5440
5441 .coex_info_hw_regs_num = ARRAY_SIZE(coex_info_hw_regs_8822c),
5442 .coex_info_hw_regs = coex_info_hw_regs_8822c,
5443
5444 .fw_fifo_addr = {0x780, 0x700, 0x780, 0x660, 0x650, 0x680},
5445 .fwcd_segs = &rtw8822c_fwcd_segs,
5446 };
5447 EXPORT_SYMBOL(rtw8822c_hw_spec);
5448
5449 MODULE_FIRMWARE("rtw88/rtw8822c_fw.bin");
5450 MODULE_FIRMWARE("rtw88/rtw8822c_wow_fw.bin");
5451
5452 MODULE_AUTHOR("Realtek Corporation");
5453 MODULE_DESCRIPTION("Realtek 802.11ac wireless 8822c driver");
5454 MODULE_LICENSE("Dual BSD/GPL");
5455