1 // SPDX-License-Identifier: GPL-2.0
2 /******************************************************************************
3 *
4 * Copyright(c) 2007 - 2016 Realtek Corporation.
5 *
6 * Contact Information:
7 * wlanfae <wlanfae@realtek.com>
8 * Realtek Corporation, No. 2, Innovation Road II, Hsinchu Science Park,
9 * Hsinchu 300, Taiwan.
10 *
11 * Larry Finger <Larry.Finger@lwfinger.net>
12 *
13 *****************************************************************************/
14
15 #include "../mp_precomp.h"
16 #include "../phydm_precomp.h"
17
18 /*---------------------------Define Local Constant---------------------------*/
19
20 static bool _iqk_rx_iqk_by_path_8822b(void *, u8);
21
phydm_set_iqk_info(struct phy_dm_struct * dm,struct dm_iqk_info * iqk_info,u8 status)22 static inline void phydm_set_iqk_info(struct phy_dm_struct *dm,
23 struct dm_iqk_info *iqk_info, u8 status)
24 {
25 bool KFAIL = true;
26
27 while (1) {
28 KFAIL = _iqk_rx_iqk_by_path_8822b(dm, ODM_RF_PATH_A);
29 if (status == 0)
30 ODM_RT_TRACE(dm, ODM_COMP_CALIBRATION,
31 "[IQK]S0RXK KFail = 0x%x\n", KFAIL);
32 else if (status == 1)
33 ODM_RT_TRACE(dm, ODM_COMP_CALIBRATION,
34 "[IQK]S1RXK KFail = 0x%x\n", KFAIL);
35 if (iqk_info->rxiqk_step == 5) {
36 dm->rf_calibrate_info.iqk_step++;
37 iqk_info->rxiqk_step = 1;
38 if (KFAIL && status == 0)
39 ODM_RT_TRACE(dm, ODM_COMP_CALIBRATION,
40 "[IQK]S0RXK fail code: %d!!!\n",
41 iqk_info->rxiqk_fail_code
42 [0][ODM_RF_PATH_A]);
43 else if (KFAIL && status == 1)
44 ODM_RT_TRACE(dm, ODM_COMP_CALIBRATION,
45 "[IQK]S1RXK fail code: %d!!!\n",
46 iqk_info->rxiqk_fail_code
47 [0][ODM_RF_PATH_A]);
48 break;
49 }
50 }
51
52 iqk_info->kcount++;
53 }
54
phydm_init_iqk_information(struct dm_iqk_info * iqk_info)55 static inline void phydm_init_iqk_information(struct dm_iqk_info *iqk_info)
56 {
57 u8 i, j, k, m;
58
59 for (i = 0; i < 2; i++) {
60 iqk_info->iqk_channel[i] = 0x0;
61
62 for (j = 0; j < SS_8822B; j++) {
63 iqk_info->lok_idac[i][j] = 0x0;
64 iqk_info->rxiqk_agc[i][j] = 0x0;
65 iqk_info->bypass_iqk[i][j] = 0x0;
66
67 for (k = 0; k < 2; k++) {
68 iqk_info->iqk_fail_report[i][j][k] = true;
69 for (m = 0; m < 8; m++) {
70 iqk_info->iqk_cfir_real[i][j][k][m] =
71 0x0;
72 iqk_info->iqk_cfir_imag[i][j][k][m] =
73 0x0;
74 }
75 }
76
77 for (k = 0; k < 3; k++)
78 iqk_info->retry_count[i][j][k] = 0x0;
79 }
80 }
81 }
82
phydm_backup_iqk_information(struct dm_iqk_info * iqk_info)83 static inline void phydm_backup_iqk_information(struct dm_iqk_info *iqk_info)
84 {
85 u8 i, j, k;
86
87 iqk_info->iqk_channel[1] = iqk_info->iqk_channel[0];
88 for (i = 0; i < 2; i++) {
89 iqk_info->lok_idac[1][i] = iqk_info->lok_idac[0][i];
90 iqk_info->rxiqk_agc[1][i] = iqk_info->rxiqk_agc[0][i];
91 iqk_info->bypass_iqk[1][i] = iqk_info->bypass_iqk[0][i];
92 iqk_info->rxiqk_fail_code[1][i] =
93 iqk_info->rxiqk_fail_code[0][i];
94 for (j = 0; j < 2; j++) {
95 iqk_info->iqk_fail_report[1][i][j] =
96 iqk_info->iqk_fail_report[0][i][j];
97 for (k = 0; k < 8; k++) {
98 iqk_info->iqk_cfir_real[1][i][j][k] =
99 iqk_info->iqk_cfir_real[0][i][j][k];
100 iqk_info->iqk_cfir_imag[1][i][j][k] =
101 iqk_info->iqk_cfir_imag[0][i][j][k];
102 }
103 }
104 }
105
106 for (i = 0; i < 4; i++) {
107 iqk_info->rxiqk_fail_code[0][i] = 0x0;
108 iqk_info->rxiqk_agc[0][i] = 0x0;
109 for (j = 0; j < 2; j++) {
110 iqk_info->iqk_fail_report[0][i][j] = true;
111 iqk_info->gs_retry_count[0][i][j] = 0x0;
112 }
113 for (j = 0; j < 3; j++)
114 iqk_info->retry_count[0][i][j] = 0x0;
115 }
116 }
117
phydm_set_iqk_cfir(struct phy_dm_struct * dm,struct dm_iqk_info * iqk_info,u8 path)118 static inline void phydm_set_iqk_cfir(struct phy_dm_struct *dm,
119 struct dm_iqk_info *iqk_info, u8 path)
120 {
121 u8 idx, i;
122 u32 tmp;
123
124 for (idx = 0; idx < 2; idx++) {
125 odm_set_bb_reg(dm, 0x1b00, MASKDWORD, 0xf8000008 | path << 1);
126
127 if (idx == 0)
128 odm_set_bb_reg(dm, 0x1b0c, BIT(13) | BIT(12), 0x3);
129 else
130 odm_set_bb_reg(dm, 0x1b0c, BIT(13) | BIT(12), 0x1);
131
132 odm_set_bb_reg(dm, 0x1bd4,
133 BIT(20) | BIT(19) | BIT(18) | BIT(17) | BIT(16),
134 0x10);
135
136 for (i = 0; i < 8; i++) {
137 odm_set_bb_reg(dm, 0x1bd8, MASKDWORD,
138 0xe0000001 + (i * 4));
139 tmp = odm_get_bb_reg(dm, 0x1bfc, MASKDWORD);
140 iqk_info->iqk_cfir_real[0][path][idx][i] =
141 (tmp & 0x0fff0000) >> 16;
142 iqk_info->iqk_cfir_imag[0][path][idx][i] = tmp & 0xfff;
143 }
144 }
145 }
146
phydm_get_read_counter(struct phy_dm_struct * dm)147 static inline void phydm_get_read_counter(struct phy_dm_struct *dm)
148 {
149 u32 counter = 0x0;
150
151 while (1) {
152 if (((odm_read_4byte(dm, 0x1bf0) >> 24) == 0x7f) ||
153 (counter > 300))
154 break;
155
156 counter++;
157 ODM_delay_ms(1);
158 }
159
160 ODM_RT_TRACE(dm, ODM_COMP_CALIBRATION, "[IQK]counter = %d\n", counter);
161 }
162
163 /*---------------------------Define Local Constant---------------------------*/
164
do_iqk_8822b(void * dm_void,u8 delta_thermal_index,u8 thermal_value,u8 threshold)165 void do_iqk_8822b(void *dm_void, u8 delta_thermal_index, u8 thermal_value,
166 u8 threshold)
167 {
168 struct phy_dm_struct *dm = (struct phy_dm_struct *)dm_void;
169
170 odm_reset_iqk_result(dm);
171
172 dm->rf_calibrate_info.thermal_value_iqk = thermal_value;
173
174 phy_iq_calibrate_8822b(dm, true);
175 }
176
_iqk_fill_iqk_report_8822b(void * dm_void,u8 channel)177 static void _iqk_fill_iqk_report_8822b(void *dm_void, u8 channel)
178 {
179 struct phy_dm_struct *dm = (struct phy_dm_struct *)dm_void;
180 struct dm_iqk_info *iqk_info = &dm->IQK_info;
181 u32 tmp1 = 0x0, tmp2 = 0x0, tmp3 = 0x0;
182 u8 i;
183
184 for (i = 0; i < SS_8822B; i++) {
185 tmp1 = tmp1 +
186 ((iqk_info->iqk_fail_report[channel][i][TX_IQK] & 0x1)
187 << i);
188 tmp2 = tmp2 +
189 ((iqk_info->iqk_fail_report[channel][i][RX_IQK] & 0x1)
190 << (i + 4));
191 tmp3 = tmp3 + ((iqk_info->rxiqk_fail_code[channel][i] & 0x3)
192 << (i * 2 + 8));
193 }
194 odm_write_4byte(dm, 0x1b00, 0xf8000008);
195 odm_set_bb_reg(dm, 0x1bf0, 0x0000ffff, tmp1 | tmp2 | tmp3);
196
197 for (i = 0; i < 2; i++)
198 odm_write_4byte(
199 dm, 0x1be8 + (i * 4),
200 (iqk_info->rxiqk_agc[channel][(i * 2) + 1] << 16) |
201 iqk_info->rxiqk_agc[channel][i * 2]);
202 }
203
_iqk_backup_mac_bb_8822b(struct phy_dm_struct * dm,u32 * MAC_backup,u32 * BB_backup,u32 * backup_mac_reg,u32 * backup_bb_reg)204 static void _iqk_backup_mac_bb_8822b(struct phy_dm_struct *dm, u32 *MAC_backup,
205 u32 *BB_backup, u32 *backup_mac_reg,
206 u32 *backup_bb_reg)
207 {
208 u32 i;
209
210 for (i = 0; i < MAC_REG_NUM_8822B; i++)
211 MAC_backup[i] = odm_read_4byte(dm, backup_mac_reg[i]);
212
213 for (i = 0; i < BB_REG_NUM_8822B; i++)
214 BB_backup[i] = odm_read_4byte(dm, backup_bb_reg[i]);
215 }
216
_iqk_backup_rf_8822b(struct phy_dm_struct * dm,u32 RF_backup[][2],u32 * backup_rf_reg)217 static void _iqk_backup_rf_8822b(struct phy_dm_struct *dm, u32 RF_backup[][2],
218 u32 *backup_rf_reg)
219 {
220 u32 i;
221
222 for (i = 0; i < RF_REG_NUM_8822B; i++) {
223 RF_backup[i][ODM_RF_PATH_A] = odm_get_rf_reg(
224 dm, ODM_RF_PATH_A, backup_rf_reg[i], RFREGOFFSETMASK);
225 RF_backup[i][ODM_RF_PATH_B] = odm_get_rf_reg(
226 dm, ODM_RF_PATH_B, backup_rf_reg[i], RFREGOFFSETMASK);
227 }
228 }
229
_iqk_agc_bnd_int_8822b(struct phy_dm_struct * dm)230 static void _iqk_agc_bnd_int_8822b(struct phy_dm_struct *dm)
231 {
232 /*initialize RX AGC bnd, it must do after bbreset*/
233 odm_write_4byte(dm, 0x1b00, 0xf8000008);
234 odm_write_4byte(dm, 0x1b00, 0xf80a7008);
235 odm_write_4byte(dm, 0x1b00, 0xf8015008);
236 odm_write_4byte(dm, 0x1b00, 0xf8000008);
237 }
238
_iqk_bb_reset_8822b(struct phy_dm_struct * dm)239 static void _iqk_bb_reset_8822b(struct phy_dm_struct *dm)
240 {
241 bool cca_ing = false;
242 u32 count = 0;
243
244 odm_set_rf_reg(dm, ODM_RF_PATH_A, 0x0, RFREGOFFSETMASK, 0x10000);
245 odm_set_rf_reg(dm, ODM_RF_PATH_B, 0x0, RFREGOFFSETMASK, 0x10000);
246
247 while (1) {
248 odm_write_4byte(dm, 0x8fc, 0x0);
249 odm_set_bb_reg(dm, 0x198c, 0x7, 0x7);
250 cca_ing = (bool)odm_get_bb_reg(dm, 0xfa0, BIT(3));
251
252 if (count > 30)
253 cca_ing = false;
254
255 if (cca_ing) {
256 ODM_delay_ms(1);
257 count++;
258 } else {
259 odm_write_1byte(dm, 0x808, 0x0); /*RX ant off*/
260 odm_set_bb_reg(dm, 0xa04,
261 BIT(27) | BIT(26) | BIT(25) | BIT(24),
262 0x0); /*CCK RX path off*/
263
264 /*BBreset*/
265 odm_set_bb_reg(dm, 0x0, BIT(16), 0x0);
266 odm_set_bb_reg(dm, 0x0, BIT(16), 0x1);
267
268 if (odm_get_bb_reg(dm, 0x660, BIT(16)))
269 odm_write_4byte(dm, 0x6b4, 0x89000006);
270 break;
271 }
272 }
273 }
274
_iqk_afe_setting_8822b(struct phy_dm_struct * dm,bool do_iqk)275 static void _iqk_afe_setting_8822b(struct phy_dm_struct *dm, bool do_iqk)
276 {
277 if (do_iqk) {
278 odm_write_4byte(dm, 0xc60, 0x50000000);
279 odm_write_4byte(dm, 0xc60, 0x70070040);
280 odm_write_4byte(dm, 0xe60, 0x50000000);
281 odm_write_4byte(dm, 0xe60, 0x70070040);
282
283 odm_write_4byte(dm, 0xc58, 0xd8000402);
284 odm_write_4byte(dm, 0xc5c, 0xd1000120);
285 odm_write_4byte(dm, 0xc6c, 0x00000a15);
286 odm_write_4byte(dm, 0xe58, 0xd8000402);
287 odm_write_4byte(dm, 0xe5c, 0xd1000120);
288 odm_write_4byte(dm, 0xe6c, 0x00000a15);
289 _iqk_bb_reset_8822b(dm);
290 } else {
291 odm_write_4byte(dm, 0xc60, 0x50000000);
292 odm_write_4byte(dm, 0xc60, 0x70038040);
293 odm_write_4byte(dm, 0xe60, 0x50000000);
294 odm_write_4byte(dm, 0xe60, 0x70038040);
295
296 odm_write_4byte(dm, 0xc58, 0xd8020402);
297 odm_write_4byte(dm, 0xc5c, 0xde000120);
298 odm_write_4byte(dm, 0xc6c, 0x0000122a);
299 odm_write_4byte(dm, 0xe58, 0xd8020402);
300 odm_write_4byte(dm, 0xe5c, 0xde000120);
301 odm_write_4byte(dm, 0xe6c, 0x0000122a);
302 }
303 }
304
_iqk_restore_mac_bb_8822b(struct phy_dm_struct * dm,u32 * MAC_backup,u32 * BB_backup,u32 * backup_mac_reg,u32 * backup_bb_reg)305 static void _iqk_restore_mac_bb_8822b(struct phy_dm_struct *dm, u32 *MAC_backup,
306 u32 *BB_backup, u32 *backup_mac_reg,
307 u32 *backup_bb_reg)
308 {
309 u32 i;
310
311 for (i = 0; i < MAC_REG_NUM_8822B; i++)
312 odm_write_4byte(dm, backup_mac_reg[i], MAC_backup[i]);
313 for (i = 0; i < BB_REG_NUM_8822B; i++)
314 odm_write_4byte(dm, backup_bb_reg[i], BB_backup[i]);
315 }
316
_iqk_restore_rf_8822b(struct phy_dm_struct * dm,u32 * backup_rf_reg,u32 RF_backup[][2])317 static void _iqk_restore_rf_8822b(struct phy_dm_struct *dm, u32 *backup_rf_reg,
318 u32 RF_backup[][2])
319 {
320 u32 i;
321
322 odm_set_rf_reg(dm, ODM_RF_PATH_A, 0xef, RFREGOFFSETMASK, 0x0);
323 odm_set_rf_reg(dm, ODM_RF_PATH_B, 0xef, RFREGOFFSETMASK, 0x0);
324 /*0xdf[4]=0*/
325 odm_set_rf_reg(dm, ODM_RF_PATH_A, 0xdf, RFREGOFFSETMASK,
326 RF_backup[0][ODM_RF_PATH_A] & (~BIT(4)));
327 odm_set_rf_reg(dm, ODM_RF_PATH_B, 0xdf, RFREGOFFSETMASK,
328 RF_backup[0][ODM_RF_PATH_B] & (~BIT(4)));
329
330 for (i = 1; i < RF_REG_NUM_8822B; i++) {
331 odm_set_rf_reg(dm, ODM_RF_PATH_A, backup_rf_reg[i],
332 RFREGOFFSETMASK, RF_backup[i][ODM_RF_PATH_A]);
333 odm_set_rf_reg(dm, ODM_RF_PATH_B, backup_rf_reg[i],
334 RFREGOFFSETMASK, RF_backup[i][ODM_RF_PATH_B]);
335 }
336 }
337
_iqk_backup_iqk_8822b(struct phy_dm_struct * dm,u8 step)338 static void _iqk_backup_iqk_8822b(struct phy_dm_struct *dm, u8 step)
339 {
340 struct dm_iqk_info *iqk_info = &dm->IQK_info;
341 u8 path;
342 u16 iqk_apply[2] = {0xc94, 0xe94};
343
344 if (step == 0x0) {
345 phydm_backup_iqk_information(iqk_info);
346 } else {
347 iqk_info->iqk_channel[0] = iqk_info->rf_reg18;
348 for (path = 0; path < 2; path++) {
349 iqk_info->lok_idac[0][path] =
350 odm_get_rf_reg(dm, (enum odm_rf_radio_path)path,
351 0x58, RFREGOFFSETMASK);
352 iqk_info->bypass_iqk[0][path] =
353 odm_get_bb_reg(dm, iqk_apply[path], MASKDWORD);
354
355 phydm_set_iqk_cfir(dm, iqk_info, path);
356 odm_set_bb_reg(dm, 0x1bd8, MASKDWORD, 0x0);
357 odm_set_bb_reg(dm, 0x1b0c, BIT(13) | BIT(12), 0x0);
358 }
359 }
360 }
361
_iqk_reload_iqk_setting_8822b(struct phy_dm_struct * dm,u8 channel,u8 reload_idx)362 static void _iqk_reload_iqk_setting_8822b(
363 struct phy_dm_struct *dm, u8 channel,
364 u8 reload_idx /*1: reload TX, 2: reload LO, TX, RX*/
365 )
366 {
367 struct dm_iqk_info *iqk_info = &dm->IQK_info;
368 u8 i, path, idx;
369 u16 iqk_apply[2] = {0xc94, 0xe94};
370
371 for (path = 0; path < 2; path++) {
372 if (reload_idx == 2) {
373 odm_set_rf_reg(dm, (enum odm_rf_radio_path)path, 0xdf,
374 BIT(4), 0x1);
375 odm_set_rf_reg(dm, (enum odm_rf_radio_path)path, 0x58,
376 RFREGOFFSETMASK,
377 iqk_info->lok_idac[channel][path]);
378 }
379
380 for (idx = 0; idx < reload_idx; idx++) {
381 odm_set_bb_reg(dm, 0x1b00, MASKDWORD,
382 0xf8000008 | path << 1);
383 odm_set_bb_reg(dm, 0x1b2c, MASKDWORD, 0x7);
384 odm_set_bb_reg(dm, 0x1b38, MASKDWORD, 0x20000000);
385 odm_set_bb_reg(dm, 0x1b3c, MASKDWORD, 0x20000000);
386 odm_set_bb_reg(dm, 0x1bcc, MASKDWORD, 0x00000000);
387
388 if (idx == 0)
389 odm_set_bb_reg(dm, 0x1b0c, BIT(13) | BIT(12),
390 0x3);
391 else
392 odm_set_bb_reg(dm, 0x1b0c, BIT(13) | BIT(12),
393 0x1);
394
395 odm_set_bb_reg(dm, 0x1bd4, BIT(20) | BIT(19) | BIT(18) |
396 BIT(17) | BIT(16),
397 0x10);
398
399 for (i = 0; i < 8; i++) {
400 odm_write_4byte(
401 dm, 0x1bd8,
402 ((0xc0000000 >> idx) + 0x3) + (i * 4) +
403 (iqk_info->iqk_cfir_real
404 [channel][path][idx][i]
405 << 9));
406 odm_write_4byte(
407 dm, 0x1bd8,
408 ((0xc0000000 >> idx) + 0x1) + (i * 4) +
409 (iqk_info->iqk_cfir_imag
410 [channel][path][idx][i]
411 << 9));
412 }
413 }
414 odm_set_bb_reg(dm, iqk_apply[path], MASKDWORD,
415 iqk_info->bypass_iqk[channel][path]);
416
417 odm_set_bb_reg(dm, 0x1bd8, MASKDWORD, 0x0);
418 odm_set_bb_reg(dm, 0x1b0c, BIT(13) | BIT(12), 0x0);
419 }
420 }
421
_iqk_reload_iqk_8822b(struct phy_dm_struct * dm,bool reset)422 static bool _iqk_reload_iqk_8822b(struct phy_dm_struct *dm, bool reset)
423 {
424 struct dm_iqk_info *iqk_info = &dm->IQK_info;
425 u8 i;
426 bool reload = false;
427
428 if (reset) {
429 for (i = 0; i < 2; i++)
430 iqk_info->iqk_channel[i] = 0x0;
431 } else {
432 iqk_info->rf_reg18 = odm_get_rf_reg(dm, ODM_RF_PATH_A, 0x18,
433 RFREGOFFSETMASK);
434
435 for (i = 0; i < 2; i++) {
436 if (iqk_info->rf_reg18 == iqk_info->iqk_channel[i]) {
437 _iqk_reload_iqk_setting_8822b(dm, i, 2);
438 _iqk_fill_iqk_report_8822b(dm, i);
439 ODM_RT_TRACE(
440 dm, ODM_COMP_CALIBRATION,
441 "[IQK]reload IQK result before!!!!\n");
442 reload = true;
443 }
444 }
445 }
446 return reload;
447 }
448
_iqk_rfe_setting_8822b(struct phy_dm_struct * dm,bool ext_pa_on)449 static void _iqk_rfe_setting_8822b(struct phy_dm_struct *dm, bool ext_pa_on)
450 {
451 if (ext_pa_on) {
452 /*RFE setting*/
453 odm_write_4byte(dm, 0xcb0, 0x77777777);
454 odm_write_4byte(dm, 0xcb4, 0x00007777);
455 odm_write_4byte(dm, 0xcbc, 0x0000083B);
456 odm_write_4byte(dm, 0xeb0, 0x77777777);
457 odm_write_4byte(dm, 0xeb4, 0x00007777);
458 odm_write_4byte(dm, 0xebc, 0x0000083B);
459 ODM_RT_TRACE(dm, ODM_COMP_CALIBRATION,
460 "[IQK]external PA on!!!!\n");
461 } else {
462 /*RFE setting*/
463 odm_write_4byte(dm, 0xcb0, 0x77777777);
464 odm_write_4byte(dm, 0xcb4, 0x00007777);
465 odm_write_4byte(dm, 0xcbc, 0x00000100);
466 odm_write_4byte(dm, 0xeb0, 0x77777777);
467 odm_write_4byte(dm, 0xeb4, 0x00007777);
468 odm_write_4byte(dm, 0xebc, 0x00000100);
469 }
470 }
471
_iqk_rf_setting_8822b(struct phy_dm_struct * dm)472 static void _iqk_rf_setting_8822b(struct phy_dm_struct *dm)
473 {
474 u8 path;
475 u32 tmp;
476
477 odm_write_4byte(dm, 0x1b00, 0xf8000008);
478 odm_write_4byte(dm, 0x1bb8, 0x00000000);
479
480 for (path = 0; path < 2; path++) {
481 /*0xdf:B11 = 1,B4 = 0, B1 = 1*/
482 tmp = odm_get_rf_reg(dm, (enum odm_rf_radio_path)path, 0xdf,
483 RFREGOFFSETMASK);
484 tmp = (tmp & (~BIT(4))) | BIT(1) | BIT(11);
485 odm_set_rf_reg(dm, (enum odm_rf_radio_path)path, 0xdf,
486 RFREGOFFSETMASK, tmp);
487
488 /*release 0x56 TXBB*/
489 odm_set_rf_reg(dm, (enum odm_rf_radio_path)path, 0x65,
490 RFREGOFFSETMASK, 0x09000);
491
492 if (*dm->band_type == ODM_BAND_5G) {
493 odm_set_rf_reg(dm, (enum odm_rf_radio_path)path, 0xef,
494 BIT(19), 0x1);
495 odm_set_rf_reg(dm, (enum odm_rf_radio_path)path, 0x33,
496 RFREGOFFSETMASK, 0x00026);
497 odm_set_rf_reg(dm, (enum odm_rf_radio_path)path, 0x3e,
498 RFREGOFFSETMASK, 0x00037);
499 odm_set_rf_reg(dm, (enum odm_rf_radio_path)path, 0x3f,
500 RFREGOFFSETMASK, 0xdefce);
501 odm_set_rf_reg(dm, (enum odm_rf_radio_path)path, 0xef,
502 BIT(19), 0x0);
503 } else {
504 odm_set_rf_reg(dm, (enum odm_rf_radio_path)path, 0xef,
505 BIT(19), 0x1);
506 odm_set_rf_reg(dm, (enum odm_rf_radio_path)path, 0x33,
507 RFREGOFFSETMASK, 0x00026);
508 odm_set_rf_reg(dm, (enum odm_rf_radio_path)path, 0x3e,
509 RFREGOFFSETMASK, 0x00037);
510 odm_set_rf_reg(dm, (enum odm_rf_radio_path)path, 0x3f,
511 RFREGOFFSETMASK, 0x5efce);
512 odm_set_rf_reg(dm, (enum odm_rf_radio_path)path, 0xef,
513 BIT(19), 0x0);
514 }
515 }
516 }
517
_iqk_configure_macbb_8822b(struct phy_dm_struct * dm)518 static void _iqk_configure_macbb_8822b(struct phy_dm_struct *dm)
519 {
520 /*MACBB register setting*/
521 odm_write_1byte(dm, 0x522, 0x7f);
522 odm_set_bb_reg(dm, 0x550, BIT(11) | BIT(3), 0x0);
523 odm_set_bb_reg(dm, 0x90c, BIT(15),
524 0x1); /*0x90c[15]=1: dac_buf reset selection*/
525 odm_set_bb_reg(dm, 0x9a4, BIT(31),
526 0x0); /*0x9a4[31]=0: Select da clock*/
527 /*0xc94[0]=1, 0xe94[0]=1: let tx through iqk*/
528 odm_set_bb_reg(dm, 0xc94, BIT(0), 0x1);
529 odm_set_bb_reg(dm, 0xe94, BIT(0), 0x1);
530 /* 3-wire off*/
531 odm_write_4byte(dm, 0xc00, 0x00000004);
532 odm_write_4byte(dm, 0xe00, 0x00000004);
533 }
534
_iqk_lok_setting_8822b(struct phy_dm_struct * dm,u8 path)535 static void _iqk_lok_setting_8822b(struct phy_dm_struct *dm, u8 path)
536 {
537 odm_write_4byte(dm, 0x1b00, 0xf8000008 | path << 1);
538 odm_write_4byte(dm, 0x1bcc, 0x9);
539 odm_write_1byte(dm, 0x1b23, 0x00);
540
541 switch (*dm->band_type) {
542 case ODM_BAND_2_4G:
543 odm_write_1byte(dm, 0x1b2b, 0x00);
544 odm_set_rf_reg(dm, (enum odm_rf_radio_path)path, 0x56,
545 RFREGOFFSETMASK, 0x50df2);
546 odm_set_rf_reg(dm, (enum odm_rf_radio_path)path, 0x8f,
547 RFREGOFFSETMASK, 0xadc00);
548 /* WE_LUT_TX_LOK*/
549 odm_set_rf_reg(dm, (enum odm_rf_radio_path)path, 0xef, BIT(4),
550 0x1);
551 odm_set_rf_reg(dm, (enum odm_rf_radio_path)path, 0x33,
552 BIT(1) | BIT(0), 0x0);
553 break;
554 case ODM_BAND_5G:
555 odm_write_1byte(dm, 0x1b2b, 0x80);
556 odm_set_rf_reg(dm, (enum odm_rf_radio_path)path, 0x56,
557 RFREGOFFSETMASK, 0x5086c);
558 odm_set_rf_reg(dm, (enum odm_rf_radio_path)path, 0x8f,
559 RFREGOFFSETMASK, 0xa9c00);
560 /* WE_LUT_TX_LOK*/
561 odm_set_rf_reg(dm, (enum odm_rf_radio_path)path, 0xef, BIT(4),
562 0x1);
563 odm_set_rf_reg(dm, (enum odm_rf_radio_path)path, 0x33,
564 BIT(1) | BIT(0), 0x1);
565 break;
566 }
567 }
568
_iqk_txk_setting_8822b(struct phy_dm_struct * dm,u8 path)569 static void _iqk_txk_setting_8822b(struct phy_dm_struct *dm, u8 path)
570 {
571 odm_write_4byte(dm, 0x1b00, 0xf8000008 | path << 1);
572 odm_write_4byte(dm, 0x1bcc, 0x9);
573 odm_write_4byte(dm, 0x1b20, 0x01440008);
574
575 if (path == 0x0)
576 odm_write_4byte(dm, 0x1b00, 0xf800000a);
577 else
578 odm_write_4byte(dm, 0x1b00, 0xf8000008);
579 odm_write_4byte(dm, 0x1bcc, 0x3f);
580
581 switch (*dm->band_type) {
582 case ODM_BAND_2_4G:
583 odm_set_rf_reg(dm, (enum odm_rf_radio_path)path, 0x56,
584 RFREGOFFSETMASK, 0x50df2);
585 odm_set_rf_reg(dm, (enum odm_rf_radio_path)path, 0x8f,
586 RFREGOFFSETMASK, 0xadc00);
587 odm_write_1byte(dm, 0x1b2b, 0x00);
588 break;
589 case ODM_BAND_5G:
590 odm_set_rf_reg(dm, (enum odm_rf_radio_path)path, 0x56,
591 RFREGOFFSETMASK, 0x500ef);
592 odm_set_rf_reg(dm, (enum odm_rf_radio_path)path, 0x8f,
593 RFREGOFFSETMASK, 0xa9c00);
594 odm_write_1byte(dm, 0x1b2b, 0x80);
595 break;
596 }
597 }
598
_iqk_rxk1_setting_8822b(struct phy_dm_struct * dm,u8 path)599 static void _iqk_rxk1_setting_8822b(struct phy_dm_struct *dm, u8 path)
600 {
601 odm_write_4byte(dm, 0x1b00, 0xf8000008 | path << 1);
602
603 switch (*dm->band_type) {
604 case ODM_BAND_2_4G:
605 odm_write_1byte(dm, 0x1bcc, 0x9);
606 odm_write_1byte(dm, 0x1b2b, 0x00);
607 odm_write_4byte(dm, 0x1b20, 0x01450008);
608 odm_write_4byte(dm, 0x1b24, 0x01460c88);
609 odm_set_rf_reg(dm, (enum odm_rf_radio_path)path, 0x56,
610 RFREGOFFSETMASK, 0x510e0);
611 odm_set_rf_reg(dm, (enum odm_rf_radio_path)path, 0x8f,
612 RFREGOFFSETMASK, 0xacc00);
613 break;
614 case ODM_BAND_5G:
615 odm_write_1byte(dm, 0x1bcc, 0x09);
616 odm_write_1byte(dm, 0x1b2b, 0x80);
617 odm_write_4byte(dm, 0x1b20, 0x00850008);
618 odm_write_4byte(dm, 0x1b24, 0x00460048);
619 odm_set_rf_reg(dm, (enum odm_rf_radio_path)path, 0x56,
620 RFREGOFFSETMASK, 0x510e0);
621 odm_set_rf_reg(dm, (enum odm_rf_radio_path)path, 0x8f,
622 RFREGOFFSETMASK, 0xadc00);
623 break;
624 }
625 }
626
_iqk_rxk2_setting_8822b(struct phy_dm_struct * dm,u8 path,bool is_gs)627 static void _iqk_rxk2_setting_8822b(struct phy_dm_struct *dm, u8 path,
628 bool is_gs)
629 {
630 struct dm_iqk_info *iqk_info = &dm->IQK_info;
631
632 odm_write_4byte(dm, 0x1b00, 0xf8000008 | path << 1);
633
634 switch (*dm->band_type) {
635 case ODM_BAND_2_4G:
636 if (is_gs)
637 iqk_info->tmp1bcc = 0x12;
638 odm_write_1byte(dm, 0x1bcc, iqk_info->tmp1bcc);
639 odm_write_1byte(dm, 0x1b2b, 0x00);
640 odm_write_4byte(dm, 0x1b20, 0x01450008);
641 odm_write_4byte(dm, 0x1b24, 0x01460848);
642 odm_set_rf_reg(dm, (enum odm_rf_radio_path)path, 0x56,
643 RFREGOFFSETMASK, 0x510e0);
644 odm_set_rf_reg(dm, (enum odm_rf_radio_path)path, 0x8f,
645 RFREGOFFSETMASK, 0xa9c00);
646 break;
647 case ODM_BAND_5G:
648 if (is_gs) {
649 if (path == ODM_RF_PATH_A)
650 iqk_info->tmp1bcc = 0x12;
651 else
652 iqk_info->tmp1bcc = 0x09;
653 }
654 odm_write_1byte(dm, 0x1bcc, iqk_info->tmp1bcc);
655 odm_write_1byte(dm, 0x1b2b, 0x80);
656 odm_write_4byte(dm, 0x1b20, 0x00850008);
657 odm_write_4byte(dm, 0x1b24, 0x00460848);
658 odm_set_rf_reg(dm, (enum odm_rf_radio_path)path, 0x56,
659 RFREGOFFSETMASK, 0x51060);
660 odm_set_rf_reg(dm, (enum odm_rf_radio_path)path, 0x8f,
661 RFREGOFFSETMASK, 0xa9c00);
662 break;
663 }
664 }
665
_iqk_check_cal_8822b(struct phy_dm_struct * dm,u32 IQK_CMD)666 static bool _iqk_check_cal_8822b(struct phy_dm_struct *dm, u32 IQK_CMD)
667 {
668 bool notready = true, fail = true;
669 u32 delay_count = 0x0;
670
671 while (notready) {
672 if (odm_read_4byte(dm, 0x1b00) == (IQK_CMD & 0xffffff0f)) {
673 fail = (bool)odm_get_bb_reg(dm, 0x1b08, BIT(26));
674 notready = false;
675 } else {
676 ODM_delay_ms(1);
677 delay_count++;
678 }
679
680 if (delay_count >= 50) {
681 fail = true;
682 ODM_RT_TRACE(dm, ODM_COMP_CALIBRATION,
683 "[IQK]IQK timeout!!!\n");
684 break;
685 }
686 }
687 ODM_RT_TRACE(dm, ODM_COMP_CALIBRATION, "[IQK]delay count = 0x%x!!!\n",
688 delay_count);
689 return fail;
690 }
691
_iqk_rx_iqk_gain_search_fail_8822b(struct phy_dm_struct * dm,u8 path,u8 step)692 static bool _iqk_rx_iqk_gain_search_fail_8822b(struct phy_dm_struct *dm,
693 u8 path, u8 step)
694 {
695 struct dm_iqk_info *iqk_info = &dm->IQK_info;
696 bool fail = true;
697 u32 IQK_CMD = 0x0, rf_reg0, tmp, bb_idx;
698 u8 IQMUX[4] = {0x9, 0x12, 0x1b, 0x24};
699 u8 idx;
700
701 for (idx = 0; idx < 4; idx++)
702 if (iqk_info->tmp1bcc == IQMUX[idx])
703 break;
704
705 odm_write_4byte(dm, 0x1b00, 0xf8000008 | path << 1);
706 odm_write_4byte(dm, 0x1bcc, iqk_info->tmp1bcc);
707
708 if (step == RXIQK1)
709 ODM_RT_TRACE(
710 dm, ODM_COMP_CALIBRATION,
711 "[IQK]============ S%d RXIQK GainSearch ============\n",
712 path);
713
714 if (step == RXIQK1)
715 IQK_CMD = 0xf8000208 | (1 << (path + 4));
716 else
717 IQK_CMD = 0xf8000308 | (1 << (path + 4));
718
719 ODM_RT_TRACE(dm, ODM_COMP_CALIBRATION, "[IQK]S%d GS%d_Trigger = 0x%x\n",
720 path, step, IQK_CMD);
721
722 odm_write_4byte(dm, 0x1b00, IQK_CMD);
723 odm_write_4byte(dm, 0x1b00, IQK_CMD + 0x1);
724 ODM_delay_ms(GS_delay_8822B);
725 fail = _iqk_check_cal_8822b(dm, IQK_CMD);
726
727 if (step == RXIQK2) {
728 rf_reg0 = odm_get_rf_reg(dm, (enum odm_rf_radio_path)path, 0x0,
729 RFREGOFFSETMASK);
730 odm_write_4byte(dm, 0x1b00, 0xf8000008 | path << 1);
731 ODM_RT_TRACE(
732 dm, ODM_COMP_CALIBRATION,
733 "[IQK]S%d ==> RF0x0 = 0x%x, tmp1bcc = 0x%x, idx = %d, 0x1b3c = 0x%x\n",
734 path, rf_reg0, iqk_info->tmp1bcc, idx,
735 odm_read_4byte(dm, 0x1b3c));
736 tmp = (rf_reg0 & 0x1fe0) >> 5;
737 iqk_info->lna_idx = tmp >> 5;
738 bb_idx = tmp & 0x1f;
739 if (bb_idx == 0x1) {
740 if (iqk_info->lna_idx != 0x0)
741 iqk_info->lna_idx--;
742 else if (idx != 3)
743 idx++;
744 else
745 iqk_info->isbnd = true;
746 fail = true;
747 } else if (bb_idx == 0xa) {
748 if (idx != 0)
749 idx--;
750 else if (iqk_info->lna_idx != 0x7)
751 iqk_info->lna_idx++;
752 else
753 iqk_info->isbnd = true;
754 fail = true;
755 } else {
756 fail = false;
757 }
758
759 if (iqk_info->isbnd)
760 fail = false;
761
762 iqk_info->tmp1bcc = IQMUX[idx];
763
764 if (fail) {
765 odm_write_4byte(dm, 0x1b00, 0xf8000008 | path << 1);
766 odm_write_4byte(
767 dm, 0x1b24,
768 (odm_read_4byte(dm, 0x1b24) & 0xffffe3ff) |
769 (iqk_info->lna_idx << 10));
770 }
771 }
772
773 return fail;
774 }
775
_lok_one_shot_8822b(void * dm_void,u8 path)776 static bool _lok_one_shot_8822b(void *dm_void, u8 path)
777 {
778 struct phy_dm_struct *dm = (struct phy_dm_struct *)dm_void;
779 struct dm_iqk_info *iqk_info = &dm->IQK_info;
780 u8 delay_count = 0;
781 bool LOK_notready = false;
782 u32 LOK_temp = 0;
783 u32 IQK_CMD = 0x0;
784
785 ODM_RT_TRACE(dm, ODM_COMP_CALIBRATION,
786 "[IQK]==========S%d LOK ==========\n", path);
787
788 IQK_CMD = 0xf8000008 | (1 << (4 + path));
789
790 ODM_RT_TRACE(dm, ODM_COMP_CALIBRATION, "[IQK]LOK_Trigger = 0x%x\n",
791 IQK_CMD);
792
793 odm_write_4byte(dm, 0x1b00, IQK_CMD);
794 odm_write_4byte(dm, 0x1b00, IQK_CMD + 1);
795 /*LOK: CMD ID = 0 {0xf8000018, 0xf8000028}*/
796 /*LOK: CMD ID = 0 {0xf8000019, 0xf8000029}*/
797 ODM_delay_ms(LOK_delay_8822B);
798
799 delay_count = 0;
800 LOK_notready = true;
801
802 while (LOK_notready) {
803 if (odm_read_4byte(dm, 0x1b00) == (IQK_CMD & 0xffffff0f))
804 LOK_notready = false;
805 else
806 LOK_notready = true;
807
808 if (LOK_notready) {
809 ODM_delay_ms(1);
810 delay_count++;
811 }
812
813 if (delay_count >= 50) {
814 ODM_RT_TRACE(dm, ODM_COMP_CALIBRATION,
815 "[IQK]S%d LOK timeout!!!\n", path);
816 break;
817 }
818 }
819
820 ODM_RT_TRACE(dm, ODM_COMP_CALIBRATION,
821 "[IQK]S%d ==> delay_count = 0x%x\n", path, delay_count);
822 if (ODM_COMP_CALIBRATION) {
823 if (!LOK_notready) {
824 LOK_temp =
825 odm_get_rf_reg(dm, (enum odm_rf_radio_path)path,
826 0x58, RFREGOFFSETMASK);
827 ODM_RT_TRACE(dm, ODM_COMP_CALIBRATION,
828 "[IQK]0x58 = 0x%x\n", LOK_temp);
829 } else {
830 ODM_RT_TRACE(dm, ODM_COMP_CALIBRATION,
831 "[IQK]==>S%d LOK Fail!!!\n", path);
832 }
833 }
834 iqk_info->lok_fail[path] = LOK_notready;
835 return LOK_notready;
836 }
837
_iqk_one_shot_8822b(void * dm_void,u8 path,u8 idx)838 static bool _iqk_one_shot_8822b(void *dm_void, u8 path, u8 idx)
839 {
840 struct phy_dm_struct *dm = (struct phy_dm_struct *)dm_void;
841 struct dm_iqk_info *iqk_info = &dm->IQK_info;
842 u8 delay_count = 0;
843 bool notready = true, fail = true;
844 u32 IQK_CMD = 0x0;
845 u16 iqk_apply[2] = {0xc94, 0xe94};
846
847 if (idx == TXIQK)
848 ODM_RT_TRACE(dm, ODM_COMP_CALIBRATION,
849 "[IQK]============ S%d WBTXIQK ============\n",
850 path);
851 else if (idx == RXIQK1)
852 ODM_RT_TRACE(
853 dm, ODM_COMP_CALIBRATION,
854 "[IQK]============ S%d WBRXIQK STEP1============\n",
855 path);
856 else
857 ODM_RT_TRACE(
858 dm, ODM_COMP_CALIBRATION,
859 "[IQK]============ S%d WBRXIQK STEP2============\n",
860 path);
861
862 if (idx == TXIQK) {
863 IQK_CMD = 0xf8000008 | ((*dm->band_width + 4) << 8) |
864 (1 << (path + 4));
865 ODM_RT_TRACE(dm, ODM_COMP_CALIBRATION,
866 "[IQK]TXK_Trigger = 0x%x\n", IQK_CMD);
867 /*{0xf8000418, 0xf800042a} ==> 20 WBTXK (CMD = 4)*/
868 /*{0xf8000518, 0xf800052a} ==> 40 WBTXK (CMD = 5)*/
869 /*{0xf8000618, 0xf800062a} ==> 80 WBTXK (CMD = 6)*/
870 } else if (idx == RXIQK1) {
871 if (*dm->band_width == 2)
872 IQK_CMD = 0xf8000808 | (1 << (path + 4));
873 else
874 IQK_CMD = 0xf8000708 | (1 << (path + 4));
875 ODM_RT_TRACE(dm, ODM_COMP_CALIBRATION,
876 "[IQK]RXK1_Trigger = 0x%x\n", IQK_CMD);
877 /*{0xf8000718, 0xf800072a} ==> 20 WBTXK (CMD = 7)*/
878 /*{0xf8000718, 0xf800072a} ==> 40 WBTXK (CMD = 7)*/
879 /*{0xf8000818, 0xf800082a} ==> 80 WBTXK (CMD = 8)*/
880 } else if (idx == RXIQK2) {
881 IQK_CMD = 0xf8000008 | ((*dm->band_width + 9) << 8) |
882 (1 << (path + 4));
883 ODM_RT_TRACE(dm, ODM_COMP_CALIBRATION,
884 "[IQK]RXK2_Trigger = 0x%x\n", IQK_CMD);
885 /*{0xf8000918, 0xf800092a} ==> 20 WBRXK (CMD = 9)*/
886 /*{0xf8000a18, 0xf8000a2a} ==> 40 WBRXK (CMD = 10)*/
887 /*{0xf8000b18, 0xf8000b2a} ==> 80 WBRXK (CMD = 11)*/
888 odm_write_4byte(dm, 0x1b00, 0xf8000008 | path << 1);
889 odm_write_4byte(dm, 0x1b24,
890 (odm_read_4byte(dm, 0x1b24) & 0xffffe3ff) |
891 ((iqk_info->lna_idx & 0x7) << 10));
892 }
893 odm_write_4byte(dm, 0x1b00, IQK_CMD);
894 odm_write_4byte(dm, 0x1b00, IQK_CMD + 0x1);
895 ODM_delay_ms(WBIQK_delay_8822B);
896
897 while (notready) {
898 if (odm_read_4byte(dm, 0x1b00) == (IQK_CMD & 0xffffff0f))
899 notready = false;
900 else
901 notready = true;
902
903 if (notready) {
904 ODM_delay_ms(1);
905 delay_count++;
906 } else {
907 fail = (bool)odm_get_bb_reg(dm, 0x1b08, BIT(26));
908 break;
909 }
910
911 if (delay_count >= 50) {
912 ODM_RT_TRACE(dm, ODM_COMP_CALIBRATION,
913 "[IQK]S%d IQK timeout!!!\n", path);
914 break;
915 }
916 }
917
918 if (dm->debug_components & ODM_COMP_CALIBRATION) {
919 odm_write_4byte(dm, 0x1b00, 0xf8000008 | path << 1);
920 ODM_RT_TRACE(dm, ODM_COMP_CALIBRATION,
921 "[IQK]S%d ==> 0x1b00 = 0x%x, 0x1b08 = 0x%x\n",
922 path, odm_read_4byte(dm, 0x1b00),
923 odm_read_4byte(dm, 0x1b08));
924 ODM_RT_TRACE(dm, ODM_COMP_CALIBRATION,
925 "[IQK]S%d ==> delay_count = 0x%x\n", path,
926 delay_count);
927 if (idx != TXIQK)
928 ODM_RT_TRACE(
929 dm, ODM_COMP_CALIBRATION,
930 "[IQK]S%d ==> RF0x0 = 0x%x, RF0x56 = 0x%x\n",
931 path,
932 odm_get_rf_reg(dm, (enum odm_rf_radio_path)path,
933 0x0, RFREGOFFSETMASK),
934 odm_get_rf_reg(dm, (enum odm_rf_radio_path)path,
935 0x56, RFREGOFFSETMASK));
936 }
937
938 odm_write_4byte(dm, 0x1b00, 0xf8000008 | path << 1);
939
940 if (idx == TXIQK)
941 if (fail)
942 odm_set_bb_reg(dm, iqk_apply[path], BIT(0), 0x0);
943
944 if (idx == RXIQK2) {
945 iqk_info->rxiqk_agc[0][path] =
946 (u16)(((odm_get_rf_reg(dm, (enum odm_rf_radio_path)path,
947 0x0, RFREGOFFSETMASK) >>
948 5) &
949 0xff) |
950 (iqk_info->tmp1bcc << 8));
951
952 odm_write_4byte(dm, 0x1b38, 0x20000000);
953
954 if (!fail)
955 odm_set_bb_reg(dm, iqk_apply[path], (BIT(11) | BIT(10)),
956 0x1);
957 else
958 odm_set_bb_reg(dm, iqk_apply[path], (BIT(11) | BIT(10)),
959 0x0);
960 }
961
962 if (idx == TXIQK)
963 iqk_info->iqk_fail_report[0][path][TXIQK] = fail;
964 else
965 iqk_info->iqk_fail_report[0][path][RXIQK] = fail;
966
967 return fail;
968 }
969
_iqk_rx_iqk_by_path_8822b(void * dm_void,u8 path)970 static bool _iqk_rx_iqk_by_path_8822b(void *dm_void, u8 path)
971 {
972 struct phy_dm_struct *dm = (struct phy_dm_struct *)dm_void;
973 struct dm_iqk_info *iqk_info = &dm->IQK_info;
974 bool KFAIL = true, gonext;
975
976 switch (iqk_info->rxiqk_step) {
977 case 1: /*gain search_RXK1*/
978 _iqk_rxk1_setting_8822b(dm, path);
979 gonext = false;
980 while (1) {
981 KFAIL = _iqk_rx_iqk_gain_search_fail_8822b(dm, path,
982 RXIQK1);
983 if (KFAIL &&
984 (iqk_info->gs_retry_count[0][path][GSRXK1] < 2))
985 iqk_info->gs_retry_count[0][path][GSRXK1]++;
986 else if (KFAIL) {
987 iqk_info->rxiqk_fail_code[0][path] = 0;
988 iqk_info->rxiqk_step = 5;
989 gonext = true;
990 } else {
991 iqk_info->rxiqk_step++;
992 gonext = true;
993 }
994 if (gonext)
995 break;
996 }
997 break;
998 case 2: /*gain search_RXK2*/
999 _iqk_rxk2_setting_8822b(dm, path, true);
1000 iqk_info->isbnd = false;
1001 while (1) {
1002 KFAIL = _iqk_rx_iqk_gain_search_fail_8822b(dm, path,
1003 RXIQK2);
1004 if (KFAIL &&
1005 (iqk_info->gs_retry_count[0][path][GSRXK2] <
1006 rxiqk_gs_limit)) {
1007 iqk_info->gs_retry_count[0][path][GSRXK2]++;
1008 } else {
1009 iqk_info->rxiqk_step++;
1010 break;
1011 }
1012 }
1013 break;
1014 case 3: /*RXK1*/
1015 _iqk_rxk1_setting_8822b(dm, path);
1016 gonext = false;
1017 while (1) {
1018 KFAIL = _iqk_one_shot_8822b(dm, path, RXIQK1);
1019 if (KFAIL &&
1020 (iqk_info->retry_count[0][path][RXIQK1] < 2))
1021 iqk_info->retry_count[0][path][RXIQK1]++;
1022 else if (KFAIL) {
1023 iqk_info->rxiqk_fail_code[0][path] = 1;
1024 iqk_info->rxiqk_step = 5;
1025 gonext = true;
1026 } else {
1027 iqk_info->rxiqk_step++;
1028 gonext = true;
1029 }
1030 if (gonext)
1031 break;
1032 }
1033 break;
1034 case 4: /*RXK2*/
1035 _iqk_rxk2_setting_8822b(dm, path, false);
1036 gonext = false;
1037 while (1) {
1038 KFAIL = _iqk_one_shot_8822b(dm, path, RXIQK2);
1039 if (KFAIL &&
1040 (iqk_info->retry_count[0][path][RXIQK2] < 2))
1041 iqk_info->retry_count[0][path][RXIQK2]++;
1042 else if (KFAIL) {
1043 iqk_info->rxiqk_fail_code[0][path] = 2;
1044 iqk_info->rxiqk_step = 5;
1045 gonext = true;
1046 } else {
1047 iqk_info->rxiqk_step++;
1048 gonext = true;
1049 }
1050 if (gonext)
1051 break;
1052 }
1053 break;
1054 }
1055 return KFAIL;
1056 }
1057
_iqk_iqk_by_path_8822b(void * dm_void,bool segment_iqk)1058 static void _iqk_iqk_by_path_8822b(void *dm_void, bool segment_iqk)
1059 {
1060 struct phy_dm_struct *dm = (struct phy_dm_struct *)dm_void;
1061 struct dm_iqk_info *iqk_info = &dm->IQK_info;
1062 bool KFAIL = true;
1063 u8 i, kcount_limit;
1064
1065 if (*dm->band_width == 2)
1066 kcount_limit = kcount_limit_80m;
1067 else
1068 kcount_limit = kcount_limit_others;
1069
1070 while (1) {
1071 switch (dm->rf_calibrate_info.iqk_step) {
1072 case 1: /*S0 LOK*/
1073 _iqk_lok_setting_8822b(dm, ODM_RF_PATH_A);
1074 _lok_one_shot_8822b(dm, ODM_RF_PATH_A);
1075 dm->rf_calibrate_info.iqk_step++;
1076 break;
1077 case 2: /*S1 LOK*/
1078 _iqk_lok_setting_8822b(dm, ODM_RF_PATH_B);
1079 _lok_one_shot_8822b(dm, ODM_RF_PATH_B);
1080 dm->rf_calibrate_info.iqk_step++;
1081 break;
1082 case 3: /*S0 TXIQK*/
1083 _iqk_txk_setting_8822b(dm, ODM_RF_PATH_A);
1084 KFAIL = _iqk_one_shot_8822b(dm, ODM_RF_PATH_A, TXIQK);
1085 iqk_info->kcount++;
1086 ODM_RT_TRACE(dm, ODM_COMP_CALIBRATION,
1087 "[IQK]S0TXK KFail = 0x%x\n", KFAIL);
1088
1089 if (KFAIL &&
1090 (iqk_info->retry_count[0][ODM_RF_PATH_A][TXIQK] <
1091 3))
1092 iqk_info->retry_count[0][ODM_RF_PATH_A]
1093 [TXIQK]++;
1094 else
1095 dm->rf_calibrate_info.iqk_step++;
1096 break;
1097 case 4: /*S1 TXIQK*/
1098 _iqk_txk_setting_8822b(dm, ODM_RF_PATH_B);
1099 KFAIL = _iqk_one_shot_8822b(dm, ODM_RF_PATH_B, TXIQK);
1100 iqk_info->kcount++;
1101 ODM_RT_TRACE(dm, ODM_COMP_CALIBRATION,
1102 "[IQK]S1TXK KFail = 0x%x\n", KFAIL);
1103 if (KFAIL &&
1104 iqk_info->retry_count[0][ODM_RF_PATH_B][TXIQK] < 3)
1105 iqk_info->retry_count[0][ODM_RF_PATH_B]
1106 [TXIQK]++;
1107 else
1108 dm->rf_calibrate_info.iqk_step++;
1109 break;
1110 case 5: /*S0 RXIQK*/
1111 phydm_set_iqk_info(dm, iqk_info, 0);
1112 break;
1113 case 6: /*S1 RXIQK*/
1114 phydm_set_iqk_info(dm, iqk_info, 1);
1115 break;
1116 }
1117
1118 if (dm->rf_calibrate_info.iqk_step == 7) {
1119 ODM_RT_TRACE(dm, ODM_COMP_CALIBRATION,
1120 "[IQK]==========LOK summary ==========\n");
1121 ODM_RT_TRACE(
1122 dm, ODM_COMP_CALIBRATION,
1123 "[IQK]PathA_LOK_notready = %d, PathB_LOK1_notready = %d\n",
1124 iqk_info->lok_fail[ODM_RF_PATH_A],
1125 iqk_info->lok_fail[ODM_RF_PATH_B]);
1126 ODM_RT_TRACE(dm, ODM_COMP_CALIBRATION,
1127 "[IQK]==========IQK summary ==========\n");
1128 ODM_RT_TRACE(
1129 dm, ODM_COMP_CALIBRATION,
1130 "[IQK]PathA_TXIQK_fail = %d, PathB_TXIQK_fail = %d\n",
1131 iqk_info->iqk_fail_report[0][ODM_RF_PATH_A]
1132 [TXIQK],
1133 iqk_info->iqk_fail_report[0][ODM_RF_PATH_B]
1134 [TXIQK]);
1135 ODM_RT_TRACE(
1136 dm, ODM_COMP_CALIBRATION,
1137 "[IQK]PathA_RXIQK_fail = %d, PathB_RXIQK_fail = %d\n",
1138 iqk_info->iqk_fail_report[0][ODM_RF_PATH_A]
1139 [RXIQK],
1140 iqk_info->iqk_fail_report[0][ODM_RF_PATH_B]
1141 [RXIQK]);
1142 ODM_RT_TRACE(
1143 dm, ODM_COMP_CALIBRATION,
1144 "[IQK]PathA_TXIQK_retry = %d, PathB_TXIQK_retry = %d\n",
1145 iqk_info->retry_count[0][ODM_RF_PATH_A][TXIQK],
1146 iqk_info->retry_count[0][ODM_RF_PATH_B][TXIQK]);
1147 ODM_RT_TRACE(
1148 dm, ODM_COMP_CALIBRATION,
1149 "[IQK]PathA_RXK1_retry = %d, PathA_RXK2_retry = %d, PathB_RXK1_retry = %d, PathB_RXK2_retry = %d\n",
1150 iqk_info->retry_count[0][ODM_RF_PATH_A][RXIQK1],
1151 iqk_info->retry_count[0][ODM_RF_PATH_A][RXIQK2],
1152 iqk_info->retry_count[0][ODM_RF_PATH_B][RXIQK1],
1153 iqk_info->retry_count[0][ODM_RF_PATH_B]
1154 [RXIQK2]);
1155 ODM_RT_TRACE(
1156 dm, ODM_COMP_CALIBRATION,
1157 "[IQK]PathA_GS1_retry = %d, PathA_GS2_retry = %d, PathB_GS1_retry = %d, PathB_GS2_retry = %d\n",
1158 iqk_info->gs_retry_count[0][ODM_RF_PATH_A]
1159 [GSRXK1],
1160 iqk_info->gs_retry_count[0][ODM_RF_PATH_A]
1161 [GSRXK2],
1162 iqk_info->gs_retry_count[0][ODM_RF_PATH_B]
1163 [GSRXK1],
1164 iqk_info->gs_retry_count[0][ODM_RF_PATH_B]
1165 [GSRXK2]);
1166 for (i = 0; i < 2; i++) {
1167 odm_write_4byte(dm, 0x1b00,
1168 0xf8000008 | i << 1);
1169 odm_write_4byte(dm, 0x1b2c, 0x7);
1170 odm_write_4byte(dm, 0x1bcc, 0x0);
1171 }
1172 break;
1173 }
1174
1175 if (segment_iqk && (iqk_info->kcount == kcount_limit))
1176 break;
1177 }
1178 }
1179
_iqk_start_iqk_8822b(struct phy_dm_struct * dm,bool segment_iqk)1180 static void _iqk_start_iqk_8822b(struct phy_dm_struct *dm, bool segment_iqk)
1181 {
1182 u32 tmp;
1183
1184 /*GNT_WL = 1*/
1185 tmp = odm_get_rf_reg(dm, ODM_RF_PATH_A, 0x1, RFREGOFFSETMASK);
1186 tmp = tmp | BIT(5) | BIT(0);
1187 odm_set_rf_reg(dm, ODM_RF_PATH_A, 0x1, RFREGOFFSETMASK, tmp);
1188
1189 tmp = odm_get_rf_reg(dm, ODM_RF_PATH_B, 0x1, RFREGOFFSETMASK);
1190 tmp = tmp | BIT(5) | BIT(0);
1191 odm_set_rf_reg(dm, ODM_RF_PATH_B, 0x1, RFREGOFFSETMASK, tmp);
1192
1193 _iqk_iqk_by_path_8822b(dm, segment_iqk);
1194 }
1195
_iq_calibrate_8822b_init(void * dm_void)1196 static void _iq_calibrate_8822b_init(void *dm_void)
1197 {
1198 struct phy_dm_struct *dm = (struct phy_dm_struct *)dm_void;
1199 struct dm_iqk_info *iqk_info = &dm->IQK_info;
1200 u8 i, j;
1201
1202 if (iqk_info->iqk_times == 0) {
1203 ODM_RT_TRACE(dm, ODM_COMP_CALIBRATION,
1204 "[IQK]=====>PHY_IQCalibrate_8822B_Init\n");
1205
1206 for (i = 0; i < SS_8822B; i++) {
1207 for (j = 0; j < 2; j++) {
1208 iqk_info->lok_fail[i] = true;
1209 iqk_info->iqk_fail[j][i] = true;
1210 iqk_info->iqc_matrix[j][i] = 0x20000000;
1211 }
1212 }
1213
1214 phydm_init_iqk_information(iqk_info);
1215 }
1216 }
1217
_phy_iq_calibrate_8822b(struct phy_dm_struct * dm,bool reset)1218 static void _phy_iq_calibrate_8822b(struct phy_dm_struct *dm, bool reset)
1219 {
1220 u32 MAC_backup[MAC_REG_NUM_8822B], BB_backup[BB_REG_NUM_8822B],
1221 RF_backup[RF_REG_NUM_8822B][SS_8822B];
1222 u32 backup_mac_reg[MAC_REG_NUM_8822B] = {0x520, 0x550};
1223 u32 backup_bb_reg[BB_REG_NUM_8822B] = {
1224 0x808, 0x90c, 0xc00, 0xcb0, 0xcb4, 0xcbc, 0xe00,
1225 0xeb0, 0xeb4, 0xebc, 0x1990, 0x9a4, 0xa04};
1226 u32 backup_rf_reg[RF_REG_NUM_8822B] = {0xdf, 0x8f, 0x65, 0x0, 0x1};
1227 bool segment_iqk = false, is_mp = false;
1228
1229 struct dm_iqk_info *iqk_info = &dm->IQK_info;
1230
1231 if (dm->mp_mode)
1232 is_mp = true;
1233 else if (dm->is_linked)
1234 segment_iqk = true;
1235
1236 if (!is_mp)
1237 if (_iqk_reload_iqk_8822b(dm, reset))
1238 return;
1239
1240 ODM_RT_TRACE(dm, ODM_COMP_CALIBRATION,
1241 "[IQK]==========IQK strat!!!!!==========\n");
1242
1243 ODM_RT_TRACE(
1244 dm, ODM_COMP_CALIBRATION,
1245 "[IQK]band_type = %s, band_width = %d, ExtPA2G = %d, ext_pa_5g = %d\n",
1246 (*dm->band_type == ODM_BAND_5G) ? "5G" : "2G", *dm->band_width,
1247 dm->ext_pa, dm->ext_pa_5g);
1248 ODM_RT_TRACE(dm, ODM_COMP_CALIBRATION,
1249 "[IQK]Interface = %d, cut_version = %x\n",
1250 dm->support_interface, dm->cut_version);
1251
1252 iqk_info->iqk_times++;
1253
1254 iqk_info->kcount = 0;
1255 dm->rf_calibrate_info.iqk_total_progressing_time = 0;
1256 dm->rf_calibrate_info.iqk_step = 1;
1257 iqk_info->rxiqk_step = 1;
1258
1259 _iqk_backup_iqk_8822b(dm, 0);
1260 _iqk_backup_mac_bb_8822b(dm, MAC_backup, BB_backup, backup_mac_reg,
1261 backup_bb_reg);
1262 _iqk_backup_rf_8822b(dm, RF_backup, backup_rf_reg);
1263
1264 while (1) {
1265 if (!is_mp)
1266 dm->rf_calibrate_info.iqk_start_time =
1267 odm_get_current_time(dm);
1268
1269 _iqk_configure_macbb_8822b(dm);
1270 _iqk_afe_setting_8822b(dm, true);
1271 _iqk_rfe_setting_8822b(dm, false);
1272 _iqk_agc_bnd_int_8822b(dm);
1273 _iqk_rf_setting_8822b(dm);
1274
1275 _iqk_start_iqk_8822b(dm, segment_iqk);
1276
1277 _iqk_afe_setting_8822b(dm, false);
1278 _iqk_restore_mac_bb_8822b(dm, MAC_backup, BB_backup,
1279 backup_mac_reg, backup_bb_reg);
1280 _iqk_restore_rf_8822b(dm, backup_rf_reg, RF_backup);
1281
1282 if (!is_mp) {
1283 dm->rf_calibrate_info.iqk_progressing_time =
1284 odm_get_progressing_time(
1285 dm,
1286 dm->rf_calibrate_info.iqk_start_time);
1287 dm->rf_calibrate_info.iqk_total_progressing_time +=
1288 odm_get_progressing_time(
1289 dm,
1290 dm->rf_calibrate_info.iqk_start_time);
1291 ODM_RT_TRACE(
1292 dm, ODM_COMP_CALIBRATION,
1293 "[IQK]IQK progressing_time = %lld ms\n",
1294 dm->rf_calibrate_info.iqk_progressing_time);
1295 }
1296
1297 if (dm->rf_calibrate_info.iqk_step == 7)
1298 break;
1299
1300 iqk_info->kcount = 0;
1301 ODM_RT_TRACE(dm, ODM_COMP_CALIBRATION, "[IQK]delay 50ms!!!\n");
1302 ODM_delay_ms(50);
1303 }
1304
1305 _iqk_backup_iqk_8822b(dm, 1);
1306 _iqk_fill_iqk_report_8822b(dm, 0);
1307
1308 if (!is_mp)
1309 ODM_RT_TRACE(dm, ODM_COMP_CALIBRATION,
1310 "[IQK]Total IQK progressing_time = %lld ms\n",
1311 dm->rf_calibrate_info.iqk_total_progressing_time);
1312
1313 ODM_RT_TRACE(dm, ODM_COMP_CALIBRATION,
1314 "[IQK]==========IQK end!!!!!==========\n");
1315 }
1316
_phy_iq_calibrate_by_fw_8822b(void * dm_void,u8 clear)1317 static void _phy_iq_calibrate_by_fw_8822b(void *dm_void, u8 clear) {}
1318
1319 /*IQK version:v3.3, NCTL v0.6*/
1320 /*1.The new gainsearch method for RXIQK*/
1321 /*2.The new format of IQK report register: 0x1be8/0x1bec*/
1322 /*3. add the option of segment IQK*/
phy_iq_calibrate_8822b(void * dm_void,bool clear)1323 void phy_iq_calibrate_8822b(void *dm_void, bool clear)
1324 {
1325 struct phy_dm_struct *dm = (struct phy_dm_struct *)dm_void;
1326
1327 dm->iqk_fw_offload = 0;
1328
1329 /*FW IQK*/
1330 if (dm->iqk_fw_offload) {
1331 if (!dm->rf_calibrate_info.is_iqk_in_progress) {
1332 odm_acquire_spin_lock(dm, RT_IQK_SPINLOCK);
1333 dm->rf_calibrate_info.is_iqk_in_progress = true;
1334 odm_release_spin_lock(dm, RT_IQK_SPINLOCK);
1335
1336 dm->rf_calibrate_info.iqk_start_time =
1337 odm_get_current_time(dm);
1338
1339 odm_write_4byte(dm, 0x1b00, 0xf8000008);
1340 odm_set_bb_reg(dm, 0x1bf0, 0xff000000, 0xff);
1341 ODM_RT_TRACE(dm, ODM_COMP_CALIBRATION,
1342 "[IQK]0x1bf0 = 0x%x\n",
1343 odm_read_4byte(dm, 0x1bf0));
1344
1345 _phy_iq_calibrate_by_fw_8822b(dm, clear);
1346 phydm_get_read_counter(dm);
1347
1348 dm->rf_calibrate_info.iqk_progressing_time =
1349 odm_get_progressing_time(
1350 dm,
1351 dm->rf_calibrate_info.iqk_start_time);
1352
1353 ODM_RT_TRACE(
1354 dm, ODM_COMP_CALIBRATION,
1355 "[IQK]IQK progressing_time = %lld ms\n",
1356 dm->rf_calibrate_info.iqk_progressing_time);
1357
1358 odm_acquire_spin_lock(dm, RT_IQK_SPINLOCK);
1359 dm->rf_calibrate_info.is_iqk_in_progress = false;
1360 odm_release_spin_lock(dm, RT_IQK_SPINLOCK);
1361 } else {
1362 ODM_RT_TRACE(
1363 dm, ODM_COMP_CALIBRATION,
1364 "== Return the IQK CMD, because the IQK in Progress ==\n");
1365 }
1366
1367 } else {
1368 _iq_calibrate_8822b_init(dm_void);
1369
1370 if (!dm->rf_calibrate_info.is_iqk_in_progress) {
1371 odm_acquire_spin_lock(dm, RT_IQK_SPINLOCK);
1372 dm->rf_calibrate_info.is_iqk_in_progress = true;
1373 odm_release_spin_lock(dm, RT_IQK_SPINLOCK);
1374 if (dm->mp_mode)
1375 dm->rf_calibrate_info.iqk_start_time =
1376 odm_get_current_time(dm);
1377
1378 _phy_iq_calibrate_8822b(dm, clear);
1379 if (dm->mp_mode) {
1380 dm->rf_calibrate_info.iqk_progressing_time =
1381 odm_get_progressing_time(
1382 dm, dm->rf_calibrate_info
1383 .iqk_start_time);
1384 ODM_RT_TRACE(
1385 dm, ODM_COMP_CALIBRATION,
1386 "[IQK]IQK progressing_time = %lld ms\n",
1387 dm->rf_calibrate_info
1388 .iqk_progressing_time);
1389 }
1390 odm_acquire_spin_lock(dm, RT_IQK_SPINLOCK);
1391 dm->rf_calibrate_info.is_iqk_in_progress = false;
1392 odm_release_spin_lock(dm, RT_IQK_SPINLOCK);
1393 } else {
1394 ODM_RT_TRACE(
1395 dm, ODM_COMP_CALIBRATION,
1396 "[IQK]== Return the IQK CMD, because the IQK in Progress ==\n");
1397 }
1398 }
1399 }
1400