1 /*
2 * (c) Copyright 2002-2010, Ralink Technology, Inc.
3 * Copyright (C) 2014 Felix Fietkau <nbd@openwrt.org>
4 * Copyright (C) 2015 Jakub Kicinski <kubakici@wp.pl>
5 * Copyright (C) 2018 Stanislaw Gruszka <stf_xl@wp.pl>
6 *
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License version 2
9 * as published by the Free Software Foundation
10 *
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
15 */
16
17 #include "mt76x0.h"
18 #include "mcu.h"
19 #include "eeprom.h"
20 #include "trace.h"
21 #include "phy.h"
22 #include "initvals.h"
23 #include "initvals_phy.h"
24
25 #include <linux/etherdevice.h>
26
27 static int
mt76x0_rf_csr_wr(struct mt76x0_dev * dev,u32 offset,u8 value)28 mt76x0_rf_csr_wr(struct mt76x0_dev *dev, u32 offset, u8 value)
29 {
30 int ret = 0;
31 u8 bank, reg;
32
33 if (test_bit(MT76_REMOVED, &dev->mt76.state))
34 return -ENODEV;
35
36 bank = MT_RF_BANK(offset);
37 reg = MT_RF_REG(offset);
38
39 if (WARN_ON_ONCE(reg > 64) || WARN_ON_ONCE(bank) > 8)
40 return -EINVAL;
41
42 mutex_lock(&dev->reg_atomic_mutex);
43
44 if (!mt76_poll(dev, MT_RF_CSR_CFG, MT_RF_CSR_CFG_KICK, 0, 100)) {
45 ret = -ETIMEDOUT;
46 goto out;
47 }
48
49 mt76_wr(dev, MT_RF_CSR_CFG,
50 FIELD_PREP(MT_RF_CSR_CFG_DATA, value) |
51 FIELD_PREP(MT_RF_CSR_CFG_REG_BANK, bank) |
52 FIELD_PREP(MT_RF_CSR_CFG_REG_ID, reg) |
53 MT_RF_CSR_CFG_WR |
54 MT_RF_CSR_CFG_KICK);
55 trace_mt76x0_rf_write(&dev->mt76, bank, offset, value);
56 out:
57 mutex_unlock(&dev->reg_atomic_mutex);
58
59 if (ret < 0)
60 dev_err(dev->mt76.dev, "Error: RF write %d:%d failed:%d!!\n",
61 bank, reg, ret);
62
63 return ret;
64 }
65
66 static int
mt76x0_rf_csr_rr(struct mt76x0_dev * dev,u32 offset)67 mt76x0_rf_csr_rr(struct mt76x0_dev *dev, u32 offset)
68 {
69 int ret = -ETIMEDOUT;
70 u32 val;
71 u8 bank, reg;
72
73 if (test_bit(MT76_REMOVED, &dev->mt76.state))
74 return -ENODEV;
75
76 bank = MT_RF_BANK(offset);
77 reg = MT_RF_REG(offset);
78
79 if (WARN_ON_ONCE(reg > 64) || WARN_ON_ONCE(bank) > 8)
80 return -EINVAL;
81
82 mutex_lock(&dev->reg_atomic_mutex);
83
84 if (!mt76_poll(dev, MT_RF_CSR_CFG, MT_RF_CSR_CFG_KICK, 0, 100))
85 goto out;
86
87 mt76_wr(dev, MT_RF_CSR_CFG,
88 FIELD_PREP(MT_RF_CSR_CFG_REG_BANK, bank) |
89 FIELD_PREP(MT_RF_CSR_CFG_REG_ID, reg) |
90 MT_RF_CSR_CFG_KICK);
91
92 if (!mt76_poll(dev, MT_RF_CSR_CFG, MT_RF_CSR_CFG_KICK, 0, 100))
93 goto out;
94
95 val = mt76_rr(dev, MT_RF_CSR_CFG);
96 if (FIELD_GET(MT_RF_CSR_CFG_REG_ID, val) == reg &&
97 FIELD_GET(MT_RF_CSR_CFG_REG_BANK, val) == bank) {
98 ret = FIELD_GET(MT_RF_CSR_CFG_DATA, val);
99 trace_mt76x0_rf_read(&dev->mt76, bank, offset, ret);
100 }
101 out:
102 mutex_unlock(&dev->reg_atomic_mutex);
103
104 if (ret < 0)
105 dev_err(dev->mt76.dev, "Error: RF read %d:%d failed:%d!!\n",
106 bank, reg, ret);
107
108 return ret;
109 }
110
111 static int
rf_wr(struct mt76x0_dev * dev,u32 offset,u8 val)112 rf_wr(struct mt76x0_dev *dev, u32 offset, u8 val)
113 {
114 if (test_bit(MT76_STATE_MCU_RUNNING, &dev->mt76.state)) {
115 struct mt76_reg_pair pair = {
116 .reg = offset,
117 .value = val,
118 };
119
120 return mt76x0_write_reg_pairs(dev, MT_MCU_MEMMAP_RF, &pair, 1);
121 } else {
122 WARN_ON_ONCE(1);
123 return mt76x0_rf_csr_wr(dev, offset, val);
124 }
125 }
126
127 static int
rf_rr(struct mt76x0_dev * dev,u32 offset)128 rf_rr(struct mt76x0_dev *dev, u32 offset)
129 {
130 int ret;
131 u32 val;
132
133 if (test_bit(MT76_STATE_MCU_RUNNING, &dev->mt76.state)) {
134 struct mt76_reg_pair pair = {
135 .reg = offset,
136 };
137
138 ret = mt76x0_read_reg_pairs(dev, MT_MCU_MEMMAP_RF, &pair, 1);
139 val = pair.value;
140 } else {
141 WARN_ON_ONCE(1);
142 ret = val = mt76x0_rf_csr_rr(dev, offset);
143 }
144
145 return (ret < 0) ? ret : val;
146 }
147
148 static int
rf_rmw(struct mt76x0_dev * dev,u32 offset,u8 mask,u8 val)149 rf_rmw(struct mt76x0_dev *dev, u32 offset, u8 mask, u8 val)
150 {
151 int ret;
152
153 ret = rf_rr(dev, offset);
154 if (ret < 0)
155 return ret;
156 val |= ret & ~mask;
157 ret = rf_wr(dev, offset, val);
158 if (ret)
159 return ret;
160
161 return val;
162 }
163
164 static int
rf_set(struct mt76x0_dev * dev,u32 offset,u8 val)165 rf_set(struct mt76x0_dev *dev, u32 offset, u8 val)
166 {
167 return rf_rmw(dev, offset, 0, val);
168 }
169
170 #if 0
171 static int
172 rf_clear(struct mt76x0_dev *dev, u32 offset, u8 mask)
173 {
174 return rf_rmw(dev, offset, mask, 0);
175 }
176 #endif
177
178 #define RF_RANDOM_WRITE(dev, tab) \
179 mt76x0_write_reg_pairs(dev, MT_MCU_MEMMAP_RF, tab, ARRAY_SIZE(tab));
180
mt76x0_wait_bbp_ready(struct mt76x0_dev * dev)181 int mt76x0_wait_bbp_ready(struct mt76x0_dev *dev)
182 {
183 int i = 20;
184 u32 val;
185
186 do {
187 val = mt76_rr(dev, MT_BBP(CORE, 0));
188 printk("BBP version %08x\n", val);
189 if (val && ~val)
190 break;
191 } while (--i);
192
193 if (!i) {
194 dev_err(dev->mt76.dev, "Error: BBP is not ready\n");
195 return -EIO;
196 }
197
198 return 0;
199 }
200
201 static void
mt76x0_bbp_set_ctrlch(struct mt76x0_dev * dev,enum nl80211_chan_width width,u8 ctrl)202 mt76x0_bbp_set_ctrlch(struct mt76x0_dev *dev, enum nl80211_chan_width width,
203 u8 ctrl)
204 {
205 int core_val, agc_val;
206
207 switch (width) {
208 case NL80211_CHAN_WIDTH_80:
209 core_val = 3;
210 agc_val = 7;
211 break;
212 case NL80211_CHAN_WIDTH_40:
213 core_val = 2;
214 agc_val = 3;
215 break;
216 default:
217 core_val = 0;
218 agc_val = 1;
219 break;
220 }
221
222 mt76_rmw_field(dev, MT_BBP(CORE, 1), MT_BBP_CORE_R1_BW, core_val);
223 mt76_rmw_field(dev, MT_BBP(AGC, 0), MT_BBP_AGC_R0_BW, agc_val);
224 mt76_rmw_field(dev, MT_BBP(AGC, 0), MT_BBP_AGC_R0_CTRL_CHAN, ctrl);
225 mt76_rmw_field(dev, MT_BBP(TXBE, 0), MT_BBP_TXBE_R0_CTRL_CHAN, ctrl);
226 }
227
mt76x0_phy_get_rssi(struct mt76x0_dev * dev,struct mt76x0_rxwi * rxwi)228 int mt76x0_phy_get_rssi(struct mt76x0_dev *dev, struct mt76x0_rxwi *rxwi)
229 {
230 s8 lna_gain, rssi_offset;
231 int val;
232
233 if (dev->mt76.chandef.chan->band == NL80211_BAND_2GHZ) {
234 lna_gain = dev->ee->lna_gain_2ghz;
235 rssi_offset = dev->ee->rssi_offset_2ghz[0];
236 } else {
237 lna_gain = dev->ee->lna_gain_5ghz[0];
238 rssi_offset = dev->ee->rssi_offset_5ghz[0];
239 }
240
241 val = rxwi->rssi[0] + rssi_offset - lna_gain;
242
243 return val;
244 }
245
mt76x0_vco_cal(struct mt76x0_dev * dev,u8 channel)246 static void mt76x0_vco_cal(struct mt76x0_dev *dev, u8 channel)
247 {
248 u8 val;
249
250 val = rf_rr(dev, MT_RF(0, 4));
251 if ((val & 0x70) != 0x30)
252 return;
253
254 /*
255 * Calibration Mode - Open loop, closed loop, and amplitude:
256 * B0.R06.[0]: 1
257 * B0.R06.[3:1] bp_close_code: 100
258 * B0.R05.[7:0] bp_open_code: 0x0
259 * B0.R04.[2:0] cal_bits: 000
260 * B0.R03.[2:0] startup_time: 011
261 * B0.R03.[6:4] settle_time:
262 * 80MHz channel: 110
263 * 40MHz channel: 101
264 * 20MHz channel: 100
265 */
266 val = rf_rr(dev, MT_RF(0, 6));
267 val &= ~0xf;
268 val |= 0x09;
269 rf_wr(dev, MT_RF(0, 6), val);
270
271 val = rf_rr(dev, MT_RF(0, 5));
272 if (val != 0)
273 rf_wr(dev, MT_RF(0, 5), 0x0);
274
275 val = rf_rr(dev, MT_RF(0, 4));
276 val &= ~0x07;
277 rf_wr(dev, MT_RF(0, 4), val);
278
279 val = rf_rr(dev, MT_RF(0, 3));
280 val &= ~0x77;
281 if (channel == 1 || channel == 7 || channel == 9 || channel >= 13) {
282 val |= 0x63;
283 } else if (channel == 3 || channel == 4 || channel == 10) {
284 val |= 0x53;
285 } else if (channel == 2 || channel == 5 || channel == 6 ||
286 channel == 8 || channel == 11 || channel == 12) {
287 val |= 0x43;
288 } else {
289 WARN(1, "Unknown channel %u\n", channel);
290 return;
291 }
292 rf_wr(dev, MT_RF(0, 3), val);
293
294 /* TODO replace by mt76x0_rf_set(dev, MT_RF(0, 4), BIT(7)); */
295 val = rf_rr(dev, MT_RF(0, 4));
296 val = ((val & ~(0x80)) | 0x80);
297 rf_wr(dev, MT_RF(0, 4), val);
298
299 msleep(2);
300 }
301
302 static void
mt76x0_mac_set_ctrlch(struct mt76x0_dev * dev,bool primary_upper)303 mt76x0_mac_set_ctrlch(struct mt76x0_dev *dev, bool primary_upper)
304 {
305 mt76_rmw_field(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_UPPER_40M,
306 primary_upper);
307 }
308
309 static void
mt76x0_phy_set_band(struct mt76x0_dev * dev,enum nl80211_band band)310 mt76x0_phy_set_band(struct mt76x0_dev *dev, enum nl80211_band band)
311 {
312 switch (band) {
313 case NL80211_BAND_2GHZ:
314 RF_RANDOM_WRITE(dev, mt76x0_rf_2g_channel_0_tab);
315
316 rf_wr(dev, MT_RF(5, 0), 0x45);
317 rf_wr(dev, MT_RF(6, 0), 0x44);
318
319 mt76_set(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_2G);
320 mt76_clear(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_5G);
321
322 mt76_wr(dev, MT_TX_ALC_VGA3, 0x00050007);
323 mt76_wr(dev, MT_TX0_RF_GAIN_CORR, 0x003E0002);
324 break;
325 case NL80211_BAND_5GHZ:
326 RF_RANDOM_WRITE(dev, mt76x0_rf_5g_channel_0_tab);
327
328 rf_wr(dev, MT_RF(5, 0), 0x44);
329 rf_wr(dev, MT_RF(6, 0), 0x45);
330
331 mt76_clear(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_2G);
332 mt76_set(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_5G);
333
334 mt76_wr(dev, MT_TX_ALC_VGA3, 0x00000005);
335 mt76_wr(dev, MT_TX0_RF_GAIN_CORR, 0x01010102);
336 break;
337 default:
338 break;
339 }
340 }
341
342 #define EXT_PA_2G_5G 0x0
343 #define EXT_PA_5G_ONLY 0x1
344 #define EXT_PA_2G_ONLY 0x2
345 #define INT_PA_2G_5G 0x3
346
347 static void
mt76x0_phy_set_chan_rf_params(struct mt76x0_dev * dev,u8 channel,u16 rf_bw_band)348 mt76x0_phy_set_chan_rf_params(struct mt76x0_dev *dev, u8 channel, u16 rf_bw_band)
349 {
350 u16 rf_band = rf_bw_band & 0xff00;
351 u16 rf_bw = rf_bw_band & 0x00ff;
352 u32 mac_reg;
353 u8 rf_val;
354 int i;
355 bool bSDM = false;
356 const struct mt76x0_freq_item *freq_item;
357
358 for (i = 0; i < ARRAY_SIZE(mt76x0_sdm_channel); i++) {
359 if (channel == mt76x0_sdm_channel[i]) {
360 bSDM = true;
361 break;
362 }
363 }
364
365 for (i = 0; i < ARRAY_SIZE(mt76x0_frequency_plan); i++) {
366 if (channel == mt76x0_frequency_plan[i].channel) {
367 rf_band = mt76x0_frequency_plan[i].band;
368
369 if (bSDM)
370 freq_item = &(mt76x0_sdm_frequency_plan[i]);
371 else
372 freq_item = &(mt76x0_frequency_plan[i]);
373
374 rf_wr(dev, MT_RF(0, 37), freq_item->pllR37);
375 rf_wr(dev, MT_RF(0, 36), freq_item->pllR36);
376 rf_wr(dev, MT_RF(0, 35), freq_item->pllR35);
377 rf_wr(dev, MT_RF(0, 34), freq_item->pllR34);
378 rf_wr(dev, MT_RF(0, 33), freq_item->pllR33);
379
380 rf_val = rf_rr(dev, MT_RF(0, 32));
381 rf_val &= ~0xE0;
382 rf_val |= freq_item->pllR32_b7b5;
383 rf_wr(dev, MT_RF(0, 32), rf_val);
384
385 /* R32<4:0> pll_den: (Denomina - 8) */
386 rf_val = rf_rr(dev, MT_RF(0, 32));
387 rf_val &= ~0x1F;
388 rf_val |= freq_item->pllR32_b4b0;
389 rf_wr(dev, MT_RF(0, 32), rf_val);
390
391 /* R31<7:5> */
392 rf_val = rf_rr(dev, MT_RF(0, 31));
393 rf_val &= ~0xE0;
394 rf_val |= freq_item->pllR31_b7b5;
395 rf_wr(dev, MT_RF(0, 31), rf_val);
396
397 /* R31<4:0> pll_k(Nominator) */
398 rf_val = rf_rr(dev, MT_RF(0, 31));
399 rf_val &= ~0x1F;
400 rf_val |= freq_item->pllR31_b4b0;
401 rf_wr(dev, MT_RF(0, 31), rf_val);
402
403 /* R30<7> sdm_reset_n */
404 rf_val = rf_rr(dev, MT_RF(0, 30));
405 rf_val &= ~0x80;
406 if (bSDM) {
407 rf_wr(dev, MT_RF(0, 30), rf_val);
408 rf_val |= 0x80;
409 rf_wr(dev, MT_RF(0, 30), rf_val);
410 } else {
411 rf_val |= freq_item->pllR30_b7;
412 rf_wr(dev, MT_RF(0, 30), rf_val);
413 }
414
415 /* R30<6:2> sdmmash_prbs,sin */
416 rf_val = rf_rr(dev, MT_RF(0, 30));
417 rf_val &= ~0x7C;
418 rf_val |= freq_item->pllR30_b6b2;
419 rf_wr(dev, MT_RF(0, 30), rf_val);
420
421 /* R30<1> sdm_bp */
422 rf_val = rf_rr(dev, MT_RF(0, 30));
423 rf_val &= ~0x02;
424 rf_val |= (freq_item->pllR30_b1 << 1);
425 rf_wr(dev, MT_RF(0, 30), rf_val);
426
427 /* R30<0> R29<7:0> (hex) pll_n */
428 rf_val = freq_item->pll_n & 0x00FF;
429 rf_wr(dev, MT_RF(0, 29), rf_val);
430
431 rf_val = rf_rr(dev, MT_RF(0, 30));
432 rf_val &= ~0x1;
433 rf_val |= ((freq_item->pll_n >> 8) & 0x0001);
434 rf_wr(dev, MT_RF(0, 30), rf_val);
435
436 /* R28<7:6> isi_iso */
437 rf_val = rf_rr(dev, MT_RF(0, 28));
438 rf_val &= ~0xC0;
439 rf_val |= freq_item->pllR28_b7b6;
440 rf_wr(dev, MT_RF(0, 28), rf_val);
441
442 /* R28<5:4> pfd_dly */
443 rf_val = rf_rr(dev, MT_RF(0, 28));
444 rf_val &= ~0x30;
445 rf_val |= freq_item->pllR28_b5b4;
446 rf_wr(dev, MT_RF(0, 28), rf_val);
447
448 /* R28<3:2> clksel option */
449 rf_val = rf_rr(dev, MT_RF(0, 28));
450 rf_val &= ~0x0C;
451 rf_val |= freq_item->pllR28_b3b2;
452 rf_wr(dev, MT_RF(0, 28), rf_val);
453
454 /* R28<1:0> R27<7:0> R26<7:0> (hex) sdm_k */
455 rf_val = freq_item->pll_sdm_k & 0x000000FF;
456 rf_wr(dev, MT_RF(0, 26), rf_val);
457
458 rf_val = ((freq_item->pll_sdm_k >> 8) & 0x000000FF);
459 rf_wr(dev, MT_RF(0, 27), rf_val);
460
461 rf_val = rf_rr(dev, MT_RF(0, 28));
462 rf_val &= ~0x3;
463 rf_val |= ((freq_item->pll_sdm_k >> 16) & 0x0003);
464 rf_wr(dev, MT_RF(0, 28), rf_val);
465
466 /* R24<1:0> xo_div */
467 rf_val = rf_rr(dev, MT_RF(0, 24));
468 rf_val &= ~0x3;
469 rf_val |= freq_item->pllR24_b1b0;
470 rf_wr(dev, MT_RF(0, 24), rf_val);
471
472 break;
473 }
474 }
475
476 for (i = 0; i < ARRAY_SIZE(mt76x0_rf_bw_switch_tab); i++) {
477 if (rf_bw == mt76x0_rf_bw_switch_tab[i].bw_band) {
478 rf_wr(dev, mt76x0_rf_bw_switch_tab[i].rf_bank_reg,
479 mt76x0_rf_bw_switch_tab[i].value);
480 } else if ((rf_bw == (mt76x0_rf_bw_switch_tab[i].bw_band & 0xFF)) &&
481 (rf_band & mt76x0_rf_bw_switch_tab[i].bw_band)) {
482 rf_wr(dev, mt76x0_rf_bw_switch_tab[i].rf_bank_reg,
483 mt76x0_rf_bw_switch_tab[i].value);
484 }
485 }
486
487 for (i = 0; i < ARRAY_SIZE(mt76x0_rf_band_switch_tab); i++) {
488 if (mt76x0_rf_band_switch_tab[i].bw_band & rf_band) {
489 rf_wr(dev, mt76x0_rf_band_switch_tab[i].rf_bank_reg,
490 mt76x0_rf_band_switch_tab[i].value);
491 }
492 }
493
494 mac_reg = mt76_rr(dev, MT_RF_MISC);
495 mac_reg &= ~0xC; /* Clear 0x518[3:2] */
496 mt76_wr(dev, MT_RF_MISC, mac_reg);
497
498 if (dev->ee->pa_type == INT_PA_2G_5G ||
499 (dev->ee->pa_type == EXT_PA_5G_ONLY && (rf_band & RF_G_BAND)) ||
500 (dev->ee->pa_type == EXT_PA_2G_ONLY && (rf_band & RF_A_BAND))) {
501 ; /* Internal PA - nothing to do. */
502 } else {
503 /*
504 MT_RF_MISC (offset: 0x0518)
505 [2]1'b1: enable external A band PA, 1'b0: disable external A band PA
506 [3]1'b1: enable external G band PA, 1'b0: disable external G band PA
507 */
508 if (rf_band & RF_A_BAND) {
509 mac_reg = mt76_rr(dev, MT_RF_MISC);
510 mac_reg |= 0x4;
511 mt76_wr(dev, MT_RF_MISC, mac_reg);
512 } else {
513 mac_reg = mt76_rr(dev, MT_RF_MISC);
514 mac_reg |= 0x8;
515 mt76_wr(dev, MT_RF_MISC, mac_reg);
516 }
517
518 /* External PA */
519 for (i = 0; i < ARRAY_SIZE(mt76x0_rf_ext_pa_tab); i++)
520 if (mt76x0_rf_ext_pa_tab[i].bw_band & rf_band)
521 rf_wr(dev, mt76x0_rf_ext_pa_tab[i].rf_bank_reg,
522 mt76x0_rf_ext_pa_tab[i].value);
523 }
524
525 if (rf_band & RF_G_BAND) {
526 mt76_wr(dev, MT_TX0_RF_GAIN_ATTEN, 0x63707400);
527 /* Set Atten mode = 2 For G band, Disable Tx Inc dcoc. */
528 mac_reg = mt76_rr(dev, MT_TX_ALC_CFG_1);
529 mac_reg &= 0x896400FF;
530 mt76_wr(dev, MT_TX_ALC_CFG_1, mac_reg);
531 } else {
532 mt76_wr(dev, MT_TX0_RF_GAIN_ATTEN, 0x686A7800);
533 /* Set Atten mode = 0 For Ext A band, Disable Tx Inc dcoc Cal. */
534 mac_reg = mt76_rr(dev, MT_TX_ALC_CFG_1);
535 mac_reg &= 0x890400FF;
536 mt76_wr(dev, MT_TX_ALC_CFG_1, mac_reg);
537 }
538 }
539
540 static void
mt76x0_phy_set_chan_bbp_params(struct mt76x0_dev * dev,u8 channel,u16 rf_bw_band)541 mt76x0_phy_set_chan_bbp_params(struct mt76x0_dev *dev, u8 channel, u16 rf_bw_band)
542 {
543 int i;
544
545 for (i = 0; i < ARRAY_SIZE(mt76x0_bbp_switch_tab); i++) {
546 const struct mt76x0_bbp_switch_item *item = &mt76x0_bbp_switch_tab[i];
547 const struct mt76_reg_pair *pair = &item->reg_pair;
548
549 if ((rf_bw_band & item->bw_band) != rf_bw_band)
550 continue;
551
552 if (pair->reg == MT_BBP(AGC, 8)) {
553 u32 val = pair->value;
554 u8 gain = FIELD_GET(MT_BBP_AGC_GAIN, val);
555
556 if (channel > 14) {
557 if (channel < 100)
558 gain -= dev->ee->lna_gain_5ghz[0]*2;
559 else if (channel < 137)
560 gain -= dev->ee->lna_gain_5ghz[1]*2;
561 else
562 gain -= dev->ee->lna_gain_5ghz[2]*2;
563
564 } else {
565 gain -= dev->ee->lna_gain_2ghz*2;
566 }
567
568 val &= ~MT_BBP_AGC_GAIN;
569 val |= FIELD_PREP(MT_BBP_AGC_GAIN, gain);
570 mt76_wr(dev, pair->reg, val);
571 } else {
572 mt76_wr(dev, pair->reg, pair->value);
573 }
574 }
575 }
576
577 #if 0
578 static void
579 mt76x0_extra_power_over_mac(struct mt76x0_dev *dev)
580 {
581 u32 val;
582
583 val = ((mt76_rr(dev, MT_TX_PWR_CFG_1) & 0x00003f00) >> 8);
584 val |= ((mt76_rr(dev, MT_TX_PWR_CFG_2) & 0x00003f00) << 8);
585 mt76_wr(dev, MT_TX_PWR_CFG_7, val);
586
587 /* TODO: fix VHT */
588 val = ((mt76_rr(dev, MT_TX_PWR_CFG_3) & 0x0000ff00) >> 8);
589 mt76_wr(dev, MT_TX_PWR_CFG_8, val);
590
591 val = ((mt76_rr(dev, MT_TX_PWR_CFG_4) & 0x0000ff00) >> 8);
592 mt76_wr(dev, MT_TX_PWR_CFG_9, val);
593 }
594
595 static void
596 mt76x0_phy_set_tx_power(struct mt76x0_dev *dev, u8 channel, u8 rf_bw_band)
597 {
598 u32 val;
599 int i;
600 int bw = (rf_bw_band & RF_BW_20) ? 0 : 1;
601
602 for (i = 0; i < 4; i++) {
603 if (channel <= 14)
604 val = dev->ee->tx_pwr_cfg_2g[i][bw];
605 else
606 val = dev->ee->tx_pwr_cfg_5g[i][bw];
607
608 mt76_wr(dev, MT_TX_PWR_CFG_0 + 4*i, val);
609 }
610
611 mt76x0_extra_power_over_mac(dev);
612 }
613 #endif
614
615 static void
mt76x0_bbp_set_bw(struct mt76x0_dev * dev,enum nl80211_chan_width width)616 mt76x0_bbp_set_bw(struct mt76x0_dev *dev, enum nl80211_chan_width width)
617 {
618 enum { BW_20 = 0, BW_40 = 1, BW_80 = 2, BW_10 = 4};
619 int bw;
620
621 switch (width) {
622 default:
623 case NL80211_CHAN_WIDTH_20_NOHT:
624 case NL80211_CHAN_WIDTH_20:
625 bw = BW_20;
626 break;
627 case NL80211_CHAN_WIDTH_40:
628 bw = BW_40;
629 break;
630 case NL80211_CHAN_WIDTH_80:
631 bw = BW_80;
632 break;
633 case NL80211_CHAN_WIDTH_10:
634 bw = BW_10;
635 break;
636 case NL80211_CHAN_WIDTH_80P80:
637 case NL80211_CHAN_WIDTH_160:
638 case NL80211_CHAN_WIDTH_5:
639 /* TODO error */
640 return ;
641 }
642
643 mt76x0_mcu_function_select(dev, BW_SETTING, bw);
644 }
645
646 static void
mt76x0_phy_set_chan_pwr(struct mt76x0_dev * dev,u8 channel)647 mt76x0_phy_set_chan_pwr(struct mt76x0_dev *dev, u8 channel)
648 {
649 static const int mt76x0_tx_pwr_ch_list[] = {
650 1,2,3,4,5,6,7,8,9,10,11,12,13,14,
651 36,38,40,44,46,48,52,54,56,60,62,64,
652 100,102,104,108,110,112,116,118,120,124,126,128,132,134,136,140,
653 149,151,153,157,159,161,165,167,169,171,173,
654 42,58,106,122,155
655 };
656 int i;
657 u32 val;
658
659 for (i = 0; i < ARRAY_SIZE(mt76x0_tx_pwr_ch_list); i++)
660 if (mt76x0_tx_pwr_ch_list[i] == channel)
661 break;
662
663 if (WARN_ON(i == ARRAY_SIZE(mt76x0_tx_pwr_ch_list)))
664 return;
665
666 val = mt76_rr(dev, MT_TX_ALC_CFG_0);
667 val &= ~0x3f3f;
668 val |= dev->ee->tx_pwr_per_chan[i];
669 val |= 0x2f2f << 16;
670 mt76_wr(dev, MT_TX_ALC_CFG_0, val);
671 }
672
673 static int
__mt76x0_phy_set_channel(struct mt76x0_dev * dev,struct cfg80211_chan_def * chandef)674 __mt76x0_phy_set_channel(struct mt76x0_dev *dev,
675 struct cfg80211_chan_def *chandef)
676 {
677 u32 ext_cca_chan[4] = {
678 [0] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 0) |
679 FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 1) |
680 FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 2) |
681 FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 3) |
682 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(0)),
683 [1] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 1) |
684 FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 0) |
685 FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 2) |
686 FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 3) |
687 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(1)),
688 [2] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 2) |
689 FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 3) |
690 FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 1) |
691 FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 0) |
692 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(2)),
693 [3] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 3) |
694 FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 2) |
695 FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 1) |
696 FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 0) |
697 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(3)),
698 };
699 bool scan = test_bit(MT76_SCANNING, &dev->mt76.state);
700 int ch_group_index, freq, freq1;
701 u8 channel;
702 u32 val;
703 u16 rf_bw_band;
704
705 freq = chandef->chan->center_freq;
706 freq1 = chandef->center_freq1;
707 channel = chandef->chan->hw_value;
708 rf_bw_band = (channel <= 14) ? RF_G_BAND : RF_A_BAND;
709
710 switch (chandef->width) {
711 case NL80211_CHAN_WIDTH_40:
712 if (freq1 > freq)
713 ch_group_index = 0;
714 else
715 ch_group_index = 1;
716 channel += 2 - ch_group_index * 4;
717 rf_bw_band |= RF_BW_40;
718 break;
719 case NL80211_CHAN_WIDTH_80:
720 ch_group_index = (freq - freq1 + 30) / 20;
721 if (WARN_ON(ch_group_index < 0 || ch_group_index > 3))
722 ch_group_index = 0;
723 channel += 6 - ch_group_index * 4;
724 rf_bw_band |= RF_BW_80;
725 break;
726 default:
727 ch_group_index = 0;
728 rf_bw_band |= RF_BW_20;
729 break;
730 }
731
732 mt76x0_bbp_set_bw(dev, chandef->width);
733 mt76x0_bbp_set_ctrlch(dev, chandef->width, ch_group_index);
734 mt76x0_mac_set_ctrlch(dev, ch_group_index & 1);
735
736 mt76_rmw(dev, MT_EXT_CCA_CFG,
737 (MT_EXT_CCA_CFG_CCA0 |
738 MT_EXT_CCA_CFG_CCA1 |
739 MT_EXT_CCA_CFG_CCA2 |
740 MT_EXT_CCA_CFG_CCA3 |
741 MT_EXT_CCA_CFG_CCA_MASK),
742 ext_cca_chan[ch_group_index]);
743
744 mt76x0_phy_set_band(dev, chandef->chan->band);
745 mt76x0_phy_set_chan_rf_params(dev, channel, rf_bw_band);
746
747 /* set Japan Tx filter at channel 14 */
748 val = mt76_rr(dev, MT_BBP(CORE, 1));
749 if (channel == 14)
750 val |= 0x20;
751 else
752 val &= ~0x20;
753 mt76_wr(dev, MT_BBP(CORE, 1), val);
754
755 mt76x0_phy_set_chan_bbp_params(dev, channel, rf_bw_band);
756
757 /* Vendor driver don't do it */
758 /* mt76x0_phy_set_tx_power(dev, channel, rf_bw_band); */
759
760 if (scan)
761 mt76x0_vco_cal(dev, channel);
762
763 mt76x0_mcu_calibrate(dev, MCU_CAL_RXDCOC, 1);
764 mt76x0_phy_set_chan_pwr(dev, channel);
765
766 dev->mt76.chandef = *chandef;
767 return 0;
768 }
769
mt76x0_phy_set_channel(struct mt76x0_dev * dev,struct cfg80211_chan_def * chandef)770 int mt76x0_phy_set_channel(struct mt76x0_dev *dev,
771 struct cfg80211_chan_def *chandef)
772 {
773 int ret;
774
775 mutex_lock(&dev->hw_atomic_mutex);
776 ret = __mt76x0_phy_set_channel(dev, chandef);
777 mutex_unlock(&dev->hw_atomic_mutex);
778
779 return ret;
780 }
781
mt76x0_phy_recalibrate_after_assoc(struct mt76x0_dev * dev)782 void mt76x0_phy_recalibrate_after_assoc(struct mt76x0_dev *dev)
783 {
784 u32 tx_alc, reg_val;
785 u8 channel = dev->mt76.chandef.chan->hw_value;
786 int is_5ghz = (dev->mt76.chandef.chan->band == NL80211_BAND_5GHZ) ? 1 : 0;
787
788 mt76x0_mcu_calibrate(dev, MCU_CAL_R, 0);
789
790 mt76x0_vco_cal(dev, channel);
791
792 tx_alc = mt76_rr(dev, MT_TX_ALC_CFG_0);
793 mt76_wr(dev, MT_TX_ALC_CFG_0, 0);
794 usleep_range(500, 700);
795
796 reg_val = mt76_rr(dev, 0x2124);
797 reg_val &= 0xffffff7e;
798 mt76_wr(dev, 0x2124, reg_val);
799
800 mt76x0_mcu_calibrate(dev, MCU_CAL_RXDCOC, 0);
801
802 mt76x0_mcu_calibrate(dev, MCU_CAL_LC, is_5ghz);
803 mt76x0_mcu_calibrate(dev, MCU_CAL_LOFT, is_5ghz);
804 mt76x0_mcu_calibrate(dev, MCU_CAL_TXIQ, is_5ghz);
805 mt76x0_mcu_calibrate(dev, MCU_CAL_TX_GROUP_DELAY, is_5ghz);
806 mt76x0_mcu_calibrate(dev, MCU_CAL_RXIQ, is_5ghz);
807 mt76x0_mcu_calibrate(dev, MCU_CAL_RX_GROUP_DELAY, is_5ghz);
808
809 mt76_wr(dev, 0x2124, reg_val);
810 mt76_wr(dev, MT_TX_ALC_CFG_0, tx_alc);
811 msleep(100);
812
813 mt76x0_mcu_calibrate(dev, MCU_CAL_RXDCOC, 1);
814 }
815
mt76x0_agc_save(struct mt76x0_dev * dev)816 void mt76x0_agc_save(struct mt76x0_dev *dev)
817 {
818 /* Only one RX path */
819 dev->agc_save = FIELD_GET(MT_BBP_AGC_GAIN, mt76_rr(dev, MT_BBP(AGC, 8)));
820 }
821
mt76x0_agc_restore(struct mt76x0_dev * dev)822 void mt76x0_agc_restore(struct mt76x0_dev *dev)
823 {
824 mt76_rmw_field(dev, MT_BBP(AGC, 8), MT_BBP_AGC_GAIN, dev->agc_save);
825 }
826
mt76x0_temp_sensor(struct mt76x0_dev * dev)827 static void mt76x0_temp_sensor(struct mt76x0_dev *dev)
828 {
829 u8 rf_b7_73, rf_b0_66, rf_b0_67;
830 int cycle, temp;
831 u32 val;
832 s32 sval;
833
834 rf_b7_73 = rf_rr(dev, MT_RF(7, 73));
835 rf_b0_66 = rf_rr(dev, MT_RF(0, 66));
836 rf_b0_67 = rf_rr(dev, MT_RF(0, 73));
837
838 rf_wr(dev, MT_RF(7, 73), 0x02);
839 rf_wr(dev, MT_RF(0, 66), 0x23);
840 rf_wr(dev, MT_RF(0, 73), 0x01);
841
842 mt76_wr(dev, MT_BBP(CORE, 34), 0x00080055);
843
844 for (cycle = 0; cycle < 2000; cycle++) {
845 val = mt76_rr(dev, MT_BBP(CORE, 34));
846 if (!(val & 0x10))
847 break;
848 udelay(3);
849 }
850
851 if (cycle >= 2000) {
852 val &= 0x10;
853 mt76_wr(dev, MT_BBP(CORE, 34), val);
854 goto done;
855 }
856
857 sval = mt76_rr(dev, MT_BBP(CORE, 35)) & 0xff;
858 if (!(sval & 0x80))
859 sval &= 0x7f; /* Positive */
860 else
861 sval |= 0xffffff00; /* Negative */
862
863 temp = (35 * (sval - dev->ee->temp_off))/ 10 + 25;
864
865 done:
866 rf_wr(dev, MT_RF(7, 73), rf_b7_73);
867 rf_wr(dev, MT_RF(0, 66), rf_b0_66);
868 rf_wr(dev, MT_RF(0, 73), rf_b0_67);
869 }
870
mt76x0_dynamic_vga_tuning(struct mt76x0_dev * dev)871 static void mt76x0_dynamic_vga_tuning(struct mt76x0_dev *dev)
872 {
873 u32 val, init_vga;
874
875 init_vga = (dev->mt76.chandef.chan->band == NL80211_BAND_5GHZ) ? 0x54 : 0x4E;
876 if (dev->avg_rssi > -60)
877 init_vga -= 0x20;
878 else if (dev->avg_rssi > -70)
879 init_vga -= 0x10;
880
881 val = mt76_rr(dev, MT_BBP(AGC, 8));
882 val &= 0xFFFF80FF;
883 val |= init_vga << 8;
884 mt76_wr(dev, MT_BBP(AGC,8), val);
885 }
886
mt76x0_phy_calibrate(struct work_struct * work)887 static void mt76x0_phy_calibrate(struct work_struct *work)
888 {
889 struct mt76x0_dev *dev = container_of(work, struct mt76x0_dev,
890 cal_work.work);
891
892 mt76x0_dynamic_vga_tuning(dev);
893 mt76x0_temp_sensor(dev);
894
895 ieee80211_queue_delayed_work(dev->mt76.hw, &dev->cal_work,
896 MT_CALIBRATE_INTERVAL);
897 }
898
mt76x0_phy_con_cal_onoff(struct mt76x0_dev * dev,struct ieee80211_bss_conf * info)899 void mt76x0_phy_con_cal_onoff(struct mt76x0_dev *dev,
900 struct ieee80211_bss_conf *info)
901 {
902 /* Start/stop collecting beacon data */
903 spin_lock_bh(&dev->con_mon_lock);
904 ether_addr_copy(dev->ap_bssid, info->bssid);
905 dev->avg_rssi = 0;
906 dev->bcn_freq_off = MT_FREQ_OFFSET_INVALID;
907 spin_unlock_bh(&dev->con_mon_lock);
908 }
909
910 static void
mt76x0_set_rx_chains(struct mt76x0_dev * dev)911 mt76x0_set_rx_chains(struct mt76x0_dev *dev)
912 {
913 u32 val;
914
915 val = mt76_rr(dev, MT_BBP(AGC, 0));
916 val &= ~(BIT(3) | BIT(4));
917
918 if (dev->chainmask & BIT(1))
919 val |= BIT(3);
920
921 mt76_wr(dev, MT_BBP(AGC, 0), val);
922
923 mb();
924 val = mt76_rr(dev, MT_BBP(AGC, 0));
925 }
926
927 static void
mt76x0_set_tx_dac(struct mt76x0_dev * dev)928 mt76x0_set_tx_dac(struct mt76x0_dev *dev)
929 {
930 if (dev->chainmask & BIT(1))
931 mt76_set(dev, MT_BBP(TXBE, 5), 3);
932 else
933 mt76_clear(dev, MT_BBP(TXBE, 5), 3);
934 }
935
936 static void
mt76x0_rf_init(struct mt76x0_dev * dev)937 mt76x0_rf_init(struct mt76x0_dev *dev)
938 {
939 int i;
940 u8 val;
941
942 RF_RANDOM_WRITE(dev, mt76x0_rf_central_tab);
943 RF_RANDOM_WRITE(dev, mt76x0_rf_2g_channel_0_tab);
944 RF_RANDOM_WRITE(dev, mt76x0_rf_5g_channel_0_tab);
945 RF_RANDOM_WRITE(dev, mt76x0_rf_vga_channel_0_tab);
946
947 for (i = 0; i < ARRAY_SIZE(mt76x0_rf_bw_switch_tab); i++) {
948 const struct mt76x0_rf_switch_item *item = &mt76x0_rf_bw_switch_tab[i];
949
950 if (item->bw_band == RF_BW_20)
951 rf_wr(dev, item->rf_bank_reg, item->value);
952 else if (((RF_G_BAND | RF_BW_20) & item->bw_band) == (RF_G_BAND | RF_BW_20))
953 rf_wr(dev, item->rf_bank_reg, item->value);
954 }
955
956 for (i = 0; i < ARRAY_SIZE(mt76x0_rf_band_switch_tab); i++) {
957 if (mt76x0_rf_band_switch_tab[i].bw_band & RF_G_BAND) {
958 rf_wr(dev,
959 mt76x0_rf_band_switch_tab[i].rf_bank_reg,
960 mt76x0_rf_band_switch_tab[i].value);
961 }
962 }
963
964 /*
965 Frequency calibration
966 E1: B0.R22<6:0>: xo_cxo<6:0>
967 E2: B0.R21<0>: xo_cxo<0>, B0.R22<7:0>: xo_cxo<8:1>
968 */
969 rf_wr(dev, MT_RF(0, 22), min_t(u8, dev->ee->rf_freq_off, 0xBF));
970 val = rf_rr(dev, MT_RF(0, 22));
971
972 /*
973 Reset the DAC (Set B0.R73<7>=1, then set B0.R73<7>=0, and then set B0.R73<7>) during power up.
974 */
975 val = rf_rr(dev, MT_RF(0, 73));
976 val |= 0x80;
977 rf_wr(dev, MT_RF(0, 73), val);
978 val &= ~0x80;
979 rf_wr(dev, MT_RF(0, 73), val);
980 val |= 0x80;
981 rf_wr(dev, MT_RF(0, 73), val);
982
983 /*
984 vcocal_en (initiate VCO calibration (reset after completion)) - It should be at the end of RF configuration.
985 */
986 rf_set(dev, MT_RF(0, 4), 0x80);
987 }
988
mt76x0_ant_select(struct mt76x0_dev * dev)989 static void mt76x0_ant_select(struct mt76x0_dev *dev)
990 {
991 /* Single antenna mode. */
992 mt76_rmw(dev, MT_WLAN_FUN_CTRL, BIT(5), BIT(6));
993 mt76_clear(dev, MT_CMB_CTRL, BIT(14) | BIT(12));
994 mt76_clear(dev, MT_COEXCFG0, BIT(2));
995 mt76_rmw(dev, MT_COEXCFG3, BIT(5) | BIT(4) | BIT(3) | BIT(2), BIT(1));
996 }
997
mt76x0_phy_init(struct mt76x0_dev * dev)998 void mt76x0_phy_init(struct mt76x0_dev *dev)
999 {
1000 INIT_DELAYED_WORK(&dev->cal_work, mt76x0_phy_calibrate);
1001
1002 mt76x0_ant_select(dev);
1003
1004 mt76x0_rf_init(dev);
1005
1006 mt76x0_set_rx_chains(dev);
1007 mt76x0_set_tx_dac(dev);
1008 }
1009