1 /*
2 * Copyright (C) 2016 Felix Fietkau <nbd@nbd.name>
3 *
4 * Permission to use, copy, modify, and/or distribute this software for any
5 * purpose with or without fee is hereby granted, provided that the above
6 * copyright notice and this permission notice appear in all copies.
7 *
8 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
15 */
16
17 #include <linux/delay.h>
18 #include "mt76x2.h"
19 #include "mt76x2_mcu.h"
20 #include "mt76x2_eeprom.h"
21
22 static bool
mt76x2_phy_tssi_init_cal(struct mt76x2_dev * dev)23 mt76x2_phy_tssi_init_cal(struct mt76x2_dev *dev)
24 {
25 struct ieee80211_channel *chan = dev->mt76.chandef.chan;
26 u32 flag = 0;
27
28 if (!mt76x2_tssi_enabled(dev))
29 return false;
30
31 if (mt76x2_channel_silent(dev))
32 return false;
33
34 if (chan->band == NL80211_BAND_5GHZ)
35 flag |= BIT(0);
36
37 if (mt76x2_ext_pa_enabled(dev, chan->band))
38 flag |= BIT(8);
39
40 mt76x2_mcu_calibrate(dev, MCU_CAL_TSSI, flag);
41 dev->cal.tssi_cal_done = true;
42 return true;
43 }
44
45 static void
mt76x2_phy_channel_calibrate(struct mt76x2_dev * dev,bool mac_stopped)46 mt76x2_phy_channel_calibrate(struct mt76x2_dev *dev, bool mac_stopped)
47 {
48 struct ieee80211_channel *chan = dev->mt76.chandef.chan;
49 bool is_5ghz = chan->band == NL80211_BAND_5GHZ;
50
51 if (dev->cal.channel_cal_done)
52 return;
53
54 if (mt76x2_channel_silent(dev))
55 return;
56
57 if (!dev->cal.tssi_cal_done)
58 mt76x2_phy_tssi_init_cal(dev);
59
60 if (!mac_stopped)
61 mt76x2_mac_stop(dev, false);
62
63 if (is_5ghz)
64 mt76x2_mcu_calibrate(dev, MCU_CAL_LC, 0);
65
66 mt76x2_mcu_calibrate(dev, MCU_CAL_TX_LOFT, is_5ghz);
67 mt76x2_mcu_calibrate(dev, MCU_CAL_TXIQ, is_5ghz);
68 mt76x2_mcu_calibrate(dev, MCU_CAL_RXIQC_FI, is_5ghz);
69 mt76x2_mcu_calibrate(dev, MCU_CAL_TEMP_SENSOR, 0);
70 mt76x2_mcu_calibrate(dev, MCU_CAL_TX_SHAPING, 0);
71
72 if (!mac_stopped)
73 mt76x2_mac_resume(dev);
74
75 mt76x2_apply_gain_adj(dev);
76
77 dev->cal.channel_cal_done = true;
78 }
79
mt76x2_phy_set_antenna(struct mt76x2_dev * dev)80 void mt76x2_phy_set_antenna(struct mt76x2_dev *dev)
81 {
82 u32 val;
83
84 val = mt76_rr(dev, MT_BBP(AGC, 0));
85 val &= ~(BIT(4) | BIT(1));
86 switch (dev->mt76.antenna_mask) {
87 case 1:
88 /* disable mac DAC control */
89 mt76_clear(dev, MT_BBP(IBI, 9), BIT(11));
90 mt76_clear(dev, MT_BBP(TXBE, 5), 3);
91 mt76_rmw_field(dev, MT_TX_PIN_CFG, MT_TX_PIN_CFG_TXANT, 0x3);
92 mt76_rmw_field(dev, MT_BBP(CORE, 32), GENMASK(21, 20), 2);
93 /* disable DAC 1 */
94 mt76_rmw_field(dev, MT_BBP(CORE, 33), GENMASK(12, 9), 4);
95
96 val &= ~(BIT(3) | BIT(0));
97 break;
98 case 2:
99 /* disable mac DAC control */
100 mt76_clear(dev, MT_BBP(IBI, 9), BIT(11));
101 mt76_rmw_field(dev, MT_BBP(TXBE, 5), 3, 1);
102 mt76_rmw_field(dev, MT_TX_PIN_CFG, MT_TX_PIN_CFG_TXANT, 0xc);
103 mt76_rmw_field(dev, MT_BBP(CORE, 32), GENMASK(21, 20), 1);
104 /* disable DAC 0 */
105 mt76_rmw_field(dev, MT_BBP(CORE, 33), GENMASK(12, 9), 1);
106
107 val &= ~BIT(3);
108 val |= BIT(0);
109 break;
110 case 3:
111 default:
112 /* enable mac DAC control */
113 mt76_set(dev, MT_BBP(IBI, 9), BIT(11));
114 mt76_set(dev, MT_BBP(TXBE, 5), 3);
115 mt76_rmw_field(dev, MT_TX_PIN_CFG, MT_TX_PIN_CFG_TXANT, 0xf);
116 mt76_clear(dev, MT_BBP(CORE, 32), GENMASK(21, 20));
117 mt76_clear(dev, MT_BBP(CORE, 33), GENMASK(12, 9));
118
119 val &= ~BIT(0);
120 val |= BIT(3);
121 break;
122 }
123 mt76_wr(dev, MT_BBP(AGC, 0), val);
124 }
125
126 static void
mt76x2_get_agc_gain(struct mt76x2_dev * dev,u8 * dest)127 mt76x2_get_agc_gain(struct mt76x2_dev *dev, u8 *dest)
128 {
129 dest[0] = mt76_get_field(dev, MT_BBP(AGC, 8), MT_BBP_AGC_GAIN);
130 dest[1] = mt76_get_field(dev, MT_BBP(AGC, 9), MT_BBP_AGC_GAIN);
131 }
132
133 static int
mt76x2_get_rssi_gain_thresh(struct mt76x2_dev * dev)134 mt76x2_get_rssi_gain_thresh(struct mt76x2_dev *dev)
135 {
136 switch (dev->mt76.chandef.width) {
137 case NL80211_CHAN_WIDTH_80:
138 return -62;
139 case NL80211_CHAN_WIDTH_40:
140 return -65;
141 default:
142 return -68;
143 }
144 }
145
146 static int
mt76x2_get_low_rssi_gain_thresh(struct mt76x2_dev * dev)147 mt76x2_get_low_rssi_gain_thresh(struct mt76x2_dev *dev)
148 {
149 switch (dev->mt76.chandef.width) {
150 case NL80211_CHAN_WIDTH_80:
151 return -76;
152 case NL80211_CHAN_WIDTH_40:
153 return -79;
154 default:
155 return -82;
156 }
157 }
158
159 static void
mt76x2_phy_set_gain_val(struct mt76x2_dev * dev)160 mt76x2_phy_set_gain_val(struct mt76x2_dev *dev)
161 {
162 u32 val;
163 u8 gain_val[2];
164
165 gain_val[0] = dev->cal.agc_gain_cur[0] - dev->cal.agc_gain_adjust;
166 gain_val[1] = dev->cal.agc_gain_cur[1] - dev->cal.agc_gain_adjust;
167
168 if (dev->mt76.chandef.width >= NL80211_CHAN_WIDTH_40)
169 val = 0x1e42 << 16;
170 else
171 val = 0x1836 << 16;
172
173 val |= 0xf8;
174
175 mt76_wr(dev, MT_BBP(AGC, 8),
176 val | FIELD_PREP(MT_BBP_AGC_GAIN, gain_val[0]));
177 mt76_wr(dev, MT_BBP(AGC, 9),
178 val | FIELD_PREP(MT_BBP_AGC_GAIN, gain_val[1]));
179
180 if (dev->mt76.chandef.chan->flags & IEEE80211_CHAN_RADAR)
181 mt76x2_dfs_adjust_agc(dev);
182 }
183
184 static void
mt76x2_phy_adjust_vga_gain(struct mt76x2_dev * dev)185 mt76x2_phy_adjust_vga_gain(struct mt76x2_dev *dev)
186 {
187 u32 false_cca;
188 u8 limit = dev->cal.low_gain > 0 ? 16 : 4;
189
190 false_cca = FIELD_GET(MT_RX_STAT_1_CCA_ERRORS, mt76_rr(dev, MT_RX_STAT_1));
191 dev->cal.false_cca = false_cca;
192 if (false_cca > 800 && dev->cal.agc_gain_adjust < limit)
193 dev->cal.agc_gain_adjust += 2;
194 else if ((false_cca < 10 && dev->cal.agc_gain_adjust > 0) ||
195 (dev->cal.agc_gain_adjust >= limit && false_cca < 500))
196 dev->cal.agc_gain_adjust -= 2;
197 else
198 return;
199
200 mt76x2_phy_set_gain_val(dev);
201 }
202
203 static void
mt76x2_phy_update_channel_gain(struct mt76x2_dev * dev)204 mt76x2_phy_update_channel_gain(struct mt76x2_dev *dev)
205 {
206 u8 *gain = dev->cal.agc_gain_init;
207 u8 low_gain_delta, gain_delta;
208 bool gain_change;
209 int low_gain;
210 u32 val;
211
212 dev->cal.avg_rssi_all = mt76x2_phy_get_min_avg_rssi(dev);
213
214 low_gain = (dev->cal.avg_rssi_all > mt76x2_get_rssi_gain_thresh(dev)) +
215 (dev->cal.avg_rssi_all > mt76x2_get_low_rssi_gain_thresh(dev));
216
217 gain_change = (dev->cal.low_gain & 2) ^ (low_gain & 2);
218 dev->cal.low_gain = low_gain;
219
220 if (!gain_change) {
221 mt76x2_phy_adjust_vga_gain(dev);
222 return;
223 }
224
225 if (dev->mt76.chandef.width == NL80211_CHAN_WIDTH_80) {
226 mt76_wr(dev, MT_BBP(RXO, 14), 0x00560211);
227 val = mt76_rr(dev, MT_BBP(AGC, 26)) & ~0xf;
228 if (low_gain == 2)
229 val |= 0x3;
230 else
231 val |= 0x5;
232 mt76_wr(dev, MT_BBP(AGC, 26), val);
233 } else {
234 mt76_wr(dev, MT_BBP(RXO, 14), 0x00560423);
235 }
236
237 if (mt76x2_has_ext_lna(dev))
238 low_gain_delta = 10;
239 else
240 low_gain_delta = 14;
241
242 if (low_gain == 2) {
243 mt76_wr(dev, MT_BBP(RXO, 18), 0xf000a990);
244 mt76_wr(dev, MT_BBP(AGC, 35), 0x08080808);
245 mt76_wr(dev, MT_BBP(AGC, 37), 0x08080808);
246 gain_delta = low_gain_delta;
247 dev->cal.agc_gain_adjust = 0;
248 } else {
249 mt76_wr(dev, MT_BBP(RXO, 18), 0xf000a991);
250 if (dev->mt76.chandef.width == NL80211_CHAN_WIDTH_80)
251 mt76_wr(dev, MT_BBP(AGC, 35), 0x10101014);
252 else
253 mt76_wr(dev, MT_BBP(AGC, 35), 0x11111116);
254 mt76_wr(dev, MT_BBP(AGC, 37), 0x2121262C);
255 gain_delta = 0;
256 dev->cal.agc_gain_adjust = low_gain_delta;
257 }
258
259 dev->cal.agc_gain_cur[0] = gain[0] - gain_delta;
260 dev->cal.agc_gain_cur[1] = gain[1] - gain_delta;
261 mt76x2_phy_set_gain_val(dev);
262
263 /* clear false CCA counters */
264 mt76_rr(dev, MT_RX_STAT_1);
265 }
266
mt76x2_phy_set_channel(struct mt76x2_dev * dev,struct cfg80211_chan_def * chandef)267 int mt76x2_phy_set_channel(struct mt76x2_dev *dev,
268 struct cfg80211_chan_def *chandef)
269 {
270 struct ieee80211_channel *chan = chandef->chan;
271 bool scan = test_bit(MT76_SCANNING, &dev->mt76.state);
272 enum nl80211_band band = chan->band;
273 u8 channel;
274
275 u32 ext_cca_chan[4] = {
276 [0] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 0) |
277 FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 1) |
278 FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 2) |
279 FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 3) |
280 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(0)),
281 [1] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 1) |
282 FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 0) |
283 FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 2) |
284 FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 3) |
285 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(1)),
286 [2] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 2) |
287 FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 3) |
288 FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 1) |
289 FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 0) |
290 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(2)),
291 [3] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 3) |
292 FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 2) |
293 FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 1) |
294 FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 0) |
295 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(3)),
296 };
297 int ch_group_index;
298 u8 bw, bw_index;
299 int freq, freq1;
300 int ret;
301
302 dev->cal.channel_cal_done = false;
303 freq = chandef->chan->center_freq;
304 freq1 = chandef->center_freq1;
305 channel = chan->hw_value;
306
307 switch (chandef->width) {
308 case NL80211_CHAN_WIDTH_40:
309 bw = 1;
310 if (freq1 > freq) {
311 bw_index = 1;
312 ch_group_index = 0;
313 } else {
314 bw_index = 3;
315 ch_group_index = 1;
316 }
317 channel += 2 - ch_group_index * 4;
318 break;
319 case NL80211_CHAN_WIDTH_80:
320 ch_group_index = (freq - freq1 + 30) / 20;
321 if (WARN_ON(ch_group_index < 0 || ch_group_index > 3))
322 ch_group_index = 0;
323 bw = 2;
324 bw_index = ch_group_index;
325 channel += 6 - ch_group_index * 4;
326 break;
327 default:
328 bw = 0;
329 bw_index = 0;
330 ch_group_index = 0;
331 break;
332 }
333
334 mt76x2_read_rx_gain(dev);
335 mt76x2_phy_set_txpower_regs(dev, band);
336 mt76x2_configure_tx_delay(dev, band, bw);
337 mt76x2_phy_set_txpower(dev);
338
339 mt76x2_phy_set_band(dev, chan->band, ch_group_index & 1);
340 mt76x2_phy_set_bw(dev, chandef->width, ch_group_index);
341
342 mt76_rmw(dev, MT_EXT_CCA_CFG,
343 (MT_EXT_CCA_CFG_CCA0 |
344 MT_EXT_CCA_CFG_CCA1 |
345 MT_EXT_CCA_CFG_CCA2 |
346 MT_EXT_CCA_CFG_CCA3 |
347 MT_EXT_CCA_CFG_CCA_MASK),
348 ext_cca_chan[ch_group_index]);
349
350 ret = mt76x2_mcu_set_channel(dev, channel, bw, bw_index, scan);
351 if (ret)
352 return ret;
353
354 mt76x2_mcu_init_gain(dev, channel, dev->cal.rx.mcu_gain, true);
355
356 mt76x2_phy_set_antenna(dev);
357
358 /* Enable LDPC Rx */
359 if (mt76xx_rev(dev) >= MT76XX_REV_E3)
360 mt76_set(dev, MT_BBP(RXO, 13), BIT(10));
361
362 if (!dev->cal.init_cal_done) {
363 u8 val = mt76x2_eeprom_get(dev, MT_EE_BT_RCAL_RESULT);
364
365 if (val != 0xff)
366 mt76x2_mcu_calibrate(dev, MCU_CAL_R, 0);
367 }
368
369 mt76x2_mcu_calibrate(dev, MCU_CAL_RXDCOC, channel);
370
371 /* Rx LPF calibration */
372 if (!dev->cal.init_cal_done)
373 mt76x2_mcu_calibrate(dev, MCU_CAL_RC, 0);
374
375 dev->cal.init_cal_done = true;
376
377 mt76_wr(dev, MT_BBP(AGC, 61), 0xFF64A4E2);
378 mt76_wr(dev, MT_BBP(AGC, 7), 0x08081010);
379 mt76_wr(dev, MT_BBP(AGC, 11), 0x00000404);
380 mt76_wr(dev, MT_BBP(AGC, 2), 0x00007070);
381 mt76_wr(dev, MT_TXOP_CTRL_CFG, 0x04101B3F);
382
383 if (scan)
384 return 0;
385
386 dev->cal.low_gain = -1;
387 mt76x2_phy_channel_calibrate(dev, true);
388 mt76x2_get_agc_gain(dev, dev->cal.agc_gain_init);
389 memcpy(dev->cal.agc_gain_cur, dev->cal.agc_gain_init,
390 sizeof(dev->cal.agc_gain_cur));
391
392 /* init default values for temp compensation */
393 if (mt76x2_tssi_enabled(dev)) {
394 mt76_rmw_field(dev, MT_TX_ALC_CFG_1, MT_TX_ALC_CFG_1_TEMP_COMP,
395 0x38);
396 mt76_rmw_field(dev, MT_TX_ALC_CFG_2, MT_TX_ALC_CFG_2_TEMP_COMP,
397 0x38);
398 }
399
400 ieee80211_queue_delayed_work(mt76_hw(dev), &dev->cal_work,
401 MT_CALIBRATE_INTERVAL);
402
403 return 0;
404 }
405
406 static void
mt76x2_phy_tssi_compensate(struct mt76x2_dev * dev)407 mt76x2_phy_tssi_compensate(struct mt76x2_dev *dev)
408 {
409 struct ieee80211_channel *chan = dev->mt76.chandef.chan;
410 struct mt76x2_tx_power_info txp;
411 struct mt76x2_tssi_comp t = {};
412
413 if (!dev->cal.tssi_cal_done)
414 return;
415
416 if (!dev->cal.tssi_comp_pending) {
417 /* TSSI trigger */
418 t.cal_mode = BIT(0);
419 mt76x2_mcu_tssi_comp(dev, &t);
420 dev->cal.tssi_comp_pending = true;
421 } else {
422 if (mt76_rr(dev, MT_BBP(CORE, 34)) & BIT(4))
423 return;
424
425 dev->cal.tssi_comp_pending = false;
426 mt76x2_get_power_info(dev, &txp, chan);
427
428 if (mt76x2_ext_pa_enabled(dev, chan->band))
429 t.pa_mode = 1;
430
431 t.cal_mode = BIT(1);
432 t.slope0 = txp.chain[0].tssi_slope;
433 t.offset0 = txp.chain[0].tssi_offset;
434 t.slope1 = txp.chain[1].tssi_slope;
435 t.offset1 = txp.chain[1].tssi_offset;
436 mt76x2_mcu_tssi_comp(dev, &t);
437
438 if (t.pa_mode || dev->cal.dpd_cal_done)
439 return;
440
441 usleep_range(10000, 20000);
442 mt76x2_mcu_calibrate(dev, MCU_CAL_DPD, chan->hw_value);
443 dev->cal.dpd_cal_done = true;
444 }
445 }
446
447 static void
mt76x2_phy_temp_compensate(struct mt76x2_dev * dev)448 mt76x2_phy_temp_compensate(struct mt76x2_dev *dev)
449 {
450 struct mt76x2_temp_comp t;
451 int temp, db_diff;
452
453 if (mt76x2_get_temp_comp(dev, &t))
454 return;
455
456 temp = mt76_get_field(dev, MT_TEMP_SENSOR, MT_TEMP_SENSOR_VAL);
457 temp -= t.temp_25_ref;
458 temp = (temp * 1789) / 1000 + 25;
459 dev->cal.temp = temp;
460
461 if (temp > 25)
462 db_diff = (temp - 25) / t.high_slope;
463 else
464 db_diff = (25 - temp) / t.low_slope;
465
466 db_diff = min(db_diff, t.upper_bound);
467 db_diff = max(db_diff, t.lower_bound);
468
469 mt76_rmw_field(dev, MT_TX_ALC_CFG_1, MT_TX_ALC_CFG_1_TEMP_COMP,
470 db_diff * 2);
471 mt76_rmw_field(dev, MT_TX_ALC_CFG_2, MT_TX_ALC_CFG_2_TEMP_COMP,
472 db_diff * 2);
473 }
474
mt76x2_phy_calibrate(struct work_struct * work)475 void mt76x2_phy_calibrate(struct work_struct *work)
476 {
477 struct mt76x2_dev *dev;
478
479 dev = container_of(work, struct mt76x2_dev, cal_work.work);
480 mt76x2_phy_channel_calibrate(dev, false);
481 mt76x2_phy_tssi_compensate(dev);
482 mt76x2_phy_temp_compensate(dev);
483 mt76x2_phy_update_channel_gain(dev);
484 ieee80211_queue_delayed_work(mt76_hw(dev), &dev->cal_work,
485 MT_CALIBRATE_INTERVAL);
486 }
487
mt76x2_phy_start(struct mt76x2_dev * dev)488 int mt76x2_phy_start(struct mt76x2_dev *dev)
489 {
490 int ret;
491
492 ret = mt76x2_mcu_set_radio_state(dev, true);
493 if (ret)
494 return ret;
495
496 mt76x2_mcu_load_cr(dev, MT_RF_BBP_CR, 0, 0);
497
498 return ret;
499 }
500