1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17#include <linux/delay.h>
18#include "mt76x2.h"
19#include "mt76x2_mcu.h"
20#include "mt76x2_eeprom.h"
21
22static void
23mt76x2_adjust_high_lna_gain(struct mt76x2_dev *dev, int reg, s8 offset)
24{
25 s8 gain;
26
27 gain = FIELD_GET(MT_BBP_AGC_LNA_HIGH_GAIN, mt76_rr(dev, MT_BBP(AGC, reg)));
28 gain -= offset / 2;
29 mt76_rmw_field(dev, MT_BBP(AGC, reg), MT_BBP_AGC_LNA_HIGH_GAIN, gain);
30}
31
32static void
33mt76x2_adjust_agc_gain(struct mt76x2_dev *dev, int reg, s8 offset)
34{
35 s8 gain;
36
37 gain = FIELD_GET(MT_BBP_AGC_GAIN, mt76_rr(dev, MT_BBP(AGC, reg)));
38 gain += offset;
39 mt76_rmw_field(dev, MT_BBP(AGC, reg), MT_BBP_AGC_GAIN, gain);
40}
41
42static void
43mt76x2_apply_gain_adj(struct mt76x2_dev *dev)
44{
45 s8 *gain_adj = dev->cal.rx.high_gain;
46
47 mt76x2_adjust_high_lna_gain(dev, 4, gain_adj[0]);
48 mt76x2_adjust_high_lna_gain(dev, 5, gain_adj[1]);
49
50 mt76x2_adjust_agc_gain(dev, 8, gain_adj[0]);
51 mt76x2_adjust_agc_gain(dev, 9, gain_adj[1]);
52}
53
54static u32
55mt76x2_tx_power_mask(u8 v1, u8 v2, u8 v3, u8 v4)
56{
57 u32 val = 0;
58
59 val |= (v1 & (BIT(6) - 1)) << 0;
60 val |= (v2 & (BIT(6) - 1)) << 8;
61 val |= (v3 & (BIT(6) - 1)) << 16;
62 val |= (v4 & (BIT(6) - 1)) << 24;
63 return val;
64}
65
66int mt76x2_phy_get_rssi(struct mt76x2_dev *dev, s8 rssi, int chain)
67{
68 struct mt76x2_rx_freq_cal *cal = &dev->cal.rx;
69
70 rssi += cal->rssi_offset[chain];
71 rssi -= cal->lna_gain;
72
73 return rssi;
74}
75
76static u8
77mt76x2_txpower_check(int value)
78{
79 if (value < 0)
80 return 0;
81 if (value > 0x2f)
82 return 0x2f;
83 return value;
84}
85
86static void
87mt76x2_add_rate_power_offset(struct mt76_rate_power *r, int offset)
88{
89 int i;
90
91 for (i = 0; i < sizeof(r->all); i++)
92 r->all[i] += offset;
93}
94
95static void
96mt76x2_limit_rate_power(struct mt76_rate_power *r, int limit)
97{
98 int i;
99
100 for (i = 0; i < sizeof(r->all); i++)
101 if (r->all[i] > limit)
102 r->all[i] = limit;
103}
104
105void mt76x2_phy_set_txpower(struct mt76x2_dev *dev)
106{
107 enum nl80211_chan_width width = dev->mt76.chandef.width;
108 struct ieee80211_channel *chan = dev->mt76.chandef.chan;
109 struct mt76x2_tx_power_info txp;
110 int txp_0, txp_1, delta = 0;
111 struct mt76_rate_power t = {};
112
113 mt76x2_get_power_info(dev, &txp, chan);
114
115 if (width == NL80211_CHAN_WIDTH_40)
116 delta = txp.delta_bw40;
117 else if (width == NL80211_CHAN_WIDTH_80)
118 delta = txp.delta_bw80;
119
120 if (txp.target_power > dev->txpower_conf)
121 delta -= txp.target_power - dev->txpower_conf;
122
123 mt76x2_get_rate_power(dev, &t, chan);
124 mt76x2_add_rate_power_offset(&t, txp.chain[0].target_power +
125 txp.chain[0].delta);
126 mt76x2_limit_rate_power(&t, dev->txpower_conf);
127 dev->txpower_cur = mt76x2_get_max_rate_power(&t);
128 mt76x2_add_rate_power_offset(&t, -(txp.chain[0].target_power +
129 txp.chain[0].delta + delta));
130 dev->target_power = txp.chain[0].target_power;
131 dev->target_power_delta[0] = txp.chain[0].delta + delta;
132 dev->target_power_delta[1] = txp.chain[1].delta + delta;
133 dev->rate_power = t;
134
135 txp_0 = mt76x2_txpower_check(txp.chain[0].target_power +
136 txp.chain[0].delta + delta);
137
138 txp_1 = mt76x2_txpower_check(txp.chain[1].target_power +
139 txp.chain[1].delta + delta);
140
141 mt76_rmw_field(dev, MT_TX_ALC_CFG_0, MT_TX_ALC_CFG_0_CH_INIT_0, txp_0);
142 mt76_rmw_field(dev, MT_TX_ALC_CFG_0, MT_TX_ALC_CFG_0_CH_INIT_1, txp_1);
143
144 mt76_wr(dev, MT_TX_PWR_CFG_0,
145 mt76x2_tx_power_mask(t.cck[0], t.cck[2], t.ofdm[0], t.ofdm[2]));
146 mt76_wr(dev, MT_TX_PWR_CFG_1,
147 mt76x2_tx_power_mask(t.ofdm[4], t.ofdm[6], t.ht[0], t.ht[2]));
148 mt76_wr(dev, MT_TX_PWR_CFG_2,
149 mt76x2_tx_power_mask(t.ht[4], t.ht[6], t.ht[8], t.ht[10]));
150 mt76_wr(dev, MT_TX_PWR_CFG_3,
151 mt76x2_tx_power_mask(t.ht[12], t.ht[14], t.ht[0], t.ht[2]));
152 mt76_wr(dev, MT_TX_PWR_CFG_4,
153 mt76x2_tx_power_mask(t.ht[4], t.ht[6], 0, 0));
154 mt76_wr(dev, MT_TX_PWR_CFG_7,
155 mt76x2_tx_power_mask(t.ofdm[6], t.vht[8], t.ht[6], t.vht[8]));
156 mt76_wr(dev, MT_TX_PWR_CFG_8,
157 mt76x2_tx_power_mask(t.ht[14], t.vht[8], t.vht[8], 0));
158 mt76_wr(dev, MT_TX_PWR_CFG_9,
159 mt76x2_tx_power_mask(t.ht[6], t.vht[8], t.vht[8], 0));
160}
161
162static bool
163mt76x2_channel_silent(struct mt76x2_dev *dev)
164{
165 struct ieee80211_channel *chan = dev->mt76.chandef.chan;
166
167 return ((chan->flags & IEEE80211_CHAN_RADAR) &&
168 chan->dfs_state != NL80211_DFS_AVAILABLE);
169}
170
171static bool
172mt76x2_phy_tssi_init_cal(struct mt76x2_dev *dev)
173{
174 struct ieee80211_channel *chan = dev->mt76.chandef.chan;
175 u32 flag = 0;
176
177 if (!mt76x2_tssi_enabled(dev))
178 return false;
179
180 if (mt76x2_channel_silent(dev))
181 return false;
182
183 if (chan->band == NL80211_BAND_2GHZ)
184 flag |= BIT(0);
185
186 if (mt76x2_ext_pa_enabled(dev, chan->band))
187 flag |= BIT(8);
188
189 mt76x2_mcu_calibrate(dev, MCU_CAL_TSSI, flag);
190 dev->cal.tssi_cal_done = true;
191 return true;
192}
193
194static void
195mt76x2_phy_channel_calibrate(struct mt76x2_dev *dev, bool mac_stopped)
196{
197 struct ieee80211_channel *chan = dev->mt76.chandef.chan;
198 bool is_5ghz = chan->band == NL80211_BAND_5GHZ;
199
200 if (dev->cal.channel_cal_done)
201 return;
202
203 if (mt76x2_channel_silent(dev))
204 return;
205
206 if (!dev->cal.tssi_cal_done)
207 mt76x2_phy_tssi_init_cal(dev);
208
209 if (!mac_stopped)
210 mt76x2_mac_stop(dev, false);
211
212 if (is_5ghz)
213 mt76x2_mcu_calibrate(dev, MCU_CAL_LC, 0);
214
215 mt76x2_mcu_calibrate(dev, MCU_CAL_TX_LOFT, is_5ghz);
216 mt76x2_mcu_calibrate(dev, MCU_CAL_TXIQ, is_5ghz);
217 mt76x2_mcu_calibrate(dev, MCU_CAL_RXIQC_FI, is_5ghz);
218 mt76x2_mcu_calibrate(dev, MCU_CAL_TEMP_SENSOR, 0);
219 mt76x2_mcu_calibrate(dev, MCU_CAL_TX_SHAPING, 0);
220
221 if (!mac_stopped)
222 mt76x2_mac_resume(dev);
223
224 mt76x2_apply_gain_adj(dev);
225
226 dev->cal.channel_cal_done = true;
227}
228
229static void
230mt76x2_phy_set_txpower_regs(struct mt76x2_dev *dev, enum nl80211_band band)
231{
232 u32 pa_mode[2];
233 u32 pa_mode_adj;
234
235 if (band == NL80211_BAND_2GHZ) {
236 pa_mode[0] = 0x010055ff;
237 pa_mode[1] = 0x00550055;
238
239 mt76_wr(dev, MT_TX_ALC_CFG_2, 0x35160a00);
240 mt76_wr(dev, MT_TX_ALC_CFG_3, 0x35160a06);
241
242 if (mt76x2_ext_pa_enabled(dev, band)) {
243 mt76_wr(dev, MT_RF_PA_MODE_ADJ0, 0x0000ec00);
244 mt76_wr(dev, MT_RF_PA_MODE_ADJ1, 0x0000ec00);
245 } else {
246 mt76_wr(dev, MT_RF_PA_MODE_ADJ0, 0xf4000200);
247 mt76_wr(dev, MT_RF_PA_MODE_ADJ1, 0xfa000200);
248 }
249 } else {
250 pa_mode[0] = 0x0000ffff;
251 pa_mode[1] = 0x00ff00ff;
252
253 if (mt76x2_ext_pa_enabled(dev, band)) {
254 mt76_wr(dev, MT_TX_ALC_CFG_2, 0x2f0f0400);
255 mt76_wr(dev, MT_TX_ALC_CFG_3, 0x2f0f0476);
256 } else {
257 mt76_wr(dev, MT_TX_ALC_CFG_2, 0x1b0f0400);
258 mt76_wr(dev, MT_TX_ALC_CFG_3, 0x1b0f0476);
259 }
260 mt76_wr(dev, MT_TX_ALC_CFG_4, 0);
261
262 if (mt76x2_ext_pa_enabled(dev, band))
263 pa_mode_adj = 0x04000000;
264 else
265 pa_mode_adj = 0;
266
267 mt76_wr(dev, MT_RF_PA_MODE_ADJ0, pa_mode_adj);
268 mt76_wr(dev, MT_RF_PA_MODE_ADJ1, pa_mode_adj);
269 }
270
271 mt76_wr(dev, MT_BB_PA_MODE_CFG0, pa_mode[0]);
272 mt76_wr(dev, MT_BB_PA_MODE_CFG1, pa_mode[1]);
273 mt76_wr(dev, MT_RF_PA_MODE_CFG0, pa_mode[0]);
274 mt76_wr(dev, MT_RF_PA_MODE_CFG1, pa_mode[1]);
275
276 if (mt76x2_ext_pa_enabled(dev, band)) {
277 u32 val;
278
279 if (band == NL80211_BAND_2GHZ)
280 val = 0x3c3c023c;
281 else
282 val = 0x363c023c;
283
284 mt76_wr(dev, MT_TX0_RF_GAIN_CORR, val);
285 mt76_wr(dev, MT_TX1_RF_GAIN_CORR, val);
286 mt76_wr(dev, MT_TX_ALC_CFG_4, 0x00001818);
287 } else {
288 if (band == NL80211_BAND_2GHZ) {
289 u32 val = 0x0f3c3c3c;
290
291 mt76_wr(dev, MT_TX0_RF_GAIN_CORR, val);
292 mt76_wr(dev, MT_TX1_RF_GAIN_CORR, val);
293 mt76_wr(dev, MT_TX_ALC_CFG_4, 0x00000606);
294 } else {
295 mt76_wr(dev, MT_TX0_RF_GAIN_CORR, 0x383c023c);
296 mt76_wr(dev, MT_TX1_RF_GAIN_CORR, 0x24282e28);
297 mt76_wr(dev, MT_TX_ALC_CFG_4, 0);
298 }
299 }
300}
301
302static void
303mt76x2_configure_tx_delay(struct mt76x2_dev *dev, enum nl80211_band band, u8 bw)
304{
305 u32 cfg0, cfg1;
306
307 if (mt76x2_ext_pa_enabled(dev, band)) {
308 cfg0 = bw ? 0x000b0c01 : 0x00101101;
309 cfg1 = 0x00011414;
310 } else {
311 cfg0 = bw ? 0x000b0b01 : 0x00101001;
312 cfg1 = 0x00021414;
313 }
314 mt76_wr(dev, MT_TX_SW_CFG0, cfg0);
315 mt76_wr(dev, MT_TX_SW_CFG1, cfg1);
316
317 mt76_rmw_field(dev, MT_XIFS_TIME_CFG, MT_XIFS_TIME_CFG_OFDM_SIFS, 15);
318}
319
320static void
321mt76x2_phy_set_bw(struct mt76x2_dev *dev, int width, u8 ctrl)
322{
323 int core_val, agc_val;
324
325 switch (width) {
326 case NL80211_CHAN_WIDTH_80:
327 core_val = 3;
328 agc_val = 7;
329 break;
330 case NL80211_CHAN_WIDTH_40:
331 core_val = 2;
332 agc_val = 3;
333 break;
334 default:
335 core_val = 0;
336 agc_val = 1;
337 break;
338 }
339
340 mt76_rmw_field(dev, MT_BBP(CORE, 1), MT_BBP_CORE_R1_BW, core_val);
341 mt76_rmw_field(dev, MT_BBP(AGC, 0), MT_BBP_AGC_R0_BW, agc_val);
342 mt76_rmw_field(dev, MT_BBP(AGC, 0), MT_BBP_AGC_R0_CTRL_CHAN, ctrl);
343 mt76_rmw_field(dev, MT_BBP(TXBE, 0), MT_BBP_TXBE_R0_CTRL_CHAN, ctrl);
344}
345
346static void
347mt76x2_phy_set_band(struct mt76x2_dev *dev, int band, bool primary_upper)
348{
349 switch (band) {
350 case NL80211_BAND_2GHZ:
351 mt76_set(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_2G);
352 mt76_clear(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_5G);
353 break;
354 case NL80211_BAND_5GHZ:
355 mt76_clear(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_2G);
356 mt76_set(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_5G);
357 break;
358 }
359
360 mt76_rmw_field(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_UPPER_40M,
361 primary_upper);
362}
363
364void mt76x2_phy_set_antenna(struct mt76x2_dev *dev)
365{
366 u32 val;
367
368 val = mt76_rr(dev, MT_BBP(AGC, 0));
369 val &= ~(BIT(4) | BIT(1));
370 switch (dev->mt76.antenna_mask) {
371 case 1:
372
373 mt76_clear(dev, MT_BBP(IBI, 9), BIT(11));
374 mt76_clear(dev, MT_BBP(TXBE, 5), 3);
375 mt76_rmw_field(dev, MT_TX_PIN_CFG, MT_TX_PIN_CFG_TXANT, 0x3);
376 mt76_rmw_field(dev, MT_BBP(CORE, 32), GENMASK(21, 20), 2);
377
378 mt76_rmw_field(dev, MT_BBP(CORE, 33), GENMASK(12, 9), 4);
379
380 val &= ~(BIT(3) | BIT(0));
381 break;
382 case 2:
383
384 mt76_clear(dev, MT_BBP(IBI, 9), BIT(11));
385 mt76_rmw_field(dev, MT_BBP(TXBE, 5), 3, 1);
386 mt76_rmw_field(dev, MT_TX_PIN_CFG, MT_TX_PIN_CFG_TXANT, 0xc);
387 mt76_rmw_field(dev, MT_BBP(CORE, 32), GENMASK(21, 20), 1);
388
389 mt76_rmw_field(dev, MT_BBP(CORE, 33), GENMASK(12, 9), 1);
390
391 val &= ~BIT(3);
392 val |= BIT(0);
393 break;
394 case 3:
395 default:
396
397 mt76_set(dev, MT_BBP(IBI, 9), BIT(11));
398 mt76_set(dev, MT_BBP(TXBE, 5), 3);
399 mt76_rmw_field(dev, MT_TX_PIN_CFG, MT_TX_PIN_CFG_TXANT, 0xf);
400 mt76_clear(dev, MT_BBP(CORE, 32), GENMASK(21, 20));
401 mt76_clear(dev, MT_BBP(CORE, 33), GENMASK(12, 9));
402
403 val &= ~BIT(0);
404 val |= BIT(3);
405 break;
406 }
407 mt76_wr(dev, MT_BBP(AGC, 0), val);
408}
409
410static void
411mt76x2_get_agc_gain(struct mt76x2_dev *dev, u8 *dest)
412{
413 dest[0] = mt76_get_field(dev, MT_BBP(AGC, 8), MT_BBP_AGC_GAIN);
414 dest[1] = mt76_get_field(dev, MT_BBP(AGC, 9), MT_BBP_AGC_GAIN);
415}
416
417static int
418mt76x2_get_rssi_gain_thresh(struct mt76x2_dev *dev)
419{
420 switch (dev->mt76.chandef.width) {
421 case NL80211_CHAN_WIDTH_80:
422 return -62;
423 case NL80211_CHAN_WIDTH_40:
424 return -65;
425 default:
426 return -68;
427 }
428}
429
430static int
431mt76x2_get_low_rssi_gain_thresh(struct mt76x2_dev *dev)
432{
433 switch (dev->mt76.chandef.width) {
434 case NL80211_CHAN_WIDTH_80:
435 return -76;
436 case NL80211_CHAN_WIDTH_40:
437 return -79;
438 default:
439 return -82;
440 }
441}
442
443static void
444mt76x2_phy_set_gain_val(struct mt76x2_dev *dev)
445{
446 u32 val;
447 u8 gain_val[2];
448
449 gain_val[0] = dev->cal.agc_gain_cur[0] - dev->cal.agc_gain_adjust;
450 gain_val[1] = dev->cal.agc_gain_cur[1] - dev->cal.agc_gain_adjust;
451
452 if (dev->mt76.chandef.width >= NL80211_CHAN_WIDTH_40)
453 val = 0x1e42 << 16;
454 else
455 val = 0x1836 << 16;
456
457 val |= 0xf8;
458
459 mt76_wr(dev, MT_BBP(AGC, 8),
460 val | FIELD_PREP(MT_BBP_AGC_GAIN, gain_val[0]));
461 mt76_wr(dev, MT_BBP(AGC, 9),
462 val | FIELD_PREP(MT_BBP_AGC_GAIN, gain_val[1]));
463
464 if (dev->mt76.chandef.chan->flags & IEEE80211_CHAN_RADAR)
465 mt76x2_dfs_adjust_agc(dev);
466}
467
468static void
469mt76x2_phy_adjust_vga_gain(struct mt76x2_dev *dev)
470{
471 u32 false_cca;
472 u8 limit = dev->cal.low_gain > 1 ? 4 : 16;
473
474 false_cca = FIELD_GET(MT_RX_STAT_1_CCA_ERRORS, mt76_rr(dev, MT_RX_STAT_1));
475 if (false_cca > 800 && dev->cal.agc_gain_adjust < limit)
476 dev->cal.agc_gain_adjust += 2;
477 else if (false_cca < 10 && dev->cal.agc_gain_adjust > 0)
478 dev->cal.agc_gain_adjust -= 2;
479 else
480 return;
481
482 mt76x2_phy_set_gain_val(dev);
483}
484
485static void
486mt76x2_phy_update_channel_gain(struct mt76x2_dev *dev)
487{
488 u32 val = mt76_rr(dev, MT_BBP(AGC, 20));
489 int rssi0 = (s8) FIELD_GET(MT_BBP_AGC20_RSSI0, val);
490 int rssi1 = (s8) FIELD_GET(MT_BBP_AGC20_RSSI1, val);
491 u8 *gain = dev->cal.agc_gain_init;
492 u8 gain_delta;
493 int low_gain;
494
495 dev->cal.avg_rssi[0] = (dev->cal.avg_rssi[0] * 15) / 16 + (rssi0 << 8);
496 dev->cal.avg_rssi[1] = (dev->cal.avg_rssi[1] * 15) / 16 + (rssi1 << 8);
497 dev->cal.avg_rssi_all = (dev->cal.avg_rssi[0] +
498 dev->cal.avg_rssi[1]) / 512;
499
500 low_gain = (dev->cal.avg_rssi_all > mt76x2_get_rssi_gain_thresh(dev)) +
501 (dev->cal.avg_rssi_all > mt76x2_get_low_rssi_gain_thresh(dev));
502
503 if (dev->cal.low_gain == low_gain) {
504 mt76x2_phy_adjust_vga_gain(dev);
505 return;
506 }
507
508 dev->cal.low_gain = low_gain;
509
510 if (dev->mt76.chandef.width == NL80211_CHAN_WIDTH_80)
511 mt76_wr(dev, MT_BBP(RXO, 14), 0x00560211);
512 else
513 mt76_wr(dev, MT_BBP(RXO, 14), 0x00560423);
514
515 if (low_gain) {
516 mt76_wr(dev, MT_BBP(RXO, 18), 0xf000a991);
517 mt76_wr(dev, MT_BBP(AGC, 35), 0x08080808);
518 mt76_wr(dev, MT_BBP(AGC, 37), 0x08080808);
519 if (mt76x2_has_ext_lna(dev))
520 gain_delta = 10;
521 else
522 gain_delta = 14;
523 } else {
524 mt76_wr(dev, MT_BBP(RXO, 18), 0xf000a990);
525 if (dev->mt76.chandef.width == NL80211_CHAN_WIDTH_80)
526 mt76_wr(dev, MT_BBP(AGC, 35), 0x10101014);
527 else
528 mt76_wr(dev, MT_BBP(AGC, 35), 0x11111116);
529 mt76_wr(dev, MT_BBP(AGC, 37), 0x2121262C);
530 gain_delta = 0;
531 }
532
533 dev->cal.agc_gain_cur[0] = gain[0] - gain_delta;
534 dev->cal.agc_gain_cur[1] = gain[1] - gain_delta;
535 dev->cal.agc_gain_adjust = 0;
536 mt76x2_phy_set_gain_val(dev);
537}
538
539int mt76x2_phy_set_channel(struct mt76x2_dev *dev,
540 struct cfg80211_chan_def *chandef)
541{
542 struct ieee80211_channel *chan = chandef->chan;
543 bool scan = test_bit(MT76_SCANNING, &dev->mt76.state);
544 enum nl80211_band band = chan->band;
545 u8 channel;
546
547 u32 ext_cca_chan[4] = {
548 [0] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 0) |
549 FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 1) |
550 FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 2) |
551 FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 3) |
552 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(0)),
553 [1] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 1) |
554 FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 0) |
555 FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 2) |
556 FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 3) |
557 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(1)),
558 [2] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 2) |
559 FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 3) |
560 FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 1) |
561 FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 0) |
562 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(2)),
563 [3] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 3) |
564 FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 2) |
565 FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 1) |
566 FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 0) |
567 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(3)),
568 };
569 int ch_group_index;
570 u8 bw, bw_index;
571 int freq, freq1;
572 int ret;
573
574 dev->cal.channel_cal_done = false;
575 freq = chandef->chan->center_freq;
576 freq1 = chandef->center_freq1;
577 channel = chan->hw_value;
578
579 switch (chandef->width) {
580 case NL80211_CHAN_WIDTH_40:
581 bw = 1;
582 if (freq1 > freq) {
583 bw_index = 1;
584 ch_group_index = 0;
585 } else {
586 bw_index = 3;
587 ch_group_index = 1;
588 }
589 channel += 2 - ch_group_index * 4;
590 break;
591 case NL80211_CHAN_WIDTH_80:
592 ch_group_index = (freq - freq1 + 30) / 20;
593 if (WARN_ON(ch_group_index < 0 || ch_group_index > 3))
594 ch_group_index = 0;
595 bw = 2;
596 bw_index = ch_group_index;
597 channel += 6 - ch_group_index * 4;
598 break;
599 default:
600 bw = 0;
601 bw_index = 0;
602 ch_group_index = 0;
603 break;
604 }
605
606 mt76x2_read_rx_gain(dev);
607 mt76x2_phy_set_txpower_regs(dev, band);
608 mt76x2_configure_tx_delay(dev, band, bw);
609 mt76x2_phy_set_txpower(dev);
610
611 mt76x2_phy_set_band(dev, chan->band, ch_group_index & 1);
612 mt76x2_phy_set_bw(dev, chandef->width, ch_group_index);
613
614 mt76_rmw(dev, MT_EXT_CCA_CFG,
615 (MT_EXT_CCA_CFG_CCA0 |
616 MT_EXT_CCA_CFG_CCA1 |
617 MT_EXT_CCA_CFG_CCA2 |
618 MT_EXT_CCA_CFG_CCA3 |
619 MT_EXT_CCA_CFG_CCA_MASK),
620 ext_cca_chan[ch_group_index]);
621
622 ret = mt76x2_mcu_set_channel(dev, channel, bw, bw_index, scan);
623 if (ret)
624 return ret;
625
626 mt76x2_mcu_init_gain(dev, channel, dev->cal.rx.mcu_gain, true);
627
628 mt76x2_phy_set_antenna(dev);
629
630
631 if (mt76xx_rev(dev) >= MT76XX_REV_E3)
632 mt76_set(dev, MT_BBP(RXO, 13), BIT(10));
633
634 if (!dev->cal.init_cal_done) {
635 u8 val = mt76x2_eeprom_get(dev, MT_EE_BT_RCAL_RESULT);
636
637 if (val != 0xff)
638 mt76x2_mcu_calibrate(dev, MCU_CAL_R, 0);
639 }
640
641 mt76x2_mcu_calibrate(dev, MCU_CAL_RXDCOC, channel);
642
643
644 if (!dev->cal.init_cal_done)
645 mt76x2_mcu_calibrate(dev, MCU_CAL_RC, 0);
646
647 dev->cal.init_cal_done = true;
648
649 mt76_wr(dev, MT_BBP(AGC, 61), 0xFF64A4E2);
650 mt76_wr(dev, MT_BBP(AGC, 7), 0x08081010);
651 mt76_wr(dev, MT_BBP(AGC, 11), 0x00000404);
652 mt76_wr(dev, MT_BBP(AGC, 2), 0x00007070);
653 mt76_wr(dev, MT_TXOP_CTRL_CFG, 0x04101B3F);
654
655 if (scan)
656 return 0;
657
658 dev->cal.low_gain = -1;
659 mt76x2_phy_channel_calibrate(dev, true);
660 mt76x2_get_agc_gain(dev, dev->cal.agc_gain_init);
661 memcpy(dev->cal.agc_gain_cur, dev->cal.agc_gain_init,
662 sizeof(dev->cal.agc_gain_cur));
663
664 ieee80211_queue_delayed_work(mt76_hw(dev), &dev->cal_work,
665 MT_CALIBRATE_INTERVAL);
666
667 return 0;
668}
669
670static void
671mt76x2_phy_tssi_compensate(struct mt76x2_dev *dev)
672{
673 struct ieee80211_channel *chan = dev->mt76.chandef.chan;
674 struct mt76x2_tx_power_info txp;
675 struct mt76x2_tssi_comp t = {};
676
677 if (!dev->cal.tssi_cal_done)
678 return;
679
680 if (!dev->cal.tssi_comp_pending) {
681
682 t.cal_mode = BIT(0);
683 mt76x2_mcu_tssi_comp(dev, &t);
684 dev->cal.tssi_comp_pending = true;
685 } else {
686 if (mt76_rr(dev, MT_BBP(CORE, 34)) & BIT(4))
687 return;
688
689 dev->cal.tssi_comp_pending = false;
690 mt76x2_get_power_info(dev, &txp, chan);
691
692 if (mt76x2_ext_pa_enabled(dev, chan->band))
693 t.pa_mode = 1;
694
695 t.cal_mode = BIT(1);
696 t.slope0 = txp.chain[0].tssi_slope;
697 t.offset0 = txp.chain[0].tssi_offset;
698 t.slope1 = txp.chain[1].tssi_slope;
699 t.offset1 = txp.chain[1].tssi_offset;
700 mt76x2_mcu_tssi_comp(dev, &t);
701
702 if (t.pa_mode || dev->cal.dpd_cal_done)
703 return;
704
705 usleep_range(10000, 20000);
706 mt76x2_mcu_calibrate(dev, MCU_CAL_DPD, chan->hw_value);
707 dev->cal.dpd_cal_done = true;
708 }
709}
710
711static void
712mt76x2_phy_temp_compensate(struct mt76x2_dev *dev)
713{
714 struct mt76x2_temp_comp t;
715 int temp, db_diff;
716
717 if (mt76x2_get_temp_comp(dev, &t))
718 return;
719
720 temp = mt76_get_field(dev, MT_TEMP_SENSOR, MT_TEMP_SENSOR_VAL);
721 temp -= t.temp_25_ref;
722 temp = (temp * 1789) / 1000 + 25;
723 dev->cal.temp = temp;
724
725 if (temp > 25)
726 db_diff = (temp - 25) / t.high_slope;
727 else
728 db_diff = (25 - temp) / t.low_slope;
729
730 db_diff = min(db_diff, t.upper_bound);
731 db_diff = max(db_diff, t.lower_bound);
732
733 mt76_rmw_field(dev, MT_TX_ALC_CFG_1, MT_TX_ALC_CFG_1_TEMP_COMP,
734 db_diff * 2);
735 mt76_rmw_field(dev, MT_TX_ALC_CFG_2, MT_TX_ALC_CFG_2_TEMP_COMP,
736 db_diff * 2);
737}
738
739void mt76x2_phy_calibrate(struct work_struct *work)
740{
741 struct mt76x2_dev *dev;
742
743 dev = container_of(work, struct mt76x2_dev, cal_work.work);
744 mt76x2_phy_channel_calibrate(dev, false);
745 mt76x2_phy_tssi_compensate(dev);
746 mt76x2_phy_temp_compensate(dev);
747 mt76x2_phy_update_channel_gain(dev);
748 ieee80211_queue_delayed_work(mt76_hw(dev), &dev->cal_work,
749 MT_CALIBRATE_INTERVAL);
750}
751
752int mt76x2_phy_start(struct mt76x2_dev *dev)
753{
754 int ret;
755
756 ret = mt76x2_mcu_set_radio_state(dev, true);
757 if (ret)
758 return ret;
759
760 mt76x2_mcu_load_cr(dev, MT_RF_BBP_CR, 0, 0);
761
762 return ret;
763}
764