1
2
3
4
5#include <linux/bcd.h>
6
7#include "main.h"
8#include "reg.h"
9#include "fw.h"
10#include "phy.h"
11#include "debug.h"
12
13struct phy_cfg_pair {
14 u32 addr;
15 u32 data;
16};
17
18union phy_table_tile {
19 struct rtw_phy_cond cond;
20 struct phy_cfg_pair cfg;
21};
22
23static const u32 db_invert_table[12][8] = {
24 {10, 13, 16, 20,
25 25, 32, 40, 50},
26 {64, 80, 101, 128,
27 160, 201, 256, 318},
28 {401, 505, 635, 800,
29 1007, 1268, 1596, 2010},
30 {316, 398, 501, 631,
31 794, 1000, 1259, 1585},
32 {1995, 2512, 3162, 3981,
33 5012, 6310, 7943, 10000},
34 {12589, 15849, 19953, 25119,
35 31623, 39811, 50119, 63098},
36 {79433, 100000, 125893, 158489,
37 199526, 251189, 316228, 398107},
38 {501187, 630957, 794328, 1000000,
39 1258925, 1584893, 1995262, 2511886},
40 {3162278, 3981072, 5011872, 6309573,
41 7943282, 1000000, 12589254, 15848932},
42 {19952623, 25118864, 31622777, 39810717,
43 50118723, 63095734, 79432823, 100000000},
44 {125892541, 158489319, 199526232, 251188643,
45 316227766, 398107171, 501187234, 630957345},
46 {794328235, 1000000000, 1258925412, 1584893192,
47 1995262315, 2511886432U, 3162277660U, 3981071706U}
48};
49
50u8 rtw_cck_rates[] = { DESC_RATE1M, DESC_RATE2M, DESC_RATE5_5M, DESC_RATE11M };
51u8 rtw_ofdm_rates[] = {
52 DESC_RATE6M, DESC_RATE9M, DESC_RATE12M,
53 DESC_RATE18M, DESC_RATE24M, DESC_RATE36M,
54 DESC_RATE48M, DESC_RATE54M
55};
56u8 rtw_ht_1s_rates[] = {
57 DESC_RATEMCS0, DESC_RATEMCS1, DESC_RATEMCS2,
58 DESC_RATEMCS3, DESC_RATEMCS4, DESC_RATEMCS5,
59 DESC_RATEMCS6, DESC_RATEMCS7
60};
61u8 rtw_ht_2s_rates[] = {
62 DESC_RATEMCS8, DESC_RATEMCS9, DESC_RATEMCS10,
63 DESC_RATEMCS11, DESC_RATEMCS12, DESC_RATEMCS13,
64 DESC_RATEMCS14, DESC_RATEMCS15
65};
66u8 rtw_vht_1s_rates[] = {
67 DESC_RATEVHT1SS_MCS0, DESC_RATEVHT1SS_MCS1,
68 DESC_RATEVHT1SS_MCS2, DESC_RATEVHT1SS_MCS3,
69 DESC_RATEVHT1SS_MCS4, DESC_RATEVHT1SS_MCS5,
70 DESC_RATEVHT1SS_MCS6, DESC_RATEVHT1SS_MCS7,
71 DESC_RATEVHT1SS_MCS8, DESC_RATEVHT1SS_MCS9
72};
73u8 rtw_vht_2s_rates[] = {
74 DESC_RATEVHT2SS_MCS0, DESC_RATEVHT2SS_MCS1,
75 DESC_RATEVHT2SS_MCS2, DESC_RATEVHT2SS_MCS3,
76 DESC_RATEVHT2SS_MCS4, DESC_RATEVHT2SS_MCS5,
77 DESC_RATEVHT2SS_MCS6, DESC_RATEVHT2SS_MCS7,
78 DESC_RATEVHT2SS_MCS8, DESC_RATEVHT2SS_MCS9
79};
80u8 *rtw_rate_section[RTW_RATE_SECTION_MAX] = {
81 rtw_cck_rates, rtw_ofdm_rates,
82 rtw_ht_1s_rates, rtw_ht_2s_rates,
83 rtw_vht_1s_rates, rtw_vht_2s_rates
84};
85EXPORT_SYMBOL(rtw_rate_section);
86
87u8 rtw_rate_size[RTW_RATE_SECTION_MAX] = {
88 ARRAY_SIZE(rtw_cck_rates),
89 ARRAY_SIZE(rtw_ofdm_rates),
90 ARRAY_SIZE(rtw_ht_1s_rates),
91 ARRAY_SIZE(rtw_ht_2s_rates),
92 ARRAY_SIZE(rtw_vht_1s_rates),
93 ARRAY_SIZE(rtw_vht_2s_rates)
94};
95EXPORT_SYMBOL(rtw_rate_size);
96
97static const u8 rtw_cck_size = ARRAY_SIZE(rtw_cck_rates);
98static const u8 rtw_ofdm_size = ARRAY_SIZE(rtw_ofdm_rates);
99static const u8 rtw_ht_1s_size = ARRAY_SIZE(rtw_ht_1s_rates);
100static const u8 rtw_ht_2s_size = ARRAY_SIZE(rtw_ht_2s_rates);
101static const u8 rtw_vht_1s_size = ARRAY_SIZE(rtw_vht_1s_rates);
102static const u8 rtw_vht_2s_size = ARRAY_SIZE(rtw_vht_2s_rates);
103
104enum rtw_phy_band_type {
105 PHY_BAND_2G = 0,
106 PHY_BAND_5G = 1,
107};
108
109static void rtw_phy_cck_pd_init(struct rtw_dev *rtwdev)
110{
111 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
112 u8 i, j;
113
114 for (i = 0; i <= RTW_CHANNEL_WIDTH_40; i++) {
115 for (j = 0; j < RTW_RF_PATH_MAX; j++)
116 dm_info->cck_pd_lv[i][j] = CCK_PD_LV0;
117 }
118
119 dm_info->cck_fa_avg = CCK_FA_AVG_RESET;
120}
121
122static void rtw_phy_cfo_init(struct rtw_dev *rtwdev)
123{
124 struct rtw_chip_info *chip = rtwdev->chip;
125
126 if (chip->ops->cfo_init)
127 chip->ops->cfo_init(rtwdev);
128}
129
130static void rtw_phy_tx_path_div_init(struct rtw_dev *rtwdev)
131{
132 struct rtw_path_div *path_div = &rtwdev->dm_path_div;
133
134 path_div->current_tx_path = rtwdev->chip->default_1ss_tx_path;
135 path_div->path_a_cnt = 0;
136 path_div->path_a_sum = 0;
137 path_div->path_b_cnt = 0;
138 path_div->path_b_sum = 0;
139}
140
141void rtw_phy_init(struct rtw_dev *rtwdev)
142{
143 struct rtw_chip_info *chip = rtwdev->chip;
144 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
145 u32 addr, mask;
146
147 dm_info->fa_history[3] = 0;
148 dm_info->fa_history[2] = 0;
149 dm_info->fa_history[1] = 0;
150 dm_info->fa_history[0] = 0;
151 dm_info->igi_bitmap = 0;
152 dm_info->igi_history[3] = 0;
153 dm_info->igi_history[2] = 0;
154 dm_info->igi_history[1] = 0;
155
156 addr = chip->dig[0].addr;
157 mask = chip->dig[0].mask;
158 dm_info->igi_history[0] = rtw_read32_mask(rtwdev, addr, mask);
159 rtw_phy_cck_pd_init(rtwdev);
160
161 dm_info->iqk.done = false;
162 rtw_phy_cfo_init(rtwdev);
163 rtw_phy_tx_path_div_init(rtwdev);
164}
165EXPORT_SYMBOL(rtw_phy_init);
166
167void rtw_phy_dig_write(struct rtw_dev *rtwdev, u8 igi)
168{
169 struct rtw_chip_info *chip = rtwdev->chip;
170 struct rtw_hal *hal = &rtwdev->hal;
171 u32 addr, mask;
172 u8 path;
173
174 if (chip->dig_cck) {
175 const struct rtw_hw_reg *dig_cck = &chip->dig_cck[0];
176 rtw_write32_mask(rtwdev, dig_cck->addr, dig_cck->mask, igi >> 1);
177 }
178
179 for (path = 0; path < hal->rf_path_num; path++) {
180 addr = chip->dig[path].addr;
181 mask = chip->dig[path].mask;
182 rtw_write32_mask(rtwdev, addr, mask, igi);
183 }
184}
185
186static void rtw_phy_stat_false_alarm(struct rtw_dev *rtwdev)
187{
188 struct rtw_chip_info *chip = rtwdev->chip;
189
190 chip->ops->false_alarm_statistics(rtwdev);
191}
192
193#define RA_FLOOR_TABLE_SIZE 7
194#define RA_FLOOR_UP_GAP 3
195
196static u8 rtw_phy_get_rssi_level(u8 old_level, u8 rssi)
197{
198 u8 table[RA_FLOOR_TABLE_SIZE] = {20, 34, 38, 42, 46, 50, 100};
199 u8 new_level = 0;
200 int i;
201
202 for (i = 0; i < RA_FLOOR_TABLE_SIZE; i++)
203 if (i >= old_level)
204 table[i] += RA_FLOOR_UP_GAP;
205
206 for (i = 0; i < RA_FLOOR_TABLE_SIZE; i++) {
207 if (rssi < table[i]) {
208 new_level = i;
209 break;
210 }
211 }
212
213 return new_level;
214}
215
216struct rtw_phy_stat_iter_data {
217 struct rtw_dev *rtwdev;
218 u8 min_rssi;
219};
220
221static void rtw_phy_stat_rssi_iter(void *data, struct ieee80211_sta *sta)
222{
223 struct rtw_phy_stat_iter_data *iter_data = data;
224 struct rtw_dev *rtwdev = iter_data->rtwdev;
225 struct rtw_sta_info *si = (struct rtw_sta_info *)sta->drv_priv;
226 u8 rssi;
227
228 rssi = ewma_rssi_read(&si->avg_rssi);
229 si->rssi_level = rtw_phy_get_rssi_level(si->rssi_level, rssi);
230
231 rtw_fw_send_rssi_info(rtwdev, si);
232
233 iter_data->min_rssi = min_t(u8, rssi, iter_data->min_rssi);
234}
235
236static void rtw_phy_stat_rssi(struct rtw_dev *rtwdev)
237{
238 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
239 struct rtw_phy_stat_iter_data data = {};
240
241 data.rtwdev = rtwdev;
242 data.min_rssi = U8_MAX;
243 rtw_iterate_stas_atomic(rtwdev, rtw_phy_stat_rssi_iter, &data);
244
245 dm_info->pre_min_rssi = dm_info->min_rssi;
246 dm_info->min_rssi = data.min_rssi;
247}
248
249static void rtw_phy_stat_rate_cnt(struct rtw_dev *rtwdev)
250{
251 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
252
253 dm_info->last_pkt_count = dm_info->cur_pkt_count;
254 memset(&dm_info->cur_pkt_count, 0, sizeof(dm_info->cur_pkt_count));
255}
256
257static void rtw_phy_statistics(struct rtw_dev *rtwdev)
258{
259 rtw_phy_stat_rssi(rtwdev);
260 rtw_phy_stat_false_alarm(rtwdev);
261 rtw_phy_stat_rate_cnt(rtwdev);
262}
263
264#define DIG_PERF_FA_TH_LOW 250
265#define DIG_PERF_FA_TH_HIGH 500
266#define DIG_PERF_FA_TH_EXTRA_HIGH 750
267#define DIG_PERF_MAX 0x5a
268#define DIG_PERF_MID 0x40
269#define DIG_CVRG_FA_TH_LOW 2000
270#define DIG_CVRG_FA_TH_HIGH 4000
271#define DIG_CVRG_FA_TH_EXTRA_HIGH 5000
272#define DIG_CVRG_MAX 0x2a
273#define DIG_CVRG_MID 0x26
274#define DIG_CVRG_MIN 0x1c
275#define DIG_RSSI_GAIN_OFFSET 15
276
277static bool
278rtw_phy_dig_check_damping(struct rtw_dm_info *dm_info)
279{
280 u16 fa_lo = DIG_PERF_FA_TH_LOW;
281 u16 fa_hi = DIG_PERF_FA_TH_HIGH;
282 u16 *fa_history;
283 u8 *igi_history;
284 u8 damping_rssi;
285 u8 min_rssi;
286 u8 diff;
287 u8 igi_bitmap;
288 bool damping = false;
289
290 min_rssi = dm_info->min_rssi;
291 if (dm_info->damping) {
292 damping_rssi = dm_info->damping_rssi;
293 diff = min_rssi > damping_rssi ? min_rssi - damping_rssi :
294 damping_rssi - min_rssi;
295 if (diff > 3 || dm_info->damping_cnt++ > 20) {
296 dm_info->damping = false;
297 return false;
298 }
299
300 return true;
301 }
302
303 igi_history = dm_info->igi_history;
304 fa_history = dm_info->fa_history;
305 igi_bitmap = dm_info->igi_bitmap & 0xf;
306 switch (igi_bitmap) {
307 case 5:
308
309 if (igi_history[0] > igi_history[1] &&
310 igi_history[2] > igi_history[3] &&
311 igi_history[0] - igi_history[1] >= 2 &&
312 igi_history[2] - igi_history[3] >= 2 &&
313 fa_history[0] > fa_hi && fa_history[1] < fa_lo &&
314 fa_history[2] > fa_hi && fa_history[3] < fa_lo)
315 damping = true;
316 break;
317 case 9:
318
319 if (igi_history[0] > igi_history[1] &&
320 igi_history[3] > igi_history[2] &&
321 igi_history[0] - igi_history[1] >= 4 &&
322 igi_history[3] - igi_history[2] >= 2 &&
323 fa_history[0] > fa_hi && fa_history[1] < fa_lo &&
324 fa_history[2] < fa_lo && fa_history[3] > fa_hi)
325 damping = true;
326 break;
327 default:
328 return false;
329 }
330
331 if (damping) {
332 dm_info->damping = true;
333 dm_info->damping_cnt = 0;
334 dm_info->damping_rssi = min_rssi;
335 }
336
337 return damping;
338}
339
340static void rtw_phy_dig_get_boundary(struct rtw_dev *rtwdev,
341 struct rtw_dm_info *dm_info,
342 u8 *upper, u8 *lower, bool linked)
343{
344 u8 dig_max, dig_min, dig_mid;
345 u8 min_rssi;
346
347 if (linked) {
348 dig_max = DIG_PERF_MAX;
349 dig_mid = DIG_PERF_MID;
350 dig_min = rtwdev->chip->dig_min;
351 min_rssi = max_t(u8, dm_info->min_rssi, dig_min);
352 } else {
353 dig_max = DIG_CVRG_MAX;
354 dig_mid = DIG_CVRG_MID;
355 dig_min = DIG_CVRG_MIN;
356 min_rssi = dig_min;
357 }
358
359
360 dig_max = min_t(u8, dig_max, min_rssi + DIG_RSSI_GAIN_OFFSET);
361
362 *lower = clamp_t(u8, min_rssi, dig_min, dig_mid);
363 *upper = clamp_t(u8, *lower + DIG_RSSI_GAIN_OFFSET, dig_min, dig_max);
364}
365
366static void rtw_phy_dig_get_threshold(struct rtw_dm_info *dm_info,
367 u16 *fa_th, u8 *step, bool linked)
368{
369 u8 min_rssi, pre_min_rssi;
370
371 min_rssi = dm_info->min_rssi;
372 pre_min_rssi = dm_info->pre_min_rssi;
373 step[0] = 4;
374 step[1] = 3;
375 step[2] = 2;
376
377 if (linked) {
378 fa_th[0] = DIG_PERF_FA_TH_EXTRA_HIGH;
379 fa_th[1] = DIG_PERF_FA_TH_HIGH;
380 fa_th[2] = DIG_PERF_FA_TH_LOW;
381 if (pre_min_rssi > min_rssi) {
382 step[0] = 6;
383 step[1] = 4;
384 step[2] = 2;
385 }
386 } else {
387 fa_th[0] = DIG_CVRG_FA_TH_EXTRA_HIGH;
388 fa_th[1] = DIG_CVRG_FA_TH_HIGH;
389 fa_th[2] = DIG_CVRG_FA_TH_LOW;
390 }
391}
392
393static void rtw_phy_dig_recorder(struct rtw_dm_info *dm_info, u8 igi, u16 fa)
394{
395 u8 *igi_history;
396 u16 *fa_history;
397 u8 igi_bitmap;
398 bool up;
399
400 igi_bitmap = dm_info->igi_bitmap << 1 & 0xfe;
401 igi_history = dm_info->igi_history;
402 fa_history = dm_info->fa_history;
403
404 up = igi > igi_history[0];
405 igi_bitmap |= up;
406
407 igi_history[3] = igi_history[2];
408 igi_history[2] = igi_history[1];
409 igi_history[1] = igi_history[0];
410 igi_history[0] = igi;
411
412 fa_history[3] = fa_history[2];
413 fa_history[2] = fa_history[1];
414 fa_history[1] = fa_history[0];
415 fa_history[0] = fa;
416
417 dm_info->igi_bitmap = igi_bitmap;
418}
419
420static void rtw_phy_dig(struct rtw_dev *rtwdev)
421{
422 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
423 u8 upper_bound, lower_bound;
424 u8 pre_igi, cur_igi;
425 u16 fa_th[3], fa_cnt;
426 u8 level;
427 u8 step[3];
428 bool linked;
429
430 if (test_bit(RTW_FLAG_DIG_DISABLE, rtwdev->flags))
431 return;
432
433 if (rtw_phy_dig_check_damping(dm_info))
434 return;
435
436 linked = !!rtwdev->sta_cnt;
437
438 fa_cnt = dm_info->total_fa_cnt;
439 pre_igi = dm_info->igi_history[0];
440
441 rtw_phy_dig_get_threshold(dm_info, fa_th, step, linked);
442
443
444
445
446
447
448 cur_igi = pre_igi;
449 for (level = 0; level < 3; level++) {
450 if (fa_cnt > fa_th[level]) {
451 cur_igi += step[level];
452 break;
453 }
454 }
455 cur_igi -= 2;
456
457
458
459
460
461 rtw_phy_dig_get_boundary(rtwdev, dm_info, &upper_bound, &lower_bound,
462 linked);
463 cur_igi = clamp_t(u8, cur_igi, lower_bound, upper_bound);
464
465
466
467
468 rtw_phy_dig_recorder(dm_info, cur_igi, fa_cnt);
469
470 if (cur_igi != pre_igi)
471 rtw_phy_dig_write(rtwdev, cur_igi);
472}
473
474static void rtw_phy_ra_info_update_iter(void *data, struct ieee80211_sta *sta)
475{
476 struct rtw_dev *rtwdev = data;
477 struct rtw_sta_info *si = (struct rtw_sta_info *)sta->drv_priv;
478
479 rtw_update_sta_info(rtwdev, si);
480}
481
482static void rtw_phy_ra_info_update(struct rtw_dev *rtwdev)
483{
484 if (rtwdev->watch_dog_cnt & 0x3)
485 return;
486
487 rtw_iterate_stas_atomic(rtwdev, rtw_phy_ra_info_update_iter, rtwdev);
488}
489
490static u32 rtw_phy_get_rrsr_mask(struct rtw_dev *rtwdev, u8 rate_idx)
491{
492 u8 rate_order;
493
494 rate_order = rate_idx;
495
496 if (rate_idx >= DESC_RATEVHT4SS_MCS0)
497 rate_order -= DESC_RATEVHT4SS_MCS0;
498 else if (rate_idx >= DESC_RATEVHT3SS_MCS0)
499 rate_order -= DESC_RATEVHT3SS_MCS0;
500 else if (rate_idx >= DESC_RATEVHT2SS_MCS0)
501 rate_order -= DESC_RATEVHT2SS_MCS0;
502 else if (rate_idx >= DESC_RATEVHT1SS_MCS0)
503 rate_order -= DESC_RATEVHT1SS_MCS0;
504 else if (rate_idx >= DESC_RATEMCS24)
505 rate_order -= DESC_RATEMCS24;
506 else if (rate_idx >= DESC_RATEMCS16)
507 rate_order -= DESC_RATEMCS16;
508 else if (rate_idx >= DESC_RATEMCS8)
509 rate_order -= DESC_RATEMCS8;
510 else if (rate_idx >= DESC_RATEMCS0)
511 rate_order -= DESC_RATEMCS0;
512 else if (rate_idx >= DESC_RATE6M)
513 rate_order -= DESC_RATE6M;
514 else
515 rate_order -= DESC_RATE1M;
516
517 if (rate_idx >= DESC_RATEMCS0 || rate_order == 0)
518 rate_order++;
519
520 return GENMASK(rate_order + RRSR_RATE_ORDER_CCK_LEN - 1, 0);
521}
522
523static void rtw_phy_rrsr_mask_min_iter(void *data, struct ieee80211_sta *sta)
524{
525 struct rtw_dev *rtwdev = (struct rtw_dev *)data;
526 struct rtw_sta_info *si = (struct rtw_sta_info *)sta->drv_priv;
527 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
528 u32 mask = 0;
529
530 mask = rtw_phy_get_rrsr_mask(rtwdev, si->ra_report.desc_rate);
531 if (mask < dm_info->rrsr_mask_min)
532 dm_info->rrsr_mask_min = mask;
533}
534
535static void rtw_phy_rrsr_update(struct rtw_dev *rtwdev)
536{
537 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
538
539 dm_info->rrsr_mask_min = RRSR_RATE_ORDER_MAX;
540 rtw_iterate_stas_atomic(rtwdev, rtw_phy_rrsr_mask_min_iter, rtwdev);
541 rtw_write32(rtwdev, REG_RRSR, dm_info->rrsr_val_init & dm_info->rrsr_mask_min);
542}
543
544static void rtw_phy_dpk_track(struct rtw_dev *rtwdev)
545{
546 struct rtw_chip_info *chip = rtwdev->chip;
547
548 if (chip->ops->dpk_track)
549 chip->ops->dpk_track(rtwdev);
550}
551
552struct rtw_rx_addr_match_data {
553 struct rtw_dev *rtwdev;
554 struct ieee80211_hdr *hdr;
555 struct rtw_rx_pkt_stat *pkt_stat;
556 u8 *bssid;
557};
558
559static void rtw_phy_parsing_cfo_iter(void *data, u8 *mac,
560 struct ieee80211_vif *vif)
561{
562 struct rtw_rx_addr_match_data *iter_data = data;
563 struct rtw_dev *rtwdev = iter_data->rtwdev;
564 struct rtw_rx_pkt_stat *pkt_stat = iter_data->pkt_stat;
565 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
566 struct rtw_cfo_track *cfo = &dm_info->cfo_track;
567 u8 *bssid = iter_data->bssid;
568 u8 i;
569
570 if (!ether_addr_equal(vif->bss_conf.bssid, bssid))
571 return;
572
573 for (i = 0; i < rtwdev->hal.rf_path_num; i++) {
574 cfo->cfo_tail[i] += pkt_stat->cfo_tail[i];
575 cfo->cfo_cnt[i]++;
576 }
577
578 cfo->packet_count++;
579}
580
581void rtw_phy_parsing_cfo(struct rtw_dev *rtwdev,
582 struct rtw_rx_pkt_stat *pkt_stat)
583{
584 struct ieee80211_hdr *hdr = pkt_stat->hdr;
585 struct rtw_rx_addr_match_data data = {};
586
587 if (pkt_stat->crc_err || pkt_stat->icv_err || !pkt_stat->phy_status ||
588 ieee80211_is_ctl(hdr->frame_control))
589 return;
590
591 data.rtwdev = rtwdev;
592 data.hdr = hdr;
593 data.pkt_stat = pkt_stat;
594 data.bssid = get_hdr_bssid(hdr);
595
596 rtw_iterate_vifs_atomic(rtwdev, rtw_phy_parsing_cfo_iter, &data);
597}
598EXPORT_SYMBOL(rtw_phy_parsing_cfo);
599
600static void rtw_phy_cfo_track(struct rtw_dev *rtwdev)
601{
602 struct rtw_chip_info *chip = rtwdev->chip;
603
604 if (chip->ops->cfo_track)
605 chip->ops->cfo_track(rtwdev);
606}
607
608#define CCK_PD_FA_LV1_MIN 1000
609#define CCK_PD_FA_LV0_MAX 500
610
611static u8 rtw_phy_cck_pd_lv_unlink(struct rtw_dev *rtwdev)
612{
613 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
614 u32 cck_fa_avg = dm_info->cck_fa_avg;
615
616 if (cck_fa_avg > CCK_PD_FA_LV1_MIN)
617 return CCK_PD_LV1;
618
619 if (cck_fa_avg < CCK_PD_FA_LV0_MAX)
620 return CCK_PD_LV0;
621
622 return CCK_PD_LV_MAX;
623}
624
625#define CCK_PD_IGI_LV4_VAL 0x38
626#define CCK_PD_IGI_LV3_VAL 0x2a
627#define CCK_PD_IGI_LV2_VAL 0x24
628#define CCK_PD_RSSI_LV4_VAL 32
629#define CCK_PD_RSSI_LV3_VAL 32
630#define CCK_PD_RSSI_LV2_VAL 24
631
632static u8 rtw_phy_cck_pd_lv_link(struct rtw_dev *rtwdev)
633{
634 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
635 u8 igi = dm_info->igi_history[0];
636 u8 rssi = dm_info->min_rssi;
637 u32 cck_fa_avg = dm_info->cck_fa_avg;
638
639 if (igi > CCK_PD_IGI_LV4_VAL && rssi > CCK_PD_RSSI_LV4_VAL)
640 return CCK_PD_LV4;
641 if (igi > CCK_PD_IGI_LV3_VAL && rssi > CCK_PD_RSSI_LV3_VAL)
642 return CCK_PD_LV3;
643 if (igi > CCK_PD_IGI_LV2_VAL || rssi > CCK_PD_RSSI_LV2_VAL)
644 return CCK_PD_LV2;
645 if (cck_fa_avg > CCK_PD_FA_LV1_MIN)
646 return CCK_PD_LV1;
647 if (cck_fa_avg < CCK_PD_FA_LV0_MAX)
648 return CCK_PD_LV0;
649
650 return CCK_PD_LV_MAX;
651}
652
653static u8 rtw_phy_cck_pd_lv(struct rtw_dev *rtwdev)
654{
655 if (!rtw_is_assoc(rtwdev))
656 return rtw_phy_cck_pd_lv_unlink(rtwdev);
657 else
658 return rtw_phy_cck_pd_lv_link(rtwdev);
659}
660
661static void rtw_phy_cck_pd(struct rtw_dev *rtwdev)
662{
663 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
664 struct rtw_chip_info *chip = rtwdev->chip;
665 u32 cck_fa = dm_info->cck_fa_cnt;
666 u8 level;
667
668 if (rtwdev->hal.current_band_type != RTW_BAND_2G)
669 return;
670
671 if (dm_info->cck_fa_avg == CCK_FA_AVG_RESET)
672 dm_info->cck_fa_avg = cck_fa;
673 else
674 dm_info->cck_fa_avg = (dm_info->cck_fa_avg * 3 + cck_fa) >> 2;
675
676 rtw_dbg(rtwdev, RTW_DBG_PHY, "IGI=0x%x, rssi_min=%d, cck_fa=%d\n",
677 dm_info->igi_history[0], dm_info->min_rssi,
678 dm_info->fa_history[0]);
679 rtw_dbg(rtwdev, RTW_DBG_PHY, "cck_fa_avg=%d, cck_pd_default=%d\n",
680 dm_info->cck_fa_avg, dm_info->cck_pd_default);
681
682 level = rtw_phy_cck_pd_lv(rtwdev);
683
684 if (level >= CCK_PD_LV_MAX)
685 return;
686
687 if (chip->ops->cck_pd_set)
688 chip->ops->cck_pd_set(rtwdev, level);
689}
690
691static void rtw_phy_pwr_track(struct rtw_dev *rtwdev)
692{
693 rtwdev->chip->ops->pwr_track(rtwdev);
694}
695
696static void rtw_phy_ra_track(struct rtw_dev *rtwdev)
697{
698 rtw_fw_update_wl_phy_info(rtwdev);
699 rtw_phy_ra_info_update(rtwdev);
700 rtw_phy_rrsr_update(rtwdev);
701}
702
703void rtw_phy_dynamic_mechanism(struct rtw_dev *rtwdev)
704{
705
706 rtw_phy_statistics(rtwdev);
707 rtw_phy_dig(rtwdev);
708 rtw_phy_cck_pd(rtwdev);
709 rtw_phy_ra_track(rtwdev);
710 rtw_phy_tx_path_diversity(rtwdev);
711 rtw_phy_cfo_track(rtwdev);
712 rtw_phy_dpk_track(rtwdev);
713 rtw_phy_pwr_track(rtwdev);
714}
715
716#define FRAC_BITS 3
717
718static u8 rtw_phy_power_2_db(s8 power)
719{
720 if (power <= -100 || power >= 20)
721 return 0;
722 else if (power >= 0)
723 return 100;
724 else
725 return 100 + power;
726}
727
728static u64 rtw_phy_db_2_linear(u8 power_db)
729{
730 u8 i, j;
731 u64 linear;
732
733 if (power_db > 96)
734 power_db = 96;
735 else if (power_db < 1)
736 return 1;
737
738
739 i = (power_db - 1) >> 3;
740 j = (power_db - 1) - (i << 3);
741
742 linear = db_invert_table[i][j];
743 linear = i > 2 ? linear << FRAC_BITS : linear;
744
745 return linear;
746}
747
748static u8 rtw_phy_linear_2_db(u64 linear)
749{
750 u8 i;
751 u8 j;
752 u32 dB;
753
754 if (linear >= db_invert_table[11][7])
755 return 96;
756
757 for (i = 0; i < 12; i++) {
758 if (i <= 2 && (linear << FRAC_BITS) <= db_invert_table[i][7])
759 break;
760 else if (i > 2 && linear <= db_invert_table[i][7])
761 break;
762 }
763
764 for (j = 0; j < 8; j++) {
765 if (i <= 2 && (linear << FRAC_BITS) <= db_invert_table[i][j])
766 break;
767 else if (i > 2 && linear <= db_invert_table[i][j])
768 break;
769 }
770
771 if (j == 0 && i == 0)
772 goto end;
773
774 if (j == 0) {
775 if (i != 3) {
776 if (db_invert_table[i][0] - linear >
777 linear - db_invert_table[i - 1][7]) {
778 i = i - 1;
779 j = 7;
780 }
781 } else {
782 if (db_invert_table[3][0] - linear >
783 linear - db_invert_table[2][7]) {
784 i = 2;
785 j = 7;
786 }
787 }
788 } else {
789 if (db_invert_table[i][j] - linear >
790 linear - db_invert_table[i][j - 1]) {
791 j = j - 1;
792 }
793 }
794end:
795 dB = (i << 3) + j + 1;
796
797 return dB;
798}
799
800u8 rtw_phy_rf_power_2_rssi(s8 *rf_power, u8 path_num)
801{
802 s8 power;
803 u8 power_db;
804 u64 linear;
805 u64 sum = 0;
806 u8 path;
807
808 for (path = 0; path < path_num; path++) {
809 power = rf_power[path];
810 power_db = rtw_phy_power_2_db(power);
811 linear = rtw_phy_db_2_linear(power_db);
812 sum += linear;
813 }
814
815 sum = (sum + (1 << (FRAC_BITS - 1))) >> FRAC_BITS;
816 switch (path_num) {
817 case 2:
818 sum >>= 1;
819 break;
820 case 3:
821 sum = ((sum) + ((sum) << 1) + ((sum) << 3)) >> 5;
822 break;
823 case 4:
824 sum >>= 2;
825 break;
826 default:
827 break;
828 }
829
830 return rtw_phy_linear_2_db(sum);
831}
832EXPORT_SYMBOL(rtw_phy_rf_power_2_rssi);
833
834u32 rtw_phy_read_rf(struct rtw_dev *rtwdev, enum rtw_rf_path rf_path,
835 u32 addr, u32 mask)
836{
837 struct rtw_hal *hal = &rtwdev->hal;
838 struct rtw_chip_info *chip = rtwdev->chip;
839 const u32 *base_addr = chip->rf_base_addr;
840 u32 val, direct_addr;
841
842 if (rf_path >= hal->rf_phy_num) {
843 rtw_err(rtwdev, "unsupported rf path (%d)\n", rf_path);
844 return INV_RF_DATA;
845 }
846
847 addr &= 0xff;
848 direct_addr = base_addr[rf_path] + (addr << 2);
849 mask &= RFREG_MASK;
850
851 val = rtw_read32_mask(rtwdev, direct_addr, mask);
852
853 return val;
854}
855EXPORT_SYMBOL(rtw_phy_read_rf);
856
857u32 rtw_phy_read_rf_sipi(struct rtw_dev *rtwdev, enum rtw_rf_path rf_path,
858 u32 addr, u32 mask)
859{
860 struct rtw_hal *hal = &rtwdev->hal;
861 struct rtw_chip_info *chip = rtwdev->chip;
862 const struct rtw_rf_sipi_addr *rf_sipi_addr;
863 const struct rtw_rf_sipi_addr *rf_sipi_addr_a;
864 u32 val32;
865 u32 en_pi;
866 u32 r_addr;
867 u32 shift;
868
869 if (rf_path >= hal->rf_phy_num) {
870 rtw_err(rtwdev, "unsupported rf path (%d)\n", rf_path);
871 return INV_RF_DATA;
872 }
873
874 if (!chip->rf_sipi_read_addr) {
875 rtw_err(rtwdev, "rf_sipi_read_addr isn't defined\n");
876 return INV_RF_DATA;
877 }
878
879 rf_sipi_addr = &chip->rf_sipi_read_addr[rf_path];
880 rf_sipi_addr_a = &chip->rf_sipi_read_addr[RF_PATH_A];
881
882 addr &= 0xff;
883
884 val32 = rtw_read32(rtwdev, rf_sipi_addr->hssi_2);
885 val32 = (val32 & ~LSSI_READ_ADDR_MASK) | (addr << 23);
886 rtw_write32(rtwdev, rf_sipi_addr->hssi_2, val32);
887
888
889 val32 = rtw_read32(rtwdev, rf_sipi_addr_a->hssi_2);
890 rtw_write32(rtwdev, rf_sipi_addr_a->hssi_2, val32 & ~LSSI_READ_EDGE_MASK);
891 rtw_write32(rtwdev, rf_sipi_addr_a->hssi_2, val32 | LSSI_READ_EDGE_MASK);
892
893 udelay(120);
894
895 en_pi = rtw_read32_mask(rtwdev, rf_sipi_addr->hssi_1, BIT(8));
896 r_addr = en_pi ? rf_sipi_addr->lssi_read_pi : rf_sipi_addr->lssi_read;
897
898 val32 = rtw_read32_mask(rtwdev, r_addr, LSSI_READ_DATA_MASK);
899
900 shift = __ffs(mask);
901
902 return (val32 & mask) >> shift;
903}
904EXPORT_SYMBOL(rtw_phy_read_rf_sipi);
905
906bool rtw_phy_write_rf_reg_sipi(struct rtw_dev *rtwdev, enum rtw_rf_path rf_path,
907 u32 addr, u32 mask, u32 data)
908{
909 struct rtw_hal *hal = &rtwdev->hal;
910 struct rtw_chip_info *chip = rtwdev->chip;
911 u32 *sipi_addr = chip->rf_sipi_addr;
912 u32 data_and_addr;
913 u32 old_data = 0;
914 u32 shift;
915
916 if (rf_path >= hal->rf_phy_num) {
917 rtw_err(rtwdev, "unsupported rf path (%d)\n", rf_path);
918 return false;
919 }
920
921 addr &= 0xff;
922 mask &= RFREG_MASK;
923
924 if (mask != RFREG_MASK) {
925 old_data = chip->ops->read_rf(rtwdev, rf_path, addr, RFREG_MASK);
926
927 if (old_data == INV_RF_DATA) {
928 rtw_err(rtwdev, "Write fail, rf is disabled\n");
929 return false;
930 }
931
932 shift = __ffs(mask);
933 data = ((old_data) & (~mask)) | (data << shift);
934 }
935
936 data_and_addr = ((addr << 20) | (data & 0x000fffff)) & 0x0fffffff;
937
938 rtw_write32(rtwdev, sipi_addr[rf_path], data_and_addr);
939
940 udelay(13);
941
942 return true;
943}
944EXPORT_SYMBOL(rtw_phy_write_rf_reg_sipi);
945
946bool rtw_phy_write_rf_reg(struct rtw_dev *rtwdev, enum rtw_rf_path rf_path,
947 u32 addr, u32 mask, u32 data)
948{
949 struct rtw_hal *hal = &rtwdev->hal;
950 struct rtw_chip_info *chip = rtwdev->chip;
951 const u32 *base_addr = chip->rf_base_addr;
952 u32 direct_addr;
953
954 if (rf_path >= hal->rf_phy_num) {
955 rtw_err(rtwdev, "unsupported rf path (%d)\n", rf_path);
956 return false;
957 }
958
959 addr &= 0xff;
960 direct_addr = base_addr[rf_path] + (addr << 2);
961 mask &= RFREG_MASK;
962
963 rtw_write32_mask(rtwdev, direct_addr, mask, data);
964
965 udelay(1);
966
967 return true;
968}
969
970bool rtw_phy_write_rf_reg_mix(struct rtw_dev *rtwdev, enum rtw_rf_path rf_path,
971 u32 addr, u32 mask, u32 data)
972{
973 if (addr != 0x00)
974 return rtw_phy_write_rf_reg(rtwdev, rf_path, addr, mask, data);
975
976 return rtw_phy_write_rf_reg_sipi(rtwdev, rf_path, addr, mask, data);
977}
978EXPORT_SYMBOL(rtw_phy_write_rf_reg_mix);
979
980void rtw_phy_setup_phy_cond(struct rtw_dev *rtwdev, u32 pkg)
981{
982 struct rtw_hal *hal = &rtwdev->hal;
983 struct rtw_efuse *efuse = &rtwdev->efuse;
984 struct rtw_phy_cond cond = {0};
985
986 cond.cut = hal->cut_version ? hal->cut_version : 15;
987 cond.pkg = pkg ? pkg : 15;
988 cond.plat = 0x04;
989 cond.rfe = efuse->rfe_option;
990
991 switch (rtw_hci_type(rtwdev)) {
992 case RTW_HCI_TYPE_USB:
993 cond.intf = INTF_USB;
994 break;
995 case RTW_HCI_TYPE_SDIO:
996 cond.intf = INTF_SDIO;
997 break;
998 case RTW_HCI_TYPE_PCIE:
999 default:
1000 cond.intf = INTF_PCIE;
1001 break;
1002 }
1003
1004 hal->phy_cond = cond;
1005
1006 rtw_dbg(rtwdev, RTW_DBG_PHY, "phy cond=0x%08x\n", *((u32 *)&hal->phy_cond));
1007}
1008
1009static bool check_positive(struct rtw_dev *rtwdev, struct rtw_phy_cond cond)
1010{
1011 struct rtw_hal *hal = &rtwdev->hal;
1012 struct rtw_phy_cond drv_cond = hal->phy_cond;
1013
1014 if (cond.cut && cond.cut != drv_cond.cut)
1015 return false;
1016
1017 if (cond.pkg && cond.pkg != drv_cond.pkg)
1018 return false;
1019
1020 if (cond.intf && cond.intf != drv_cond.intf)
1021 return false;
1022
1023 if (cond.rfe != drv_cond.rfe)
1024 return false;
1025
1026 return true;
1027}
1028
1029void rtw_parse_tbl_phy_cond(struct rtw_dev *rtwdev, const struct rtw_table *tbl)
1030{
1031 const union phy_table_tile *p = tbl->data;
1032 const union phy_table_tile *end = p + tbl->size / 2;
1033 struct rtw_phy_cond pos_cond = {0};
1034 bool is_matched = true, is_skipped = false;
1035
1036 BUILD_BUG_ON(sizeof(union phy_table_tile) != sizeof(struct phy_cfg_pair));
1037
1038 for (; p < end; p++) {
1039 if (p->cond.pos) {
1040 switch (p->cond.branch) {
1041 case BRANCH_ENDIF:
1042 is_matched = true;
1043 is_skipped = false;
1044 break;
1045 case BRANCH_ELSE:
1046 is_matched = is_skipped ? false : true;
1047 break;
1048 case BRANCH_IF:
1049 case BRANCH_ELIF:
1050 default:
1051 pos_cond = p->cond;
1052 break;
1053 }
1054 } else if (p->cond.neg) {
1055 if (!is_skipped) {
1056 if (check_positive(rtwdev, pos_cond)) {
1057 is_matched = true;
1058 is_skipped = true;
1059 } else {
1060 is_matched = false;
1061 is_skipped = false;
1062 }
1063 } else {
1064 is_matched = false;
1065 }
1066 } else if (is_matched) {
1067 (*tbl->do_cfg)(rtwdev, tbl, p->cfg.addr, p->cfg.data);
1068 }
1069 }
1070}
1071EXPORT_SYMBOL(rtw_parse_tbl_phy_cond);
1072
1073#define bcd_to_dec_pwr_by_rate(val, i) bcd2bin(val >> (i * 8))
1074
1075static u8 tbl_to_dec_pwr_by_rate(struct rtw_dev *rtwdev, u32 hex, u8 i)
1076{
1077 if (rtwdev->chip->is_pwr_by_rate_dec)
1078 return bcd_to_dec_pwr_by_rate(hex, i);
1079
1080 return (hex >> (i * 8)) & 0xFF;
1081}
1082
1083static void
1084rtw_phy_get_rate_values_of_txpwr_by_rate(struct rtw_dev *rtwdev,
1085 u32 addr, u32 mask, u32 val, u8 *rate,
1086 u8 *pwr_by_rate, u8 *rate_num)
1087{
1088 int i;
1089
1090 switch (addr) {
1091 case 0xE00:
1092 case 0x830:
1093 rate[0] = DESC_RATE6M;
1094 rate[1] = DESC_RATE9M;
1095 rate[2] = DESC_RATE12M;
1096 rate[3] = DESC_RATE18M;
1097 for (i = 0; i < 4; ++i)
1098 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1099 *rate_num = 4;
1100 break;
1101 case 0xE04:
1102 case 0x834:
1103 rate[0] = DESC_RATE24M;
1104 rate[1] = DESC_RATE36M;
1105 rate[2] = DESC_RATE48M;
1106 rate[3] = DESC_RATE54M;
1107 for (i = 0; i < 4; ++i)
1108 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1109 *rate_num = 4;
1110 break;
1111 case 0xE08:
1112 rate[0] = DESC_RATE1M;
1113 pwr_by_rate[0] = bcd_to_dec_pwr_by_rate(val, 1);
1114 *rate_num = 1;
1115 break;
1116 case 0x86C:
1117 if (mask == 0xffffff00) {
1118 rate[0] = DESC_RATE2M;
1119 rate[1] = DESC_RATE5_5M;
1120 rate[2] = DESC_RATE11M;
1121 for (i = 1; i < 4; ++i)
1122 pwr_by_rate[i - 1] =
1123 tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1124 *rate_num = 3;
1125 } else if (mask == 0x000000ff) {
1126 rate[0] = DESC_RATE11M;
1127 pwr_by_rate[0] = bcd_to_dec_pwr_by_rate(val, 0);
1128 *rate_num = 1;
1129 }
1130 break;
1131 case 0xE10:
1132 case 0x83C:
1133 rate[0] = DESC_RATEMCS0;
1134 rate[1] = DESC_RATEMCS1;
1135 rate[2] = DESC_RATEMCS2;
1136 rate[3] = DESC_RATEMCS3;
1137 for (i = 0; i < 4; ++i)
1138 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1139 *rate_num = 4;
1140 break;
1141 case 0xE14:
1142 case 0x848:
1143 rate[0] = DESC_RATEMCS4;
1144 rate[1] = DESC_RATEMCS5;
1145 rate[2] = DESC_RATEMCS6;
1146 rate[3] = DESC_RATEMCS7;
1147 for (i = 0; i < 4; ++i)
1148 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1149 *rate_num = 4;
1150 break;
1151 case 0xE18:
1152 case 0x84C:
1153 rate[0] = DESC_RATEMCS8;
1154 rate[1] = DESC_RATEMCS9;
1155 rate[2] = DESC_RATEMCS10;
1156 rate[3] = DESC_RATEMCS11;
1157 for (i = 0; i < 4; ++i)
1158 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1159 *rate_num = 4;
1160 break;
1161 case 0xE1C:
1162 case 0x868:
1163 rate[0] = DESC_RATEMCS12;
1164 rate[1] = DESC_RATEMCS13;
1165 rate[2] = DESC_RATEMCS14;
1166 rate[3] = DESC_RATEMCS15;
1167 for (i = 0; i < 4; ++i)
1168 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1169 *rate_num = 4;
1170 break;
1171 case 0x838:
1172 rate[0] = DESC_RATE1M;
1173 rate[1] = DESC_RATE2M;
1174 rate[2] = DESC_RATE5_5M;
1175 for (i = 1; i < 4; ++i)
1176 pwr_by_rate[i - 1] = tbl_to_dec_pwr_by_rate(rtwdev,
1177 val, i);
1178 *rate_num = 3;
1179 break;
1180 case 0xC20:
1181 case 0xE20:
1182 case 0x1820:
1183 case 0x1A20:
1184 rate[0] = DESC_RATE1M;
1185 rate[1] = DESC_RATE2M;
1186 rate[2] = DESC_RATE5_5M;
1187 rate[3] = DESC_RATE11M;
1188 for (i = 0; i < 4; ++i)
1189 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1190 *rate_num = 4;
1191 break;
1192 case 0xC24:
1193 case 0xE24:
1194 case 0x1824:
1195 case 0x1A24:
1196 rate[0] = DESC_RATE6M;
1197 rate[1] = DESC_RATE9M;
1198 rate[2] = DESC_RATE12M;
1199 rate[3] = DESC_RATE18M;
1200 for (i = 0; i < 4; ++i)
1201 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1202 *rate_num = 4;
1203 break;
1204 case 0xC28:
1205 case 0xE28:
1206 case 0x1828:
1207 case 0x1A28:
1208 rate[0] = DESC_RATE24M;
1209 rate[1] = DESC_RATE36M;
1210 rate[2] = DESC_RATE48M;
1211 rate[3] = DESC_RATE54M;
1212 for (i = 0; i < 4; ++i)
1213 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1214 *rate_num = 4;
1215 break;
1216 case 0xC2C:
1217 case 0xE2C:
1218 case 0x182C:
1219 case 0x1A2C:
1220 rate[0] = DESC_RATEMCS0;
1221 rate[1] = DESC_RATEMCS1;
1222 rate[2] = DESC_RATEMCS2;
1223 rate[3] = DESC_RATEMCS3;
1224 for (i = 0; i < 4; ++i)
1225 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1226 *rate_num = 4;
1227 break;
1228 case 0xC30:
1229 case 0xE30:
1230 case 0x1830:
1231 case 0x1A30:
1232 rate[0] = DESC_RATEMCS4;
1233 rate[1] = DESC_RATEMCS5;
1234 rate[2] = DESC_RATEMCS6;
1235 rate[3] = DESC_RATEMCS7;
1236 for (i = 0; i < 4; ++i)
1237 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1238 *rate_num = 4;
1239 break;
1240 case 0xC34:
1241 case 0xE34:
1242 case 0x1834:
1243 case 0x1A34:
1244 rate[0] = DESC_RATEMCS8;
1245 rate[1] = DESC_RATEMCS9;
1246 rate[2] = DESC_RATEMCS10;
1247 rate[3] = DESC_RATEMCS11;
1248 for (i = 0; i < 4; ++i)
1249 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1250 *rate_num = 4;
1251 break;
1252 case 0xC38:
1253 case 0xE38:
1254 case 0x1838:
1255 case 0x1A38:
1256 rate[0] = DESC_RATEMCS12;
1257 rate[1] = DESC_RATEMCS13;
1258 rate[2] = DESC_RATEMCS14;
1259 rate[3] = DESC_RATEMCS15;
1260 for (i = 0; i < 4; ++i)
1261 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1262 *rate_num = 4;
1263 break;
1264 case 0xC3C:
1265 case 0xE3C:
1266 case 0x183C:
1267 case 0x1A3C:
1268 rate[0] = DESC_RATEVHT1SS_MCS0;
1269 rate[1] = DESC_RATEVHT1SS_MCS1;
1270 rate[2] = DESC_RATEVHT1SS_MCS2;
1271 rate[3] = DESC_RATEVHT1SS_MCS3;
1272 for (i = 0; i < 4; ++i)
1273 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1274 *rate_num = 4;
1275 break;
1276 case 0xC40:
1277 case 0xE40:
1278 case 0x1840:
1279 case 0x1A40:
1280 rate[0] = DESC_RATEVHT1SS_MCS4;
1281 rate[1] = DESC_RATEVHT1SS_MCS5;
1282 rate[2] = DESC_RATEVHT1SS_MCS6;
1283 rate[3] = DESC_RATEVHT1SS_MCS7;
1284 for (i = 0; i < 4; ++i)
1285 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1286 *rate_num = 4;
1287 break;
1288 case 0xC44:
1289 case 0xE44:
1290 case 0x1844:
1291 case 0x1A44:
1292 rate[0] = DESC_RATEVHT1SS_MCS8;
1293 rate[1] = DESC_RATEVHT1SS_MCS9;
1294 rate[2] = DESC_RATEVHT2SS_MCS0;
1295 rate[3] = DESC_RATEVHT2SS_MCS1;
1296 for (i = 0; i < 4; ++i)
1297 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1298 *rate_num = 4;
1299 break;
1300 case 0xC48:
1301 case 0xE48:
1302 case 0x1848:
1303 case 0x1A48:
1304 rate[0] = DESC_RATEVHT2SS_MCS2;
1305 rate[1] = DESC_RATEVHT2SS_MCS3;
1306 rate[2] = DESC_RATEVHT2SS_MCS4;
1307 rate[3] = DESC_RATEVHT2SS_MCS5;
1308 for (i = 0; i < 4; ++i)
1309 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1310 *rate_num = 4;
1311 break;
1312 case 0xC4C:
1313 case 0xE4C:
1314 case 0x184C:
1315 case 0x1A4C:
1316 rate[0] = DESC_RATEVHT2SS_MCS6;
1317 rate[1] = DESC_RATEVHT2SS_MCS7;
1318 rate[2] = DESC_RATEVHT2SS_MCS8;
1319 rate[3] = DESC_RATEVHT2SS_MCS9;
1320 for (i = 0; i < 4; ++i)
1321 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1322 *rate_num = 4;
1323 break;
1324 case 0xCD8:
1325 case 0xED8:
1326 case 0x18D8:
1327 case 0x1AD8:
1328 rate[0] = DESC_RATEMCS16;
1329 rate[1] = DESC_RATEMCS17;
1330 rate[2] = DESC_RATEMCS18;
1331 rate[3] = DESC_RATEMCS19;
1332 for (i = 0; i < 4; ++i)
1333 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1334 *rate_num = 4;
1335 break;
1336 case 0xCDC:
1337 case 0xEDC:
1338 case 0x18DC:
1339 case 0x1ADC:
1340 rate[0] = DESC_RATEMCS20;
1341 rate[1] = DESC_RATEMCS21;
1342 rate[2] = DESC_RATEMCS22;
1343 rate[3] = DESC_RATEMCS23;
1344 for (i = 0; i < 4; ++i)
1345 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1346 *rate_num = 4;
1347 break;
1348 case 0xCE0:
1349 case 0xEE0:
1350 case 0x18E0:
1351 case 0x1AE0:
1352 rate[0] = DESC_RATEVHT3SS_MCS0;
1353 rate[1] = DESC_RATEVHT3SS_MCS1;
1354 rate[2] = DESC_RATEVHT3SS_MCS2;
1355 rate[3] = DESC_RATEVHT3SS_MCS3;
1356 for (i = 0; i < 4; ++i)
1357 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1358 *rate_num = 4;
1359 break;
1360 case 0xCE4:
1361 case 0xEE4:
1362 case 0x18E4:
1363 case 0x1AE4:
1364 rate[0] = DESC_RATEVHT3SS_MCS4;
1365 rate[1] = DESC_RATEVHT3SS_MCS5;
1366 rate[2] = DESC_RATEVHT3SS_MCS6;
1367 rate[3] = DESC_RATEVHT3SS_MCS7;
1368 for (i = 0; i < 4; ++i)
1369 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1370 *rate_num = 4;
1371 break;
1372 case 0xCE8:
1373 case 0xEE8:
1374 case 0x18E8:
1375 case 0x1AE8:
1376 rate[0] = DESC_RATEVHT3SS_MCS8;
1377 rate[1] = DESC_RATEVHT3SS_MCS9;
1378 for (i = 0; i < 2; ++i)
1379 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1380 *rate_num = 2;
1381 break;
1382 default:
1383 rtw_warn(rtwdev, "invalid tx power index addr 0x%08x\n", addr);
1384 break;
1385 }
1386}
1387
1388static void rtw_phy_store_tx_power_by_rate(struct rtw_dev *rtwdev,
1389 u32 band, u32 rfpath, u32 txnum,
1390 u32 regaddr, u32 bitmask, u32 data)
1391{
1392 struct rtw_hal *hal = &rtwdev->hal;
1393 u8 rate_num = 0;
1394 u8 rate;
1395 u8 rates[RTW_RF_PATH_MAX] = {0};
1396 s8 offset;
1397 s8 pwr_by_rate[RTW_RF_PATH_MAX] = {0};
1398 int i;
1399
1400 rtw_phy_get_rate_values_of_txpwr_by_rate(rtwdev, regaddr, bitmask, data,
1401 rates, pwr_by_rate, &rate_num);
1402
1403 if (WARN_ON(rfpath >= RTW_RF_PATH_MAX ||
1404 (band != PHY_BAND_2G && band != PHY_BAND_5G) ||
1405 rate_num > RTW_RF_PATH_MAX))
1406 return;
1407
1408 for (i = 0; i < rate_num; i++) {
1409 offset = pwr_by_rate[i];
1410 rate = rates[i];
1411 if (band == PHY_BAND_2G)
1412 hal->tx_pwr_by_rate_offset_2g[rfpath][rate] = offset;
1413 else if (band == PHY_BAND_5G)
1414 hal->tx_pwr_by_rate_offset_5g[rfpath][rate] = offset;
1415 else
1416 continue;
1417 }
1418}
1419
1420void rtw_parse_tbl_bb_pg(struct rtw_dev *rtwdev, const struct rtw_table *tbl)
1421{
1422 const struct rtw_phy_pg_cfg_pair *p = tbl->data;
1423 const struct rtw_phy_pg_cfg_pair *end = p + tbl->size;
1424
1425 for (; p < end; p++) {
1426 if (p->addr == 0xfe || p->addr == 0xffe) {
1427 msleep(50);
1428 continue;
1429 }
1430 rtw_phy_store_tx_power_by_rate(rtwdev, p->band, p->rf_path,
1431 p->tx_num, p->addr, p->bitmask,
1432 p->data);
1433 }
1434}
1435EXPORT_SYMBOL(rtw_parse_tbl_bb_pg);
1436
1437static const u8 rtw_channel_idx_5g[RTW_MAX_CHANNEL_NUM_5G] = {
1438 36, 38, 40, 42, 44, 46, 48,
1439 52, 54, 56, 58, 60, 62, 64,
1440 100, 102, 104, 106, 108, 110, 112,
1441 116, 118, 120, 122, 124, 126, 128,
1442 132, 134, 136, 138, 140, 142, 144,
1443 149, 151, 153, 155, 157, 159, 161,
1444 165, 167, 169, 171, 173, 175, 177};
1445
1446static int rtw_channel_to_idx(u8 band, u8 channel)
1447{
1448 int ch_idx;
1449 u8 n_channel;
1450
1451 if (band == PHY_BAND_2G) {
1452 ch_idx = channel - 1;
1453 n_channel = RTW_MAX_CHANNEL_NUM_2G;
1454 } else if (band == PHY_BAND_5G) {
1455 n_channel = RTW_MAX_CHANNEL_NUM_5G;
1456 for (ch_idx = 0; ch_idx < n_channel; ch_idx++)
1457 if (rtw_channel_idx_5g[ch_idx] == channel)
1458 break;
1459 } else {
1460 return -1;
1461 }
1462
1463 if (ch_idx >= n_channel)
1464 return -1;
1465
1466 return ch_idx;
1467}
1468
1469static void rtw_phy_set_tx_power_limit(struct rtw_dev *rtwdev, u8 regd, u8 band,
1470 u8 bw, u8 rs, u8 ch, s8 pwr_limit)
1471{
1472 struct rtw_hal *hal = &rtwdev->hal;
1473 u8 max_power_index = rtwdev->chip->max_power_index;
1474 s8 ww;
1475 int ch_idx;
1476
1477 pwr_limit = clamp_t(s8, pwr_limit,
1478 -max_power_index, max_power_index);
1479 ch_idx = rtw_channel_to_idx(band, ch);
1480
1481 if (regd >= RTW_REGD_MAX || bw >= RTW_CHANNEL_WIDTH_MAX ||
1482 rs >= RTW_RATE_SECTION_MAX || ch_idx < 0) {
1483 WARN(1,
1484 "wrong txpwr_lmt regd=%u, band=%u bw=%u, rs=%u, ch_idx=%u, pwr_limit=%d\n",
1485 regd, band, bw, rs, ch_idx, pwr_limit);
1486 return;
1487 }
1488
1489 if (band == PHY_BAND_2G) {
1490 hal->tx_pwr_limit_2g[regd][bw][rs][ch_idx] = pwr_limit;
1491 ww = hal->tx_pwr_limit_2g[RTW_REGD_WW][bw][rs][ch_idx];
1492 ww = min_t(s8, ww, pwr_limit);
1493 hal->tx_pwr_limit_2g[RTW_REGD_WW][bw][rs][ch_idx] = ww;
1494 } else if (band == PHY_BAND_5G) {
1495 hal->tx_pwr_limit_5g[regd][bw][rs][ch_idx] = pwr_limit;
1496 ww = hal->tx_pwr_limit_5g[RTW_REGD_WW][bw][rs][ch_idx];
1497 ww = min_t(s8, ww, pwr_limit);
1498 hal->tx_pwr_limit_5g[RTW_REGD_WW][bw][rs][ch_idx] = ww;
1499 }
1500}
1501
1502
1503static void
1504rtw_xref_5g_txpwr_lmt(struct rtw_dev *rtwdev, u8 regd,
1505 u8 bw, u8 ch_idx, u8 rs_ht, u8 rs_vht)
1506{
1507 struct rtw_hal *hal = &rtwdev->hal;
1508 u8 max_power_index = rtwdev->chip->max_power_index;
1509 s8 lmt_ht = hal->tx_pwr_limit_5g[regd][bw][rs_ht][ch_idx];
1510 s8 lmt_vht = hal->tx_pwr_limit_5g[regd][bw][rs_vht][ch_idx];
1511
1512 if (lmt_ht == lmt_vht)
1513 return;
1514
1515 if (lmt_ht == max_power_index)
1516 hal->tx_pwr_limit_5g[regd][bw][rs_ht][ch_idx] = lmt_vht;
1517
1518 else if (lmt_vht == max_power_index)
1519 hal->tx_pwr_limit_5g[regd][bw][rs_vht][ch_idx] = lmt_ht;
1520}
1521
1522
1523static void
1524rtw_xref_txpwr_lmt_by_rs(struct rtw_dev *rtwdev, u8 regd, u8 bw, u8 ch_idx)
1525{
1526 u8 rs_idx, rs_ht, rs_vht;
1527 u8 rs_cmp[2][2] = {{RTW_RATE_SECTION_HT_1S, RTW_RATE_SECTION_VHT_1S},
1528 {RTW_RATE_SECTION_HT_2S, RTW_RATE_SECTION_VHT_2S} };
1529
1530 for (rs_idx = 0; rs_idx < 2; rs_idx++) {
1531 rs_ht = rs_cmp[rs_idx][0];
1532 rs_vht = rs_cmp[rs_idx][1];
1533
1534 rtw_xref_5g_txpwr_lmt(rtwdev, regd, bw, ch_idx, rs_ht, rs_vht);
1535 }
1536}
1537
1538
1539static void
1540rtw_xref_5g_txpwr_lmt_by_ch(struct rtw_dev *rtwdev, u8 regd, u8 bw)
1541{
1542 u8 ch_idx;
1543
1544 for (ch_idx = 0; ch_idx < RTW_MAX_CHANNEL_NUM_5G; ch_idx++)
1545 rtw_xref_txpwr_lmt_by_rs(rtwdev, regd, bw, ch_idx);
1546}
1547
1548
1549static void
1550rtw_xref_txpwr_lmt_by_bw(struct rtw_dev *rtwdev, u8 regd)
1551{
1552 u8 bw;
1553
1554 for (bw = RTW_CHANNEL_WIDTH_20; bw <= RTW_CHANNEL_WIDTH_40; bw++)
1555 rtw_xref_5g_txpwr_lmt_by_ch(rtwdev, regd, bw);
1556}
1557
1558
1559static void rtw_xref_txpwr_lmt(struct rtw_dev *rtwdev)
1560{
1561 u8 regd;
1562
1563 for (regd = 0; regd < RTW_REGD_MAX; regd++)
1564 rtw_xref_txpwr_lmt_by_bw(rtwdev, regd);
1565}
1566
1567void rtw_parse_tbl_txpwr_lmt(struct rtw_dev *rtwdev,
1568 const struct rtw_table *tbl)
1569{
1570 const struct rtw_txpwr_lmt_cfg_pair *p = tbl->data;
1571 const struct rtw_txpwr_lmt_cfg_pair *end = p + tbl->size;
1572
1573 for (; p < end; p++) {
1574 rtw_phy_set_tx_power_limit(rtwdev, p->regd, p->band,
1575 p->bw, p->rs, p->ch, p->txpwr_lmt);
1576 }
1577
1578 rtw_xref_txpwr_lmt(rtwdev);
1579}
1580EXPORT_SYMBOL(rtw_parse_tbl_txpwr_lmt);
1581
1582void rtw_phy_cfg_mac(struct rtw_dev *rtwdev, const struct rtw_table *tbl,
1583 u32 addr, u32 data)
1584{
1585 rtw_write8(rtwdev, addr, data);
1586}
1587EXPORT_SYMBOL(rtw_phy_cfg_mac);
1588
1589void rtw_phy_cfg_agc(struct rtw_dev *rtwdev, const struct rtw_table *tbl,
1590 u32 addr, u32 data)
1591{
1592 rtw_write32(rtwdev, addr, data);
1593}
1594EXPORT_SYMBOL(rtw_phy_cfg_agc);
1595
1596void rtw_phy_cfg_bb(struct rtw_dev *rtwdev, const struct rtw_table *tbl,
1597 u32 addr, u32 data)
1598{
1599 if (addr == 0xfe)
1600 msleep(50);
1601 else if (addr == 0xfd)
1602 mdelay(5);
1603 else if (addr == 0xfc)
1604 mdelay(1);
1605 else if (addr == 0xfb)
1606 usleep_range(50, 60);
1607 else if (addr == 0xfa)
1608 udelay(5);
1609 else if (addr == 0xf9)
1610 udelay(1);
1611 else
1612 rtw_write32(rtwdev, addr, data);
1613}
1614EXPORT_SYMBOL(rtw_phy_cfg_bb);
1615
1616void rtw_phy_cfg_rf(struct rtw_dev *rtwdev, const struct rtw_table *tbl,
1617 u32 addr, u32 data)
1618{
1619 if (addr == 0xffe) {
1620 msleep(50);
1621 } else if (addr == 0xfe) {
1622 usleep_range(100, 110);
1623 } else {
1624 rtw_write_rf(rtwdev, tbl->rf_path, addr, RFREG_MASK, data);
1625 udelay(1);
1626 }
1627}
1628EXPORT_SYMBOL(rtw_phy_cfg_rf);
1629
1630static void rtw_load_rfk_table(struct rtw_dev *rtwdev)
1631{
1632 struct rtw_chip_info *chip = rtwdev->chip;
1633 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
1634
1635 if (!chip->rfk_init_tbl)
1636 return;
1637
1638 rtw_write32_mask(rtwdev, 0x1e24, BIT(17), 0x1);
1639 rtw_write32_mask(rtwdev, 0x1cd0, BIT(28), 0x1);
1640 rtw_write32_mask(rtwdev, 0x1cd0, BIT(29), 0x1);
1641 rtw_write32_mask(rtwdev, 0x1cd0, BIT(30), 0x1);
1642 rtw_write32_mask(rtwdev, 0x1cd0, BIT(31), 0x0);
1643
1644 rtw_load_table(rtwdev, chip->rfk_init_tbl);
1645
1646 dpk_info->is_dpk_pwr_on = true;
1647}
1648
1649void rtw_phy_load_tables(struct rtw_dev *rtwdev)
1650{
1651 struct rtw_chip_info *chip = rtwdev->chip;
1652 u8 rf_path;
1653
1654 rtw_load_table(rtwdev, chip->mac_tbl);
1655 rtw_load_table(rtwdev, chip->bb_tbl);
1656 rtw_load_table(rtwdev, chip->agc_tbl);
1657 rtw_load_rfk_table(rtwdev);
1658
1659 for (rf_path = 0; rf_path < rtwdev->hal.rf_path_num; rf_path++) {
1660 const struct rtw_table *tbl;
1661
1662 tbl = chip->rf_tbl[rf_path];
1663 rtw_load_table(rtwdev, tbl);
1664 }
1665}
1666EXPORT_SYMBOL(rtw_phy_load_tables);
1667
1668static u8 rtw_get_channel_group(u8 channel, u8 rate)
1669{
1670 switch (channel) {
1671 default:
1672 WARN_ON(1);
1673 fallthrough;
1674 case 1:
1675 case 2:
1676 case 36:
1677 case 38:
1678 case 40:
1679 case 42:
1680 return 0;
1681 case 3:
1682 case 4:
1683 case 5:
1684 case 44:
1685 case 46:
1686 case 48:
1687 case 50:
1688 return 1;
1689 case 6:
1690 case 7:
1691 case 8:
1692 case 52:
1693 case 54:
1694 case 56:
1695 case 58:
1696 return 2;
1697 case 9:
1698 case 10:
1699 case 11:
1700 case 60:
1701 case 62:
1702 case 64:
1703 return 3;
1704 case 12:
1705 case 13:
1706 case 100:
1707 case 102:
1708 case 104:
1709 case 106:
1710 return 4;
1711 case 14:
1712 return rate <= DESC_RATE11M ? 5 : 4;
1713 case 108:
1714 case 110:
1715 case 112:
1716 case 114:
1717 return 5;
1718 case 116:
1719 case 118:
1720 case 120:
1721 case 122:
1722 return 6;
1723 case 124:
1724 case 126:
1725 case 128:
1726 case 130:
1727 return 7;
1728 case 132:
1729 case 134:
1730 case 136:
1731 case 138:
1732 return 8;
1733 case 140:
1734 case 142:
1735 case 144:
1736 return 9;
1737 case 149:
1738 case 151:
1739 case 153:
1740 case 155:
1741 return 10;
1742 case 157:
1743 case 159:
1744 case 161:
1745 return 11;
1746 case 165:
1747 case 167:
1748 case 169:
1749 case 171:
1750 return 12;
1751 case 173:
1752 case 175:
1753 case 177:
1754 return 13;
1755 }
1756}
1757
1758static s8 rtw_phy_get_dis_dpd_by_rate_diff(struct rtw_dev *rtwdev, u16 rate)
1759{
1760 struct rtw_chip_info *chip = rtwdev->chip;
1761 s8 dpd_diff = 0;
1762
1763 if (!chip->en_dis_dpd)
1764 return 0;
1765
1766#define RTW_DPD_RATE_CHECK(_rate) \
1767 case DESC_RATE ## _rate: \
1768 if (DIS_DPD_RATE ## _rate & chip->dpd_ratemask) \
1769 dpd_diff = -6 * chip->txgi_factor; \
1770 break
1771
1772 switch (rate) {
1773 RTW_DPD_RATE_CHECK(6M);
1774 RTW_DPD_RATE_CHECK(9M);
1775 RTW_DPD_RATE_CHECK(MCS0);
1776 RTW_DPD_RATE_CHECK(MCS1);
1777 RTW_DPD_RATE_CHECK(MCS8);
1778 RTW_DPD_RATE_CHECK(MCS9);
1779 RTW_DPD_RATE_CHECK(VHT1SS_MCS0);
1780 RTW_DPD_RATE_CHECK(VHT1SS_MCS1);
1781 RTW_DPD_RATE_CHECK(VHT2SS_MCS0);
1782 RTW_DPD_RATE_CHECK(VHT2SS_MCS1);
1783 }
1784#undef RTW_DPD_RATE_CHECK
1785
1786 return dpd_diff;
1787}
1788
1789static u8 rtw_phy_get_2g_tx_power_index(struct rtw_dev *rtwdev,
1790 struct rtw_2g_txpwr_idx *pwr_idx_2g,
1791 enum rtw_bandwidth bandwidth,
1792 u8 rate, u8 group)
1793{
1794 struct rtw_chip_info *chip = rtwdev->chip;
1795 u8 tx_power;
1796 bool mcs_rate;
1797 bool above_2ss;
1798 u8 factor = chip->txgi_factor;
1799
1800 if (rate <= DESC_RATE11M)
1801 tx_power = pwr_idx_2g->cck_base[group];
1802 else
1803 tx_power = pwr_idx_2g->bw40_base[group];
1804
1805 if (rate >= DESC_RATE6M && rate <= DESC_RATE54M)
1806 tx_power += pwr_idx_2g->ht_1s_diff.ofdm * factor;
1807
1808 mcs_rate = (rate >= DESC_RATEMCS0 && rate <= DESC_RATEMCS15) ||
1809 (rate >= DESC_RATEVHT1SS_MCS0 &&
1810 rate <= DESC_RATEVHT2SS_MCS9);
1811 above_2ss = (rate >= DESC_RATEMCS8 && rate <= DESC_RATEMCS15) ||
1812 (rate >= DESC_RATEVHT2SS_MCS0);
1813
1814 if (!mcs_rate)
1815 return tx_power;
1816
1817 switch (bandwidth) {
1818 default:
1819 WARN_ON(1);
1820 fallthrough;
1821 case RTW_CHANNEL_WIDTH_20:
1822 tx_power += pwr_idx_2g->ht_1s_diff.bw20 * factor;
1823 if (above_2ss)
1824 tx_power += pwr_idx_2g->ht_2s_diff.bw20 * factor;
1825 break;
1826 case RTW_CHANNEL_WIDTH_40:
1827
1828 if (above_2ss)
1829 tx_power += pwr_idx_2g->ht_2s_diff.bw40 * factor;
1830 break;
1831 }
1832
1833 return tx_power;
1834}
1835
1836static u8 rtw_phy_get_5g_tx_power_index(struct rtw_dev *rtwdev,
1837 struct rtw_5g_txpwr_idx *pwr_idx_5g,
1838 enum rtw_bandwidth bandwidth,
1839 u8 rate, u8 group)
1840{
1841 struct rtw_chip_info *chip = rtwdev->chip;
1842 u8 tx_power;
1843 u8 upper, lower;
1844 bool mcs_rate;
1845 bool above_2ss;
1846 u8 factor = chip->txgi_factor;
1847
1848 tx_power = pwr_idx_5g->bw40_base[group];
1849
1850 mcs_rate = (rate >= DESC_RATEMCS0 && rate <= DESC_RATEMCS15) ||
1851 (rate >= DESC_RATEVHT1SS_MCS0 &&
1852 rate <= DESC_RATEVHT2SS_MCS9);
1853 above_2ss = (rate >= DESC_RATEMCS8 && rate <= DESC_RATEMCS15) ||
1854 (rate >= DESC_RATEVHT2SS_MCS0);
1855
1856 if (!mcs_rate) {
1857 tx_power += pwr_idx_5g->ht_1s_diff.ofdm * factor;
1858 return tx_power;
1859 }
1860
1861 switch (bandwidth) {
1862 default:
1863 WARN_ON(1);
1864 fallthrough;
1865 case RTW_CHANNEL_WIDTH_20:
1866 tx_power += pwr_idx_5g->ht_1s_diff.bw20 * factor;
1867 if (above_2ss)
1868 tx_power += pwr_idx_5g->ht_2s_diff.bw20 * factor;
1869 break;
1870 case RTW_CHANNEL_WIDTH_40:
1871
1872 if (above_2ss)
1873 tx_power += pwr_idx_5g->ht_2s_diff.bw40 * factor;
1874 break;
1875 case RTW_CHANNEL_WIDTH_80:
1876
1877 lower = pwr_idx_5g->bw40_base[group];
1878 upper = pwr_idx_5g->bw40_base[group + 1];
1879
1880 tx_power = (lower + upper) / 2;
1881 tx_power += pwr_idx_5g->vht_1s_diff.bw80 * factor;
1882 if (above_2ss)
1883 tx_power += pwr_idx_5g->vht_2s_diff.bw80 * factor;
1884 break;
1885 }
1886
1887 return tx_power;
1888}
1889
1890static s8 rtw_phy_get_tx_power_limit(struct rtw_dev *rtwdev, u8 band,
1891 enum rtw_bandwidth bw, u8 rf_path,
1892 u8 rate, u8 channel, u8 regd)
1893{
1894 struct rtw_hal *hal = &rtwdev->hal;
1895 u8 *cch_by_bw = hal->cch_by_bw;
1896 s8 power_limit = (s8)rtwdev->chip->max_power_index;
1897 u8 rs;
1898 int ch_idx;
1899 u8 cur_bw, cur_ch;
1900 s8 cur_lmt;
1901
1902 if (regd > RTW_REGD_WW)
1903 return power_limit;
1904
1905 if (rate >= DESC_RATE1M && rate <= DESC_RATE11M)
1906 rs = RTW_RATE_SECTION_CCK;
1907 else if (rate >= DESC_RATE6M && rate <= DESC_RATE54M)
1908 rs = RTW_RATE_SECTION_OFDM;
1909 else if (rate >= DESC_RATEMCS0 && rate <= DESC_RATEMCS7)
1910 rs = RTW_RATE_SECTION_HT_1S;
1911 else if (rate >= DESC_RATEMCS8 && rate <= DESC_RATEMCS15)
1912 rs = RTW_RATE_SECTION_HT_2S;
1913 else if (rate >= DESC_RATEVHT1SS_MCS0 && rate <= DESC_RATEVHT1SS_MCS9)
1914 rs = RTW_RATE_SECTION_VHT_1S;
1915 else if (rate >= DESC_RATEVHT2SS_MCS0 && rate <= DESC_RATEVHT2SS_MCS9)
1916 rs = RTW_RATE_SECTION_VHT_2S;
1917 else
1918 goto err;
1919
1920
1921 if (rs == RTW_RATE_SECTION_CCK || rs == RTW_RATE_SECTION_OFDM)
1922 bw = RTW_CHANNEL_WIDTH_20;
1923
1924
1925 if (rs == RTW_RATE_SECTION_HT_1S || rs == RTW_RATE_SECTION_HT_2S)
1926 bw = min_t(u8, bw, RTW_CHANNEL_WIDTH_40);
1927
1928
1929 for (cur_bw = RTW_CHANNEL_WIDTH_20; cur_bw <= bw; cur_bw++) {
1930 cur_ch = cch_by_bw[cur_bw];
1931
1932 ch_idx = rtw_channel_to_idx(band, cur_ch);
1933 if (ch_idx < 0)
1934 goto err;
1935
1936 cur_lmt = cur_ch <= RTW_MAX_CHANNEL_NUM_2G ?
1937 hal->tx_pwr_limit_2g[regd][cur_bw][rs][ch_idx] :
1938 hal->tx_pwr_limit_5g[regd][cur_bw][rs][ch_idx];
1939
1940 power_limit = min_t(s8, cur_lmt, power_limit);
1941 }
1942
1943 return power_limit;
1944
1945err:
1946 WARN(1, "invalid arguments, band=%d, bw=%d, path=%d, rate=%d, ch=%d\n",
1947 band, bw, rf_path, rate, channel);
1948 return (s8)rtwdev->chip->max_power_index;
1949}
1950
1951void rtw_get_tx_power_params(struct rtw_dev *rtwdev, u8 path, u8 rate, u8 bw,
1952 u8 ch, u8 regd, struct rtw_power_params *pwr_param)
1953{
1954 struct rtw_hal *hal = &rtwdev->hal;
1955 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
1956 struct rtw_txpwr_idx *pwr_idx;
1957 u8 group, band;
1958 u8 *base = &pwr_param->pwr_base;
1959 s8 *offset = &pwr_param->pwr_offset;
1960 s8 *limit = &pwr_param->pwr_limit;
1961 s8 *remnant = &pwr_param->pwr_remnant;
1962
1963 pwr_idx = &rtwdev->efuse.txpwr_idx_table[path];
1964 group = rtw_get_channel_group(ch, rate);
1965
1966
1967 if (IS_CH_2G_BAND(ch)) {
1968 band = PHY_BAND_2G;
1969 *base = rtw_phy_get_2g_tx_power_index(rtwdev,
1970 &pwr_idx->pwr_idx_2g,
1971 bw, rate, group);
1972 *offset = hal->tx_pwr_by_rate_offset_2g[path][rate];
1973 } else {
1974 band = PHY_BAND_5G;
1975 *base = rtw_phy_get_5g_tx_power_index(rtwdev,
1976 &pwr_idx->pwr_idx_5g,
1977 bw, rate, group);
1978 *offset = hal->tx_pwr_by_rate_offset_5g[path][rate];
1979 }
1980
1981 *limit = rtw_phy_get_tx_power_limit(rtwdev, band, bw, path,
1982 rate, ch, regd);
1983 *remnant = (rate <= DESC_RATE11M ? dm_info->txagc_remnant_cck :
1984 dm_info->txagc_remnant_ofdm);
1985}
1986
1987u8
1988rtw_phy_get_tx_power_index(struct rtw_dev *rtwdev, u8 rf_path, u8 rate,
1989 enum rtw_bandwidth bandwidth, u8 channel, u8 regd)
1990{
1991 struct rtw_power_params pwr_param = {0};
1992 u8 tx_power;
1993 s8 offset;
1994
1995 rtw_get_tx_power_params(rtwdev, rf_path, rate, bandwidth,
1996 channel, regd, &pwr_param);
1997
1998 tx_power = pwr_param.pwr_base;
1999 offset = min_t(s8, pwr_param.pwr_offset, pwr_param.pwr_limit);
2000
2001 if (rtwdev->chip->en_dis_dpd)
2002 offset += rtw_phy_get_dis_dpd_by_rate_diff(rtwdev, rate);
2003
2004 tx_power += offset + pwr_param.pwr_remnant;
2005
2006 if (tx_power > rtwdev->chip->max_power_index)
2007 tx_power = rtwdev->chip->max_power_index;
2008
2009 return tx_power;
2010}
2011EXPORT_SYMBOL(rtw_phy_get_tx_power_index);
2012
2013static void rtw_phy_set_tx_power_index_by_rs(struct rtw_dev *rtwdev,
2014 u8 ch, u8 path, u8 rs)
2015{
2016 struct rtw_hal *hal = &rtwdev->hal;
2017 u8 regd = rtwdev->regd.txpwr_regd;
2018 u8 *rates;
2019 u8 size;
2020 u8 rate;
2021 u8 pwr_idx;
2022 u8 bw;
2023 int i;
2024
2025 if (rs >= RTW_RATE_SECTION_MAX)
2026 return;
2027
2028 rates = rtw_rate_section[rs];
2029 size = rtw_rate_size[rs];
2030 bw = hal->current_band_width;
2031 for (i = 0; i < size; i++) {
2032 rate = rates[i];
2033 pwr_idx = rtw_phy_get_tx_power_index(rtwdev, path, rate,
2034 bw, ch, regd);
2035 hal->tx_pwr_tbl[path][rate] = pwr_idx;
2036 }
2037}
2038
2039
2040
2041
2042
2043
2044static void rtw_phy_set_tx_power_level_by_path(struct rtw_dev *rtwdev,
2045 u8 ch, u8 path)
2046{
2047 struct rtw_hal *hal = &rtwdev->hal;
2048 u8 rs;
2049
2050
2051 if (hal->current_band_type == RTW_BAND_2G)
2052 rs = RTW_RATE_SECTION_CCK;
2053 else
2054 rs = RTW_RATE_SECTION_OFDM;
2055
2056 for (; rs < RTW_RATE_SECTION_MAX; rs++)
2057 rtw_phy_set_tx_power_index_by_rs(rtwdev, ch, path, rs);
2058}
2059
2060void rtw_phy_set_tx_power_level(struct rtw_dev *rtwdev, u8 channel)
2061{
2062 struct rtw_chip_info *chip = rtwdev->chip;
2063 struct rtw_hal *hal = &rtwdev->hal;
2064 u8 path;
2065
2066 mutex_lock(&hal->tx_power_mutex);
2067
2068 for (path = 0; path < hal->rf_path_num; path++)
2069 rtw_phy_set_tx_power_level_by_path(rtwdev, channel, path);
2070
2071 chip->ops->set_tx_power_index(rtwdev);
2072 mutex_unlock(&hal->tx_power_mutex);
2073}
2074EXPORT_SYMBOL(rtw_phy_set_tx_power_level);
2075
2076static void
2077rtw_phy_tx_power_by_rate_config_by_path(struct rtw_hal *hal, u8 path,
2078 u8 rs, u8 size, u8 *rates)
2079{
2080 u8 rate;
2081 u8 base_idx, rate_idx;
2082 s8 base_2g, base_5g;
2083
2084 if (rs >= RTW_RATE_SECTION_VHT_1S)
2085 base_idx = rates[size - 3];
2086 else
2087 base_idx = rates[size - 1];
2088 base_2g = hal->tx_pwr_by_rate_offset_2g[path][base_idx];
2089 base_5g = hal->tx_pwr_by_rate_offset_5g[path][base_idx];
2090 hal->tx_pwr_by_rate_base_2g[path][rs] = base_2g;
2091 hal->tx_pwr_by_rate_base_5g[path][rs] = base_5g;
2092 for (rate = 0; rate < size; rate++) {
2093 rate_idx = rates[rate];
2094 hal->tx_pwr_by_rate_offset_2g[path][rate_idx] -= base_2g;
2095 hal->tx_pwr_by_rate_offset_5g[path][rate_idx] -= base_5g;
2096 }
2097}
2098
2099void rtw_phy_tx_power_by_rate_config(struct rtw_hal *hal)
2100{
2101 u8 path;
2102
2103 for (path = 0; path < RTW_RF_PATH_MAX; path++) {
2104 rtw_phy_tx_power_by_rate_config_by_path(hal, path,
2105 RTW_RATE_SECTION_CCK,
2106 rtw_cck_size, rtw_cck_rates);
2107 rtw_phy_tx_power_by_rate_config_by_path(hal, path,
2108 RTW_RATE_SECTION_OFDM,
2109 rtw_ofdm_size, rtw_ofdm_rates);
2110 rtw_phy_tx_power_by_rate_config_by_path(hal, path,
2111 RTW_RATE_SECTION_HT_1S,
2112 rtw_ht_1s_size, rtw_ht_1s_rates);
2113 rtw_phy_tx_power_by_rate_config_by_path(hal, path,
2114 RTW_RATE_SECTION_HT_2S,
2115 rtw_ht_2s_size, rtw_ht_2s_rates);
2116 rtw_phy_tx_power_by_rate_config_by_path(hal, path,
2117 RTW_RATE_SECTION_VHT_1S,
2118 rtw_vht_1s_size, rtw_vht_1s_rates);
2119 rtw_phy_tx_power_by_rate_config_by_path(hal, path,
2120 RTW_RATE_SECTION_VHT_2S,
2121 rtw_vht_2s_size, rtw_vht_2s_rates);
2122 }
2123}
2124
2125static void
2126__rtw_phy_tx_power_limit_config(struct rtw_hal *hal, u8 regd, u8 bw, u8 rs)
2127{
2128 s8 base;
2129 u8 ch;
2130
2131 for (ch = 0; ch < RTW_MAX_CHANNEL_NUM_2G; ch++) {
2132 base = hal->tx_pwr_by_rate_base_2g[0][rs];
2133 hal->tx_pwr_limit_2g[regd][bw][rs][ch] -= base;
2134 }
2135
2136 for (ch = 0; ch < RTW_MAX_CHANNEL_NUM_5G; ch++) {
2137 base = hal->tx_pwr_by_rate_base_5g[0][rs];
2138 hal->tx_pwr_limit_5g[regd][bw][rs][ch] -= base;
2139 }
2140}
2141
2142void rtw_phy_tx_power_limit_config(struct rtw_hal *hal)
2143{
2144 u8 regd, bw, rs;
2145
2146
2147 hal->cch_by_bw[RTW_CHANNEL_WIDTH_20] = 1;
2148
2149 for (regd = 0; regd < RTW_REGD_MAX; regd++)
2150 for (bw = 0; bw < RTW_CHANNEL_WIDTH_MAX; bw++)
2151 for (rs = 0; rs < RTW_RATE_SECTION_MAX; rs++)
2152 __rtw_phy_tx_power_limit_config(hal, regd, bw, rs);
2153}
2154
2155static void rtw_phy_init_tx_power_limit(struct rtw_dev *rtwdev,
2156 u8 regd, u8 bw, u8 rs)
2157{
2158 struct rtw_hal *hal = &rtwdev->hal;
2159 s8 max_power_index = (s8)rtwdev->chip->max_power_index;
2160 u8 ch;
2161
2162
2163 for (ch = 0; ch < RTW_MAX_CHANNEL_NUM_2G; ch++)
2164 hal->tx_pwr_limit_2g[regd][bw][rs][ch] = max_power_index;
2165
2166
2167 for (ch = 0; ch < RTW_MAX_CHANNEL_NUM_5G; ch++)
2168 hal->tx_pwr_limit_5g[regd][bw][rs][ch] = max_power_index;
2169}
2170
2171void rtw_phy_init_tx_power(struct rtw_dev *rtwdev)
2172{
2173 struct rtw_hal *hal = &rtwdev->hal;
2174 u8 regd, path, rate, rs, bw;
2175
2176
2177 for (path = 0; path < RTW_RF_PATH_MAX; path++) {
2178 for (rate = 0; rate < DESC_RATE_MAX; rate++) {
2179 hal->tx_pwr_by_rate_offset_2g[path][rate] = 0;
2180 hal->tx_pwr_by_rate_offset_5g[path][rate] = 0;
2181 }
2182 }
2183
2184
2185 for (regd = 0; regd < RTW_REGD_MAX; regd++)
2186 for (bw = 0; bw < RTW_CHANNEL_WIDTH_MAX; bw++)
2187 for (rs = 0; rs < RTW_RATE_SECTION_MAX; rs++)
2188 rtw_phy_init_tx_power_limit(rtwdev, regd, bw,
2189 rs);
2190}
2191
2192void rtw_phy_config_swing_table(struct rtw_dev *rtwdev,
2193 struct rtw_swing_table *swing_table)
2194{
2195 const struct rtw_pwr_track_tbl *tbl = rtwdev->chip->pwr_track_tbl;
2196 u8 channel = rtwdev->hal.current_channel;
2197
2198 if (IS_CH_2G_BAND(channel)) {
2199 if (rtwdev->dm_info.tx_rate <= DESC_RATE11M) {
2200 swing_table->p[RF_PATH_A] = tbl->pwrtrk_2g_ccka_p;
2201 swing_table->n[RF_PATH_A] = tbl->pwrtrk_2g_ccka_n;
2202 swing_table->p[RF_PATH_B] = tbl->pwrtrk_2g_cckb_p;
2203 swing_table->n[RF_PATH_B] = tbl->pwrtrk_2g_cckb_n;
2204 } else {
2205 swing_table->p[RF_PATH_A] = tbl->pwrtrk_2ga_p;
2206 swing_table->n[RF_PATH_A] = tbl->pwrtrk_2ga_n;
2207 swing_table->p[RF_PATH_B] = tbl->pwrtrk_2gb_p;
2208 swing_table->n[RF_PATH_B] = tbl->pwrtrk_2gb_n;
2209 }
2210 } else if (IS_CH_5G_BAND_1(channel) || IS_CH_5G_BAND_2(channel)) {
2211 swing_table->p[RF_PATH_A] = tbl->pwrtrk_5ga_p[RTW_PWR_TRK_5G_1];
2212 swing_table->n[RF_PATH_A] = tbl->pwrtrk_5ga_n[RTW_PWR_TRK_5G_1];
2213 swing_table->p[RF_PATH_B] = tbl->pwrtrk_5gb_p[RTW_PWR_TRK_5G_1];
2214 swing_table->n[RF_PATH_B] = tbl->pwrtrk_5gb_n[RTW_PWR_TRK_5G_1];
2215 } else if (IS_CH_5G_BAND_3(channel)) {
2216 swing_table->p[RF_PATH_A] = tbl->pwrtrk_5ga_p[RTW_PWR_TRK_5G_2];
2217 swing_table->n[RF_PATH_A] = tbl->pwrtrk_5ga_n[RTW_PWR_TRK_5G_2];
2218 swing_table->p[RF_PATH_B] = tbl->pwrtrk_5gb_p[RTW_PWR_TRK_5G_2];
2219 swing_table->n[RF_PATH_B] = tbl->pwrtrk_5gb_n[RTW_PWR_TRK_5G_2];
2220 } else if (IS_CH_5G_BAND_4(channel)) {
2221 swing_table->p[RF_PATH_A] = tbl->pwrtrk_5ga_p[RTW_PWR_TRK_5G_3];
2222 swing_table->n[RF_PATH_A] = tbl->pwrtrk_5ga_n[RTW_PWR_TRK_5G_3];
2223 swing_table->p[RF_PATH_B] = tbl->pwrtrk_5gb_p[RTW_PWR_TRK_5G_3];
2224 swing_table->n[RF_PATH_B] = tbl->pwrtrk_5gb_n[RTW_PWR_TRK_5G_3];
2225 } else {
2226 swing_table->p[RF_PATH_A] = tbl->pwrtrk_2ga_p;
2227 swing_table->n[RF_PATH_A] = tbl->pwrtrk_2ga_n;
2228 swing_table->p[RF_PATH_B] = tbl->pwrtrk_2gb_p;
2229 swing_table->n[RF_PATH_B] = tbl->pwrtrk_2gb_n;
2230 }
2231}
2232EXPORT_SYMBOL(rtw_phy_config_swing_table);
2233
2234void rtw_phy_pwrtrack_avg(struct rtw_dev *rtwdev, u8 thermal, u8 path)
2235{
2236 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
2237
2238 ewma_thermal_add(&dm_info->avg_thermal[path], thermal);
2239 dm_info->thermal_avg[path] =
2240 ewma_thermal_read(&dm_info->avg_thermal[path]);
2241}
2242EXPORT_SYMBOL(rtw_phy_pwrtrack_avg);
2243
2244bool rtw_phy_pwrtrack_thermal_changed(struct rtw_dev *rtwdev, u8 thermal,
2245 u8 path)
2246{
2247 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
2248 u8 avg = ewma_thermal_read(&dm_info->avg_thermal[path]);
2249
2250 if (avg == thermal)
2251 return false;
2252
2253 return true;
2254}
2255EXPORT_SYMBOL(rtw_phy_pwrtrack_thermal_changed);
2256
2257u8 rtw_phy_pwrtrack_get_delta(struct rtw_dev *rtwdev, u8 path)
2258{
2259 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
2260 u8 therm_avg, therm_efuse, therm_delta;
2261
2262 therm_avg = dm_info->thermal_avg[path];
2263 therm_efuse = rtwdev->efuse.thermal_meter[path];
2264 therm_delta = abs(therm_avg - therm_efuse);
2265
2266 return min_t(u8, therm_delta, RTW_PWR_TRK_TBL_SZ - 1);
2267}
2268EXPORT_SYMBOL(rtw_phy_pwrtrack_get_delta);
2269
2270s8 rtw_phy_pwrtrack_get_pwridx(struct rtw_dev *rtwdev,
2271 struct rtw_swing_table *swing_table,
2272 u8 tbl_path, u8 therm_path, u8 delta)
2273{
2274 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
2275 const u8 *delta_swing_table_idx_pos;
2276 const u8 *delta_swing_table_idx_neg;
2277
2278 if (delta >= RTW_PWR_TRK_TBL_SZ) {
2279 rtw_warn(rtwdev, "power track table overflow\n");
2280 return 0;
2281 }
2282
2283 if (!swing_table) {
2284 rtw_warn(rtwdev, "swing table not configured\n");
2285 return 0;
2286 }
2287
2288 delta_swing_table_idx_pos = swing_table->p[tbl_path];
2289 delta_swing_table_idx_neg = swing_table->n[tbl_path];
2290
2291 if (!delta_swing_table_idx_pos || !delta_swing_table_idx_neg) {
2292 rtw_warn(rtwdev, "invalid swing table index\n");
2293 return 0;
2294 }
2295
2296 if (dm_info->thermal_avg[therm_path] >
2297 rtwdev->efuse.thermal_meter[therm_path])
2298 return delta_swing_table_idx_pos[delta];
2299 else
2300 return -delta_swing_table_idx_neg[delta];
2301}
2302EXPORT_SYMBOL(rtw_phy_pwrtrack_get_pwridx);
2303
2304bool rtw_phy_pwrtrack_need_lck(struct rtw_dev *rtwdev)
2305{
2306 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
2307 u8 delta_lck;
2308
2309 delta_lck = abs(dm_info->thermal_avg[0] - dm_info->thermal_meter_lck);
2310 if (delta_lck >= rtwdev->chip->lck_threshold) {
2311 dm_info->thermal_meter_lck = dm_info->thermal_avg[0];
2312 return true;
2313 }
2314 return false;
2315}
2316EXPORT_SYMBOL(rtw_phy_pwrtrack_need_lck);
2317
2318bool rtw_phy_pwrtrack_need_iqk(struct rtw_dev *rtwdev)
2319{
2320 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
2321 u8 delta_iqk;
2322
2323 delta_iqk = abs(dm_info->thermal_avg[0] - dm_info->thermal_meter_k);
2324 if (delta_iqk >= rtwdev->chip->iqk_threshold) {
2325 dm_info->thermal_meter_k = dm_info->thermal_avg[0];
2326 return true;
2327 }
2328 return false;
2329}
2330EXPORT_SYMBOL(rtw_phy_pwrtrack_need_iqk);
2331
2332static void rtw_phy_set_tx_path_by_reg(struct rtw_dev *rtwdev,
2333 enum rtw_bb_path tx_path_sel_1ss)
2334{
2335 struct rtw_path_div *path_div = &rtwdev->dm_path_div;
2336 enum rtw_bb_path tx_path_sel_cck = tx_path_sel_1ss;
2337 struct rtw_chip_info *chip = rtwdev->chip;
2338
2339 if (tx_path_sel_1ss == path_div->current_tx_path)
2340 return;
2341
2342 path_div->current_tx_path = tx_path_sel_1ss;
2343 rtw_dbg(rtwdev, RTW_DBG_PATH_DIV, "Switch TX path=%s\n",
2344 tx_path_sel_1ss == BB_PATH_A ? "A" : "B");
2345 chip->ops->config_tx_path(rtwdev, rtwdev->hal.antenna_tx,
2346 tx_path_sel_1ss, tx_path_sel_cck, false);
2347}
2348
2349static void rtw_phy_tx_path_div_select(struct rtw_dev *rtwdev)
2350{
2351 struct rtw_path_div *path_div = &rtwdev->dm_path_div;
2352 enum rtw_bb_path path = path_div->current_tx_path;
2353 s32 rssi_a = 0, rssi_b = 0;
2354
2355 if (path_div->path_a_cnt)
2356 rssi_a = path_div->path_a_sum / path_div->path_a_cnt;
2357 else
2358 rssi_a = 0;
2359 if (path_div->path_b_cnt)
2360 rssi_b = path_div->path_b_sum / path_div->path_b_cnt;
2361 else
2362 rssi_b = 0;
2363
2364 if (rssi_a != rssi_b)
2365 path = (rssi_a > rssi_b) ? BB_PATH_A : BB_PATH_B;
2366
2367 path_div->path_a_cnt = 0;
2368 path_div->path_a_sum = 0;
2369 path_div->path_b_cnt = 0;
2370 path_div->path_b_sum = 0;
2371 rtw_phy_set_tx_path_by_reg(rtwdev, path);
2372}
2373
2374static void rtw_phy_tx_path_diversity_2ss(struct rtw_dev *rtwdev)
2375{
2376 if (rtwdev->hal.antenna_rx != BB_PATH_AB) {
2377 rtw_dbg(rtwdev, RTW_DBG_PATH_DIV,
2378 "[Return] tx_Path_en=%d, rx_Path_en=%d\n",
2379 rtwdev->hal.antenna_tx, rtwdev->hal.antenna_rx);
2380 return;
2381 }
2382 if (rtwdev->sta_cnt == 0) {
2383 rtw_dbg(rtwdev, RTW_DBG_PATH_DIV, "No Link\n");
2384 return;
2385 }
2386
2387 rtw_phy_tx_path_div_select(rtwdev);
2388}
2389
2390void rtw_phy_tx_path_diversity(struct rtw_dev *rtwdev)
2391{
2392 struct rtw_chip_info *chip = rtwdev->chip;
2393
2394 if (!chip->path_div_supported)
2395 return;
2396
2397 rtw_phy_tx_path_diversity_2ss(rtwdev);
2398}
2399