1
2
3
4#include "mt7915.h"
5#include "mac.h"
6#include "mcu.h"
7#include "testmode.h"
8
9enum {
10 TM_CHANGED_TXPOWER,
11 TM_CHANGED_FREQ_OFFSET,
12
13
14 NUM_TM_CHANGED
15};
16
17static const u8 tm_change_map[] = {
18 [TM_CHANGED_TXPOWER] = MT76_TM_ATTR_TX_POWER,
19 [TM_CHANGED_FREQ_OFFSET] = MT76_TM_ATTR_FREQ_OFFSET,
20};
21
22struct reg_band {
23 u32 band[2];
24};
25
26#define REG_BAND(_reg) \
27 { .band[0] = MT_##_reg(0), .band[1] = MT_##_reg(1) }
28#define REG_BAND_IDX(_reg, _idx) \
29 { .band[0] = MT_##_reg(0, _idx), .band[1] = MT_##_reg(1, _idx) }
30
31static const struct reg_band reg_backup_list[] = {
32 REG_BAND_IDX(AGG_PCR0, 0),
33 REG_BAND_IDX(AGG_PCR0, 1),
34 REG_BAND_IDX(AGG_AWSCR0, 0),
35 REG_BAND_IDX(AGG_AWSCR0, 1),
36 REG_BAND_IDX(AGG_AWSCR0, 2),
37 REG_BAND_IDX(AGG_AWSCR0, 3),
38 REG_BAND(AGG_MRCR),
39 REG_BAND(TMAC_TFCR0),
40 REG_BAND(TMAC_TCR0),
41 REG_BAND(AGG_ATCR1),
42 REG_BAND(AGG_ATCR3),
43 REG_BAND(TMAC_TRCR0),
44 REG_BAND(TMAC_ICR0),
45 REG_BAND_IDX(ARB_DRNGR0, 0),
46 REG_BAND_IDX(ARB_DRNGR0, 1),
47 REG_BAND(WF_RFCR),
48 REG_BAND(WF_RFCR1),
49};
50
51static int
52mt7915_tm_set_tx_power(struct mt7915_phy *phy)
53{
54 struct mt7915_dev *dev = phy->dev;
55 struct mt76_phy *mphy = phy->mt76;
56 struct cfg80211_chan_def *chandef = &mphy->chandef;
57 int freq = chandef->center_freq1;
58 int ret;
59 struct {
60 u8 format_id;
61 u8 dbdc_idx;
62 s8 tx_power;
63 u8 ant_idx;
64 u8 center_chan;
65 u8 rsv[3];
66 } __packed req = {
67 .format_id = 0xf,
68 .dbdc_idx = phy != &dev->phy,
69 .center_chan = ieee80211_frequency_to_channel(freq),
70 };
71 u8 *tx_power = NULL;
72
73 if (phy->mt76->test.state != MT76_TM_STATE_OFF)
74 tx_power = phy->mt76->test.tx_power;
75
76
77 if (tx_power && tx_power[0])
78 req.tx_power = tx_power[0];
79
80 ret = mt76_mcu_send_msg(&dev->mt76,
81 MCU_EXT_CMD(TX_POWER_FEATURE_CTRL),
82 &req, sizeof(req), false);
83
84 return ret;
85}
86
87static int
88mt7915_tm_set_freq_offset(struct mt7915_phy *phy, bool en, u32 val)
89{
90 struct mt7915_dev *dev = phy->dev;
91 struct mt7915_tm_cmd req = {
92 .testmode_en = en,
93 .param_idx = MCU_ATE_SET_FREQ_OFFSET,
94 .param.freq.band = phy != &dev->phy,
95 .param.freq.freq_offset = cpu_to_le32(val),
96 };
97
98 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(ATE_CTRL), &req,
99 sizeof(req), false);
100}
101
102static int
103mt7915_tm_mode_ctrl(struct mt7915_dev *dev, bool enable)
104{
105 struct {
106 u8 format_id;
107 bool enable;
108 u8 rsv[2];
109 } __packed req = {
110 .format_id = 0x6,
111 .enable = enable,
112 };
113
114 return mt76_mcu_send_msg(&dev->mt76,
115 MCU_EXT_CMD(TX_POWER_FEATURE_CTRL),
116 &req, sizeof(req), false);
117}
118
119static int
120mt7915_tm_set_trx(struct mt7915_phy *phy, int type, bool en)
121{
122 struct mt7915_dev *dev = phy->dev;
123 struct mt7915_tm_cmd req = {
124 .testmode_en = 1,
125 .param_idx = MCU_ATE_SET_TRX,
126 .param.trx.type = type,
127 .param.trx.enable = en,
128 .param.trx.band = phy != &dev->phy,
129 };
130
131 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(ATE_CTRL), &req,
132 sizeof(req), false);
133}
134
135static int
136mt7915_tm_clean_hwq(struct mt7915_phy *phy, u8 wcid)
137{
138 struct mt7915_dev *dev = phy->dev;
139 struct mt7915_tm_cmd req = {
140 .testmode_en = 1,
141 .param_idx = MCU_ATE_CLEAN_TXQUEUE,
142 .param.clean.wcid = wcid,
143 .param.clean.band = phy != &dev->phy,
144 };
145
146 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(ATE_CTRL), &req,
147 sizeof(req), false);
148}
149
150static int
151mt7915_tm_set_slot_time(struct mt7915_phy *phy, u8 slot_time, u8 sifs)
152{
153 struct mt7915_dev *dev = phy->dev;
154 struct mt7915_tm_cmd req = {
155 .testmode_en = !(phy->mt76->test.state == MT76_TM_STATE_OFF),
156 .param_idx = MCU_ATE_SET_SLOT_TIME,
157 .param.slot.slot_time = slot_time,
158 .param.slot.sifs = sifs,
159 .param.slot.rifs = 2,
160 .param.slot.eifs = cpu_to_le16(60),
161 .param.slot.band = phy != &dev->phy,
162 };
163
164 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(ATE_CTRL), &req,
165 sizeof(req), false);
166}
167
168static int
169mt7915_tm_set_wmm_qid(struct mt7915_dev *dev, u8 qid, u8 aifs, u8 cw_min,
170 u16 cw_max, u16 txop)
171{
172 struct mt7915_mcu_tx req = { .total = 1 };
173 struct edca *e = &req.edca[0];
174
175 e->queue = qid;
176 e->set = WMM_PARAM_SET;
177
178 e->aifs = aifs;
179 e->cw_min = cw_min;
180 e->cw_max = cpu_to_le16(cw_max);
181 e->txop = cpu_to_le16(txop);
182
183 return mt7915_mcu_update_edca(dev, &req);
184}
185
186static int
187mt7915_tm_set_ipg_params(struct mt7915_phy *phy, u32 ipg, u8 mode)
188{
189#define TM_DEFAULT_SIFS 10
190#define TM_MAX_SIFS 127
191#define TM_MAX_AIFSN 0xf
192#define TM_MIN_AIFSN 0x1
193#define BBP_PROC_TIME 1500
194 struct mt7915_dev *dev = phy->dev;
195 u8 sig_ext = (mode == MT76_TM_TX_MODE_CCK) ? 0 : 6;
196 u8 slot_time = 9, sifs = TM_DEFAULT_SIFS;
197 u8 aifsn = TM_MIN_AIFSN;
198 u32 i2t_time, tr2t_time, txv_time;
199 bool ext_phy = phy != &dev->phy;
200 u16 cw = 0;
201
202 if (ipg < sig_ext + slot_time + sifs)
203 ipg = 0;
204
205 if (!ipg)
206 goto done;
207
208 ipg -= sig_ext;
209
210 if (ipg <= (TM_MAX_SIFS + slot_time)) {
211 sifs = ipg - slot_time;
212 } else {
213 u32 val = (ipg + slot_time) / slot_time;
214
215 while (val >>= 1)
216 cw++;
217
218 if (cw > 16)
219 cw = 16;
220
221 ipg -= ((1 << cw) - 1) * slot_time;
222
223 aifsn = ipg / slot_time;
224 if (aifsn > TM_MAX_AIFSN)
225 aifsn = TM_MAX_AIFSN;
226
227 ipg -= aifsn * slot_time;
228
229 if (ipg > TM_DEFAULT_SIFS) {
230 if (ipg < TM_MAX_SIFS)
231 sifs = ipg;
232 else
233 sifs = TM_MAX_SIFS;
234 }
235 }
236done:
237 txv_time = mt76_get_field(dev, MT_TMAC_ATCR(ext_phy),
238 MT_TMAC_ATCR_TXV_TOUT);
239 txv_time *= 50;
240
241 i2t_time = (slot_time * 1000 - txv_time - BBP_PROC_TIME) / 50;
242 tr2t_time = (sifs * 1000 - txv_time - BBP_PROC_TIME) / 50;
243
244 mt76_set(dev, MT_TMAC_TRCR0(ext_phy),
245 FIELD_PREP(MT_TMAC_TRCR0_TR2T_CHK, tr2t_time) |
246 FIELD_PREP(MT_TMAC_TRCR0_I2T_CHK, i2t_time));
247
248 mt7915_tm_set_slot_time(phy, slot_time, sifs);
249
250 return mt7915_tm_set_wmm_qid(dev,
251 mt7915_lmac_mapping(dev, IEEE80211_AC_BE),
252 aifsn, cw, cw, 0);
253}
254
255static int
256mt7915_tm_set_tx_len(struct mt7915_phy *phy, u32 tx_time)
257{
258 struct mt76_phy *mphy = phy->mt76;
259 struct mt76_testmode_data *td = &mphy->test;
260 struct ieee80211_supported_band *sband;
261 struct rate_info rate = {};
262 u16 flags = 0, tx_len;
263 u32 bitrate;
264 int ret;
265
266 if (!tx_time)
267 return 0;
268
269 rate.mcs = td->tx_rate_idx;
270 rate.nss = td->tx_rate_nss;
271
272 switch (td->tx_rate_mode) {
273 case MT76_TM_TX_MODE_CCK:
274 case MT76_TM_TX_MODE_OFDM:
275 if (mphy->chandef.chan->band == NL80211_BAND_5GHZ)
276 sband = &mphy->sband_5g.sband;
277 else
278 sband = &mphy->sband_2g.sband;
279
280 rate.legacy = sband->bitrates[rate.mcs].bitrate;
281 break;
282 case MT76_TM_TX_MODE_HT:
283 rate.mcs += rate.nss * 8;
284 flags |= RATE_INFO_FLAGS_MCS;
285
286 if (td->tx_rate_sgi)
287 flags |= RATE_INFO_FLAGS_SHORT_GI;
288 break;
289 case MT76_TM_TX_MODE_VHT:
290 flags |= RATE_INFO_FLAGS_VHT_MCS;
291
292 if (td->tx_rate_sgi)
293 flags |= RATE_INFO_FLAGS_SHORT_GI;
294 break;
295 case MT76_TM_TX_MODE_HE_SU:
296 case MT76_TM_TX_MODE_HE_EXT_SU:
297 case MT76_TM_TX_MODE_HE_TB:
298 case MT76_TM_TX_MODE_HE_MU:
299 rate.he_gi = td->tx_rate_sgi;
300 flags |= RATE_INFO_FLAGS_HE_MCS;
301 break;
302 default:
303 break;
304 }
305 rate.flags = flags;
306
307 switch (mphy->chandef.width) {
308 case NL80211_CHAN_WIDTH_160:
309 case NL80211_CHAN_WIDTH_80P80:
310 rate.bw = RATE_INFO_BW_160;
311 break;
312 case NL80211_CHAN_WIDTH_80:
313 rate.bw = RATE_INFO_BW_80;
314 break;
315 case NL80211_CHAN_WIDTH_40:
316 rate.bw = RATE_INFO_BW_40;
317 break;
318 default:
319 rate.bw = RATE_INFO_BW_20;
320 break;
321 }
322
323 bitrate = cfg80211_calculate_bitrate(&rate);
324 tx_len = bitrate * tx_time / 10 / 8;
325
326 ret = mt76_testmode_alloc_skb(phy->mt76, tx_len);
327 if (ret)
328 return ret;
329
330 return 0;
331}
332
333static void
334mt7915_tm_reg_backup_restore(struct mt7915_phy *phy)
335{
336 int n_regs = ARRAY_SIZE(reg_backup_list);
337 struct mt7915_dev *dev = phy->dev;
338 bool ext_phy = phy != &dev->phy;
339 u32 *b = phy->test.reg_backup;
340 int i;
341
342 if (phy->mt76->test.state == MT76_TM_STATE_OFF) {
343 for (i = 0; i < n_regs; i++)
344 mt76_wr(dev, reg_backup_list[i].band[ext_phy], b[i]);
345 return;
346 }
347
348 if (b)
349 return;
350
351 b = devm_kzalloc(dev->mt76.dev, 4 * n_regs, GFP_KERNEL);
352 if (!b)
353 return;
354
355 phy->test.reg_backup = b;
356 for (i = 0; i < n_regs; i++)
357 b[i] = mt76_rr(dev, reg_backup_list[i].band[ext_phy]);
358
359 mt76_clear(dev, MT_AGG_PCR0(ext_phy, 0), MT_AGG_PCR0_MM_PROT |
360 MT_AGG_PCR0_GF_PROT | MT_AGG_PCR0_ERP_PROT |
361 MT_AGG_PCR0_VHT_PROT | MT_AGG_PCR0_BW20_PROT |
362 MT_AGG_PCR0_BW40_PROT | MT_AGG_PCR0_BW80_PROT);
363 mt76_set(dev, MT_AGG_PCR0(ext_phy, 0), MT_AGG_PCR0_PTA_WIN_DIS);
364
365 mt76_wr(dev, MT_AGG_PCR0(ext_phy, 1), MT_AGG_PCR1_RTS0_NUM_THRES |
366 MT_AGG_PCR1_RTS0_LEN_THRES);
367
368 mt76_clear(dev, MT_AGG_MRCR(ext_phy), MT_AGG_MRCR_BAR_CNT_LIMIT |
369 MT_AGG_MRCR_LAST_RTS_CTS_RN | MT_AGG_MRCR_RTS_FAIL_LIMIT |
370 MT_AGG_MRCR_TXCMD_RTS_FAIL_LIMIT);
371
372 mt76_rmw(dev, MT_AGG_MRCR(ext_phy), MT_AGG_MRCR_RTS_FAIL_LIMIT |
373 MT_AGG_MRCR_TXCMD_RTS_FAIL_LIMIT,
374 FIELD_PREP(MT_AGG_MRCR_RTS_FAIL_LIMIT, 1) |
375 FIELD_PREP(MT_AGG_MRCR_TXCMD_RTS_FAIL_LIMIT, 1));
376
377 mt76_wr(dev, MT_TMAC_TFCR0(ext_phy), 0);
378 mt76_clear(dev, MT_TMAC_TCR0(ext_phy), MT_TMAC_TCR0_TBTT_STOP_CTRL);
379
380
381 mt76_wr(dev, MT_WF_RFCR(ext_phy), 0xcf70a);
382 mt76_wr(dev, MT_WF_RFCR1(ext_phy), 0);
383}
384
385static void
386mt7915_tm_init(struct mt7915_phy *phy, bool en)
387{
388 struct mt7915_dev *dev = phy->dev;
389
390 if (!test_bit(MT76_STATE_RUNNING, &phy->mt76->state))
391 return;
392
393 mt7915_mcu_set_sku_en(phy, !en);
394
395 mt7915_tm_mode_ctrl(dev, en);
396 mt7915_tm_reg_backup_restore(phy);
397 mt7915_tm_set_trx(phy, TM_MAC_TXRX, !en);
398
399 mt7915_mcu_add_bss_info(phy, phy->monitor_vif, en);
400}
401
402static void
403mt7915_tm_update_channel(struct mt7915_phy *phy)
404{
405 mutex_unlock(&phy->dev->mt76.mutex);
406 mt7915_set_channel(phy);
407 mutex_lock(&phy->dev->mt76.mutex);
408
409 mt7915_mcu_set_chan_info(phy, MCU_EXT_CMD(SET_RX_PATH));
410}
411
412static void
413mt7915_tm_set_tx_frames(struct mt7915_phy *phy, bool en)
414{
415 static const u8 spe_idx_map[] = {0, 0, 1, 0, 3, 2, 4, 0,
416 9, 8, 6, 10, 16, 12, 18, 0};
417 struct mt76_testmode_data *td = &phy->mt76->test;
418 struct mt7915_dev *dev = phy->dev;
419 struct ieee80211_tx_info *info;
420 u8 duty_cycle = td->tx_duty_cycle;
421 u32 tx_time = td->tx_time;
422 u32 ipg = td->tx_ipg;
423
424 mt7915_tm_set_trx(phy, TM_MAC_RX_RXV, false);
425 mt7915_tm_clean_hwq(phy, dev->mt76.global_wcid.idx);
426
427 if (en) {
428 mt7915_tm_update_channel(phy);
429
430 if (td->tx_spe_idx) {
431 phy->test.spe_idx = td->tx_spe_idx;
432 } else {
433 u8 tx_ant = td->tx_antenna_mask;
434
435 if (phy != &dev->phy)
436 tx_ant >>= 2;
437 phy->test.spe_idx = spe_idx_map[tx_ant];
438 }
439 }
440
441
442 if (duty_cycle && tx_time && !ipg) {
443 ipg = tx_time * 100 / duty_cycle - tx_time;
444 } else if (duty_cycle && !tx_time && ipg) {
445 if (duty_cycle < 100)
446 tx_time = duty_cycle * ipg / (100 - duty_cycle);
447 }
448
449 mt7915_tm_set_ipg_params(phy, ipg, td->tx_rate_mode);
450 mt7915_tm_set_tx_len(phy, tx_time);
451
452 if (ipg)
453 td->tx_queued_limit = MT76_TM_TIMEOUT * 1000000 / ipg / 2;
454
455 if (!en || !td->tx_skb)
456 return;
457
458 info = IEEE80211_SKB_CB(td->tx_skb);
459 info->control.vif = phy->monitor_vif;
460
461 mt7915_tm_set_trx(phy, TM_MAC_TX, en);
462}
463
464static void
465mt7915_tm_set_rx_frames(struct mt7915_phy *phy, bool en)
466{
467 mt7915_tm_set_trx(phy, TM_MAC_RX_RXV, false);
468
469 if (en) {
470 struct mt7915_dev *dev = phy->dev;
471
472 mt7915_tm_update_channel(phy);
473
474
475 mt76_rr(dev, MT_MIB_SDR3(phy != &dev->phy));
476 mt7915_tm_set_trx(phy, TM_MAC_RX_RXV, en);
477 }
478}
479
480static int
481mt7915_tm_rf_switch_mode(struct mt7915_dev *dev, u32 oper)
482{
483 struct mt7915_tm_rf_test req = {
484 .op.op_mode = cpu_to_le32(oper),
485 };
486
487 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(RF_TEST), &req,
488 sizeof(req), true);
489}
490
491static int
492mt7915_tm_set_tx_cont(struct mt7915_phy *phy, bool en)
493{
494#define TX_CONT_START 0x05
495#define TX_CONT_STOP 0x06
496 struct mt7915_dev *dev = phy->dev;
497 struct cfg80211_chan_def *chandef = &phy->mt76->chandef;
498 int freq1 = ieee80211_frequency_to_channel(chandef->center_freq1);
499 struct mt76_testmode_data *td = &phy->mt76->test;
500 u32 func_idx = en ? TX_CONT_START : TX_CONT_STOP;
501 u8 rate_idx = td->tx_rate_idx, mode;
502 u16 rateval;
503 struct mt7915_tm_rf_test req = {
504 .action = 1,
505 .icap_len = 120,
506 .op.rf.func_idx = cpu_to_le32(func_idx),
507 };
508 struct tm_tx_cont *tx_cont = &req.op.rf.param.tx_cont;
509
510 tx_cont->control_ch = chandef->chan->hw_value;
511 tx_cont->center_ch = freq1;
512 tx_cont->tx_ant = td->tx_antenna_mask;
513 tx_cont->band = phy != &dev->phy;
514
515 switch (chandef->width) {
516 case NL80211_CHAN_WIDTH_40:
517 tx_cont->bw = CMD_CBW_40MHZ;
518 break;
519 case NL80211_CHAN_WIDTH_80:
520 tx_cont->bw = CMD_CBW_80MHZ;
521 break;
522 case NL80211_CHAN_WIDTH_80P80:
523 tx_cont->bw = CMD_CBW_8080MHZ;
524 break;
525 case NL80211_CHAN_WIDTH_160:
526 tx_cont->bw = CMD_CBW_160MHZ;
527 break;
528 case NL80211_CHAN_WIDTH_5:
529 tx_cont->bw = CMD_CBW_5MHZ;
530 break;
531 case NL80211_CHAN_WIDTH_10:
532 tx_cont->bw = CMD_CBW_10MHZ;
533 break;
534 case NL80211_CHAN_WIDTH_20:
535 tx_cont->bw = CMD_CBW_20MHZ;
536 break;
537 case NL80211_CHAN_WIDTH_20_NOHT:
538 tx_cont->bw = CMD_CBW_20MHZ;
539 break;
540 default:
541 return -EINVAL;
542 }
543
544 if (!en) {
545 req.op.rf.param.func_data = cpu_to_le32(phy != &dev->phy);
546 goto out;
547 }
548
549 if (td->tx_rate_mode <= MT76_TM_TX_MODE_OFDM) {
550 struct ieee80211_supported_band *sband;
551 u8 idx = rate_idx;
552
553 if (chandef->chan->band == NL80211_BAND_5GHZ)
554 sband = &phy->mt76->sband_5g.sband;
555 else
556 sband = &phy->mt76->sband_2g.sband;
557
558 if (td->tx_rate_mode == MT76_TM_TX_MODE_OFDM)
559 idx += 4;
560 rate_idx = sband->bitrates[idx].hw_value & 0xff;
561 }
562
563 switch (td->tx_rate_mode) {
564 case MT76_TM_TX_MODE_CCK:
565 mode = MT_PHY_TYPE_CCK;
566 break;
567 case MT76_TM_TX_MODE_OFDM:
568 mode = MT_PHY_TYPE_OFDM;
569 break;
570 case MT76_TM_TX_MODE_HT:
571 mode = MT_PHY_TYPE_HT;
572 break;
573 case MT76_TM_TX_MODE_VHT:
574 mode = MT_PHY_TYPE_VHT;
575 break;
576 case MT76_TM_TX_MODE_HE_SU:
577 mode = MT_PHY_TYPE_HE_SU;
578 break;
579 case MT76_TM_TX_MODE_HE_EXT_SU:
580 mode = MT_PHY_TYPE_HE_EXT_SU;
581 break;
582 case MT76_TM_TX_MODE_HE_TB:
583 mode = MT_PHY_TYPE_HE_TB;
584 break;
585 case MT76_TM_TX_MODE_HE_MU:
586 mode = MT_PHY_TYPE_HE_MU;
587 break;
588 default:
589 return -EINVAL;
590 }
591
592 rateval = mode << 6 | rate_idx;
593 tx_cont->rateval = cpu_to_le16(rateval);
594
595out:
596 if (!en) {
597 int ret;
598
599 ret = mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(RF_TEST), &req,
600 sizeof(req), true);
601 if (ret)
602 return ret;
603
604 return mt7915_tm_rf_switch_mode(dev, RF_OPER_NORMAL);
605 }
606
607 mt7915_tm_rf_switch_mode(dev, RF_OPER_RF_TEST);
608 mt7915_tm_update_channel(phy);
609
610 return mt76_mcu_send_msg(&dev->mt76, MCU_EXT_CMD(RF_TEST), &req,
611 sizeof(req), true);
612}
613
614static void
615mt7915_tm_update_params(struct mt7915_phy *phy, u32 changed)
616{
617 struct mt76_testmode_data *td = &phy->mt76->test;
618 bool en = phy->mt76->test.state != MT76_TM_STATE_OFF;
619
620 if (changed & BIT(TM_CHANGED_FREQ_OFFSET))
621 mt7915_tm_set_freq_offset(phy, en, en ? td->freq_offset : 0);
622 if (changed & BIT(TM_CHANGED_TXPOWER))
623 mt7915_tm_set_tx_power(phy);
624}
625
626static int
627mt7915_tm_set_state(struct mt76_phy *mphy, enum mt76_testmode_state state)
628{
629 struct mt76_testmode_data *td = &mphy->test;
630 struct mt7915_phy *phy = mphy->priv;
631 enum mt76_testmode_state prev_state = td->state;
632
633 mphy->test.state = state;
634
635 if (prev_state == MT76_TM_STATE_TX_FRAMES ||
636 state == MT76_TM_STATE_TX_FRAMES)
637 mt7915_tm_set_tx_frames(phy, state == MT76_TM_STATE_TX_FRAMES);
638 else if (prev_state == MT76_TM_STATE_RX_FRAMES ||
639 state == MT76_TM_STATE_RX_FRAMES)
640 mt7915_tm_set_rx_frames(phy, state == MT76_TM_STATE_RX_FRAMES);
641 else if (prev_state == MT76_TM_STATE_TX_CONT ||
642 state == MT76_TM_STATE_TX_CONT)
643 mt7915_tm_set_tx_cont(phy, state == MT76_TM_STATE_TX_CONT);
644 else if (prev_state == MT76_TM_STATE_OFF ||
645 state == MT76_TM_STATE_OFF)
646 mt7915_tm_init(phy, !(state == MT76_TM_STATE_OFF));
647
648 if ((state == MT76_TM_STATE_IDLE &&
649 prev_state == MT76_TM_STATE_OFF) ||
650 (state == MT76_TM_STATE_OFF &&
651 prev_state == MT76_TM_STATE_IDLE)) {
652 u32 changed = 0;
653 int i;
654
655 for (i = 0; i < ARRAY_SIZE(tm_change_map); i++) {
656 u16 cur = tm_change_map[i];
657
658 if (td->param_set[cur / 32] & BIT(cur % 32))
659 changed |= BIT(i);
660 }
661
662 mt7915_tm_update_params(phy, changed);
663 }
664
665 return 0;
666}
667
668static int
669mt7915_tm_set_params(struct mt76_phy *mphy, struct nlattr **tb,
670 enum mt76_testmode_state new_state)
671{
672 struct mt76_testmode_data *td = &mphy->test;
673 struct mt7915_phy *phy = mphy->priv;
674 u32 changed = 0;
675 int i;
676
677 BUILD_BUG_ON(NUM_TM_CHANGED >= 32);
678
679 if (new_state == MT76_TM_STATE_OFF ||
680 td->state == MT76_TM_STATE_OFF)
681 return 0;
682
683 if (td->tx_antenna_mask & ~mphy->chainmask)
684 return -EINVAL;
685
686 for (i = 0; i < ARRAY_SIZE(tm_change_map); i++) {
687 if (tb[tm_change_map[i]])
688 changed |= BIT(i);
689 }
690
691 mt7915_tm_update_params(phy, changed);
692
693 return 0;
694}
695
696static int
697mt7915_tm_dump_stats(struct mt76_phy *mphy, struct sk_buff *msg)
698{
699 struct mt7915_phy *phy = mphy->priv;
700 struct mt7915_dev *dev = phy->dev;
701 bool ext_phy = phy != &dev->phy;
702 enum mt76_rxq_id q;
703 void *rx, *rssi;
704 u16 fcs_err;
705 int i;
706
707 rx = nla_nest_start(msg, MT76_TM_STATS_ATTR_LAST_RX);
708 if (!rx)
709 return -ENOMEM;
710
711 if (nla_put_s32(msg, MT76_TM_RX_ATTR_FREQ_OFFSET, phy->test.last_freq_offset))
712 return -ENOMEM;
713
714 rssi = nla_nest_start(msg, MT76_TM_RX_ATTR_RCPI);
715 if (!rssi)
716 return -ENOMEM;
717
718 for (i = 0; i < ARRAY_SIZE(phy->test.last_rcpi); i++)
719 if (nla_put_u8(msg, i, phy->test.last_rcpi[i]))
720 return -ENOMEM;
721
722 nla_nest_end(msg, rssi);
723
724 rssi = nla_nest_start(msg, MT76_TM_RX_ATTR_IB_RSSI);
725 if (!rssi)
726 return -ENOMEM;
727
728 for (i = 0; i < ARRAY_SIZE(phy->test.last_ib_rssi); i++)
729 if (nla_put_s8(msg, i, phy->test.last_ib_rssi[i]))
730 return -ENOMEM;
731
732 nla_nest_end(msg, rssi);
733
734 rssi = nla_nest_start(msg, MT76_TM_RX_ATTR_WB_RSSI);
735 if (!rssi)
736 return -ENOMEM;
737
738 for (i = 0; i < ARRAY_SIZE(phy->test.last_wb_rssi); i++)
739 if (nla_put_s8(msg, i, phy->test.last_wb_rssi[i]))
740 return -ENOMEM;
741
742 nla_nest_end(msg, rssi);
743
744 if (nla_put_u8(msg, MT76_TM_RX_ATTR_SNR, phy->test.last_snr))
745 return -ENOMEM;
746
747 nla_nest_end(msg, rx);
748
749 fcs_err = mt76_get_field(dev, MT_MIB_SDR3(ext_phy),
750 MT_MIB_SDR3_FCS_ERR_MASK);
751 q = ext_phy ? MT_RXQ_EXT : MT_RXQ_MAIN;
752 mphy->test.rx_stats.packets[q] += fcs_err;
753 mphy->test.rx_stats.fcs_error[q] += fcs_err;
754
755 return 0;
756}
757
758const struct mt76_testmode_ops mt7915_testmode_ops = {
759 .set_state = mt7915_tm_set_state,
760 .set_params = mt7915_tm_set_params,
761 .dump_stats = mt7915_tm_dump_stats,
762};
763