linux/drivers/net/wireless/mediatek/mt76/testmode.c
<<
>>
Prefs
   1// SPDX-License-Identifier: ISC
   2/* Copyright (C) 2020 Felix Fietkau <nbd@nbd.name> */
   3#include "mt76.h"
   4
   5static const struct nla_policy mt76_tm_policy[NUM_MT76_TM_ATTRS] = {
   6        [MT76_TM_ATTR_RESET] = { .type = NLA_FLAG },
   7        [MT76_TM_ATTR_STATE] = { .type = NLA_U8 },
   8        [MT76_TM_ATTR_TX_COUNT] = { .type = NLA_U32 },
   9        [MT76_TM_ATTR_TX_RATE_MODE] = { .type = NLA_U8 },
  10        [MT76_TM_ATTR_TX_RATE_NSS] = { .type = NLA_U8 },
  11        [MT76_TM_ATTR_TX_RATE_IDX] = { .type = NLA_U8 },
  12        [MT76_TM_ATTR_TX_RATE_SGI] = { .type = NLA_U8 },
  13        [MT76_TM_ATTR_TX_RATE_LDPC] = { .type = NLA_U8 },
  14        [MT76_TM_ATTR_TX_RATE_STBC] = { .type = NLA_U8 },
  15        [MT76_TM_ATTR_TX_LTF] = { .type = NLA_U8 },
  16        [MT76_TM_ATTR_TX_ANTENNA] = { .type = NLA_U8 },
  17        [MT76_TM_ATTR_TX_SPE_IDX] = { .type = NLA_U8 },
  18        [MT76_TM_ATTR_TX_POWER_CONTROL] = { .type = NLA_U8 },
  19        [MT76_TM_ATTR_TX_POWER] = { .type = NLA_NESTED },
  20        [MT76_TM_ATTR_TX_DUTY_CYCLE] = { .type = NLA_U8 },
  21        [MT76_TM_ATTR_TX_IPG] = { .type = NLA_U32 },
  22        [MT76_TM_ATTR_TX_TIME] = { .type = NLA_U32 },
  23        [MT76_TM_ATTR_FREQ_OFFSET] = { .type = NLA_U32 },
  24};
  25
  26void mt76_testmode_tx_pending(struct mt76_phy *phy)
  27{
  28        struct mt76_testmode_data *td = &phy->test;
  29        struct mt76_dev *dev = phy->dev;
  30        struct mt76_wcid *wcid = &dev->global_wcid;
  31        struct sk_buff *skb = td->tx_skb;
  32        struct mt76_queue *q;
  33        u16 tx_queued_limit;
  34        int qid;
  35
  36        if (!skb || !td->tx_pending)
  37                return;
  38
  39        qid = skb_get_queue_mapping(skb);
  40        q = phy->q_tx[qid];
  41
  42        tx_queued_limit = td->tx_queued_limit ? td->tx_queued_limit : 1000;
  43
  44        spin_lock_bh(&q->lock);
  45
  46        while (td->tx_pending > 0 &&
  47               td->tx_queued - td->tx_done < tx_queued_limit &&
  48               q->queued < q->ndesc / 2) {
  49                int ret;
  50
  51                ret = dev->queue_ops->tx_queue_skb(dev, q, skb_get(skb), wcid,
  52                                                   NULL);
  53                if (ret < 0)
  54                        break;
  55
  56                td->tx_pending--;
  57                td->tx_queued++;
  58        }
  59
  60        dev->queue_ops->kick(dev, q);
  61
  62        spin_unlock_bh(&q->lock);
  63}
  64
  65static u32
  66mt76_testmode_max_mpdu_len(struct mt76_phy *phy, u8 tx_rate_mode)
  67{
  68        switch (tx_rate_mode) {
  69        case MT76_TM_TX_MODE_HT:
  70                return IEEE80211_MAX_MPDU_LEN_HT_7935;
  71        case MT76_TM_TX_MODE_VHT:
  72        case MT76_TM_TX_MODE_HE_SU:
  73        case MT76_TM_TX_MODE_HE_EXT_SU:
  74        case MT76_TM_TX_MODE_HE_TB:
  75        case MT76_TM_TX_MODE_HE_MU:
  76                if (phy->sband_5g.sband.vht_cap.cap &
  77                    IEEE80211_VHT_CAP_MAX_MPDU_LENGTH_7991)
  78                        return IEEE80211_MAX_MPDU_LEN_VHT_7991;
  79                return IEEE80211_MAX_MPDU_LEN_VHT_11454;
  80        case MT76_TM_TX_MODE_CCK:
  81        case MT76_TM_TX_MODE_OFDM:
  82        default:
  83                return IEEE80211_MAX_FRAME_LEN;
  84        }
  85}
  86
  87static void
  88mt76_testmode_free_skb(struct mt76_phy *phy)
  89{
  90        struct mt76_testmode_data *td = &phy->test;
  91
  92        dev_kfree_skb(td->tx_skb);
  93        td->tx_skb = NULL;
  94}
  95
  96int mt76_testmode_alloc_skb(struct mt76_phy *phy, u32 len)
  97{
  98#define MT_TXP_MAX_LEN  4095
  99        u16 fc = IEEE80211_FTYPE_DATA | IEEE80211_STYPE_DATA |
 100                 IEEE80211_FCTL_FROMDS;
 101        struct mt76_testmode_data *td = &phy->test;
 102        bool ext_phy = phy != &phy->dev->phy;
 103        struct sk_buff **frag_tail, *head;
 104        struct ieee80211_tx_info *info;
 105        struct ieee80211_hdr *hdr;
 106        u32 max_len, head_len;
 107        int nfrags, i;
 108
 109        max_len = mt76_testmode_max_mpdu_len(phy, td->tx_rate_mode);
 110        if (len > max_len)
 111                len = max_len;
 112        else if (len < sizeof(struct ieee80211_hdr))
 113                len = sizeof(struct ieee80211_hdr);
 114
 115        nfrags = len / MT_TXP_MAX_LEN;
 116        head_len = nfrags ? MT_TXP_MAX_LEN : len;
 117
 118        if (len > IEEE80211_MAX_FRAME_LEN)
 119                fc |= IEEE80211_STYPE_QOS_DATA;
 120
 121        head = alloc_skb(head_len, GFP_KERNEL);
 122        if (!head)
 123                return -ENOMEM;
 124
 125        hdr = __skb_put_zero(head, head_len);
 126        hdr->frame_control = cpu_to_le16(fc);
 127        memcpy(hdr->addr1, phy->macaddr, sizeof(phy->macaddr));
 128        memcpy(hdr->addr2, phy->macaddr, sizeof(phy->macaddr));
 129        memcpy(hdr->addr3, phy->macaddr, sizeof(phy->macaddr));
 130        skb_set_queue_mapping(head, IEEE80211_AC_BE);
 131
 132        info = IEEE80211_SKB_CB(head);
 133        info->flags = IEEE80211_TX_CTL_INJECTED |
 134                      IEEE80211_TX_CTL_NO_ACK |
 135                      IEEE80211_TX_CTL_NO_PS_BUFFER;
 136
 137        if (ext_phy)
 138                info->hw_queue |= MT_TX_HW_QUEUE_EXT_PHY;
 139
 140        frag_tail = &skb_shinfo(head)->frag_list;
 141
 142        for (i = 0; i < nfrags; i++) {
 143                struct sk_buff *frag;
 144                u16 frag_len;
 145
 146                if (i == nfrags - 1)
 147                        frag_len = len % MT_TXP_MAX_LEN;
 148                else
 149                        frag_len = MT_TXP_MAX_LEN;
 150
 151                frag = alloc_skb(frag_len, GFP_KERNEL);
 152                if (!frag) {
 153                        mt76_testmode_free_skb(phy);
 154                        dev_kfree_skb(head);
 155                        return -ENOMEM;
 156                }
 157
 158                __skb_put_zero(frag, frag_len);
 159                head->len += frag->len;
 160                head->data_len += frag->len;
 161
 162                *frag_tail = frag;
 163                frag_tail = &(*frag_tail)->next;
 164        }
 165
 166        mt76_testmode_free_skb(phy);
 167        td->tx_skb = head;
 168
 169        return 0;
 170}
 171EXPORT_SYMBOL(mt76_testmode_alloc_skb);
 172
 173static int
 174mt76_testmode_tx_init(struct mt76_phy *phy)
 175{
 176        struct mt76_testmode_data *td = &phy->test;
 177        struct ieee80211_tx_info *info;
 178        struct ieee80211_tx_rate *rate;
 179        u8 max_nss = hweight8(phy->antenna_mask);
 180        int ret;
 181
 182        ret = mt76_testmode_alloc_skb(phy, td->tx_mpdu_len);
 183        if (ret)
 184                return ret;
 185
 186        if (td->tx_rate_mode > MT76_TM_TX_MODE_VHT)
 187                goto out;
 188
 189        if (td->tx_antenna_mask)
 190                max_nss = min_t(u8, max_nss, hweight8(td->tx_antenna_mask));
 191
 192        info = IEEE80211_SKB_CB(td->tx_skb);
 193        rate = &info->control.rates[0];
 194        rate->count = 1;
 195        rate->idx = td->tx_rate_idx;
 196
 197        switch (td->tx_rate_mode) {
 198        case MT76_TM_TX_MODE_CCK:
 199                if (phy->chandef.chan->band != NL80211_BAND_2GHZ)
 200                        return -EINVAL;
 201
 202                if (rate->idx > 4)
 203                        return -EINVAL;
 204                break;
 205        case MT76_TM_TX_MODE_OFDM:
 206                if (phy->chandef.chan->band != NL80211_BAND_2GHZ)
 207                        break;
 208
 209                if (rate->idx > 8)
 210                        return -EINVAL;
 211
 212                rate->idx += 4;
 213                break;
 214        case MT76_TM_TX_MODE_HT:
 215                if (rate->idx > 8 * max_nss &&
 216                        !(rate->idx == 32 &&
 217                          phy->chandef.width >= NL80211_CHAN_WIDTH_40))
 218                        return -EINVAL;
 219
 220                rate->flags |= IEEE80211_TX_RC_MCS;
 221                break;
 222        case MT76_TM_TX_MODE_VHT:
 223                if (rate->idx > 9)
 224                        return -EINVAL;
 225
 226                if (td->tx_rate_nss > max_nss)
 227                        return -EINVAL;
 228
 229                ieee80211_rate_set_vht(rate, td->tx_rate_idx, td->tx_rate_nss);
 230                rate->flags |= IEEE80211_TX_RC_VHT_MCS;
 231                break;
 232        default:
 233                break;
 234        }
 235
 236        if (td->tx_rate_sgi)
 237                rate->flags |= IEEE80211_TX_RC_SHORT_GI;
 238
 239        if (td->tx_rate_ldpc)
 240                info->flags |= IEEE80211_TX_CTL_LDPC;
 241
 242        if (td->tx_rate_stbc)
 243                info->flags |= IEEE80211_TX_CTL_STBC;
 244
 245        if (td->tx_rate_mode >= MT76_TM_TX_MODE_HT) {
 246                switch (phy->chandef.width) {
 247                case NL80211_CHAN_WIDTH_40:
 248                        rate->flags |= IEEE80211_TX_RC_40_MHZ_WIDTH;
 249                        break;
 250                case NL80211_CHAN_WIDTH_80:
 251                        rate->flags |= IEEE80211_TX_RC_80_MHZ_WIDTH;
 252                        break;
 253                case NL80211_CHAN_WIDTH_80P80:
 254                case NL80211_CHAN_WIDTH_160:
 255                        rate->flags |= IEEE80211_TX_RC_160_MHZ_WIDTH;
 256                        break;
 257                default:
 258                        break;
 259                }
 260        }
 261out:
 262        return 0;
 263}
 264
 265static void
 266mt76_testmode_tx_start(struct mt76_phy *phy)
 267{
 268        struct mt76_testmode_data *td = &phy->test;
 269        struct mt76_dev *dev = phy->dev;
 270
 271        td->tx_queued = 0;
 272        td->tx_done = 0;
 273        td->tx_pending = td->tx_count;
 274        mt76_worker_schedule(&dev->tx_worker);
 275}
 276
 277static void
 278mt76_testmode_tx_stop(struct mt76_phy *phy)
 279{
 280        struct mt76_testmode_data *td = &phy->test;
 281        struct mt76_dev *dev = phy->dev;
 282
 283        mt76_worker_disable(&dev->tx_worker);
 284
 285        td->tx_pending = 0;
 286
 287        mt76_worker_enable(&dev->tx_worker);
 288
 289        wait_event_timeout(dev->tx_wait, td->tx_done == td->tx_queued,
 290                           MT76_TM_TIMEOUT * HZ);
 291
 292        mt76_testmode_free_skb(phy);
 293}
 294
 295static inline void
 296mt76_testmode_param_set(struct mt76_testmode_data *td, u16 idx)
 297{
 298        td->param_set[idx / 32] |= BIT(idx % 32);
 299}
 300
 301static inline bool
 302mt76_testmode_param_present(struct mt76_testmode_data *td, u16 idx)
 303{
 304        return td->param_set[idx / 32] & BIT(idx % 32);
 305}
 306
 307static void
 308mt76_testmode_init_defaults(struct mt76_phy *phy)
 309{
 310        struct mt76_testmode_data *td = &phy->test;
 311
 312        if (td->tx_mpdu_len > 0)
 313                return;
 314
 315        td->tx_mpdu_len = 1024;
 316        td->tx_count = 1;
 317        td->tx_rate_mode = MT76_TM_TX_MODE_OFDM;
 318        td->tx_rate_nss = 1;
 319}
 320
 321static int
 322__mt76_testmode_set_state(struct mt76_phy *phy, enum mt76_testmode_state state)
 323{
 324        enum mt76_testmode_state prev_state = phy->test.state;
 325        struct mt76_dev *dev = phy->dev;
 326        int err;
 327
 328        if (prev_state == MT76_TM_STATE_TX_FRAMES)
 329                mt76_testmode_tx_stop(phy);
 330
 331        if (state == MT76_TM_STATE_TX_FRAMES) {
 332                err = mt76_testmode_tx_init(phy);
 333                if (err)
 334                        return err;
 335        }
 336
 337        err = dev->test_ops->set_state(phy, state);
 338        if (err) {
 339                if (state == MT76_TM_STATE_TX_FRAMES)
 340                        mt76_testmode_tx_stop(phy);
 341
 342                return err;
 343        }
 344
 345        if (state == MT76_TM_STATE_TX_FRAMES)
 346                mt76_testmode_tx_start(phy);
 347        else if (state == MT76_TM_STATE_RX_FRAMES) {
 348                memset(&phy->test.rx_stats, 0, sizeof(phy->test.rx_stats));
 349        }
 350
 351        phy->test.state = state;
 352
 353        return 0;
 354}
 355
 356int mt76_testmode_set_state(struct mt76_phy *phy, enum mt76_testmode_state state)
 357{
 358        struct mt76_testmode_data *td = &phy->test;
 359        struct ieee80211_hw *hw = phy->hw;
 360
 361        if (state == td->state && state == MT76_TM_STATE_OFF)
 362                return 0;
 363
 364        if (state > MT76_TM_STATE_OFF &&
 365            (!test_bit(MT76_STATE_RUNNING, &phy->state) ||
 366             !(hw->conf.flags & IEEE80211_CONF_MONITOR)))
 367                return -ENOTCONN;
 368
 369        if (state != MT76_TM_STATE_IDLE &&
 370            td->state != MT76_TM_STATE_IDLE) {
 371                int ret;
 372
 373                ret = __mt76_testmode_set_state(phy, MT76_TM_STATE_IDLE);
 374                if (ret)
 375                        return ret;
 376        }
 377
 378        return __mt76_testmode_set_state(phy, state);
 379
 380}
 381EXPORT_SYMBOL(mt76_testmode_set_state);
 382
 383static int
 384mt76_tm_get_u8(struct nlattr *attr, u8 *dest, u8 min, u8 max)
 385{
 386        u8 val;
 387
 388        if (!attr)
 389                return 0;
 390
 391        val = nla_get_u8(attr);
 392        if (val < min || val > max)
 393                return -EINVAL;
 394
 395        *dest = val;
 396        return 0;
 397}
 398
 399int mt76_testmode_cmd(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
 400                      void *data, int len)
 401{
 402        struct mt76_phy *phy = hw->priv;
 403        struct mt76_dev *dev = phy->dev;
 404        struct mt76_testmode_data *td = &phy->test;
 405        struct nlattr *tb[NUM_MT76_TM_ATTRS];
 406        bool ext_phy = phy != &dev->phy;
 407        u32 state;
 408        int err;
 409        int i;
 410
 411        if (!dev->test_ops)
 412                return -EOPNOTSUPP;
 413
 414        err = nla_parse_deprecated(tb, MT76_TM_ATTR_MAX, data, len,
 415                                   mt76_tm_policy, NULL);
 416        if (err)
 417                return err;
 418
 419        err = -EINVAL;
 420
 421        mutex_lock(&dev->mutex);
 422
 423        if (tb[MT76_TM_ATTR_RESET]) {
 424                mt76_testmode_set_state(phy, MT76_TM_STATE_OFF);
 425                memset(td, 0, sizeof(*td));
 426        }
 427
 428        mt76_testmode_init_defaults(phy);
 429
 430        if (tb[MT76_TM_ATTR_TX_COUNT])
 431                td->tx_count = nla_get_u32(tb[MT76_TM_ATTR_TX_COUNT]);
 432
 433        if (tb[MT76_TM_ATTR_TX_RATE_IDX])
 434                td->tx_rate_idx = nla_get_u8(tb[MT76_TM_ATTR_TX_RATE_IDX]);
 435
 436        if (mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_MODE], &td->tx_rate_mode,
 437                           0, MT76_TM_TX_MODE_MAX) ||
 438            mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_NSS], &td->tx_rate_nss,
 439                           1, hweight8(phy->antenna_mask)) ||
 440            mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_SGI], &td->tx_rate_sgi, 0, 2) ||
 441            mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_LDPC], &td->tx_rate_ldpc, 0, 1) ||
 442            mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_STBC], &td->tx_rate_stbc, 0, 1) ||
 443            mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_LTF], &td->tx_ltf, 0, 2) ||
 444            mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_ANTENNA], &td->tx_antenna_mask,
 445                           1 << (ext_phy * 2), phy->antenna_mask << (ext_phy * 2)) ||
 446            mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_SPE_IDX], &td->tx_spe_idx, 0, 27) ||
 447            mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_DUTY_CYCLE],
 448                           &td->tx_duty_cycle, 0, 99) ||
 449            mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_POWER_CONTROL],
 450                           &td->tx_power_control, 0, 1))
 451                goto out;
 452
 453        if (tb[MT76_TM_ATTR_TX_LENGTH]) {
 454                u32 val = nla_get_u32(tb[MT76_TM_ATTR_TX_LENGTH]);
 455
 456                if (val > mt76_testmode_max_mpdu_len(phy, td->tx_rate_mode) ||
 457                    val < sizeof(struct ieee80211_hdr))
 458                        goto out;
 459
 460                td->tx_mpdu_len = val;
 461        }
 462
 463        if (tb[MT76_TM_ATTR_TX_IPG])
 464                td->tx_ipg = nla_get_u32(tb[MT76_TM_ATTR_TX_IPG]);
 465
 466        if (tb[MT76_TM_ATTR_TX_TIME])
 467                td->tx_time = nla_get_u32(tb[MT76_TM_ATTR_TX_TIME]);
 468
 469        if (tb[MT76_TM_ATTR_FREQ_OFFSET])
 470                td->freq_offset = nla_get_u32(tb[MT76_TM_ATTR_FREQ_OFFSET]);
 471
 472        if (tb[MT76_TM_ATTR_STATE]) {
 473                state = nla_get_u32(tb[MT76_TM_ATTR_STATE]);
 474                if (state > MT76_TM_STATE_MAX)
 475                        goto out;
 476        } else {
 477                state = td->state;
 478        }
 479
 480        if (tb[MT76_TM_ATTR_TX_POWER]) {
 481                struct nlattr *cur;
 482                int idx = 0;
 483                int rem;
 484
 485                nla_for_each_nested(cur, tb[MT76_TM_ATTR_TX_POWER], rem) {
 486                        if (nla_len(cur) != 1 ||
 487                            idx >= ARRAY_SIZE(td->tx_power))
 488                                goto out;
 489
 490                        td->tx_power[idx++] = nla_get_u8(cur);
 491                }
 492        }
 493
 494        if (dev->test_ops->set_params) {
 495                err = dev->test_ops->set_params(phy, tb, state);
 496                if (err)
 497                        goto out;
 498        }
 499
 500        for (i = MT76_TM_ATTR_STATE; i < ARRAY_SIZE(tb); i++)
 501                if (tb[i])
 502                        mt76_testmode_param_set(td, i);
 503
 504        err = 0;
 505        if (tb[MT76_TM_ATTR_STATE])
 506                err = mt76_testmode_set_state(phy, state);
 507
 508out:
 509        mutex_unlock(&dev->mutex);
 510
 511        return err;
 512}
 513EXPORT_SYMBOL(mt76_testmode_cmd);
 514
 515static int
 516mt76_testmode_dump_stats(struct mt76_phy *phy, struct sk_buff *msg)
 517{
 518        struct mt76_testmode_data *td = &phy->test;
 519        struct mt76_dev *dev = phy->dev;
 520        u64 rx_packets = 0;
 521        u64 rx_fcs_error = 0;
 522        int i;
 523
 524        if (dev->test_ops->dump_stats) {
 525                int ret;
 526
 527                ret = dev->test_ops->dump_stats(phy, msg);
 528                if (ret)
 529                        return ret;
 530        }
 531
 532        for (i = 0; i < ARRAY_SIZE(td->rx_stats.packets); i++) {
 533                rx_packets += td->rx_stats.packets[i];
 534                rx_fcs_error += td->rx_stats.fcs_error[i];
 535        }
 536
 537        if (nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_PENDING, td->tx_pending) ||
 538            nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_QUEUED, td->tx_queued) ||
 539            nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_DONE, td->tx_done) ||
 540            nla_put_u64_64bit(msg, MT76_TM_STATS_ATTR_RX_PACKETS, rx_packets,
 541                              MT76_TM_STATS_ATTR_PAD) ||
 542            nla_put_u64_64bit(msg, MT76_TM_STATS_ATTR_RX_FCS_ERROR, rx_fcs_error,
 543                              MT76_TM_STATS_ATTR_PAD))
 544                return -EMSGSIZE;
 545
 546        return 0;
 547}
 548
 549int mt76_testmode_dump(struct ieee80211_hw *hw, struct sk_buff *msg,
 550                       struct netlink_callback *cb, void *data, int len)
 551{
 552        struct mt76_phy *phy = hw->priv;
 553        struct mt76_dev *dev = phy->dev;
 554        struct mt76_testmode_data *td = &phy->test;
 555        struct nlattr *tb[NUM_MT76_TM_ATTRS] = {};
 556        int err = 0;
 557        void *a;
 558        int i;
 559
 560        if (!dev->test_ops)
 561                return -EOPNOTSUPP;
 562
 563        if (cb->args[2]++ > 0)
 564                return -ENOENT;
 565
 566        if (data) {
 567                err = nla_parse_deprecated(tb, MT76_TM_ATTR_MAX, data, len,
 568                                           mt76_tm_policy, NULL);
 569                if (err)
 570                        return err;
 571        }
 572
 573        mutex_lock(&dev->mutex);
 574
 575        if (tb[MT76_TM_ATTR_STATS]) {
 576                err = -EINVAL;
 577
 578                a = nla_nest_start(msg, MT76_TM_ATTR_STATS);
 579                if (a) {
 580                        err = mt76_testmode_dump_stats(phy, msg);
 581                        nla_nest_end(msg, a);
 582                }
 583
 584                goto out;
 585        }
 586
 587        mt76_testmode_init_defaults(phy);
 588
 589        err = -EMSGSIZE;
 590        if (nla_put_u32(msg, MT76_TM_ATTR_STATE, td->state))
 591                goto out;
 592
 593        if (dev->test_mtd.name &&
 594            (nla_put_string(msg, MT76_TM_ATTR_MTD_PART, dev->test_mtd.name) ||
 595             nla_put_u32(msg, MT76_TM_ATTR_MTD_OFFSET, dev->test_mtd.offset)))
 596                goto out;
 597
 598        if (nla_put_u32(msg, MT76_TM_ATTR_TX_COUNT, td->tx_count) ||
 599            nla_put_u32(msg, MT76_TM_ATTR_TX_LENGTH, td->tx_mpdu_len) ||
 600            nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_MODE, td->tx_rate_mode) ||
 601            nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_NSS, td->tx_rate_nss) ||
 602            nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_IDX, td->tx_rate_idx) ||
 603            nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_SGI, td->tx_rate_sgi) ||
 604            nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_LDPC, td->tx_rate_ldpc) ||
 605            nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_STBC, td->tx_rate_stbc) ||
 606            (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_LTF) &&
 607             nla_put_u8(msg, MT76_TM_ATTR_TX_LTF, td->tx_ltf)) ||
 608            (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_ANTENNA) &&
 609             nla_put_u8(msg, MT76_TM_ATTR_TX_ANTENNA, td->tx_antenna_mask)) ||
 610            (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_SPE_IDX) &&
 611             nla_put_u8(msg, MT76_TM_ATTR_TX_SPE_IDX, td->tx_spe_idx)) ||
 612            (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_DUTY_CYCLE) &&
 613             nla_put_u8(msg, MT76_TM_ATTR_TX_DUTY_CYCLE, td->tx_duty_cycle)) ||
 614            (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_IPG) &&
 615             nla_put_u32(msg, MT76_TM_ATTR_TX_IPG, td->tx_ipg)) ||
 616            (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_TIME) &&
 617             nla_put_u32(msg, MT76_TM_ATTR_TX_TIME, td->tx_time)) ||
 618            (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_POWER_CONTROL) &&
 619             nla_put_u8(msg, MT76_TM_ATTR_TX_POWER_CONTROL, td->tx_power_control)) ||
 620            (mt76_testmode_param_present(td, MT76_TM_ATTR_FREQ_OFFSET) &&
 621             nla_put_u8(msg, MT76_TM_ATTR_FREQ_OFFSET, td->freq_offset)))
 622                goto out;
 623
 624        if (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_POWER)) {
 625                a = nla_nest_start(msg, MT76_TM_ATTR_TX_POWER);
 626                if (!a)
 627                        goto out;
 628
 629                for (i = 0; i < ARRAY_SIZE(td->tx_power); i++)
 630                        if (nla_put_u8(msg, i, td->tx_power[i]))
 631                                goto out;
 632
 633                nla_nest_end(msg, a);
 634        }
 635
 636        err = 0;
 637
 638out:
 639        mutex_unlock(&dev->mutex);
 640
 641        return err;
 642}
 643EXPORT_SYMBOL(mt76_testmode_dump);
 644