0001
0002
0003 #include "mt76.h"
0004
0005 const struct nla_policy mt76_tm_policy[NUM_MT76_TM_ATTRS] = {
0006 [MT76_TM_ATTR_RESET] = { .type = NLA_FLAG },
0007 [MT76_TM_ATTR_STATE] = { .type = NLA_U8 },
0008 [MT76_TM_ATTR_TX_COUNT] = { .type = NLA_U32 },
0009 [MT76_TM_ATTR_TX_RATE_MODE] = { .type = NLA_U8 },
0010 [MT76_TM_ATTR_TX_RATE_NSS] = { .type = NLA_U8 },
0011 [MT76_TM_ATTR_TX_RATE_IDX] = { .type = NLA_U8 },
0012 [MT76_TM_ATTR_TX_RATE_SGI] = { .type = NLA_U8 },
0013 [MT76_TM_ATTR_TX_RATE_LDPC] = { .type = NLA_U8 },
0014 [MT76_TM_ATTR_TX_RATE_STBC] = { .type = NLA_U8 },
0015 [MT76_TM_ATTR_TX_LTF] = { .type = NLA_U8 },
0016 [MT76_TM_ATTR_TX_ANTENNA] = { .type = NLA_U8 },
0017 [MT76_TM_ATTR_TX_SPE_IDX] = { .type = NLA_U8 },
0018 [MT76_TM_ATTR_TX_POWER_CONTROL] = { .type = NLA_U8 },
0019 [MT76_TM_ATTR_TX_POWER] = { .type = NLA_NESTED },
0020 [MT76_TM_ATTR_TX_DUTY_CYCLE] = { .type = NLA_U8 },
0021 [MT76_TM_ATTR_TX_IPG] = { .type = NLA_U32 },
0022 [MT76_TM_ATTR_TX_TIME] = { .type = NLA_U32 },
0023 [MT76_TM_ATTR_FREQ_OFFSET] = { .type = NLA_U32 },
0024 [MT76_TM_ATTR_DRV_DATA] = { .type = NLA_NESTED },
0025 };
0026 EXPORT_SYMBOL_GPL(mt76_tm_policy);
0027
0028 void mt76_testmode_tx_pending(struct mt76_phy *phy)
0029 {
0030 struct mt76_testmode_data *td = &phy->test;
0031 struct mt76_dev *dev = phy->dev;
0032 struct mt76_wcid *wcid = &dev->global_wcid;
0033 struct sk_buff *skb = td->tx_skb;
0034 struct mt76_queue *q;
0035 u16 tx_queued_limit;
0036 int qid;
0037
0038 if (!skb || !td->tx_pending)
0039 return;
0040
0041 qid = skb_get_queue_mapping(skb);
0042 q = phy->q_tx[qid];
0043
0044 tx_queued_limit = td->tx_queued_limit ? td->tx_queued_limit : 1000;
0045
0046 spin_lock_bh(&q->lock);
0047
0048 while (td->tx_pending > 0 &&
0049 td->tx_queued - td->tx_done < tx_queued_limit &&
0050 q->queued < q->ndesc / 2) {
0051 int ret;
0052
0053 ret = dev->queue_ops->tx_queue_skb(dev, q, qid, skb_get(skb),
0054 wcid, NULL);
0055 if (ret < 0)
0056 break;
0057
0058 td->tx_pending--;
0059 td->tx_queued++;
0060 }
0061
0062 dev->queue_ops->kick(dev, q);
0063
0064 spin_unlock_bh(&q->lock);
0065 }
0066
0067 static u32
0068 mt76_testmode_max_mpdu_len(struct mt76_phy *phy, u8 tx_rate_mode)
0069 {
0070 switch (tx_rate_mode) {
0071 case MT76_TM_TX_MODE_HT:
0072 return IEEE80211_MAX_MPDU_LEN_HT_7935;
0073 case MT76_TM_TX_MODE_VHT:
0074 case MT76_TM_TX_MODE_HE_SU:
0075 case MT76_TM_TX_MODE_HE_EXT_SU:
0076 case MT76_TM_TX_MODE_HE_TB:
0077 case MT76_TM_TX_MODE_HE_MU:
0078 if (phy->sband_5g.sband.vht_cap.cap &
0079 IEEE80211_VHT_CAP_MAX_MPDU_LENGTH_7991)
0080 return IEEE80211_MAX_MPDU_LEN_VHT_7991;
0081 return IEEE80211_MAX_MPDU_LEN_VHT_11454;
0082 case MT76_TM_TX_MODE_CCK:
0083 case MT76_TM_TX_MODE_OFDM:
0084 default:
0085 return IEEE80211_MAX_FRAME_LEN;
0086 }
0087 }
0088
0089 static void
0090 mt76_testmode_free_skb(struct mt76_phy *phy)
0091 {
0092 struct mt76_testmode_data *td = &phy->test;
0093
0094 dev_kfree_skb(td->tx_skb);
0095 td->tx_skb = NULL;
0096 }
0097
0098 int mt76_testmode_alloc_skb(struct mt76_phy *phy, u32 len)
0099 {
0100 #define MT_TXP_MAX_LEN 4095
0101 u16 fc = IEEE80211_FTYPE_DATA | IEEE80211_STYPE_DATA |
0102 IEEE80211_FCTL_FROMDS;
0103 struct mt76_testmode_data *td = &phy->test;
0104 struct sk_buff **frag_tail, *head;
0105 struct ieee80211_tx_info *info;
0106 struct ieee80211_hdr *hdr;
0107 u32 max_len, head_len;
0108 int nfrags, i;
0109
0110 max_len = mt76_testmode_max_mpdu_len(phy, td->tx_rate_mode);
0111 if (len > max_len)
0112 len = max_len;
0113 else if (len < sizeof(struct ieee80211_hdr))
0114 len = sizeof(struct ieee80211_hdr);
0115
0116 nfrags = len / MT_TXP_MAX_LEN;
0117 head_len = nfrags ? MT_TXP_MAX_LEN : len;
0118
0119 if (len > IEEE80211_MAX_FRAME_LEN)
0120 fc |= IEEE80211_STYPE_QOS_DATA;
0121
0122 head = alloc_skb(head_len, GFP_KERNEL);
0123 if (!head)
0124 return -ENOMEM;
0125
0126 hdr = __skb_put_zero(head, head_len);
0127 hdr->frame_control = cpu_to_le16(fc);
0128 memcpy(hdr->addr1, td->addr[0], ETH_ALEN);
0129 memcpy(hdr->addr2, td->addr[1], ETH_ALEN);
0130 memcpy(hdr->addr3, td->addr[2], ETH_ALEN);
0131 skb_set_queue_mapping(head, IEEE80211_AC_BE);
0132
0133 info = IEEE80211_SKB_CB(head);
0134 info->flags = IEEE80211_TX_CTL_INJECTED |
0135 IEEE80211_TX_CTL_NO_ACK |
0136 IEEE80211_TX_CTL_NO_PS_BUFFER;
0137
0138 info->hw_queue |= FIELD_PREP(MT_TX_HW_QUEUE_PHY, phy->band_idx);
0139 frag_tail = &skb_shinfo(head)->frag_list;
0140
0141 for (i = 0; i < nfrags; i++) {
0142 struct sk_buff *frag;
0143 u16 frag_len;
0144
0145 if (i == nfrags - 1)
0146 frag_len = len % MT_TXP_MAX_LEN;
0147 else
0148 frag_len = MT_TXP_MAX_LEN;
0149
0150 frag = alloc_skb(frag_len, GFP_KERNEL);
0151 if (!frag) {
0152 mt76_testmode_free_skb(phy);
0153 dev_kfree_skb(head);
0154 return -ENOMEM;
0155 }
0156
0157 __skb_put_zero(frag, frag_len);
0158 head->len += frag->len;
0159 head->data_len += frag->len;
0160
0161 *frag_tail = frag;
0162 frag_tail = &(*frag_tail)->next;
0163 }
0164
0165 mt76_testmode_free_skb(phy);
0166 td->tx_skb = head;
0167
0168 return 0;
0169 }
0170 EXPORT_SYMBOL(mt76_testmode_alloc_skb);
0171
0172 static int
0173 mt76_testmode_tx_init(struct mt76_phy *phy)
0174 {
0175 struct mt76_testmode_data *td = &phy->test;
0176 struct ieee80211_tx_info *info;
0177 struct ieee80211_tx_rate *rate;
0178 u8 max_nss = hweight8(phy->antenna_mask);
0179 int ret;
0180
0181 ret = mt76_testmode_alloc_skb(phy, td->tx_mpdu_len);
0182 if (ret)
0183 return ret;
0184
0185 if (td->tx_rate_mode > MT76_TM_TX_MODE_VHT)
0186 goto out;
0187
0188 if (td->tx_antenna_mask)
0189 max_nss = min_t(u8, max_nss, hweight8(td->tx_antenna_mask));
0190
0191 info = IEEE80211_SKB_CB(td->tx_skb);
0192 rate = &info->control.rates[0];
0193 rate->count = 1;
0194 rate->idx = td->tx_rate_idx;
0195
0196 switch (td->tx_rate_mode) {
0197 case MT76_TM_TX_MODE_CCK:
0198 if (phy->chandef.chan->band != NL80211_BAND_2GHZ)
0199 return -EINVAL;
0200
0201 if (rate->idx > 4)
0202 return -EINVAL;
0203 break;
0204 case MT76_TM_TX_MODE_OFDM:
0205 if (phy->chandef.chan->band != NL80211_BAND_2GHZ)
0206 break;
0207
0208 if (rate->idx > 8)
0209 return -EINVAL;
0210
0211 rate->idx += 4;
0212 break;
0213 case MT76_TM_TX_MODE_HT:
0214 if (rate->idx > 8 * max_nss &&
0215 !(rate->idx == 32 &&
0216 phy->chandef.width >= NL80211_CHAN_WIDTH_40))
0217 return -EINVAL;
0218
0219 rate->flags |= IEEE80211_TX_RC_MCS;
0220 break;
0221 case MT76_TM_TX_MODE_VHT:
0222 if (rate->idx > 9)
0223 return -EINVAL;
0224
0225 if (td->tx_rate_nss > max_nss)
0226 return -EINVAL;
0227
0228 ieee80211_rate_set_vht(rate, td->tx_rate_idx, td->tx_rate_nss);
0229 rate->flags |= IEEE80211_TX_RC_VHT_MCS;
0230 break;
0231 default:
0232 break;
0233 }
0234
0235 if (td->tx_rate_sgi)
0236 rate->flags |= IEEE80211_TX_RC_SHORT_GI;
0237
0238 if (td->tx_rate_ldpc)
0239 info->flags |= IEEE80211_TX_CTL_LDPC;
0240
0241 if (td->tx_rate_stbc)
0242 info->flags |= IEEE80211_TX_CTL_STBC;
0243
0244 if (td->tx_rate_mode >= MT76_TM_TX_MODE_HT) {
0245 switch (phy->chandef.width) {
0246 case NL80211_CHAN_WIDTH_40:
0247 rate->flags |= IEEE80211_TX_RC_40_MHZ_WIDTH;
0248 break;
0249 case NL80211_CHAN_WIDTH_80:
0250 rate->flags |= IEEE80211_TX_RC_80_MHZ_WIDTH;
0251 break;
0252 case NL80211_CHAN_WIDTH_80P80:
0253 case NL80211_CHAN_WIDTH_160:
0254 rate->flags |= IEEE80211_TX_RC_160_MHZ_WIDTH;
0255 break;
0256 default:
0257 break;
0258 }
0259 }
0260 out:
0261 return 0;
0262 }
0263
0264 static void
0265 mt76_testmode_tx_start(struct mt76_phy *phy)
0266 {
0267 struct mt76_testmode_data *td = &phy->test;
0268 struct mt76_dev *dev = phy->dev;
0269
0270 td->tx_queued = 0;
0271 td->tx_done = 0;
0272 td->tx_pending = td->tx_count;
0273 mt76_worker_schedule(&dev->tx_worker);
0274 }
0275
0276 static void
0277 mt76_testmode_tx_stop(struct mt76_phy *phy)
0278 {
0279 struct mt76_testmode_data *td = &phy->test;
0280 struct mt76_dev *dev = phy->dev;
0281
0282 mt76_worker_disable(&dev->tx_worker);
0283
0284 td->tx_pending = 0;
0285
0286 mt76_worker_enable(&dev->tx_worker);
0287
0288 wait_event_timeout(dev->tx_wait, td->tx_done == td->tx_queued,
0289 MT76_TM_TIMEOUT * HZ);
0290
0291 mt76_testmode_free_skb(phy);
0292 }
0293
0294 static inline void
0295 mt76_testmode_param_set(struct mt76_testmode_data *td, u16 idx)
0296 {
0297 td->param_set[idx / 32] |= BIT(idx % 32);
0298 }
0299
0300 static inline bool
0301 mt76_testmode_param_present(struct mt76_testmode_data *td, u16 idx)
0302 {
0303 return td->param_set[idx / 32] & BIT(idx % 32);
0304 }
0305
0306 static void
0307 mt76_testmode_init_defaults(struct mt76_phy *phy)
0308 {
0309 struct mt76_testmode_data *td = &phy->test;
0310
0311 if (td->tx_mpdu_len > 0)
0312 return;
0313
0314 td->tx_mpdu_len = 1024;
0315 td->tx_count = 1;
0316 td->tx_rate_mode = MT76_TM_TX_MODE_OFDM;
0317 td->tx_rate_nss = 1;
0318
0319 memcpy(td->addr[0], phy->macaddr, ETH_ALEN);
0320 memcpy(td->addr[1], phy->macaddr, ETH_ALEN);
0321 memcpy(td->addr[2], phy->macaddr, ETH_ALEN);
0322 }
0323
0324 static int
0325 __mt76_testmode_set_state(struct mt76_phy *phy, enum mt76_testmode_state state)
0326 {
0327 enum mt76_testmode_state prev_state = phy->test.state;
0328 struct mt76_dev *dev = phy->dev;
0329 int err;
0330
0331 if (prev_state == MT76_TM_STATE_TX_FRAMES)
0332 mt76_testmode_tx_stop(phy);
0333
0334 if (state == MT76_TM_STATE_TX_FRAMES) {
0335 err = mt76_testmode_tx_init(phy);
0336 if (err)
0337 return err;
0338 }
0339
0340 err = dev->test_ops->set_state(phy, state);
0341 if (err) {
0342 if (state == MT76_TM_STATE_TX_FRAMES)
0343 mt76_testmode_tx_stop(phy);
0344
0345 return err;
0346 }
0347
0348 if (state == MT76_TM_STATE_TX_FRAMES)
0349 mt76_testmode_tx_start(phy);
0350 else if (state == MT76_TM_STATE_RX_FRAMES) {
0351 memset(&phy->test.rx_stats, 0, sizeof(phy->test.rx_stats));
0352 }
0353
0354 phy->test.state = state;
0355
0356 return 0;
0357 }
0358
0359 int mt76_testmode_set_state(struct mt76_phy *phy, enum mt76_testmode_state state)
0360 {
0361 struct mt76_testmode_data *td = &phy->test;
0362 struct ieee80211_hw *hw = phy->hw;
0363
0364 if (state == td->state && state == MT76_TM_STATE_OFF)
0365 return 0;
0366
0367 if (state > MT76_TM_STATE_OFF &&
0368 (!test_bit(MT76_STATE_RUNNING, &phy->state) ||
0369 !(hw->conf.flags & IEEE80211_CONF_MONITOR)))
0370 return -ENOTCONN;
0371
0372 if (state != MT76_TM_STATE_IDLE &&
0373 td->state != MT76_TM_STATE_IDLE) {
0374 int ret;
0375
0376 ret = __mt76_testmode_set_state(phy, MT76_TM_STATE_IDLE);
0377 if (ret)
0378 return ret;
0379 }
0380
0381 return __mt76_testmode_set_state(phy, state);
0382
0383 }
0384 EXPORT_SYMBOL(mt76_testmode_set_state);
0385
0386 static int
0387 mt76_tm_get_u8(struct nlattr *attr, u8 *dest, u8 min, u8 max)
0388 {
0389 u8 val;
0390
0391 if (!attr)
0392 return 0;
0393
0394 val = nla_get_u8(attr);
0395 if (val < min || val > max)
0396 return -EINVAL;
0397
0398 *dest = val;
0399 return 0;
0400 }
0401
0402 int mt76_testmode_cmd(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
0403 void *data, int len)
0404 {
0405 struct mt76_phy *phy = hw->priv;
0406 struct mt76_dev *dev = phy->dev;
0407 struct mt76_testmode_data *td = &phy->test;
0408 struct nlattr *tb[NUM_MT76_TM_ATTRS];
0409 u32 state;
0410 int err;
0411 int i;
0412
0413 if (!dev->test_ops)
0414 return -EOPNOTSUPP;
0415
0416 err = nla_parse_deprecated(tb, MT76_TM_ATTR_MAX, data, len,
0417 mt76_tm_policy, NULL);
0418 if (err)
0419 return err;
0420
0421 err = -EINVAL;
0422
0423 mutex_lock(&dev->mutex);
0424
0425 if (tb[MT76_TM_ATTR_RESET]) {
0426 mt76_testmode_set_state(phy, MT76_TM_STATE_OFF);
0427 memset(td, 0, sizeof(*td));
0428 }
0429
0430 mt76_testmode_init_defaults(phy);
0431
0432 if (tb[MT76_TM_ATTR_TX_COUNT])
0433 td->tx_count = nla_get_u32(tb[MT76_TM_ATTR_TX_COUNT]);
0434
0435 if (tb[MT76_TM_ATTR_TX_RATE_IDX])
0436 td->tx_rate_idx = nla_get_u8(tb[MT76_TM_ATTR_TX_RATE_IDX]);
0437
0438 if (mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_MODE], &td->tx_rate_mode,
0439 0, MT76_TM_TX_MODE_MAX) ||
0440 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_NSS], &td->tx_rate_nss,
0441 1, hweight8(phy->antenna_mask)) ||
0442 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_SGI], &td->tx_rate_sgi, 0, 2) ||
0443 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_LDPC], &td->tx_rate_ldpc, 0, 1) ||
0444 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_STBC], &td->tx_rate_stbc, 0, 1) ||
0445 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_LTF], &td->tx_ltf, 0, 2) ||
0446 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_ANTENNA],
0447 &td->tx_antenna_mask, 0, 0xff) ||
0448 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_SPE_IDX], &td->tx_spe_idx, 0, 27) ||
0449 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_DUTY_CYCLE],
0450 &td->tx_duty_cycle, 0, 99) ||
0451 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_POWER_CONTROL],
0452 &td->tx_power_control, 0, 1))
0453 goto out;
0454
0455 if (tb[MT76_TM_ATTR_TX_LENGTH]) {
0456 u32 val = nla_get_u32(tb[MT76_TM_ATTR_TX_LENGTH]);
0457
0458 if (val > mt76_testmode_max_mpdu_len(phy, td->tx_rate_mode) ||
0459 val < sizeof(struct ieee80211_hdr))
0460 goto out;
0461
0462 td->tx_mpdu_len = val;
0463 }
0464
0465 if (tb[MT76_TM_ATTR_TX_IPG])
0466 td->tx_ipg = nla_get_u32(tb[MT76_TM_ATTR_TX_IPG]);
0467
0468 if (tb[MT76_TM_ATTR_TX_TIME])
0469 td->tx_time = nla_get_u32(tb[MT76_TM_ATTR_TX_TIME]);
0470
0471 if (tb[MT76_TM_ATTR_FREQ_OFFSET])
0472 td->freq_offset = nla_get_u32(tb[MT76_TM_ATTR_FREQ_OFFSET]);
0473
0474 if (tb[MT76_TM_ATTR_STATE]) {
0475 state = nla_get_u32(tb[MT76_TM_ATTR_STATE]);
0476 if (state > MT76_TM_STATE_MAX)
0477 goto out;
0478 } else {
0479 state = td->state;
0480 }
0481
0482 if (tb[MT76_TM_ATTR_TX_POWER]) {
0483 struct nlattr *cur;
0484 int idx = 0;
0485 int rem;
0486
0487 nla_for_each_nested(cur, tb[MT76_TM_ATTR_TX_POWER], rem) {
0488 if (nla_len(cur) != 1 ||
0489 idx >= ARRAY_SIZE(td->tx_power))
0490 goto out;
0491
0492 td->tx_power[idx++] = nla_get_u8(cur);
0493 }
0494 }
0495
0496 if (tb[MT76_TM_ATTR_MAC_ADDRS]) {
0497 struct nlattr *cur;
0498 int idx = 0;
0499 int rem;
0500
0501 nla_for_each_nested(cur, tb[MT76_TM_ATTR_MAC_ADDRS], rem) {
0502 if (nla_len(cur) != ETH_ALEN || idx >= 3)
0503 goto out;
0504
0505 memcpy(td->addr[idx], nla_data(cur), ETH_ALEN);
0506 idx++;
0507 }
0508 }
0509
0510 if (dev->test_ops->set_params) {
0511 err = dev->test_ops->set_params(phy, tb, state);
0512 if (err)
0513 goto out;
0514 }
0515
0516 for (i = MT76_TM_ATTR_STATE; i < ARRAY_SIZE(tb); i++)
0517 if (tb[i])
0518 mt76_testmode_param_set(td, i);
0519
0520 err = 0;
0521 if (tb[MT76_TM_ATTR_STATE])
0522 err = mt76_testmode_set_state(phy, state);
0523
0524 out:
0525 mutex_unlock(&dev->mutex);
0526
0527 return err;
0528 }
0529 EXPORT_SYMBOL(mt76_testmode_cmd);
0530
0531 static int
0532 mt76_testmode_dump_stats(struct mt76_phy *phy, struct sk_buff *msg)
0533 {
0534 struct mt76_testmode_data *td = &phy->test;
0535 struct mt76_dev *dev = phy->dev;
0536 u64 rx_packets = 0;
0537 u64 rx_fcs_error = 0;
0538 int i;
0539
0540 if (dev->test_ops->dump_stats) {
0541 int ret;
0542
0543 ret = dev->test_ops->dump_stats(phy, msg);
0544 if (ret)
0545 return ret;
0546 }
0547
0548 for (i = 0; i < ARRAY_SIZE(td->rx_stats.packets); i++) {
0549 rx_packets += td->rx_stats.packets[i];
0550 rx_fcs_error += td->rx_stats.fcs_error[i];
0551 }
0552
0553 if (nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_PENDING, td->tx_pending) ||
0554 nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_QUEUED, td->tx_queued) ||
0555 nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_DONE, td->tx_done) ||
0556 nla_put_u64_64bit(msg, MT76_TM_STATS_ATTR_RX_PACKETS, rx_packets,
0557 MT76_TM_STATS_ATTR_PAD) ||
0558 nla_put_u64_64bit(msg, MT76_TM_STATS_ATTR_RX_FCS_ERROR, rx_fcs_error,
0559 MT76_TM_STATS_ATTR_PAD))
0560 return -EMSGSIZE;
0561
0562 return 0;
0563 }
0564
0565 int mt76_testmode_dump(struct ieee80211_hw *hw, struct sk_buff *msg,
0566 struct netlink_callback *cb, void *data, int len)
0567 {
0568 struct mt76_phy *phy = hw->priv;
0569 struct mt76_dev *dev = phy->dev;
0570 struct mt76_testmode_data *td = &phy->test;
0571 struct nlattr *tb[NUM_MT76_TM_ATTRS] = {};
0572 int err = 0;
0573 void *a;
0574 int i;
0575
0576 if (!dev->test_ops)
0577 return -EOPNOTSUPP;
0578
0579 if (cb->args[2]++ > 0)
0580 return -ENOENT;
0581
0582 if (data) {
0583 err = nla_parse_deprecated(tb, MT76_TM_ATTR_MAX, data, len,
0584 mt76_tm_policy, NULL);
0585 if (err)
0586 return err;
0587 }
0588
0589 mutex_lock(&dev->mutex);
0590
0591 if (tb[MT76_TM_ATTR_STATS]) {
0592 err = -EINVAL;
0593
0594 a = nla_nest_start(msg, MT76_TM_ATTR_STATS);
0595 if (a) {
0596 err = mt76_testmode_dump_stats(phy, msg);
0597 nla_nest_end(msg, a);
0598 }
0599
0600 goto out;
0601 }
0602
0603 mt76_testmode_init_defaults(phy);
0604
0605 err = -EMSGSIZE;
0606 if (nla_put_u32(msg, MT76_TM_ATTR_STATE, td->state))
0607 goto out;
0608
0609 if (dev->test_mtd.name &&
0610 (nla_put_string(msg, MT76_TM_ATTR_MTD_PART, dev->test_mtd.name) ||
0611 nla_put_u32(msg, MT76_TM_ATTR_MTD_OFFSET, dev->test_mtd.offset)))
0612 goto out;
0613
0614 if (nla_put_u32(msg, MT76_TM_ATTR_TX_COUNT, td->tx_count) ||
0615 nla_put_u32(msg, MT76_TM_ATTR_TX_LENGTH, td->tx_mpdu_len) ||
0616 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_MODE, td->tx_rate_mode) ||
0617 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_NSS, td->tx_rate_nss) ||
0618 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_IDX, td->tx_rate_idx) ||
0619 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_SGI, td->tx_rate_sgi) ||
0620 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_LDPC, td->tx_rate_ldpc) ||
0621 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_STBC, td->tx_rate_stbc) ||
0622 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_LTF) &&
0623 nla_put_u8(msg, MT76_TM_ATTR_TX_LTF, td->tx_ltf)) ||
0624 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_ANTENNA) &&
0625 nla_put_u8(msg, MT76_TM_ATTR_TX_ANTENNA, td->tx_antenna_mask)) ||
0626 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_SPE_IDX) &&
0627 nla_put_u8(msg, MT76_TM_ATTR_TX_SPE_IDX, td->tx_spe_idx)) ||
0628 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_DUTY_CYCLE) &&
0629 nla_put_u8(msg, MT76_TM_ATTR_TX_DUTY_CYCLE, td->tx_duty_cycle)) ||
0630 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_IPG) &&
0631 nla_put_u32(msg, MT76_TM_ATTR_TX_IPG, td->tx_ipg)) ||
0632 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_TIME) &&
0633 nla_put_u32(msg, MT76_TM_ATTR_TX_TIME, td->tx_time)) ||
0634 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_POWER_CONTROL) &&
0635 nla_put_u8(msg, MT76_TM_ATTR_TX_POWER_CONTROL, td->tx_power_control)) ||
0636 (mt76_testmode_param_present(td, MT76_TM_ATTR_FREQ_OFFSET) &&
0637 nla_put_u8(msg, MT76_TM_ATTR_FREQ_OFFSET, td->freq_offset)))
0638 goto out;
0639
0640 if (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_POWER)) {
0641 a = nla_nest_start(msg, MT76_TM_ATTR_TX_POWER);
0642 if (!a)
0643 goto out;
0644
0645 for (i = 0; i < ARRAY_SIZE(td->tx_power); i++)
0646 if (nla_put_u8(msg, i, td->tx_power[i]))
0647 goto out;
0648
0649 nla_nest_end(msg, a);
0650 }
0651
0652 if (mt76_testmode_param_present(td, MT76_TM_ATTR_MAC_ADDRS)) {
0653 a = nla_nest_start(msg, MT76_TM_ATTR_MAC_ADDRS);
0654 if (!a)
0655 goto out;
0656
0657 for (i = 0; i < 3; i++)
0658 if (nla_put(msg, i, ETH_ALEN, td->addr[i]))
0659 goto out;
0660
0661 nla_nest_end(msg, a);
0662 }
0663
0664 err = 0;
0665
0666 out:
0667 mutex_unlock(&dev->mutex);
0668
0669 return err;
0670 }
0671 EXPORT_SYMBOL(mt76_testmode_dump);