• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // SPDX-License-Identifier: ISC
2 /* Copyright (C) 2020 Felix Fietkau <nbd@nbd.name> */
3 #include "mt76.h"
4 
5 static const struct nla_policy mt76_tm_policy[NUM_MT76_TM_ATTRS] = {
6 	[MT76_TM_ATTR_RESET] = { .type = NLA_FLAG },
7 	[MT76_TM_ATTR_STATE] = { .type = NLA_U8 },
8 	[MT76_TM_ATTR_TX_COUNT] = { .type = NLA_U32 },
9 	[MT76_TM_ATTR_TX_LENGTH] = { .type = NLA_U32 },
10 	[MT76_TM_ATTR_TX_RATE_MODE] = { .type = NLA_U8 },
11 	[MT76_TM_ATTR_TX_RATE_NSS] = { .type = NLA_U8 },
12 	[MT76_TM_ATTR_TX_RATE_IDX] = { .type = NLA_U8 },
13 	[MT76_TM_ATTR_TX_RATE_SGI] = { .type = NLA_U8 },
14 	[MT76_TM_ATTR_TX_RATE_LDPC] = { .type = NLA_U8 },
15 	[MT76_TM_ATTR_TX_RATE_STBC] = { .type = NLA_U8 },
16 	[MT76_TM_ATTR_TX_LTF] = { .type = NLA_U8 },
17 	[MT76_TM_ATTR_TX_ANTENNA] = { .type = NLA_U8 },
18 	[MT76_TM_ATTR_TX_SPE_IDX] = { .type = NLA_U8 },
19 	[MT76_TM_ATTR_TX_POWER_CONTROL] = { .type = NLA_U8 },
20 	[MT76_TM_ATTR_TX_POWER] = { .type = NLA_NESTED },
21 	[MT76_TM_ATTR_TX_DUTY_CYCLE] = { .type = NLA_U8 },
22 	[MT76_TM_ATTR_TX_IPG] = { .type = NLA_U32 },
23 	[MT76_TM_ATTR_TX_TIME] = { .type = NLA_U32 },
24 	[MT76_TM_ATTR_FREQ_OFFSET] = { .type = NLA_U32 },
25 };
26 
mt76_testmode_tx_pending(struct mt76_phy * phy)27 void mt76_testmode_tx_pending(struct mt76_phy *phy)
28 {
29 	struct mt76_testmode_data *td = &phy->test;
30 	struct mt76_dev *dev = phy->dev;
31 	struct mt76_wcid *wcid = &dev->global_wcid;
32 	struct sk_buff *skb = td->tx_skb;
33 	struct mt76_queue *q;
34 	u16 tx_queued_limit;
35 	int qid;
36 
37 	if (!skb || !td->tx_pending)
38 		return;
39 
40 	qid = skb_get_queue_mapping(skb);
41 	q = phy->q_tx[qid];
42 
43 	tx_queued_limit = td->tx_queued_limit ? td->tx_queued_limit : 1000;
44 
45 	spin_lock_bh(&q->lock);
46 
47 	while (td->tx_pending > 0 &&
48 	       td->tx_queued - td->tx_done < tx_queued_limit &&
49 	       q->queued < q->ndesc / 2) {
50 		int ret;
51 
52 		ret = dev->queue_ops->tx_queue_skb(dev, q, qid, skb_get(skb),
53 						   wcid, NULL);
54 		if (ret < 0)
55 			break;
56 
57 		td->tx_pending--;
58 		td->tx_queued++;
59 	}
60 
61 	dev->queue_ops->kick(dev, q);
62 
63 	spin_unlock_bh(&q->lock);
64 }
65 
66 static u32
mt76_testmode_max_mpdu_len(struct mt76_phy * phy,u8 tx_rate_mode)67 mt76_testmode_max_mpdu_len(struct mt76_phy *phy, u8 tx_rate_mode)
68 {
69 	switch (tx_rate_mode) {
70 	case MT76_TM_TX_MODE_HT:
71 		return IEEE80211_MAX_MPDU_LEN_HT_7935;
72 	case MT76_TM_TX_MODE_VHT:
73 	case MT76_TM_TX_MODE_HE_SU:
74 	case MT76_TM_TX_MODE_HE_EXT_SU:
75 	case MT76_TM_TX_MODE_HE_TB:
76 	case MT76_TM_TX_MODE_HE_MU:
77 		if (phy->sband_5g.sband.vht_cap.cap &
78 		    IEEE80211_VHT_CAP_MAX_MPDU_LENGTH_7991)
79 			return IEEE80211_MAX_MPDU_LEN_VHT_7991;
80 		return IEEE80211_MAX_MPDU_LEN_VHT_11454;
81 	case MT76_TM_TX_MODE_CCK:
82 	case MT76_TM_TX_MODE_OFDM:
83 	default:
84 		return IEEE80211_MAX_FRAME_LEN;
85 	}
86 }
87 
88 static void
mt76_testmode_free_skb(struct mt76_phy * phy)89 mt76_testmode_free_skb(struct mt76_phy *phy)
90 {
91 	struct mt76_testmode_data *td = &phy->test;
92 
93 	dev_kfree_skb(td->tx_skb);
94 	td->tx_skb = NULL;
95 }
96 
mt76_testmode_alloc_skb(struct mt76_phy * phy,u32 len)97 int mt76_testmode_alloc_skb(struct mt76_phy *phy, u32 len)
98 {
99 #define MT_TXP_MAX_LEN	4095
100 	u16 fc = IEEE80211_FTYPE_DATA | IEEE80211_STYPE_DATA |
101 		 IEEE80211_FCTL_FROMDS;
102 	struct mt76_testmode_data *td = &phy->test;
103 	bool ext_phy = phy != &phy->dev->phy;
104 	struct sk_buff **frag_tail, *head;
105 	struct ieee80211_tx_info *info;
106 	struct ieee80211_hdr *hdr;
107 	u32 max_len, head_len;
108 	int nfrags, i;
109 
110 	max_len = mt76_testmode_max_mpdu_len(phy, td->tx_rate_mode);
111 	if (len > max_len)
112 		len = max_len;
113 	else if (len < sizeof(struct ieee80211_hdr))
114 		len = sizeof(struct ieee80211_hdr);
115 
116 	nfrags = len / MT_TXP_MAX_LEN;
117 	head_len = nfrags ? MT_TXP_MAX_LEN : len;
118 
119 	if (len > IEEE80211_MAX_FRAME_LEN)
120 		fc |= IEEE80211_STYPE_QOS_DATA;
121 
122 	head = alloc_skb(head_len, GFP_KERNEL);
123 	if (!head)
124 		return -ENOMEM;
125 
126 	hdr = __skb_put_zero(head, head_len);
127 	hdr->frame_control = cpu_to_le16(fc);
128 	memcpy(hdr->addr1, phy->macaddr, sizeof(phy->macaddr));
129 	memcpy(hdr->addr2, phy->macaddr, sizeof(phy->macaddr));
130 	memcpy(hdr->addr3, phy->macaddr, sizeof(phy->macaddr));
131 	skb_set_queue_mapping(head, IEEE80211_AC_BE);
132 
133 	info = IEEE80211_SKB_CB(head);
134 	info->flags = IEEE80211_TX_CTL_INJECTED |
135 		      IEEE80211_TX_CTL_NO_ACK |
136 		      IEEE80211_TX_CTL_NO_PS_BUFFER;
137 
138 	if (ext_phy)
139 		info->hw_queue |= MT_TX_HW_QUEUE_EXT_PHY;
140 
141 	frag_tail = &skb_shinfo(head)->frag_list;
142 
143 	for (i = 0; i < nfrags; i++) {
144 		struct sk_buff *frag;
145 		u16 frag_len;
146 
147 		if (i == nfrags - 1)
148 			frag_len = len % MT_TXP_MAX_LEN;
149 		else
150 			frag_len = MT_TXP_MAX_LEN;
151 
152 		frag = alloc_skb(frag_len, GFP_KERNEL);
153 		if (!frag) {
154 			mt76_testmode_free_skb(phy);
155 			dev_kfree_skb(head);
156 			return -ENOMEM;
157 		}
158 
159 		__skb_put_zero(frag, frag_len);
160 		head->len += frag->len;
161 		head->data_len += frag->len;
162 
163 		*frag_tail = frag;
164 		frag_tail = &(*frag_tail)->next;
165 	}
166 
167 	mt76_testmode_free_skb(phy);
168 	td->tx_skb = head;
169 
170 	return 0;
171 }
172 EXPORT_SYMBOL(mt76_testmode_alloc_skb);
173 
174 static int
mt76_testmode_tx_init(struct mt76_phy * phy)175 mt76_testmode_tx_init(struct mt76_phy *phy)
176 {
177 	struct mt76_testmode_data *td = &phy->test;
178 	struct ieee80211_tx_info *info;
179 	struct ieee80211_tx_rate *rate;
180 	u8 max_nss = hweight8(phy->antenna_mask);
181 	int ret;
182 
183 	ret = mt76_testmode_alloc_skb(phy, td->tx_mpdu_len);
184 	if (ret)
185 		return ret;
186 
187 	if (td->tx_rate_mode > MT76_TM_TX_MODE_VHT)
188 		goto out;
189 
190 	if (td->tx_antenna_mask)
191 		max_nss = min_t(u8, max_nss, hweight8(td->tx_antenna_mask));
192 
193 	info = IEEE80211_SKB_CB(td->tx_skb);
194 	rate = &info->control.rates[0];
195 	rate->count = 1;
196 	rate->idx = td->tx_rate_idx;
197 
198 	switch (td->tx_rate_mode) {
199 	case MT76_TM_TX_MODE_CCK:
200 		if (phy->chandef.chan->band != NL80211_BAND_2GHZ)
201 			return -EINVAL;
202 
203 		if (rate->idx > 4)
204 			return -EINVAL;
205 		break;
206 	case MT76_TM_TX_MODE_OFDM:
207 		if (phy->chandef.chan->band != NL80211_BAND_2GHZ)
208 			break;
209 
210 		if (rate->idx > 8)
211 			return -EINVAL;
212 
213 		rate->idx += 4;
214 		break;
215 	case MT76_TM_TX_MODE_HT:
216 		if (rate->idx > 8 * max_nss &&
217 			!(rate->idx == 32 &&
218 			  phy->chandef.width >= NL80211_CHAN_WIDTH_40))
219 			return -EINVAL;
220 
221 		rate->flags |= IEEE80211_TX_RC_MCS;
222 		break;
223 	case MT76_TM_TX_MODE_VHT:
224 		if (rate->idx > 9)
225 			return -EINVAL;
226 
227 		if (td->tx_rate_nss > max_nss)
228 			return -EINVAL;
229 
230 		ieee80211_rate_set_vht(rate, td->tx_rate_idx, td->tx_rate_nss);
231 		rate->flags |= IEEE80211_TX_RC_VHT_MCS;
232 		break;
233 	default:
234 		break;
235 	}
236 
237 	if (td->tx_rate_sgi)
238 		rate->flags |= IEEE80211_TX_RC_SHORT_GI;
239 
240 	if (td->tx_rate_ldpc)
241 		info->flags |= IEEE80211_TX_CTL_LDPC;
242 
243 	if (td->tx_rate_stbc)
244 		info->flags |= IEEE80211_TX_CTL_STBC;
245 
246 	if (td->tx_rate_mode >= MT76_TM_TX_MODE_HT) {
247 		switch (phy->chandef.width) {
248 		case NL80211_CHAN_WIDTH_40:
249 			rate->flags |= IEEE80211_TX_RC_40_MHZ_WIDTH;
250 			break;
251 		case NL80211_CHAN_WIDTH_80:
252 			rate->flags |= IEEE80211_TX_RC_80_MHZ_WIDTH;
253 			break;
254 		case NL80211_CHAN_WIDTH_80P80:
255 		case NL80211_CHAN_WIDTH_160:
256 			rate->flags |= IEEE80211_TX_RC_160_MHZ_WIDTH;
257 			break;
258 		default:
259 			break;
260 		}
261 	}
262 out:
263 	return 0;
264 }
265 
266 static void
mt76_testmode_tx_start(struct mt76_phy * phy)267 mt76_testmode_tx_start(struct mt76_phy *phy)
268 {
269 	struct mt76_testmode_data *td = &phy->test;
270 	struct mt76_dev *dev = phy->dev;
271 
272 	td->tx_queued = 0;
273 	td->tx_done = 0;
274 	td->tx_pending = td->tx_count;
275 	mt76_worker_schedule(&dev->tx_worker);
276 }
277 
278 static void
mt76_testmode_tx_stop(struct mt76_phy * phy)279 mt76_testmode_tx_stop(struct mt76_phy *phy)
280 {
281 	struct mt76_testmode_data *td = &phy->test;
282 	struct mt76_dev *dev = phy->dev;
283 
284 	mt76_worker_disable(&dev->tx_worker);
285 
286 	td->tx_pending = 0;
287 
288 	mt76_worker_enable(&dev->tx_worker);
289 
290 	wait_event_timeout(dev->tx_wait, td->tx_done == td->tx_queued,
291 			   MT76_TM_TIMEOUT * HZ);
292 
293 	mt76_testmode_free_skb(phy);
294 }
295 
296 static inline void
mt76_testmode_param_set(struct mt76_testmode_data * td,u16 idx)297 mt76_testmode_param_set(struct mt76_testmode_data *td, u16 idx)
298 {
299 	td->param_set[idx / 32] |= BIT(idx % 32);
300 }
301 
302 static inline bool
mt76_testmode_param_present(struct mt76_testmode_data * td,u16 idx)303 mt76_testmode_param_present(struct mt76_testmode_data *td, u16 idx)
304 {
305 	return td->param_set[idx / 32] & BIT(idx % 32);
306 }
307 
308 static void
mt76_testmode_init_defaults(struct mt76_phy * phy)309 mt76_testmode_init_defaults(struct mt76_phy *phy)
310 {
311 	struct mt76_testmode_data *td = &phy->test;
312 
313 	if (td->tx_mpdu_len > 0)
314 		return;
315 
316 	td->tx_mpdu_len = 1024;
317 	td->tx_count = 1;
318 	td->tx_rate_mode = MT76_TM_TX_MODE_OFDM;
319 	td->tx_rate_nss = 1;
320 }
321 
322 static int
__mt76_testmode_set_state(struct mt76_phy * phy,enum mt76_testmode_state state)323 __mt76_testmode_set_state(struct mt76_phy *phy, enum mt76_testmode_state state)
324 {
325 	enum mt76_testmode_state prev_state = phy->test.state;
326 	struct mt76_dev *dev = phy->dev;
327 	int err;
328 
329 	if (prev_state == MT76_TM_STATE_TX_FRAMES)
330 		mt76_testmode_tx_stop(phy);
331 
332 	if (state == MT76_TM_STATE_TX_FRAMES) {
333 		err = mt76_testmode_tx_init(phy);
334 		if (err)
335 			return err;
336 	}
337 
338 	err = dev->test_ops->set_state(phy, state);
339 	if (err) {
340 		if (state == MT76_TM_STATE_TX_FRAMES)
341 			mt76_testmode_tx_stop(phy);
342 
343 		return err;
344 	}
345 
346 	if (state == MT76_TM_STATE_TX_FRAMES)
347 		mt76_testmode_tx_start(phy);
348 	else if (state == MT76_TM_STATE_RX_FRAMES) {
349 		memset(&phy->test.rx_stats, 0, sizeof(phy->test.rx_stats));
350 	}
351 
352 	phy->test.state = state;
353 
354 	return 0;
355 }
356 
mt76_testmode_set_state(struct mt76_phy * phy,enum mt76_testmode_state state)357 int mt76_testmode_set_state(struct mt76_phy *phy, enum mt76_testmode_state state)
358 {
359 	struct mt76_testmode_data *td = &phy->test;
360 	struct ieee80211_hw *hw = phy->hw;
361 
362 	if (state == td->state && state == MT76_TM_STATE_OFF)
363 		return 0;
364 
365 	if (state > MT76_TM_STATE_OFF &&
366 	    (!test_bit(MT76_STATE_RUNNING, &phy->state) ||
367 	     !(hw->conf.flags & IEEE80211_CONF_MONITOR)))
368 		return -ENOTCONN;
369 
370 	if (state != MT76_TM_STATE_IDLE &&
371 	    td->state != MT76_TM_STATE_IDLE) {
372 		int ret;
373 
374 		ret = __mt76_testmode_set_state(phy, MT76_TM_STATE_IDLE);
375 		if (ret)
376 			return ret;
377 	}
378 
379 	return __mt76_testmode_set_state(phy, state);
380 
381 }
382 EXPORT_SYMBOL(mt76_testmode_set_state);
383 
384 static int
mt76_tm_get_u8(struct nlattr * attr,u8 * dest,u8 min,u8 max)385 mt76_tm_get_u8(struct nlattr *attr, u8 *dest, u8 min, u8 max)
386 {
387 	u8 val;
388 
389 	if (!attr)
390 		return 0;
391 
392 	val = nla_get_u8(attr);
393 	if (val < min || val > max)
394 		return -EINVAL;
395 
396 	*dest = val;
397 	return 0;
398 }
399 
mt76_testmode_cmd(struct ieee80211_hw * hw,struct ieee80211_vif * vif,void * data,int len)400 int mt76_testmode_cmd(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
401 		      void *data, int len)
402 {
403 	struct mt76_phy *phy = hw->priv;
404 	struct mt76_dev *dev = phy->dev;
405 	struct mt76_testmode_data *td = &phy->test;
406 	struct nlattr *tb[NUM_MT76_TM_ATTRS];
407 	bool ext_phy = phy != &dev->phy;
408 	u32 state;
409 	int err;
410 	int i;
411 
412 	if (!dev->test_ops)
413 		return -EOPNOTSUPP;
414 
415 	err = nla_parse_deprecated(tb, MT76_TM_ATTR_MAX, data, len,
416 				   mt76_tm_policy, NULL);
417 	if (err)
418 		return err;
419 
420 	err = -EINVAL;
421 
422 	mutex_lock(&dev->mutex);
423 
424 	if (tb[MT76_TM_ATTR_RESET]) {
425 		mt76_testmode_set_state(phy, MT76_TM_STATE_OFF);
426 		memset(td, 0, sizeof(*td));
427 	}
428 
429 	mt76_testmode_init_defaults(phy);
430 
431 	if (tb[MT76_TM_ATTR_TX_COUNT])
432 		td->tx_count = nla_get_u32(tb[MT76_TM_ATTR_TX_COUNT]);
433 
434 	if (tb[MT76_TM_ATTR_TX_RATE_IDX])
435 		td->tx_rate_idx = nla_get_u8(tb[MT76_TM_ATTR_TX_RATE_IDX]);
436 
437 	if (mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_MODE], &td->tx_rate_mode,
438 			   0, MT76_TM_TX_MODE_MAX) ||
439 	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_NSS], &td->tx_rate_nss,
440 			   1, hweight8(phy->antenna_mask)) ||
441 	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_SGI], &td->tx_rate_sgi, 0, 2) ||
442 	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_LDPC], &td->tx_rate_ldpc, 0, 1) ||
443 	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_STBC], &td->tx_rate_stbc, 0, 1) ||
444 	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_LTF], &td->tx_ltf, 0, 2) ||
445 	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_ANTENNA], &td->tx_antenna_mask,
446 			   1 << (ext_phy * 2), phy->antenna_mask << (ext_phy * 2)) ||
447 	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_SPE_IDX], &td->tx_spe_idx, 0, 27) ||
448 	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_DUTY_CYCLE],
449 			   &td->tx_duty_cycle, 0, 99) ||
450 	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_POWER_CONTROL],
451 			   &td->tx_power_control, 0, 1))
452 		goto out;
453 
454 	if (tb[MT76_TM_ATTR_TX_LENGTH]) {
455 		u32 val = nla_get_u32(tb[MT76_TM_ATTR_TX_LENGTH]);
456 
457 		if (val > mt76_testmode_max_mpdu_len(phy, td->tx_rate_mode) ||
458 		    val < sizeof(struct ieee80211_hdr))
459 			goto out;
460 
461 		td->tx_mpdu_len = val;
462 	}
463 
464 	if (tb[MT76_TM_ATTR_TX_IPG])
465 		td->tx_ipg = nla_get_u32(tb[MT76_TM_ATTR_TX_IPG]);
466 
467 	if (tb[MT76_TM_ATTR_TX_TIME])
468 		td->tx_time = nla_get_u32(tb[MT76_TM_ATTR_TX_TIME]);
469 
470 	if (tb[MT76_TM_ATTR_FREQ_OFFSET])
471 		td->freq_offset = nla_get_u32(tb[MT76_TM_ATTR_FREQ_OFFSET]);
472 
473 	if (tb[MT76_TM_ATTR_STATE]) {
474 		state = nla_get_u32(tb[MT76_TM_ATTR_STATE]);
475 		if (state > MT76_TM_STATE_MAX)
476 			goto out;
477 	} else {
478 		state = td->state;
479 	}
480 
481 	if (tb[MT76_TM_ATTR_TX_POWER]) {
482 		struct nlattr *cur;
483 		int idx = 0;
484 		int rem;
485 
486 		nla_for_each_nested(cur, tb[MT76_TM_ATTR_TX_POWER], rem) {
487 			if (nla_len(cur) != 1 ||
488 			    idx >= ARRAY_SIZE(td->tx_power))
489 				goto out;
490 
491 			td->tx_power[idx++] = nla_get_u8(cur);
492 		}
493 	}
494 
495 	if (dev->test_ops->set_params) {
496 		err = dev->test_ops->set_params(phy, tb, state);
497 		if (err)
498 			goto out;
499 	}
500 
501 	for (i = MT76_TM_ATTR_STATE; i < ARRAY_SIZE(tb); i++)
502 		if (tb[i])
503 			mt76_testmode_param_set(td, i);
504 
505 	err = 0;
506 	if (tb[MT76_TM_ATTR_STATE])
507 		err = mt76_testmode_set_state(phy, state);
508 
509 out:
510 	mutex_unlock(&dev->mutex);
511 
512 	return err;
513 }
514 EXPORT_SYMBOL(mt76_testmode_cmd);
515 
516 static int
mt76_testmode_dump_stats(struct mt76_phy * phy,struct sk_buff * msg)517 mt76_testmode_dump_stats(struct mt76_phy *phy, struct sk_buff *msg)
518 {
519 	struct mt76_testmode_data *td = &phy->test;
520 	struct mt76_dev *dev = phy->dev;
521 	u64 rx_packets = 0;
522 	u64 rx_fcs_error = 0;
523 	int i;
524 
525 	if (dev->test_ops->dump_stats) {
526 		int ret;
527 
528 		ret = dev->test_ops->dump_stats(phy, msg);
529 		if (ret)
530 			return ret;
531 	}
532 
533 	for (i = 0; i < ARRAY_SIZE(td->rx_stats.packets); i++) {
534 		rx_packets += td->rx_stats.packets[i];
535 		rx_fcs_error += td->rx_stats.fcs_error[i];
536 	}
537 
538 	if (nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_PENDING, td->tx_pending) ||
539 	    nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_QUEUED, td->tx_queued) ||
540 	    nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_DONE, td->tx_done) ||
541 	    nla_put_u64_64bit(msg, MT76_TM_STATS_ATTR_RX_PACKETS, rx_packets,
542 			      MT76_TM_STATS_ATTR_PAD) ||
543 	    nla_put_u64_64bit(msg, MT76_TM_STATS_ATTR_RX_FCS_ERROR, rx_fcs_error,
544 			      MT76_TM_STATS_ATTR_PAD))
545 		return -EMSGSIZE;
546 
547 	return 0;
548 }
549 
mt76_testmode_dump(struct ieee80211_hw * hw,struct sk_buff * msg,struct netlink_callback * cb,void * data,int len)550 int mt76_testmode_dump(struct ieee80211_hw *hw, struct sk_buff *msg,
551 		       struct netlink_callback *cb, void *data, int len)
552 {
553 	struct mt76_phy *phy = hw->priv;
554 	struct mt76_dev *dev = phy->dev;
555 	struct mt76_testmode_data *td = &phy->test;
556 	struct nlattr *tb[NUM_MT76_TM_ATTRS] = {};
557 	int err = 0;
558 	void *a;
559 	int i;
560 
561 	if (!dev->test_ops)
562 		return -EOPNOTSUPP;
563 
564 	if (cb->args[2]++ > 0)
565 		return -ENOENT;
566 
567 	if (data) {
568 		err = nla_parse_deprecated(tb, MT76_TM_ATTR_MAX, data, len,
569 					   mt76_tm_policy, NULL);
570 		if (err)
571 			return err;
572 	}
573 
574 	mutex_lock(&dev->mutex);
575 
576 	if (tb[MT76_TM_ATTR_STATS]) {
577 		err = -EINVAL;
578 
579 		a = nla_nest_start(msg, MT76_TM_ATTR_STATS);
580 		if (a) {
581 			err = mt76_testmode_dump_stats(phy, msg);
582 			nla_nest_end(msg, a);
583 		}
584 
585 		goto out;
586 	}
587 
588 	mt76_testmode_init_defaults(phy);
589 
590 	err = -EMSGSIZE;
591 	if (nla_put_u32(msg, MT76_TM_ATTR_STATE, td->state))
592 		goto out;
593 
594 	if (dev->test_mtd.name &&
595 	    (nla_put_string(msg, MT76_TM_ATTR_MTD_PART, dev->test_mtd.name) ||
596 	     nla_put_u32(msg, MT76_TM_ATTR_MTD_OFFSET, dev->test_mtd.offset)))
597 		goto out;
598 
599 	if (nla_put_u32(msg, MT76_TM_ATTR_TX_COUNT, td->tx_count) ||
600 	    nla_put_u32(msg, MT76_TM_ATTR_TX_LENGTH, td->tx_mpdu_len) ||
601 	    nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_MODE, td->tx_rate_mode) ||
602 	    nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_NSS, td->tx_rate_nss) ||
603 	    nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_IDX, td->tx_rate_idx) ||
604 	    nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_SGI, td->tx_rate_sgi) ||
605 	    nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_LDPC, td->tx_rate_ldpc) ||
606 	    nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_STBC, td->tx_rate_stbc) ||
607 	    (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_LTF) &&
608 	     nla_put_u8(msg, MT76_TM_ATTR_TX_LTF, td->tx_ltf)) ||
609 	    (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_ANTENNA) &&
610 	     nla_put_u8(msg, MT76_TM_ATTR_TX_ANTENNA, td->tx_antenna_mask)) ||
611 	    (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_SPE_IDX) &&
612 	     nla_put_u8(msg, MT76_TM_ATTR_TX_SPE_IDX, td->tx_spe_idx)) ||
613 	    (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_DUTY_CYCLE) &&
614 	     nla_put_u8(msg, MT76_TM_ATTR_TX_DUTY_CYCLE, td->tx_duty_cycle)) ||
615 	    (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_IPG) &&
616 	     nla_put_u32(msg, MT76_TM_ATTR_TX_IPG, td->tx_ipg)) ||
617 	    (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_TIME) &&
618 	     nla_put_u32(msg, MT76_TM_ATTR_TX_TIME, td->tx_time)) ||
619 	    (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_POWER_CONTROL) &&
620 	     nla_put_u8(msg, MT76_TM_ATTR_TX_POWER_CONTROL, td->tx_power_control)) ||
621 	    (mt76_testmode_param_present(td, MT76_TM_ATTR_FREQ_OFFSET) &&
622 	     nla_put_u8(msg, MT76_TM_ATTR_FREQ_OFFSET, td->freq_offset)))
623 		goto out;
624 
625 	if (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_POWER)) {
626 		a = nla_nest_start(msg, MT76_TM_ATTR_TX_POWER);
627 		if (!a)
628 			goto out;
629 
630 		for (i = 0; i < ARRAY_SIZE(td->tx_power); i++)
631 			if (nla_put_u8(msg, i, td->tx_power[i]))
632 				goto out;
633 
634 		nla_nest_end(msg, a);
635 	}
636 
637 	err = 0;
638 
639 out:
640 	mutex_unlock(&dev->mutex);
641 
642 	return err;
643 }
644 EXPORT_SYMBOL(mt76_testmode_dump);
645