• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // SPDX-License-Identifier: ISC
2 /*
3  * Copyright (C) 2016 Felix Fietkau <nbd@nbd.name>
4  */
5 #include <linux/sched.h>
6 #include <linux/of.h>
7 #include "mt76.h"
8 
9 #define CHAN2G(_idx, _freq) {			\
10 	.band = NL80211_BAND_2GHZ,		\
11 	.center_freq = (_freq),			\
12 	.hw_value = (_idx),			\
13 	.max_power = 30,			\
14 }
15 
16 #define CHAN5G(_idx, _freq) {			\
17 	.band = NL80211_BAND_5GHZ,		\
18 	.center_freq = (_freq),			\
19 	.hw_value = (_idx),			\
20 	.max_power = 30,			\
21 }
22 
23 static const struct ieee80211_channel mt76_channels_2ghz[] = {
24 	CHAN2G(1, 2412),
25 	CHAN2G(2, 2417),
26 	CHAN2G(3, 2422),
27 	CHAN2G(4, 2427),
28 	CHAN2G(5, 2432),
29 	CHAN2G(6, 2437),
30 	CHAN2G(7, 2442),
31 	CHAN2G(8, 2447),
32 	CHAN2G(9, 2452),
33 	CHAN2G(10, 2457),
34 	CHAN2G(11, 2462),
35 	CHAN2G(12, 2467),
36 	CHAN2G(13, 2472),
37 	CHAN2G(14, 2484),
38 };
39 
40 static const struct ieee80211_channel mt76_channels_5ghz[] = {
41 	CHAN5G(36, 5180),
42 	CHAN5G(40, 5200),
43 	CHAN5G(44, 5220),
44 	CHAN5G(48, 5240),
45 
46 	CHAN5G(52, 5260),
47 	CHAN5G(56, 5280),
48 	CHAN5G(60, 5300),
49 	CHAN5G(64, 5320),
50 
51 	CHAN5G(100, 5500),
52 	CHAN5G(104, 5520),
53 	CHAN5G(108, 5540),
54 	CHAN5G(112, 5560),
55 	CHAN5G(116, 5580),
56 	CHAN5G(120, 5600),
57 	CHAN5G(124, 5620),
58 	CHAN5G(128, 5640),
59 	CHAN5G(132, 5660),
60 	CHAN5G(136, 5680),
61 	CHAN5G(140, 5700),
62 	CHAN5G(144, 5720),
63 
64 	CHAN5G(149, 5745),
65 	CHAN5G(153, 5765),
66 	CHAN5G(157, 5785),
67 	CHAN5G(161, 5805),
68 	CHAN5G(165, 5825),
69 	CHAN5G(169, 5845),
70 	CHAN5G(173, 5865),
71 };
72 
73 static const struct ieee80211_tpt_blink mt76_tpt_blink[] = {
74 	{ .throughput =   0 * 1024, .blink_time = 334 },
75 	{ .throughput =   1 * 1024, .blink_time = 260 },
76 	{ .throughput =   5 * 1024, .blink_time = 220 },
77 	{ .throughput =  10 * 1024, .blink_time = 190 },
78 	{ .throughput =  20 * 1024, .blink_time = 170 },
79 	{ .throughput =  50 * 1024, .blink_time = 150 },
80 	{ .throughput =  70 * 1024, .blink_time = 130 },
81 	{ .throughput = 100 * 1024, .blink_time = 110 },
82 	{ .throughput = 200 * 1024, .blink_time =  80 },
83 	{ .throughput = 300 * 1024, .blink_time =  50 },
84 };
85 
mt76_led_init(struct mt76_dev * dev)86 static int mt76_led_init(struct mt76_dev *dev)
87 {
88 	struct device_node *np = dev->dev->of_node;
89 	struct ieee80211_hw *hw = dev->hw;
90 	int led_pin;
91 
92 	if (!dev->led_cdev.brightness_set && !dev->led_cdev.blink_set)
93 		return 0;
94 
95 	snprintf(dev->led_name, sizeof(dev->led_name),
96 		 "mt76-%s", wiphy_name(hw->wiphy));
97 
98 	dev->led_cdev.name = dev->led_name;
99 	dev->led_cdev.default_trigger =
100 		ieee80211_create_tpt_led_trigger(hw,
101 					IEEE80211_TPT_LEDTRIG_FL_RADIO,
102 					mt76_tpt_blink,
103 					ARRAY_SIZE(mt76_tpt_blink));
104 
105 	np = of_get_child_by_name(np, "led");
106 	if (np) {
107 		if (!of_property_read_u32(np, "led-sources", &led_pin))
108 			dev->led_pin = led_pin;
109 		dev->led_al = of_property_read_bool(np, "led-active-low");
110 		of_node_put(np);
111 	}
112 
113 	return led_classdev_register(dev->dev, &dev->led_cdev);
114 }
115 
mt76_led_cleanup(struct mt76_dev * dev)116 static void mt76_led_cleanup(struct mt76_dev *dev)
117 {
118 	if (!dev->led_cdev.brightness_set && !dev->led_cdev.blink_set)
119 		return;
120 
121 	led_classdev_unregister(&dev->led_cdev);
122 }
123 
mt76_init_stream_cap(struct mt76_phy * phy,struct ieee80211_supported_band * sband,bool vht)124 static void mt76_init_stream_cap(struct mt76_phy *phy,
125 				 struct ieee80211_supported_band *sband,
126 				 bool vht)
127 {
128 	struct ieee80211_sta_ht_cap *ht_cap = &sband->ht_cap;
129 	int i, nstream = hweight8(phy->antenna_mask);
130 	struct ieee80211_sta_vht_cap *vht_cap;
131 	u16 mcs_map = 0;
132 
133 	if (nstream > 1)
134 		ht_cap->cap |= IEEE80211_HT_CAP_TX_STBC;
135 	else
136 		ht_cap->cap &= ~IEEE80211_HT_CAP_TX_STBC;
137 
138 	for (i = 0; i < IEEE80211_HT_MCS_MASK_LEN; i++)
139 		ht_cap->mcs.rx_mask[i] = i < nstream ? 0xff : 0;
140 
141 	if (!vht)
142 		return;
143 
144 	vht_cap = &sband->vht_cap;
145 	if (nstream > 1)
146 		vht_cap->cap |= IEEE80211_VHT_CAP_TXSTBC;
147 	else
148 		vht_cap->cap &= ~IEEE80211_VHT_CAP_TXSTBC;
149 
150 	for (i = 0; i < 8; i++) {
151 		if (i < nstream)
152 			mcs_map |= (IEEE80211_VHT_MCS_SUPPORT_0_9 << (i * 2));
153 		else
154 			mcs_map |=
155 				(IEEE80211_VHT_MCS_NOT_SUPPORTED << (i * 2));
156 	}
157 	vht_cap->vht_mcs.rx_mcs_map = cpu_to_le16(mcs_map);
158 	vht_cap->vht_mcs.tx_mcs_map = cpu_to_le16(mcs_map);
159 }
160 
mt76_set_stream_caps(struct mt76_phy * phy,bool vht)161 void mt76_set_stream_caps(struct mt76_phy *phy, bool vht)
162 {
163 	if (phy->dev->cap.has_2ghz)
164 		mt76_init_stream_cap(phy, &phy->sband_2g.sband, false);
165 	if (phy->dev->cap.has_5ghz)
166 		mt76_init_stream_cap(phy, &phy->sband_5g.sband, vht);
167 }
168 EXPORT_SYMBOL_GPL(mt76_set_stream_caps);
169 
170 static int
mt76_init_sband(struct mt76_dev * dev,struct mt76_sband * msband,const struct ieee80211_channel * chan,int n_chan,struct ieee80211_rate * rates,int n_rates,bool vht)171 mt76_init_sband(struct mt76_dev *dev, struct mt76_sband *msband,
172 		const struct ieee80211_channel *chan, int n_chan,
173 		struct ieee80211_rate *rates, int n_rates, bool vht)
174 {
175 	struct ieee80211_supported_band *sband = &msband->sband;
176 	struct ieee80211_sta_ht_cap *ht_cap;
177 	struct ieee80211_sta_vht_cap *vht_cap;
178 	void *chanlist;
179 	int size;
180 
181 	size = n_chan * sizeof(*chan);
182 	chanlist = devm_kmemdup(dev->dev, chan, size, GFP_KERNEL);
183 	if (!chanlist)
184 		return -ENOMEM;
185 
186 	msband->chan = devm_kcalloc(dev->dev, n_chan, sizeof(*msband->chan),
187 				    GFP_KERNEL);
188 	if (!msband->chan)
189 		return -ENOMEM;
190 
191 	sband->channels = chanlist;
192 	sband->n_channels = n_chan;
193 	sband->bitrates = rates;
194 	sband->n_bitrates = n_rates;
195 
196 	ht_cap = &sband->ht_cap;
197 	ht_cap->ht_supported = true;
198 	ht_cap->cap |= IEEE80211_HT_CAP_SUP_WIDTH_20_40 |
199 		       IEEE80211_HT_CAP_GRN_FLD |
200 		       IEEE80211_HT_CAP_SGI_20 |
201 		       IEEE80211_HT_CAP_SGI_40 |
202 		       (1 << IEEE80211_HT_CAP_RX_STBC_SHIFT);
203 
204 	ht_cap->mcs.tx_params = IEEE80211_HT_MCS_TX_DEFINED;
205 	ht_cap->ampdu_factor = IEEE80211_HT_MAX_AMPDU_64K;
206 
207 	mt76_init_stream_cap(&dev->phy, sband, vht);
208 
209 	if (!vht)
210 		return 0;
211 
212 	vht_cap = &sband->vht_cap;
213 	vht_cap->vht_supported = true;
214 	vht_cap->cap |= IEEE80211_VHT_CAP_RXLDPC |
215 			IEEE80211_VHT_CAP_RXSTBC_1 |
216 			IEEE80211_VHT_CAP_SHORT_GI_80 |
217 			IEEE80211_VHT_CAP_RX_ANTENNA_PATTERN |
218 			IEEE80211_VHT_CAP_TX_ANTENNA_PATTERN |
219 			(3 << IEEE80211_VHT_CAP_MAX_A_MPDU_LENGTH_EXPONENT_SHIFT);
220 
221 	return 0;
222 }
223 
224 static int
mt76_init_sband_2g(struct mt76_dev * dev,struct ieee80211_rate * rates,int n_rates)225 mt76_init_sband_2g(struct mt76_dev *dev, struct ieee80211_rate *rates,
226 		   int n_rates)
227 {
228 	dev->hw->wiphy->bands[NL80211_BAND_2GHZ] = &dev->phy.sband_2g.sband;
229 
230 	return mt76_init_sband(dev, &dev->phy.sband_2g,
231 			       mt76_channels_2ghz,
232 			       ARRAY_SIZE(mt76_channels_2ghz),
233 			       rates, n_rates, false);
234 }
235 
236 static int
mt76_init_sband_5g(struct mt76_dev * dev,struct ieee80211_rate * rates,int n_rates,bool vht)237 mt76_init_sband_5g(struct mt76_dev *dev, struct ieee80211_rate *rates,
238 		   int n_rates, bool vht)
239 {
240 	dev->hw->wiphy->bands[NL80211_BAND_5GHZ] = &dev->phy.sband_5g.sband;
241 
242 	return mt76_init_sband(dev, &dev->phy.sband_5g,
243 			       mt76_channels_5ghz,
244 			       ARRAY_SIZE(mt76_channels_5ghz),
245 			       rates, n_rates, vht);
246 }
247 
248 static void
mt76_check_sband(struct mt76_phy * phy,struct mt76_sband * msband,enum nl80211_band band)249 mt76_check_sband(struct mt76_phy *phy, struct mt76_sband *msband,
250 		 enum nl80211_band band)
251 {
252 	struct ieee80211_supported_band *sband = &msband->sband;
253 	bool found = false;
254 	int i;
255 
256 	if (!sband)
257 		return;
258 
259 	for (i = 0; i < sband->n_channels; i++) {
260 		if (sband->channels[i].flags & IEEE80211_CHAN_DISABLED)
261 			continue;
262 
263 		found = true;
264 		break;
265 	}
266 
267 	if (found) {
268 		phy->chandef.chan = &sband->channels[0];
269 		phy->chan_state = &msband->chan[0];
270 		return;
271 	}
272 
273 	sband->n_channels = 0;
274 	phy->hw->wiphy->bands[band] = NULL;
275 }
276 
277 static void
mt76_phy_init(struct mt76_dev * dev,struct ieee80211_hw * hw)278 mt76_phy_init(struct mt76_dev *dev, struct ieee80211_hw *hw)
279 {
280 	struct wiphy *wiphy = hw->wiphy;
281 
282 	SET_IEEE80211_DEV(hw, dev->dev);
283 	SET_IEEE80211_PERM_ADDR(hw, dev->macaddr);
284 
285 	wiphy->features |= NL80211_FEATURE_ACTIVE_MONITOR;
286 	wiphy->flags |= WIPHY_FLAG_HAS_CHANNEL_SWITCH |
287 			WIPHY_FLAG_SUPPORTS_TDLS |
288 			WIPHY_FLAG_AP_UAPSD;
289 
290 	wiphy_ext_feature_set(wiphy, NL80211_EXT_FEATURE_CQM_RSSI_LIST);
291 	wiphy_ext_feature_set(wiphy, NL80211_EXT_FEATURE_AIRTIME_FAIRNESS);
292 	wiphy_ext_feature_set(wiphy, NL80211_EXT_FEATURE_AQL);
293 
294 	wiphy->available_antennas_tx = dev->phy.antenna_mask;
295 	wiphy->available_antennas_rx = dev->phy.antenna_mask;
296 
297 	hw->txq_data_size = sizeof(struct mt76_txq);
298 	hw->uapsd_max_sp_len = IEEE80211_WMM_IE_STA_QOSINFO_SP_ALL;
299 
300 	if (!hw->max_tx_fragments)
301 		hw->max_tx_fragments = 16;
302 
303 	ieee80211_hw_set(hw, SIGNAL_DBM);
304 	ieee80211_hw_set(hw, AMPDU_AGGREGATION);
305 	ieee80211_hw_set(hw, SUPPORTS_RC_TABLE);
306 	ieee80211_hw_set(hw, SUPPORT_FAST_XMIT);
307 	ieee80211_hw_set(hw, SUPPORTS_CLONED_SKBS);
308 	ieee80211_hw_set(hw, SUPPORTS_AMSDU_IN_AMPDU);
309 	ieee80211_hw_set(hw, SUPPORTS_REORDERING_BUFFER);
310 
311 	if (!(dev->drv->drv_flags & MT_DRV_AMSDU_OFFLOAD)) {
312 		ieee80211_hw_set(hw, TX_AMSDU);
313 		ieee80211_hw_set(hw, TX_FRAG_LIST);
314 	}
315 
316 	ieee80211_hw_set(hw, MFP_CAPABLE);
317 	ieee80211_hw_set(hw, AP_LINK_PS);
318 	ieee80211_hw_set(hw, REPORTS_TX_ACK_STATUS);
319 	ieee80211_hw_set(hw, NEEDS_UNIQUE_STA_ADDR);
320 
321 	wiphy->flags |= WIPHY_FLAG_IBSS_RSN;
322 	wiphy->interface_modes =
323 		BIT(NL80211_IFTYPE_STATION) |
324 		BIT(NL80211_IFTYPE_AP) |
325 #ifdef CONFIG_MAC80211_MESH
326 		BIT(NL80211_IFTYPE_MESH_POINT) |
327 #endif
328 		BIT(NL80211_IFTYPE_P2P_CLIENT) |
329 		BIT(NL80211_IFTYPE_P2P_GO) |
330 		BIT(NL80211_IFTYPE_ADHOC);
331 }
332 
333 struct mt76_phy *
mt76_alloc_phy(struct mt76_dev * dev,unsigned int size,const struct ieee80211_ops * ops)334 mt76_alloc_phy(struct mt76_dev *dev, unsigned int size,
335 	       const struct ieee80211_ops *ops)
336 {
337 	struct ieee80211_hw *hw;
338 	struct mt76_phy *phy;
339 	unsigned int phy_size, chan_size;
340 	unsigned int size_2g, size_5g;
341 	void *priv;
342 
343 	phy_size = ALIGN(sizeof(*phy), 8);
344 	chan_size = sizeof(dev->phy.sband_2g.chan[0]);
345 	size_2g = ALIGN(ARRAY_SIZE(mt76_channels_2ghz) * chan_size, 8);
346 	size_5g = ALIGN(ARRAY_SIZE(mt76_channels_5ghz) * chan_size, 8);
347 
348 	size += phy_size + size_2g + size_5g;
349 	hw = ieee80211_alloc_hw(size, ops);
350 	if (!hw)
351 		return NULL;
352 
353 	phy = hw->priv;
354 	phy->dev = dev;
355 	phy->hw = hw;
356 
357 	mt76_phy_init(dev, hw);
358 
359 	priv = hw->priv + phy_size;
360 
361 	phy->sband_2g = dev->phy.sband_2g;
362 	phy->sband_2g.chan = priv;
363 	priv += size_2g;
364 
365 	phy->sband_5g = dev->phy.sband_5g;
366 	phy->sband_5g.chan = priv;
367 	priv += size_5g;
368 
369 	phy->priv = priv;
370 
371 	hw->wiphy->bands[NL80211_BAND_2GHZ] = &phy->sband_2g.sband;
372 	hw->wiphy->bands[NL80211_BAND_5GHZ] = &phy->sband_5g.sband;
373 
374 	mt76_check_sband(phy, &phy->sband_2g, NL80211_BAND_2GHZ);
375 	mt76_check_sband(phy, &phy->sband_5g, NL80211_BAND_5GHZ);
376 
377 	return phy;
378 }
379 EXPORT_SYMBOL_GPL(mt76_alloc_phy);
380 
381 int
mt76_register_phy(struct mt76_phy * phy)382 mt76_register_phy(struct mt76_phy *phy)
383 {
384 	int ret;
385 
386 	ret = ieee80211_register_hw(phy->hw);
387 	if (ret)
388 		return ret;
389 
390 	phy->dev->phy2 = phy;
391 	return 0;
392 }
393 EXPORT_SYMBOL_GPL(mt76_register_phy);
394 
395 void
mt76_unregister_phy(struct mt76_phy * phy)396 mt76_unregister_phy(struct mt76_phy *phy)
397 {
398 	struct mt76_dev *dev = phy->dev;
399 
400 	dev->phy2 = NULL;
401 	mt76_tx_status_check(dev, NULL, true);
402 	ieee80211_unregister_hw(phy->hw);
403 }
404 EXPORT_SYMBOL_GPL(mt76_unregister_phy);
405 
406 struct mt76_dev *
mt76_alloc_device(struct device * pdev,unsigned int size,const struct ieee80211_ops * ops,const struct mt76_driver_ops * drv_ops)407 mt76_alloc_device(struct device *pdev, unsigned int size,
408 		  const struct ieee80211_ops *ops,
409 		  const struct mt76_driver_ops *drv_ops)
410 {
411 	struct ieee80211_hw *hw;
412 	struct mt76_phy *phy;
413 	struct mt76_dev *dev;
414 	int i;
415 
416 	hw = ieee80211_alloc_hw(size, ops);
417 	if (!hw)
418 		return NULL;
419 
420 	dev = hw->priv;
421 	dev->hw = hw;
422 	dev->dev = pdev;
423 	dev->drv = drv_ops;
424 
425 	phy = &dev->phy;
426 	phy->dev = dev;
427 	phy->hw = hw;
428 
429 	spin_lock_init(&dev->rx_lock);
430 	spin_lock_init(&dev->lock);
431 	spin_lock_init(&dev->cc_lock);
432 	mutex_init(&dev->mutex);
433 	init_waitqueue_head(&dev->tx_wait);
434 	skb_queue_head_init(&dev->status_list);
435 
436 	skb_queue_head_init(&dev->mcu.res_q);
437 	init_waitqueue_head(&dev->mcu.wait);
438 	mutex_init(&dev->mcu.mutex);
439 	dev->tx_worker.fn = mt76_tx_worker;
440 
441 	INIT_LIST_HEAD(&dev->txwi_cache);
442 
443 	for (i = 0; i < ARRAY_SIZE(dev->q_rx); i++)
444 		skb_queue_head_init(&dev->rx_skb[i]);
445 
446 	dev->wq = alloc_ordered_workqueue("mt76", 0);
447 	if (!dev->wq) {
448 		ieee80211_free_hw(hw);
449 		return NULL;
450 	}
451 
452 	return dev;
453 }
454 EXPORT_SYMBOL_GPL(mt76_alloc_device);
455 
mt76_register_device(struct mt76_dev * dev,bool vht,struct ieee80211_rate * rates,int n_rates)456 int mt76_register_device(struct mt76_dev *dev, bool vht,
457 			 struct ieee80211_rate *rates, int n_rates)
458 {
459 	struct ieee80211_hw *hw = dev->hw;
460 	struct mt76_phy *phy = &dev->phy;
461 	int ret;
462 
463 	dev_set_drvdata(dev->dev, dev);
464 	mt76_phy_init(dev, hw);
465 
466 	if (dev->cap.has_2ghz) {
467 		ret = mt76_init_sband_2g(dev, rates, n_rates);
468 		if (ret)
469 			return ret;
470 	}
471 
472 	if (dev->cap.has_5ghz) {
473 		ret = mt76_init_sband_5g(dev, rates + 4, n_rates - 4, vht);
474 		if (ret)
475 			return ret;
476 	}
477 
478 	wiphy_read_of_freq_limits(hw->wiphy);
479 	mt76_check_sband(&dev->phy, &phy->sband_2g, NL80211_BAND_2GHZ);
480 	mt76_check_sband(&dev->phy, &phy->sband_5g, NL80211_BAND_5GHZ);
481 
482 	if (IS_ENABLED(CONFIG_MT76_LEDS)) {
483 		ret = mt76_led_init(dev);
484 		if (ret)
485 			return ret;
486 	}
487 
488 	ret = ieee80211_register_hw(hw);
489 	if (ret)
490 		return ret;
491 
492 	WARN_ON(mt76_worker_setup(hw, &dev->tx_worker, NULL, "tx"));
493 	sched_set_fifo_low(dev->tx_worker.task);
494 
495 	return 0;
496 }
497 EXPORT_SYMBOL_GPL(mt76_register_device);
498 
mt76_unregister_device(struct mt76_dev * dev)499 void mt76_unregister_device(struct mt76_dev *dev)
500 {
501 	struct ieee80211_hw *hw = dev->hw;
502 
503 	if (IS_ENABLED(CONFIG_MT76_LEDS))
504 		mt76_led_cleanup(dev);
505 	mt76_tx_status_check(dev, NULL, true);
506 	ieee80211_unregister_hw(hw);
507 }
508 EXPORT_SYMBOL_GPL(mt76_unregister_device);
509 
mt76_free_device(struct mt76_dev * dev)510 void mt76_free_device(struct mt76_dev *dev)
511 {
512 	mt76_worker_teardown(&dev->tx_worker);
513 	if (dev->wq) {
514 		destroy_workqueue(dev->wq);
515 		dev->wq = NULL;
516 	}
517 	ieee80211_free_hw(dev->hw);
518 }
519 EXPORT_SYMBOL_GPL(mt76_free_device);
520 
mt76_rx(struct mt76_dev * dev,enum mt76_rxq_id q,struct sk_buff * skb)521 void mt76_rx(struct mt76_dev *dev, enum mt76_rxq_id q, struct sk_buff *skb)
522 {
523 	struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
524 	struct mt76_phy *phy = mt76_dev_phy(dev, status->ext_phy);
525 
526 	if (!test_bit(MT76_STATE_RUNNING, &phy->state)) {
527 		dev_kfree_skb(skb);
528 		return;
529 	}
530 
531 #ifdef CONFIG_NL80211_TESTMODE
532 	if (dev->test.state == MT76_TM_STATE_RX_FRAMES) {
533 		dev->test.rx_stats.packets[q]++;
534 		if (status->flag & RX_FLAG_FAILED_FCS_CRC)
535 			dev->test.rx_stats.fcs_error[q]++;
536 	}
537 #endif
538 	__skb_queue_tail(&dev->rx_skb[q], skb);
539 }
540 EXPORT_SYMBOL_GPL(mt76_rx);
541 
mt76_has_tx_pending(struct mt76_phy * phy)542 bool mt76_has_tx_pending(struct mt76_phy *phy)
543 {
544 	struct mt76_dev *dev = phy->dev;
545 	struct mt76_queue *q;
546 	int i, offset;
547 
548 	offset = __MT_TXQ_MAX * (phy != &dev->phy);
549 
550 	for (i = 0; i < __MT_TXQ_MAX; i++) {
551 		q = dev->q_tx[offset + i];
552 		if (q && q->queued)
553 			return true;
554 	}
555 
556 	return false;
557 }
558 EXPORT_SYMBOL_GPL(mt76_has_tx_pending);
559 
560 static struct mt76_channel_state *
mt76_channel_state(struct mt76_phy * phy,struct ieee80211_channel * c)561 mt76_channel_state(struct mt76_phy *phy, struct ieee80211_channel *c)
562 {
563 	struct mt76_sband *msband;
564 	int idx;
565 
566 	if (c->band == NL80211_BAND_2GHZ)
567 		msband = &phy->sband_2g;
568 	else
569 		msband = &phy->sband_5g;
570 
571 	idx = c - &msband->sband.channels[0];
572 	return &msband->chan[idx];
573 }
574 
mt76_update_survey_active_time(struct mt76_phy * phy,ktime_t time)575 void mt76_update_survey_active_time(struct mt76_phy *phy, ktime_t time)
576 {
577 	struct mt76_channel_state *state = phy->chan_state;
578 
579 	state->cc_active += ktime_to_us(ktime_sub(time,
580 						  phy->survey_time));
581 	phy->survey_time = time;
582 }
583 EXPORT_SYMBOL_GPL(mt76_update_survey_active_time);
584 
mt76_update_survey(struct mt76_dev * dev)585 void mt76_update_survey(struct mt76_dev *dev)
586 {
587 	ktime_t cur_time;
588 
589 	if (dev->drv->update_survey)
590 		dev->drv->update_survey(dev);
591 
592 	cur_time = ktime_get_boottime();
593 	mt76_update_survey_active_time(&dev->phy, cur_time);
594 	if (dev->phy2)
595 		mt76_update_survey_active_time(dev->phy2, cur_time);
596 
597 	if (dev->drv->drv_flags & MT_DRV_SW_RX_AIRTIME) {
598 		struct mt76_channel_state *state = dev->phy.chan_state;
599 
600 		spin_lock_bh(&dev->cc_lock);
601 		state->cc_bss_rx += dev->cur_cc_bss_rx;
602 		dev->cur_cc_bss_rx = 0;
603 		spin_unlock_bh(&dev->cc_lock);
604 	}
605 }
606 EXPORT_SYMBOL_GPL(mt76_update_survey);
607 
mt76_set_channel(struct mt76_phy * phy)608 void mt76_set_channel(struct mt76_phy *phy)
609 {
610 	struct mt76_dev *dev = phy->dev;
611 	struct ieee80211_hw *hw = phy->hw;
612 	struct cfg80211_chan_def *chandef = &hw->conf.chandef;
613 	bool offchannel = hw->conf.flags & IEEE80211_CONF_OFFCHANNEL;
614 	int timeout = HZ / 5;
615 
616 	wait_event_timeout(dev->tx_wait, !mt76_has_tx_pending(phy), timeout);
617 	mt76_update_survey(dev);
618 
619 	phy->chandef = *chandef;
620 	phy->chan_state = mt76_channel_state(phy, chandef->chan);
621 
622 	if (!offchannel)
623 		phy->main_chan = chandef->chan;
624 
625 	if (chandef->chan != phy->main_chan)
626 		memset(phy->chan_state, 0, sizeof(*phy->chan_state));
627 }
628 EXPORT_SYMBOL_GPL(mt76_set_channel);
629 
mt76_get_survey(struct ieee80211_hw * hw,int idx,struct survey_info * survey)630 int mt76_get_survey(struct ieee80211_hw *hw, int idx,
631 		    struct survey_info *survey)
632 {
633 	struct mt76_phy *phy = hw->priv;
634 	struct mt76_dev *dev = phy->dev;
635 	struct mt76_sband *sband;
636 	struct ieee80211_channel *chan;
637 	struct mt76_channel_state *state;
638 	int ret = 0;
639 
640 	mutex_lock(&dev->mutex);
641 	if (idx == 0 && dev->drv->update_survey)
642 		mt76_update_survey(dev);
643 
644 	sband = &phy->sband_2g;
645 	if (idx >= sband->sband.n_channels) {
646 		idx -= sband->sband.n_channels;
647 		sband = &phy->sband_5g;
648 	}
649 
650 	if (idx >= sband->sband.n_channels) {
651 		ret = -ENOENT;
652 		goto out;
653 	}
654 
655 	chan = &sband->sband.channels[idx];
656 	state = mt76_channel_state(phy, chan);
657 
658 	memset(survey, 0, sizeof(*survey));
659 	survey->channel = chan;
660 	survey->filled = SURVEY_INFO_TIME | SURVEY_INFO_TIME_BUSY;
661 	survey->filled |= dev->drv->survey_flags;
662 	if (state->noise)
663 		survey->filled |= SURVEY_INFO_NOISE_DBM;
664 
665 	if (chan == phy->main_chan) {
666 		survey->filled |= SURVEY_INFO_IN_USE;
667 
668 		if (dev->drv->drv_flags & MT_DRV_SW_RX_AIRTIME)
669 			survey->filled |= SURVEY_INFO_TIME_BSS_RX;
670 	}
671 
672 	survey->time_busy = div_u64(state->cc_busy, 1000);
673 	survey->time_rx = div_u64(state->cc_rx, 1000);
674 	survey->time = div_u64(state->cc_active, 1000);
675 	survey->noise = state->noise;
676 
677 	spin_lock_bh(&dev->cc_lock);
678 	survey->time_bss_rx = div_u64(state->cc_bss_rx, 1000);
679 	survey->time_tx = div_u64(state->cc_tx, 1000);
680 	spin_unlock_bh(&dev->cc_lock);
681 
682 out:
683 	mutex_unlock(&dev->mutex);
684 
685 	return ret;
686 }
687 EXPORT_SYMBOL_GPL(mt76_get_survey);
688 
mt76_wcid_key_setup(struct mt76_dev * dev,struct mt76_wcid * wcid,struct ieee80211_key_conf * key)689 void mt76_wcid_key_setup(struct mt76_dev *dev, struct mt76_wcid *wcid,
690 			 struct ieee80211_key_conf *key)
691 {
692 	struct ieee80211_key_seq seq;
693 	int i;
694 
695 	wcid->rx_check_pn = false;
696 
697 	if (!key)
698 		return;
699 
700 	if (key->cipher != WLAN_CIPHER_SUITE_CCMP)
701 		return;
702 
703 	wcid->rx_check_pn = true;
704 	for (i = 0; i < IEEE80211_NUM_TIDS; i++) {
705 		ieee80211_get_key_rx_seq(key, i, &seq);
706 		memcpy(wcid->rx_key_pn[i], seq.ccmp.pn, sizeof(seq.ccmp.pn));
707 	}
708 }
709 EXPORT_SYMBOL(mt76_wcid_key_setup);
710 
711 static void
mt76_rx_convert(struct mt76_dev * dev,struct sk_buff * skb,struct ieee80211_hw ** hw,struct ieee80211_sta ** sta)712 mt76_rx_convert(struct mt76_dev *dev, struct sk_buff *skb,
713 		struct ieee80211_hw **hw,
714 		struct ieee80211_sta **sta)
715 {
716 	struct ieee80211_rx_status *status = IEEE80211_SKB_RXCB(skb);
717 	struct mt76_rx_status mstat;
718 
719 	mstat = *((struct mt76_rx_status *)skb->cb);
720 	memset(status, 0, sizeof(*status));
721 
722 	status->flag = mstat.flag;
723 	status->freq = mstat.freq;
724 	status->enc_flags = mstat.enc_flags;
725 	status->encoding = mstat.encoding;
726 	status->bw = mstat.bw;
727 	status->he_ru = mstat.he_ru;
728 	status->he_gi = mstat.he_gi;
729 	status->he_dcm = mstat.he_dcm;
730 	status->rate_idx = mstat.rate_idx;
731 	status->nss = mstat.nss;
732 	status->band = mstat.band;
733 	status->signal = mstat.signal;
734 	status->chains = mstat.chains;
735 	status->ampdu_reference = mstat.ampdu_ref;
736 
737 	BUILD_BUG_ON(sizeof(mstat) > sizeof(skb->cb));
738 	BUILD_BUG_ON(sizeof(status->chain_signal) !=
739 		     sizeof(mstat.chain_signal));
740 	memcpy(status->chain_signal, mstat.chain_signal,
741 	       sizeof(mstat.chain_signal));
742 
743 	*sta = wcid_to_sta(mstat.wcid);
744 	*hw = mt76_phy_hw(dev, mstat.ext_phy);
745 }
746 
747 static int
mt76_check_ccmp_pn(struct sk_buff * skb)748 mt76_check_ccmp_pn(struct sk_buff *skb)
749 {
750 	struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
751 	struct mt76_wcid *wcid = status->wcid;
752 	struct ieee80211_hdr *hdr;
753 	int ret;
754 
755 	if (!(status->flag & RX_FLAG_DECRYPTED))
756 		return 0;
757 
758 	if (!wcid || !wcid->rx_check_pn)
759 		return 0;
760 
761 	if (!(status->flag & RX_FLAG_IV_STRIPPED)) {
762 		/*
763 		 * Validate the first fragment both here and in mac80211
764 		 * All further fragments will be validated by mac80211 only.
765 		 */
766 		hdr = mt76_skb_get_hdr(skb);
767 		if (ieee80211_is_frag(hdr) &&
768 		    !ieee80211_is_first_frag(hdr->frame_control))
769 			return 0;
770 	}
771 
772 	BUILD_BUG_ON(sizeof(status->iv) != sizeof(wcid->rx_key_pn[0]));
773 	ret = memcmp(status->iv, wcid->rx_key_pn[status->tid],
774 		     sizeof(status->iv));
775 	if (ret <= 0)
776 		return -EINVAL; /* replay */
777 
778 	memcpy(wcid->rx_key_pn[status->tid], status->iv, sizeof(status->iv));
779 
780 	if (status->flag & RX_FLAG_IV_STRIPPED)
781 		status->flag |= RX_FLAG_PN_VALIDATED;
782 
783 	return 0;
784 }
785 
786 static void
mt76_airtime_report(struct mt76_dev * dev,struct mt76_rx_status * status,int len)787 mt76_airtime_report(struct mt76_dev *dev, struct mt76_rx_status *status,
788 		    int len)
789 {
790 	struct mt76_wcid *wcid = status->wcid;
791 	struct ieee80211_rx_status info = {
792 		.enc_flags = status->enc_flags,
793 		.rate_idx = status->rate_idx,
794 		.encoding = status->encoding,
795 		.band = status->band,
796 		.nss = status->nss,
797 		.bw = status->bw,
798 	};
799 	struct ieee80211_sta *sta;
800 	u32 airtime;
801 
802 	airtime = ieee80211_calc_rx_airtime(dev->hw, &info, len);
803 	spin_lock(&dev->cc_lock);
804 	dev->cur_cc_bss_rx += airtime;
805 	spin_unlock(&dev->cc_lock);
806 
807 	if (!wcid || !wcid->sta)
808 		return;
809 
810 	sta = container_of((void *)wcid, struct ieee80211_sta, drv_priv);
811 	ieee80211_sta_register_airtime(sta, status->tid, 0, airtime);
812 }
813 
814 static void
mt76_airtime_flush_ampdu(struct mt76_dev * dev)815 mt76_airtime_flush_ampdu(struct mt76_dev *dev)
816 {
817 	struct mt76_wcid *wcid;
818 	int wcid_idx;
819 
820 	if (!dev->rx_ampdu_len)
821 		return;
822 
823 	wcid_idx = dev->rx_ampdu_status.wcid_idx;
824 	if (wcid_idx < ARRAY_SIZE(dev->wcid))
825 		wcid = rcu_dereference(dev->wcid[wcid_idx]);
826 	else
827 		wcid = NULL;
828 	dev->rx_ampdu_status.wcid = wcid;
829 
830 	mt76_airtime_report(dev, &dev->rx_ampdu_status, dev->rx_ampdu_len);
831 
832 	dev->rx_ampdu_len = 0;
833 	dev->rx_ampdu_ref = 0;
834 }
835 
836 static void
mt76_airtime_check(struct mt76_dev * dev,struct sk_buff * skb)837 mt76_airtime_check(struct mt76_dev *dev, struct sk_buff *skb)
838 {
839 	struct ieee80211_hdr *hdr = mt76_skb_get_hdr(skb);
840 	struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
841 	struct mt76_wcid *wcid = status->wcid;
842 
843 	if (!(dev->drv->drv_flags & MT_DRV_SW_RX_AIRTIME))
844 		return;
845 
846 	if (!wcid || !wcid->sta) {
847 		if (!ether_addr_equal(hdr->addr1, dev->macaddr))
848 			return;
849 
850 		wcid = NULL;
851 	}
852 
853 	if (!(status->flag & RX_FLAG_AMPDU_DETAILS) ||
854 	    status->ampdu_ref != dev->rx_ampdu_ref)
855 		mt76_airtime_flush_ampdu(dev);
856 
857 	if (status->flag & RX_FLAG_AMPDU_DETAILS) {
858 		if (!dev->rx_ampdu_len ||
859 		    status->ampdu_ref != dev->rx_ampdu_ref) {
860 			dev->rx_ampdu_status = *status;
861 			dev->rx_ampdu_status.wcid_idx = wcid ? wcid->idx : 0xff;
862 			dev->rx_ampdu_ref = status->ampdu_ref;
863 		}
864 
865 		dev->rx_ampdu_len += skb->len;
866 		return;
867 	}
868 
869 	mt76_airtime_report(dev, status, skb->len);
870 }
871 
872 static void
mt76_check_sta(struct mt76_dev * dev,struct sk_buff * skb)873 mt76_check_sta(struct mt76_dev *dev, struct sk_buff *skb)
874 {
875 	struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
876 	struct ieee80211_hdr *hdr = mt76_skb_get_hdr(skb);
877 	struct ieee80211_sta *sta;
878 	struct ieee80211_hw *hw;
879 	struct mt76_wcid *wcid = status->wcid;
880 	bool ps;
881 
882 	hw = mt76_phy_hw(dev, status->ext_phy);
883 	if (ieee80211_is_pspoll(hdr->frame_control) && !wcid) {
884 		sta = ieee80211_find_sta_by_ifaddr(hw, hdr->addr2, NULL);
885 		if (sta)
886 			wcid = status->wcid = (struct mt76_wcid *)sta->drv_priv;
887 	}
888 
889 	mt76_airtime_check(dev, skb);
890 
891 	if (!wcid || !wcid->sta)
892 		return;
893 
894 	sta = container_of((void *)wcid, struct ieee80211_sta, drv_priv);
895 
896 	if (status->signal <= 0)
897 		ewma_signal_add(&wcid->rssi, -status->signal);
898 
899 	wcid->inactive_count = 0;
900 
901 	if (!test_bit(MT_WCID_FLAG_CHECK_PS, &wcid->flags))
902 		return;
903 
904 	if (ieee80211_is_pspoll(hdr->frame_control)) {
905 		ieee80211_sta_pspoll(sta);
906 		return;
907 	}
908 
909 	if (ieee80211_has_morefrags(hdr->frame_control) ||
910 	    !(ieee80211_is_mgmt(hdr->frame_control) ||
911 	      ieee80211_is_data(hdr->frame_control)))
912 		return;
913 
914 	ps = ieee80211_has_pm(hdr->frame_control);
915 
916 	if (ps && (ieee80211_is_data_qos(hdr->frame_control) ||
917 		   ieee80211_is_qos_nullfunc(hdr->frame_control)))
918 		ieee80211_sta_uapsd_trigger(sta, status->tid);
919 
920 	if (!!test_bit(MT_WCID_FLAG_PS, &wcid->flags) == ps)
921 		return;
922 
923 	if (ps)
924 		set_bit(MT_WCID_FLAG_PS, &wcid->flags);
925 	else
926 		clear_bit(MT_WCID_FLAG_PS, &wcid->flags);
927 
928 	dev->drv->sta_ps(dev, sta, ps);
929 	ieee80211_sta_ps_transition(sta, ps);
930 }
931 
mt76_rx_complete(struct mt76_dev * dev,struct sk_buff_head * frames,struct napi_struct * napi)932 void mt76_rx_complete(struct mt76_dev *dev, struct sk_buff_head *frames,
933 		      struct napi_struct *napi)
934 {
935 	struct ieee80211_sta *sta;
936 	struct ieee80211_hw *hw;
937 	struct sk_buff *skb;
938 
939 	spin_lock(&dev->rx_lock);
940 	while ((skb = __skb_dequeue(frames)) != NULL) {
941 		if (mt76_check_ccmp_pn(skb)) {
942 			dev_kfree_skb(skb);
943 			continue;
944 		}
945 
946 		mt76_rx_convert(dev, skb, &hw, &sta);
947 		ieee80211_rx_napi(hw, sta, skb, napi);
948 	}
949 	spin_unlock(&dev->rx_lock);
950 }
951 
mt76_rx_poll_complete(struct mt76_dev * dev,enum mt76_rxq_id q,struct napi_struct * napi)952 void mt76_rx_poll_complete(struct mt76_dev *dev, enum mt76_rxq_id q,
953 			   struct napi_struct *napi)
954 {
955 	struct sk_buff_head frames;
956 	struct sk_buff *skb;
957 
958 	__skb_queue_head_init(&frames);
959 
960 	while ((skb = __skb_dequeue(&dev->rx_skb[q])) != NULL) {
961 		mt76_check_sta(dev, skb);
962 		mt76_rx_aggr_reorder(skb, &frames);
963 	}
964 
965 	mt76_rx_complete(dev, &frames, napi);
966 }
967 EXPORT_SYMBOL_GPL(mt76_rx_poll_complete);
968 
969 static int
mt76_sta_add(struct mt76_dev * dev,struct ieee80211_vif * vif,struct ieee80211_sta * sta,bool ext_phy)970 mt76_sta_add(struct mt76_dev *dev, struct ieee80211_vif *vif,
971 	     struct ieee80211_sta *sta, bool ext_phy)
972 {
973 	struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv;
974 	int ret;
975 	int i;
976 
977 	mutex_lock(&dev->mutex);
978 
979 	ret = dev->drv->sta_add(dev, vif, sta);
980 	if (ret)
981 		goto out;
982 
983 	for (i = 0; i < ARRAY_SIZE(sta->txq); i++) {
984 		struct mt76_txq *mtxq;
985 
986 		if (!sta->txq[i])
987 			continue;
988 
989 		mtxq = (struct mt76_txq *)sta->txq[i]->drv_priv;
990 		mtxq->wcid = wcid;
991 	}
992 
993 	ewma_signal_init(&wcid->rssi);
994 	if (ext_phy)
995 		mt76_wcid_mask_set(dev->wcid_phy_mask, wcid->idx);
996 	wcid->ext_phy = ext_phy;
997 	rcu_assign_pointer(dev->wcid[wcid->idx], wcid);
998 
999 out:
1000 	mutex_unlock(&dev->mutex);
1001 
1002 	return ret;
1003 }
1004 
__mt76_sta_remove(struct mt76_dev * dev,struct ieee80211_vif * vif,struct ieee80211_sta * sta)1005 void __mt76_sta_remove(struct mt76_dev *dev, struct ieee80211_vif *vif,
1006 		       struct ieee80211_sta *sta)
1007 {
1008 	struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv;
1009 	int i, idx = wcid->idx;
1010 
1011 	for (i = 0; i < ARRAY_SIZE(wcid->aggr); i++)
1012 		mt76_rx_aggr_stop(dev, wcid, i);
1013 
1014 	if (dev->drv->sta_remove)
1015 		dev->drv->sta_remove(dev, vif, sta);
1016 
1017 	mt76_tx_status_check(dev, wcid, true);
1018 	mt76_wcid_mask_clear(dev->wcid_mask, idx);
1019 	mt76_wcid_mask_clear(dev->wcid_phy_mask, idx);
1020 }
1021 EXPORT_SYMBOL_GPL(__mt76_sta_remove);
1022 
1023 static void
mt76_sta_remove(struct mt76_dev * dev,struct ieee80211_vif * vif,struct ieee80211_sta * sta)1024 mt76_sta_remove(struct mt76_dev *dev, struct ieee80211_vif *vif,
1025 		struct ieee80211_sta *sta)
1026 {
1027 	mutex_lock(&dev->mutex);
1028 	__mt76_sta_remove(dev, vif, sta);
1029 	mutex_unlock(&dev->mutex);
1030 }
1031 
mt76_sta_state(struct ieee80211_hw * hw,struct ieee80211_vif * vif,struct ieee80211_sta * sta,enum ieee80211_sta_state old_state,enum ieee80211_sta_state new_state)1032 int mt76_sta_state(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
1033 		   struct ieee80211_sta *sta,
1034 		   enum ieee80211_sta_state old_state,
1035 		   enum ieee80211_sta_state new_state)
1036 {
1037 	struct mt76_phy *phy = hw->priv;
1038 	struct mt76_dev *dev = phy->dev;
1039 	bool ext_phy = phy != &dev->phy;
1040 
1041 	if (old_state == IEEE80211_STA_NOTEXIST &&
1042 	    new_state == IEEE80211_STA_NONE)
1043 		return mt76_sta_add(dev, vif, sta, ext_phy);
1044 
1045 	if (old_state == IEEE80211_STA_AUTH &&
1046 	    new_state == IEEE80211_STA_ASSOC &&
1047 	    dev->drv->sta_assoc)
1048 		dev->drv->sta_assoc(dev, vif, sta);
1049 
1050 	if (old_state == IEEE80211_STA_NONE &&
1051 	    new_state == IEEE80211_STA_NOTEXIST)
1052 		mt76_sta_remove(dev, vif, sta);
1053 
1054 	return 0;
1055 }
1056 EXPORT_SYMBOL_GPL(mt76_sta_state);
1057 
mt76_sta_pre_rcu_remove(struct ieee80211_hw * hw,struct ieee80211_vif * vif,struct ieee80211_sta * sta)1058 void mt76_sta_pre_rcu_remove(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
1059 			     struct ieee80211_sta *sta)
1060 {
1061 	struct mt76_phy *phy = hw->priv;
1062 	struct mt76_dev *dev = phy->dev;
1063 	struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv;
1064 
1065 	mutex_lock(&dev->mutex);
1066 	rcu_assign_pointer(dev->wcid[wcid->idx], NULL);
1067 	mutex_unlock(&dev->mutex);
1068 }
1069 EXPORT_SYMBOL_GPL(mt76_sta_pre_rcu_remove);
1070 
mt76_get_txpower(struct ieee80211_hw * hw,struct ieee80211_vif * vif,int * dbm)1071 int mt76_get_txpower(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
1072 		     int *dbm)
1073 {
1074 	struct mt76_phy *phy = hw->priv;
1075 	int n_chains = hweight8(phy->antenna_mask);
1076 	int delta = mt76_tx_power_nss_delta(n_chains);
1077 
1078 	*dbm = DIV_ROUND_UP(phy->txpower_cur + delta, 2);
1079 
1080 	return 0;
1081 }
1082 EXPORT_SYMBOL_GPL(mt76_get_txpower);
1083 
1084 static void
__mt76_csa_finish(void * priv,u8 * mac,struct ieee80211_vif * vif)1085 __mt76_csa_finish(void *priv, u8 *mac, struct ieee80211_vif *vif)
1086 {
1087 	if (vif->csa_active && ieee80211_beacon_cntdwn_is_complete(vif))
1088 		ieee80211_csa_finish(vif);
1089 }
1090 
mt76_csa_finish(struct mt76_dev * dev)1091 void mt76_csa_finish(struct mt76_dev *dev)
1092 {
1093 	if (!dev->csa_complete)
1094 		return;
1095 
1096 	ieee80211_iterate_active_interfaces_atomic(dev->hw,
1097 		IEEE80211_IFACE_ITER_RESUME_ALL,
1098 		__mt76_csa_finish, dev);
1099 
1100 	dev->csa_complete = 0;
1101 }
1102 EXPORT_SYMBOL_GPL(mt76_csa_finish);
1103 
1104 static void
__mt76_csa_check(void * priv,u8 * mac,struct ieee80211_vif * vif)1105 __mt76_csa_check(void *priv, u8 *mac, struct ieee80211_vif *vif)
1106 {
1107 	struct mt76_dev *dev = priv;
1108 
1109 	if (!vif->csa_active)
1110 		return;
1111 
1112 	dev->csa_complete |= ieee80211_beacon_cntdwn_is_complete(vif);
1113 }
1114 
mt76_csa_check(struct mt76_dev * dev)1115 void mt76_csa_check(struct mt76_dev *dev)
1116 {
1117 	ieee80211_iterate_active_interfaces_atomic(dev->hw,
1118 		IEEE80211_IFACE_ITER_RESUME_ALL,
1119 		__mt76_csa_check, dev);
1120 }
1121 EXPORT_SYMBOL_GPL(mt76_csa_check);
1122 
1123 int
mt76_set_tim(struct ieee80211_hw * hw,struct ieee80211_sta * sta,bool set)1124 mt76_set_tim(struct ieee80211_hw *hw, struct ieee80211_sta *sta, bool set)
1125 {
1126 	return 0;
1127 }
1128 EXPORT_SYMBOL_GPL(mt76_set_tim);
1129 
mt76_insert_ccmp_hdr(struct sk_buff * skb,u8 key_id)1130 void mt76_insert_ccmp_hdr(struct sk_buff *skb, u8 key_id)
1131 {
1132 	struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
1133 	int hdr_len = ieee80211_get_hdrlen_from_skb(skb);
1134 	u8 *hdr, *pn = status->iv;
1135 
1136 	__skb_push(skb, 8);
1137 	memmove(skb->data, skb->data + 8, hdr_len);
1138 	hdr = skb->data + hdr_len;
1139 
1140 	hdr[0] = pn[5];
1141 	hdr[1] = pn[4];
1142 	hdr[2] = 0;
1143 	hdr[3] = 0x20 | (key_id << 6);
1144 	hdr[4] = pn[3];
1145 	hdr[5] = pn[2];
1146 	hdr[6] = pn[1];
1147 	hdr[7] = pn[0];
1148 
1149 	status->flag &= ~RX_FLAG_IV_STRIPPED;
1150 }
1151 EXPORT_SYMBOL_GPL(mt76_insert_ccmp_hdr);
1152 
mt76_get_rate(struct mt76_dev * dev,struct ieee80211_supported_band * sband,int idx,bool cck)1153 int mt76_get_rate(struct mt76_dev *dev,
1154 		  struct ieee80211_supported_band *sband,
1155 		  int idx, bool cck)
1156 {
1157 	int i, offset = 0, len = sband->n_bitrates;
1158 
1159 	if (cck) {
1160 		if (sband == &dev->phy.sband_5g.sband)
1161 			return 0;
1162 
1163 		idx &= ~BIT(2); /* short preamble */
1164 	} else if (sband == &dev->phy.sband_2g.sband) {
1165 		offset = 4;
1166 	}
1167 
1168 	for (i = offset; i < len; i++) {
1169 		if ((sband->bitrates[i].hw_value & GENMASK(7, 0)) == idx)
1170 			return i;
1171 	}
1172 
1173 	return 0;
1174 }
1175 EXPORT_SYMBOL_GPL(mt76_get_rate);
1176 
mt76_sw_scan(struct ieee80211_hw * hw,struct ieee80211_vif * vif,const u8 * mac)1177 void mt76_sw_scan(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
1178 		  const u8 *mac)
1179 {
1180 	struct mt76_phy *phy = hw->priv;
1181 
1182 	set_bit(MT76_SCANNING, &phy->state);
1183 }
1184 EXPORT_SYMBOL_GPL(mt76_sw_scan);
1185 
mt76_sw_scan_complete(struct ieee80211_hw * hw,struct ieee80211_vif * vif)1186 void mt76_sw_scan_complete(struct ieee80211_hw *hw, struct ieee80211_vif *vif)
1187 {
1188 	struct mt76_phy *phy = hw->priv;
1189 
1190 	clear_bit(MT76_SCANNING, &phy->state);
1191 }
1192 EXPORT_SYMBOL_GPL(mt76_sw_scan_complete);
1193 
mt76_get_antenna(struct ieee80211_hw * hw,u32 * tx_ant,u32 * rx_ant)1194 int mt76_get_antenna(struct ieee80211_hw *hw, u32 *tx_ant, u32 *rx_ant)
1195 {
1196 	struct mt76_phy *phy = hw->priv;
1197 	struct mt76_dev *dev = phy->dev;
1198 
1199 	mutex_lock(&dev->mutex);
1200 	*tx_ant = phy->antenna_mask;
1201 	*rx_ant = phy->antenna_mask;
1202 	mutex_unlock(&dev->mutex);
1203 
1204 	return 0;
1205 }
1206 EXPORT_SYMBOL_GPL(mt76_get_antenna);
1207