1 // SPDX-License-Identifier: ISC
2 /*
3 * Copyright (C) 2016 Felix Fietkau <nbd@nbd.name>
4 */
5 #include <linux/sched.h>
6 #include <linux/of.h>
7 #include "mt76.h"
8
9 #define CHAN2G(_idx, _freq) { \
10 .band = NL80211_BAND_2GHZ, \
11 .center_freq = (_freq), \
12 .hw_value = (_idx), \
13 .max_power = 30, \
14 }
15
16 #define CHAN5G(_idx, _freq) { \
17 .band = NL80211_BAND_5GHZ, \
18 .center_freq = (_freq), \
19 .hw_value = (_idx), \
20 .max_power = 30, \
21 }
22
23 static const struct ieee80211_channel mt76_channels_2ghz[] = {
24 CHAN2G(1, 2412),
25 CHAN2G(2, 2417),
26 CHAN2G(3, 2422),
27 CHAN2G(4, 2427),
28 CHAN2G(5, 2432),
29 CHAN2G(6, 2437),
30 CHAN2G(7, 2442),
31 CHAN2G(8, 2447),
32 CHAN2G(9, 2452),
33 CHAN2G(10, 2457),
34 CHAN2G(11, 2462),
35 CHAN2G(12, 2467),
36 CHAN2G(13, 2472),
37 CHAN2G(14, 2484),
38 };
39
40 static const struct ieee80211_channel mt76_channels_5ghz[] = {
41 CHAN5G(36, 5180),
42 CHAN5G(40, 5200),
43 CHAN5G(44, 5220),
44 CHAN5G(48, 5240),
45
46 CHAN5G(52, 5260),
47 CHAN5G(56, 5280),
48 CHAN5G(60, 5300),
49 CHAN5G(64, 5320),
50
51 CHAN5G(100, 5500),
52 CHAN5G(104, 5520),
53 CHAN5G(108, 5540),
54 CHAN5G(112, 5560),
55 CHAN5G(116, 5580),
56 CHAN5G(120, 5600),
57 CHAN5G(124, 5620),
58 CHAN5G(128, 5640),
59 CHAN5G(132, 5660),
60 CHAN5G(136, 5680),
61 CHAN5G(140, 5700),
62 CHAN5G(144, 5720),
63
64 CHAN5G(149, 5745),
65 CHAN5G(153, 5765),
66 CHAN5G(157, 5785),
67 CHAN5G(161, 5805),
68 CHAN5G(165, 5825),
69 CHAN5G(169, 5845),
70 CHAN5G(173, 5865),
71 };
72
73 static const struct ieee80211_tpt_blink mt76_tpt_blink[] = {
74 { .throughput = 0 * 1024, .blink_time = 334 },
75 { .throughput = 1 * 1024, .blink_time = 260 },
76 { .throughput = 5 * 1024, .blink_time = 220 },
77 { .throughput = 10 * 1024, .blink_time = 190 },
78 { .throughput = 20 * 1024, .blink_time = 170 },
79 { .throughput = 50 * 1024, .blink_time = 150 },
80 { .throughput = 70 * 1024, .blink_time = 130 },
81 { .throughput = 100 * 1024, .blink_time = 110 },
82 { .throughput = 200 * 1024, .blink_time = 80 },
83 { .throughput = 300 * 1024, .blink_time = 50 },
84 };
85
mt76_led_init(struct mt76_dev * dev)86 static int mt76_led_init(struct mt76_dev *dev)
87 {
88 struct device_node *np = dev->dev->of_node;
89 struct ieee80211_hw *hw = dev->hw;
90 int led_pin;
91
92 if (!dev->led_cdev.brightness_set && !dev->led_cdev.blink_set)
93 return 0;
94
95 snprintf(dev->led_name, sizeof(dev->led_name),
96 "mt76-%s", wiphy_name(hw->wiphy));
97
98 dev->led_cdev.name = dev->led_name;
99 dev->led_cdev.default_trigger =
100 ieee80211_create_tpt_led_trigger(hw,
101 IEEE80211_TPT_LEDTRIG_FL_RADIO,
102 mt76_tpt_blink,
103 ARRAY_SIZE(mt76_tpt_blink));
104
105 np = of_get_child_by_name(np, "led");
106 if (np) {
107 if (!of_property_read_u32(np, "led-sources", &led_pin))
108 dev->led_pin = led_pin;
109 dev->led_al = of_property_read_bool(np, "led-active-low");
110 }
111
112 return led_classdev_register(dev->dev, &dev->led_cdev);
113 }
114
mt76_led_cleanup(struct mt76_dev * dev)115 static void mt76_led_cleanup(struct mt76_dev *dev)
116 {
117 if (!dev->led_cdev.brightness_set && !dev->led_cdev.blink_set)
118 return;
119
120 led_classdev_unregister(&dev->led_cdev);
121 }
122
mt76_init_stream_cap(struct mt76_phy * phy,struct ieee80211_supported_band * sband,bool vht)123 static void mt76_init_stream_cap(struct mt76_phy *phy,
124 struct ieee80211_supported_band *sband,
125 bool vht)
126 {
127 struct ieee80211_sta_ht_cap *ht_cap = &sband->ht_cap;
128 int i, nstream = hweight8(phy->antenna_mask);
129 struct ieee80211_sta_vht_cap *vht_cap;
130 u16 mcs_map = 0;
131
132 if (nstream > 1)
133 ht_cap->cap |= IEEE80211_HT_CAP_TX_STBC;
134 else
135 ht_cap->cap &= ~IEEE80211_HT_CAP_TX_STBC;
136
137 for (i = 0; i < IEEE80211_HT_MCS_MASK_LEN; i++)
138 ht_cap->mcs.rx_mask[i] = i < nstream ? 0xff : 0;
139
140 if (!vht)
141 return;
142
143 vht_cap = &sband->vht_cap;
144 if (nstream > 1)
145 vht_cap->cap |= IEEE80211_VHT_CAP_TXSTBC;
146 else
147 vht_cap->cap &= ~IEEE80211_VHT_CAP_TXSTBC;
148
149 for (i = 0; i < 8; i++) {
150 if (i < nstream)
151 mcs_map |= (IEEE80211_VHT_MCS_SUPPORT_0_9 << (i * 2));
152 else
153 mcs_map |=
154 (IEEE80211_VHT_MCS_NOT_SUPPORTED << (i * 2));
155 }
156 vht_cap->vht_mcs.rx_mcs_map = cpu_to_le16(mcs_map);
157 vht_cap->vht_mcs.tx_mcs_map = cpu_to_le16(mcs_map);
158 }
159
mt76_set_stream_caps(struct mt76_phy * phy,bool vht)160 void mt76_set_stream_caps(struct mt76_phy *phy, bool vht)
161 {
162 if (phy->dev->cap.has_2ghz)
163 mt76_init_stream_cap(phy, &phy->sband_2g.sband, false);
164 if (phy->dev->cap.has_5ghz)
165 mt76_init_stream_cap(phy, &phy->sband_5g.sband, vht);
166 }
167 EXPORT_SYMBOL_GPL(mt76_set_stream_caps);
168
169 static int
mt76_init_sband(struct mt76_dev * dev,struct mt76_sband * msband,const struct ieee80211_channel * chan,int n_chan,struct ieee80211_rate * rates,int n_rates,bool vht)170 mt76_init_sband(struct mt76_dev *dev, struct mt76_sband *msband,
171 const struct ieee80211_channel *chan, int n_chan,
172 struct ieee80211_rate *rates, int n_rates, bool vht)
173 {
174 struct ieee80211_supported_band *sband = &msband->sband;
175 struct ieee80211_sta_ht_cap *ht_cap;
176 struct ieee80211_sta_vht_cap *vht_cap;
177 void *chanlist;
178 int size;
179
180 size = n_chan * sizeof(*chan);
181 chanlist = devm_kmemdup(dev->dev, chan, size, GFP_KERNEL);
182 if (!chanlist)
183 return -ENOMEM;
184
185 msband->chan = devm_kcalloc(dev->dev, n_chan, sizeof(*msband->chan),
186 GFP_KERNEL);
187 if (!msband->chan)
188 return -ENOMEM;
189
190 sband->channels = chanlist;
191 sband->n_channels = n_chan;
192 sband->bitrates = rates;
193 sband->n_bitrates = n_rates;
194
195 ht_cap = &sband->ht_cap;
196 ht_cap->ht_supported = true;
197 ht_cap->cap |= IEEE80211_HT_CAP_SUP_WIDTH_20_40 |
198 IEEE80211_HT_CAP_GRN_FLD |
199 IEEE80211_HT_CAP_SGI_20 |
200 IEEE80211_HT_CAP_SGI_40 |
201 (1 << IEEE80211_HT_CAP_RX_STBC_SHIFT);
202
203 ht_cap->mcs.tx_params = IEEE80211_HT_MCS_TX_DEFINED;
204 ht_cap->ampdu_factor = IEEE80211_HT_MAX_AMPDU_64K;
205
206 mt76_init_stream_cap(&dev->phy, sband, vht);
207
208 if (!vht)
209 return 0;
210
211 vht_cap = &sband->vht_cap;
212 vht_cap->vht_supported = true;
213 vht_cap->cap |= IEEE80211_VHT_CAP_RXLDPC |
214 IEEE80211_VHT_CAP_RXSTBC_1 |
215 IEEE80211_VHT_CAP_SHORT_GI_80 |
216 IEEE80211_VHT_CAP_RX_ANTENNA_PATTERN |
217 IEEE80211_VHT_CAP_TX_ANTENNA_PATTERN |
218 (3 << IEEE80211_VHT_CAP_MAX_A_MPDU_LENGTH_EXPONENT_SHIFT);
219
220 return 0;
221 }
222
223 static int
mt76_init_sband_2g(struct mt76_dev * dev,struct ieee80211_rate * rates,int n_rates)224 mt76_init_sband_2g(struct mt76_dev *dev, struct ieee80211_rate *rates,
225 int n_rates)
226 {
227 dev->hw->wiphy->bands[NL80211_BAND_2GHZ] = &dev->phy.sband_2g.sband;
228
229 return mt76_init_sband(dev, &dev->phy.sband_2g,
230 mt76_channels_2ghz,
231 ARRAY_SIZE(mt76_channels_2ghz),
232 rates, n_rates, false);
233 }
234
235 static int
mt76_init_sband_5g(struct mt76_dev * dev,struct ieee80211_rate * rates,int n_rates,bool vht)236 mt76_init_sband_5g(struct mt76_dev *dev, struct ieee80211_rate *rates,
237 int n_rates, bool vht)
238 {
239 dev->hw->wiphy->bands[NL80211_BAND_5GHZ] = &dev->phy.sband_5g.sband;
240
241 return mt76_init_sband(dev, &dev->phy.sband_5g,
242 mt76_channels_5ghz,
243 ARRAY_SIZE(mt76_channels_5ghz),
244 rates, n_rates, vht);
245 }
246
247 static void
mt76_check_sband(struct mt76_phy * phy,struct mt76_sband * msband,enum nl80211_band band)248 mt76_check_sband(struct mt76_phy *phy, struct mt76_sband *msband,
249 enum nl80211_band band)
250 {
251 struct ieee80211_supported_band *sband = &msband->sband;
252 bool found = false;
253 int i;
254
255 if (!sband)
256 return;
257
258 for (i = 0; i < sband->n_channels; i++) {
259 if (sband->channels[i].flags & IEEE80211_CHAN_DISABLED)
260 continue;
261
262 found = true;
263 break;
264 }
265
266 if (found) {
267 phy->chandef.chan = &sband->channels[0];
268 phy->chan_state = &msband->chan[0];
269 return;
270 }
271
272 sband->n_channels = 0;
273 phy->hw->wiphy->bands[band] = NULL;
274 }
275
276 static void
mt76_phy_init(struct mt76_dev * dev,struct ieee80211_hw * hw)277 mt76_phy_init(struct mt76_dev *dev, struct ieee80211_hw *hw)
278 {
279 struct wiphy *wiphy = hw->wiphy;
280
281 SET_IEEE80211_DEV(hw, dev->dev);
282 SET_IEEE80211_PERM_ADDR(hw, dev->macaddr);
283
284 wiphy->features |= NL80211_FEATURE_ACTIVE_MONITOR;
285 wiphy->flags |= WIPHY_FLAG_HAS_CHANNEL_SWITCH |
286 WIPHY_FLAG_SUPPORTS_TDLS |
287 WIPHY_FLAG_AP_UAPSD;
288
289 wiphy_ext_feature_set(wiphy, NL80211_EXT_FEATURE_CQM_RSSI_LIST);
290 wiphy_ext_feature_set(wiphy, NL80211_EXT_FEATURE_AIRTIME_FAIRNESS);
291 wiphy_ext_feature_set(wiphy, NL80211_EXT_FEATURE_AQL);
292
293 wiphy->available_antennas_tx = dev->phy.antenna_mask;
294 wiphy->available_antennas_rx = dev->phy.antenna_mask;
295
296 hw->txq_data_size = sizeof(struct mt76_txq);
297 hw->uapsd_max_sp_len = IEEE80211_WMM_IE_STA_QOSINFO_SP_ALL;
298
299 if (!hw->max_tx_fragments)
300 hw->max_tx_fragments = 16;
301
302 ieee80211_hw_set(hw, SIGNAL_DBM);
303 ieee80211_hw_set(hw, AMPDU_AGGREGATION);
304 ieee80211_hw_set(hw, SUPPORTS_RC_TABLE);
305 ieee80211_hw_set(hw, SUPPORT_FAST_XMIT);
306 ieee80211_hw_set(hw, SUPPORTS_CLONED_SKBS);
307 ieee80211_hw_set(hw, SUPPORTS_AMSDU_IN_AMPDU);
308 ieee80211_hw_set(hw, SUPPORTS_REORDERING_BUFFER);
309
310 if (!(dev->drv->drv_flags & MT_DRV_AMSDU_OFFLOAD)) {
311 ieee80211_hw_set(hw, TX_AMSDU);
312 ieee80211_hw_set(hw, TX_FRAG_LIST);
313 }
314
315 ieee80211_hw_set(hw, MFP_CAPABLE);
316 ieee80211_hw_set(hw, AP_LINK_PS);
317 ieee80211_hw_set(hw, REPORTS_TX_ACK_STATUS);
318 ieee80211_hw_set(hw, NEEDS_UNIQUE_STA_ADDR);
319
320 wiphy->flags |= WIPHY_FLAG_IBSS_RSN;
321 wiphy->interface_modes =
322 BIT(NL80211_IFTYPE_STATION) |
323 BIT(NL80211_IFTYPE_AP) |
324 #ifdef CONFIG_MAC80211_MESH
325 BIT(NL80211_IFTYPE_MESH_POINT) |
326 #endif
327 BIT(NL80211_IFTYPE_P2P_CLIENT) |
328 BIT(NL80211_IFTYPE_P2P_GO) |
329 BIT(NL80211_IFTYPE_ADHOC);
330 }
331
332 struct mt76_phy *
mt76_alloc_phy(struct mt76_dev * dev,unsigned int size,const struct ieee80211_ops * ops)333 mt76_alloc_phy(struct mt76_dev *dev, unsigned int size,
334 const struct ieee80211_ops *ops)
335 {
336 struct ieee80211_hw *hw;
337 struct mt76_phy *phy;
338 unsigned int phy_size, chan_size;
339 unsigned int size_2g, size_5g;
340 void *priv;
341
342 phy_size = ALIGN(sizeof(*phy), 8);
343 chan_size = sizeof(dev->phy.sband_2g.chan[0]);
344 size_2g = ALIGN(ARRAY_SIZE(mt76_channels_2ghz) * chan_size, 8);
345 size_5g = ALIGN(ARRAY_SIZE(mt76_channels_5ghz) * chan_size, 8);
346
347 size += phy_size + size_2g + size_5g;
348 hw = ieee80211_alloc_hw(size, ops);
349 if (!hw)
350 return NULL;
351
352 phy = hw->priv;
353 phy->dev = dev;
354 phy->hw = hw;
355
356 mt76_phy_init(dev, hw);
357
358 priv = hw->priv + phy_size;
359
360 phy->sband_2g = dev->phy.sband_2g;
361 phy->sband_2g.chan = priv;
362 priv += size_2g;
363
364 phy->sband_5g = dev->phy.sband_5g;
365 phy->sband_5g.chan = priv;
366 priv += size_5g;
367
368 phy->priv = priv;
369
370 hw->wiphy->bands[NL80211_BAND_2GHZ] = &phy->sband_2g.sband;
371 hw->wiphy->bands[NL80211_BAND_5GHZ] = &phy->sband_5g.sband;
372
373 mt76_check_sband(phy, &phy->sband_2g, NL80211_BAND_2GHZ);
374 mt76_check_sband(phy, &phy->sband_5g, NL80211_BAND_5GHZ);
375
376 return phy;
377 }
378 EXPORT_SYMBOL_GPL(mt76_alloc_phy);
379
380 int
mt76_register_phy(struct mt76_phy * phy)381 mt76_register_phy(struct mt76_phy *phy)
382 {
383 int ret;
384
385 ret = ieee80211_register_hw(phy->hw);
386 if (ret)
387 return ret;
388
389 phy->dev->phy2 = phy;
390 return 0;
391 }
392 EXPORT_SYMBOL_GPL(mt76_register_phy);
393
394 void
mt76_unregister_phy(struct mt76_phy * phy)395 mt76_unregister_phy(struct mt76_phy *phy)
396 {
397 struct mt76_dev *dev = phy->dev;
398
399 dev->phy2 = NULL;
400 mt76_tx_status_check(dev, NULL, true);
401 ieee80211_unregister_hw(phy->hw);
402 }
403 EXPORT_SYMBOL_GPL(mt76_unregister_phy);
404
405 struct mt76_dev *
mt76_alloc_device(struct device * pdev,unsigned int size,const struct ieee80211_ops * ops,const struct mt76_driver_ops * drv_ops)406 mt76_alloc_device(struct device *pdev, unsigned int size,
407 const struct ieee80211_ops *ops,
408 const struct mt76_driver_ops *drv_ops)
409 {
410 struct ieee80211_hw *hw;
411 struct mt76_phy *phy;
412 struct mt76_dev *dev;
413 int i;
414
415 hw = ieee80211_alloc_hw(size, ops);
416 if (!hw)
417 return NULL;
418
419 dev = hw->priv;
420 dev->hw = hw;
421 dev->dev = pdev;
422 dev->drv = drv_ops;
423
424 phy = &dev->phy;
425 phy->dev = dev;
426 phy->hw = hw;
427
428 spin_lock_init(&dev->rx_lock);
429 spin_lock_init(&dev->lock);
430 spin_lock_init(&dev->cc_lock);
431 mutex_init(&dev->mutex);
432 init_waitqueue_head(&dev->tx_wait);
433 skb_queue_head_init(&dev->status_list);
434
435 skb_queue_head_init(&dev->mcu.res_q);
436 init_waitqueue_head(&dev->mcu.wait);
437 mutex_init(&dev->mcu.mutex);
438 dev->tx_worker.fn = mt76_tx_worker;
439
440 INIT_LIST_HEAD(&dev->txwi_cache);
441
442 for (i = 0; i < ARRAY_SIZE(dev->q_rx); i++)
443 skb_queue_head_init(&dev->rx_skb[i]);
444
445 dev->wq = alloc_ordered_workqueue("mt76", 0);
446 if (!dev->wq) {
447 ieee80211_free_hw(hw);
448 return NULL;
449 }
450
451 return dev;
452 }
453 EXPORT_SYMBOL_GPL(mt76_alloc_device);
454
mt76_register_device(struct mt76_dev * dev,bool vht,struct ieee80211_rate * rates,int n_rates)455 int mt76_register_device(struct mt76_dev *dev, bool vht,
456 struct ieee80211_rate *rates, int n_rates)
457 {
458 struct ieee80211_hw *hw = dev->hw;
459 struct mt76_phy *phy = &dev->phy;
460 int ret;
461
462 dev_set_drvdata(dev->dev, dev);
463 mt76_phy_init(dev, hw);
464
465 if (dev->cap.has_2ghz) {
466 ret = mt76_init_sband_2g(dev, rates, n_rates);
467 if (ret)
468 return ret;
469 }
470
471 if (dev->cap.has_5ghz) {
472 ret = mt76_init_sband_5g(dev, rates + 4, n_rates - 4, vht);
473 if (ret)
474 return ret;
475 }
476
477 wiphy_read_of_freq_limits(hw->wiphy);
478 mt76_check_sband(&dev->phy, &phy->sband_2g, NL80211_BAND_2GHZ);
479 mt76_check_sband(&dev->phy, &phy->sband_5g, NL80211_BAND_5GHZ);
480
481 if (IS_ENABLED(CONFIG_MT76_LEDS)) {
482 ret = mt76_led_init(dev);
483 if (ret)
484 return ret;
485 }
486
487 ret = ieee80211_register_hw(hw);
488 if (ret)
489 return ret;
490
491 WARN_ON(mt76_worker_setup(hw, &dev->tx_worker, NULL, "tx"));
492 sched_set_fifo_low(dev->tx_worker.task);
493
494 return 0;
495 }
496 EXPORT_SYMBOL_GPL(mt76_register_device);
497
mt76_unregister_device(struct mt76_dev * dev)498 void mt76_unregister_device(struct mt76_dev *dev)
499 {
500 struct ieee80211_hw *hw = dev->hw;
501
502 if (IS_ENABLED(CONFIG_MT76_LEDS))
503 mt76_led_cleanup(dev);
504 mt76_tx_status_check(dev, NULL, true);
505 ieee80211_unregister_hw(hw);
506 }
507 EXPORT_SYMBOL_GPL(mt76_unregister_device);
508
mt76_free_device(struct mt76_dev * dev)509 void mt76_free_device(struct mt76_dev *dev)
510 {
511 mt76_worker_teardown(&dev->tx_worker);
512 if (dev->wq) {
513 destroy_workqueue(dev->wq);
514 dev->wq = NULL;
515 }
516 ieee80211_free_hw(dev->hw);
517 }
518 EXPORT_SYMBOL_GPL(mt76_free_device);
519
mt76_rx(struct mt76_dev * dev,enum mt76_rxq_id q,struct sk_buff * skb)520 void mt76_rx(struct mt76_dev *dev, enum mt76_rxq_id q, struct sk_buff *skb)
521 {
522 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
523 struct mt76_phy *phy = mt76_dev_phy(dev, status->ext_phy);
524
525 if (!test_bit(MT76_STATE_RUNNING, &phy->state)) {
526 dev_kfree_skb(skb);
527 return;
528 }
529
530 #ifdef CONFIG_NL80211_TESTMODE
531 if (dev->test.state == MT76_TM_STATE_RX_FRAMES) {
532 dev->test.rx_stats.packets[q]++;
533 if (status->flag & RX_FLAG_FAILED_FCS_CRC)
534 dev->test.rx_stats.fcs_error[q]++;
535 }
536 #endif
537 __skb_queue_tail(&dev->rx_skb[q], skb);
538 }
539 EXPORT_SYMBOL_GPL(mt76_rx);
540
mt76_has_tx_pending(struct mt76_phy * phy)541 bool mt76_has_tx_pending(struct mt76_phy *phy)
542 {
543 struct mt76_dev *dev = phy->dev;
544 struct mt76_queue *q;
545 int i, offset;
546
547 offset = __MT_TXQ_MAX * (phy != &dev->phy);
548
549 for (i = 0; i < __MT_TXQ_MAX; i++) {
550 q = dev->q_tx[offset + i];
551 if (q && q->queued)
552 return true;
553 }
554
555 return false;
556 }
557 EXPORT_SYMBOL_GPL(mt76_has_tx_pending);
558
559 static struct mt76_channel_state *
mt76_channel_state(struct mt76_phy * phy,struct ieee80211_channel * c)560 mt76_channel_state(struct mt76_phy *phy, struct ieee80211_channel *c)
561 {
562 struct mt76_sband *msband;
563 int idx;
564
565 if (c->band == NL80211_BAND_2GHZ)
566 msband = &phy->sband_2g;
567 else
568 msband = &phy->sband_5g;
569
570 idx = c - &msband->sband.channels[0];
571 return &msband->chan[idx];
572 }
573
mt76_update_survey_active_time(struct mt76_phy * phy,ktime_t time)574 void mt76_update_survey_active_time(struct mt76_phy *phy, ktime_t time)
575 {
576 struct mt76_channel_state *state = phy->chan_state;
577
578 state->cc_active += ktime_to_us(ktime_sub(time,
579 phy->survey_time));
580 phy->survey_time = time;
581 }
582 EXPORT_SYMBOL_GPL(mt76_update_survey_active_time);
583
mt76_update_survey(struct mt76_dev * dev)584 void mt76_update_survey(struct mt76_dev *dev)
585 {
586 ktime_t cur_time;
587
588 if (dev->drv->update_survey)
589 dev->drv->update_survey(dev);
590
591 cur_time = ktime_get_boottime();
592 mt76_update_survey_active_time(&dev->phy, cur_time);
593 if (dev->phy2)
594 mt76_update_survey_active_time(dev->phy2, cur_time);
595
596 if (dev->drv->drv_flags & MT_DRV_SW_RX_AIRTIME) {
597 struct mt76_channel_state *state = dev->phy.chan_state;
598
599 spin_lock_bh(&dev->cc_lock);
600 state->cc_bss_rx += dev->cur_cc_bss_rx;
601 dev->cur_cc_bss_rx = 0;
602 spin_unlock_bh(&dev->cc_lock);
603 }
604 }
605 EXPORT_SYMBOL_GPL(mt76_update_survey);
606
mt76_set_channel(struct mt76_phy * phy)607 void mt76_set_channel(struct mt76_phy *phy)
608 {
609 struct mt76_dev *dev = phy->dev;
610 struct ieee80211_hw *hw = phy->hw;
611 struct cfg80211_chan_def *chandef = &hw->conf.chandef;
612 bool offchannel = hw->conf.flags & IEEE80211_CONF_OFFCHANNEL;
613 int timeout = HZ / 5;
614
615 wait_event_timeout(dev->tx_wait, !mt76_has_tx_pending(phy), timeout);
616 mt76_update_survey(dev);
617
618 phy->chandef = *chandef;
619 phy->chan_state = mt76_channel_state(phy, chandef->chan);
620
621 if (!offchannel)
622 phy->main_chan = chandef->chan;
623
624 if (chandef->chan != phy->main_chan)
625 memset(phy->chan_state, 0, sizeof(*phy->chan_state));
626 }
627 EXPORT_SYMBOL_GPL(mt76_set_channel);
628
mt76_get_survey(struct ieee80211_hw * hw,int idx,struct survey_info * survey)629 int mt76_get_survey(struct ieee80211_hw *hw, int idx,
630 struct survey_info *survey)
631 {
632 struct mt76_phy *phy = hw->priv;
633 struct mt76_dev *dev = phy->dev;
634 struct mt76_sband *sband;
635 struct ieee80211_channel *chan;
636 struct mt76_channel_state *state;
637 int ret = 0;
638
639 mutex_lock(&dev->mutex);
640 if (idx == 0 && dev->drv->update_survey)
641 mt76_update_survey(dev);
642
643 sband = &phy->sband_2g;
644 if (idx >= sband->sband.n_channels) {
645 idx -= sband->sband.n_channels;
646 sband = &phy->sband_5g;
647 }
648
649 if (idx >= sband->sband.n_channels) {
650 ret = -ENOENT;
651 goto out;
652 }
653
654 chan = &sband->sband.channels[idx];
655 state = mt76_channel_state(phy, chan);
656
657 memset(survey, 0, sizeof(*survey));
658 survey->channel = chan;
659 survey->filled = SURVEY_INFO_TIME | SURVEY_INFO_TIME_BUSY;
660 survey->filled |= dev->drv->survey_flags;
661 if (state->noise)
662 survey->filled |= SURVEY_INFO_NOISE_DBM;
663
664 if (chan == phy->main_chan) {
665 survey->filled |= SURVEY_INFO_IN_USE;
666
667 if (dev->drv->drv_flags & MT_DRV_SW_RX_AIRTIME)
668 survey->filled |= SURVEY_INFO_TIME_BSS_RX;
669 }
670
671 survey->time_busy = div_u64(state->cc_busy, 1000);
672 survey->time_rx = div_u64(state->cc_rx, 1000);
673 survey->time = div_u64(state->cc_active, 1000);
674 survey->noise = state->noise;
675
676 spin_lock_bh(&dev->cc_lock);
677 survey->time_bss_rx = div_u64(state->cc_bss_rx, 1000);
678 survey->time_tx = div_u64(state->cc_tx, 1000);
679 spin_unlock_bh(&dev->cc_lock);
680
681 out:
682 mutex_unlock(&dev->mutex);
683
684 return ret;
685 }
686 EXPORT_SYMBOL_GPL(mt76_get_survey);
687
mt76_wcid_key_setup(struct mt76_dev * dev,struct mt76_wcid * wcid,struct ieee80211_key_conf * key)688 void mt76_wcid_key_setup(struct mt76_dev *dev, struct mt76_wcid *wcid,
689 struct ieee80211_key_conf *key)
690 {
691 struct ieee80211_key_seq seq;
692 int i;
693
694 wcid->rx_check_pn = false;
695
696 if (!key)
697 return;
698
699 if (key->cipher != WLAN_CIPHER_SUITE_CCMP)
700 return;
701
702 wcid->rx_check_pn = true;
703 for (i = 0; i < IEEE80211_NUM_TIDS; i++) {
704 ieee80211_get_key_rx_seq(key, i, &seq);
705 memcpy(wcid->rx_key_pn[i], seq.ccmp.pn, sizeof(seq.ccmp.pn));
706 }
707 }
708 EXPORT_SYMBOL(mt76_wcid_key_setup);
709
710 static void
mt76_rx_convert(struct mt76_dev * dev,struct sk_buff * skb,struct ieee80211_hw ** hw,struct ieee80211_sta ** sta)711 mt76_rx_convert(struct mt76_dev *dev, struct sk_buff *skb,
712 struct ieee80211_hw **hw,
713 struct ieee80211_sta **sta)
714 {
715 struct ieee80211_rx_status *status = IEEE80211_SKB_RXCB(skb);
716 struct mt76_rx_status mstat;
717
718 mstat = *((struct mt76_rx_status *)skb->cb);
719 memset(status, 0, sizeof(*status));
720
721 status->flag = mstat.flag;
722 status->freq = mstat.freq;
723 status->enc_flags = mstat.enc_flags;
724 status->encoding = mstat.encoding;
725 status->bw = mstat.bw;
726 status->he_ru = mstat.he_ru;
727 status->he_gi = mstat.he_gi;
728 status->he_dcm = mstat.he_dcm;
729 status->rate_idx = mstat.rate_idx;
730 status->nss = mstat.nss;
731 status->band = mstat.band;
732 status->signal = mstat.signal;
733 status->chains = mstat.chains;
734 status->ampdu_reference = mstat.ampdu_ref;
735
736 BUILD_BUG_ON(sizeof(mstat) > sizeof(skb->cb));
737 BUILD_BUG_ON(sizeof(status->chain_signal) !=
738 sizeof(mstat.chain_signal));
739 memcpy(status->chain_signal, mstat.chain_signal,
740 sizeof(mstat.chain_signal));
741
742 *sta = wcid_to_sta(mstat.wcid);
743 *hw = mt76_phy_hw(dev, mstat.ext_phy);
744 }
745
746 static int
mt76_check_ccmp_pn(struct sk_buff * skb)747 mt76_check_ccmp_pn(struct sk_buff *skb)
748 {
749 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
750 struct mt76_wcid *wcid = status->wcid;
751 struct ieee80211_hdr *hdr;
752 int ret;
753
754 if (!(status->flag & RX_FLAG_DECRYPTED))
755 return 0;
756
757 if (!wcid || !wcid->rx_check_pn)
758 return 0;
759
760 if (!(status->flag & RX_FLAG_IV_STRIPPED)) {
761 /*
762 * Validate the first fragment both here and in mac80211
763 * All further fragments will be validated by mac80211 only.
764 */
765 hdr = mt76_skb_get_hdr(skb);
766 if (ieee80211_is_frag(hdr) &&
767 !ieee80211_is_first_frag(hdr->frame_control))
768 return 0;
769 }
770
771 BUILD_BUG_ON(sizeof(status->iv) != sizeof(wcid->rx_key_pn[0]));
772 ret = memcmp(status->iv, wcid->rx_key_pn[status->tid],
773 sizeof(status->iv));
774 if (ret <= 0)
775 return -EINVAL; /* replay */
776
777 memcpy(wcid->rx_key_pn[status->tid], status->iv, sizeof(status->iv));
778
779 if (status->flag & RX_FLAG_IV_STRIPPED)
780 status->flag |= RX_FLAG_PN_VALIDATED;
781
782 return 0;
783 }
784
785 static void
mt76_airtime_report(struct mt76_dev * dev,struct mt76_rx_status * status,int len)786 mt76_airtime_report(struct mt76_dev *dev, struct mt76_rx_status *status,
787 int len)
788 {
789 struct mt76_wcid *wcid = status->wcid;
790 struct ieee80211_rx_status info = {
791 .enc_flags = status->enc_flags,
792 .rate_idx = status->rate_idx,
793 .encoding = status->encoding,
794 .band = status->band,
795 .nss = status->nss,
796 .bw = status->bw,
797 };
798 struct ieee80211_sta *sta;
799 u32 airtime;
800
801 airtime = ieee80211_calc_rx_airtime(dev->hw, &info, len);
802 spin_lock(&dev->cc_lock);
803 dev->cur_cc_bss_rx += airtime;
804 spin_unlock(&dev->cc_lock);
805
806 if (!wcid || !wcid->sta)
807 return;
808
809 sta = container_of((void *)wcid, struct ieee80211_sta, drv_priv);
810 ieee80211_sta_register_airtime(sta, status->tid, 0, airtime);
811 }
812
813 static void
mt76_airtime_flush_ampdu(struct mt76_dev * dev)814 mt76_airtime_flush_ampdu(struct mt76_dev *dev)
815 {
816 struct mt76_wcid *wcid;
817 int wcid_idx;
818
819 if (!dev->rx_ampdu_len)
820 return;
821
822 wcid_idx = dev->rx_ampdu_status.wcid_idx;
823 if (wcid_idx < ARRAY_SIZE(dev->wcid))
824 wcid = rcu_dereference(dev->wcid[wcid_idx]);
825 else
826 wcid = NULL;
827 dev->rx_ampdu_status.wcid = wcid;
828
829 mt76_airtime_report(dev, &dev->rx_ampdu_status, dev->rx_ampdu_len);
830
831 dev->rx_ampdu_len = 0;
832 dev->rx_ampdu_ref = 0;
833 }
834
835 static void
mt76_airtime_check(struct mt76_dev * dev,struct sk_buff * skb)836 mt76_airtime_check(struct mt76_dev *dev, struct sk_buff *skb)
837 {
838 struct ieee80211_hdr *hdr = mt76_skb_get_hdr(skb);
839 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
840 struct mt76_wcid *wcid = status->wcid;
841
842 if (!(dev->drv->drv_flags & MT_DRV_SW_RX_AIRTIME))
843 return;
844
845 if (!wcid || !wcid->sta) {
846 if (!ether_addr_equal(hdr->addr1, dev->macaddr))
847 return;
848
849 wcid = NULL;
850 }
851
852 if (!(status->flag & RX_FLAG_AMPDU_DETAILS) ||
853 status->ampdu_ref != dev->rx_ampdu_ref)
854 mt76_airtime_flush_ampdu(dev);
855
856 if (status->flag & RX_FLAG_AMPDU_DETAILS) {
857 if (!dev->rx_ampdu_len ||
858 status->ampdu_ref != dev->rx_ampdu_ref) {
859 dev->rx_ampdu_status = *status;
860 dev->rx_ampdu_status.wcid_idx = wcid ? wcid->idx : 0xff;
861 dev->rx_ampdu_ref = status->ampdu_ref;
862 }
863
864 dev->rx_ampdu_len += skb->len;
865 return;
866 }
867
868 mt76_airtime_report(dev, status, skb->len);
869 }
870
871 static void
mt76_check_sta(struct mt76_dev * dev,struct sk_buff * skb)872 mt76_check_sta(struct mt76_dev *dev, struct sk_buff *skb)
873 {
874 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
875 struct ieee80211_hdr *hdr = mt76_skb_get_hdr(skb);
876 struct ieee80211_sta *sta;
877 struct ieee80211_hw *hw;
878 struct mt76_wcid *wcid = status->wcid;
879 bool ps;
880
881 hw = mt76_phy_hw(dev, status->ext_phy);
882 if (ieee80211_is_pspoll(hdr->frame_control) && !wcid) {
883 sta = ieee80211_find_sta_by_ifaddr(hw, hdr->addr2, NULL);
884 if (sta)
885 wcid = status->wcid = (struct mt76_wcid *)sta->drv_priv;
886 }
887
888 mt76_airtime_check(dev, skb);
889
890 if (!wcid || !wcid->sta)
891 return;
892
893 sta = container_of((void *)wcid, struct ieee80211_sta, drv_priv);
894
895 if (status->signal <= 0)
896 ewma_signal_add(&wcid->rssi, -status->signal);
897
898 wcid->inactive_count = 0;
899
900 if (!test_bit(MT_WCID_FLAG_CHECK_PS, &wcid->flags))
901 return;
902
903 if (ieee80211_is_pspoll(hdr->frame_control)) {
904 ieee80211_sta_pspoll(sta);
905 return;
906 }
907
908 if (ieee80211_has_morefrags(hdr->frame_control) ||
909 !(ieee80211_is_mgmt(hdr->frame_control) ||
910 ieee80211_is_data(hdr->frame_control)))
911 return;
912
913 ps = ieee80211_has_pm(hdr->frame_control);
914
915 if (ps && (ieee80211_is_data_qos(hdr->frame_control) ||
916 ieee80211_is_qos_nullfunc(hdr->frame_control)))
917 ieee80211_sta_uapsd_trigger(sta, status->tid);
918
919 if (!!test_bit(MT_WCID_FLAG_PS, &wcid->flags) == ps)
920 return;
921
922 if (ps)
923 set_bit(MT_WCID_FLAG_PS, &wcid->flags);
924 else
925 clear_bit(MT_WCID_FLAG_PS, &wcid->flags);
926
927 dev->drv->sta_ps(dev, sta, ps);
928 ieee80211_sta_ps_transition(sta, ps);
929 }
930
mt76_rx_complete(struct mt76_dev * dev,struct sk_buff_head * frames,struct napi_struct * napi)931 void mt76_rx_complete(struct mt76_dev *dev, struct sk_buff_head *frames,
932 struct napi_struct *napi)
933 {
934 struct ieee80211_sta *sta;
935 struct ieee80211_hw *hw;
936 struct sk_buff *skb;
937
938 spin_lock(&dev->rx_lock);
939 while ((skb = __skb_dequeue(frames)) != NULL) {
940 if (mt76_check_ccmp_pn(skb)) {
941 dev_kfree_skb(skb);
942 continue;
943 }
944
945 mt76_rx_convert(dev, skb, &hw, &sta);
946 ieee80211_rx_napi(hw, sta, skb, napi);
947 }
948 spin_unlock(&dev->rx_lock);
949 }
950
mt76_rx_poll_complete(struct mt76_dev * dev,enum mt76_rxq_id q,struct napi_struct * napi)951 void mt76_rx_poll_complete(struct mt76_dev *dev, enum mt76_rxq_id q,
952 struct napi_struct *napi)
953 {
954 struct sk_buff_head frames;
955 struct sk_buff *skb;
956
957 __skb_queue_head_init(&frames);
958
959 while ((skb = __skb_dequeue(&dev->rx_skb[q])) != NULL) {
960 mt76_check_sta(dev, skb);
961 mt76_rx_aggr_reorder(skb, &frames);
962 }
963
964 mt76_rx_complete(dev, &frames, napi);
965 }
966 EXPORT_SYMBOL_GPL(mt76_rx_poll_complete);
967
968 static int
mt76_sta_add(struct mt76_dev * dev,struct ieee80211_vif * vif,struct ieee80211_sta * sta,bool ext_phy)969 mt76_sta_add(struct mt76_dev *dev, struct ieee80211_vif *vif,
970 struct ieee80211_sta *sta, bool ext_phy)
971 {
972 struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv;
973 int ret;
974 int i;
975
976 mutex_lock(&dev->mutex);
977
978 ret = dev->drv->sta_add(dev, vif, sta);
979 if (ret)
980 goto out;
981
982 for (i = 0; i < ARRAY_SIZE(sta->txq); i++) {
983 struct mt76_txq *mtxq;
984
985 if (!sta->txq[i])
986 continue;
987
988 mtxq = (struct mt76_txq *)sta->txq[i]->drv_priv;
989 mtxq->wcid = wcid;
990 }
991
992 ewma_signal_init(&wcid->rssi);
993 if (ext_phy)
994 mt76_wcid_mask_set(dev->wcid_phy_mask, wcid->idx);
995 wcid->ext_phy = ext_phy;
996 rcu_assign_pointer(dev->wcid[wcid->idx], wcid);
997
998 out:
999 mutex_unlock(&dev->mutex);
1000
1001 return ret;
1002 }
1003
__mt76_sta_remove(struct mt76_dev * dev,struct ieee80211_vif * vif,struct ieee80211_sta * sta)1004 void __mt76_sta_remove(struct mt76_dev *dev, struct ieee80211_vif *vif,
1005 struct ieee80211_sta *sta)
1006 {
1007 struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv;
1008 int i, idx = wcid->idx;
1009
1010 for (i = 0; i < ARRAY_SIZE(wcid->aggr); i++)
1011 mt76_rx_aggr_stop(dev, wcid, i);
1012
1013 if (dev->drv->sta_remove)
1014 dev->drv->sta_remove(dev, vif, sta);
1015
1016 mt76_tx_status_check(dev, wcid, true);
1017 mt76_wcid_mask_clear(dev->wcid_mask, idx);
1018 mt76_wcid_mask_clear(dev->wcid_phy_mask, idx);
1019 }
1020 EXPORT_SYMBOL_GPL(__mt76_sta_remove);
1021
1022 static void
mt76_sta_remove(struct mt76_dev * dev,struct ieee80211_vif * vif,struct ieee80211_sta * sta)1023 mt76_sta_remove(struct mt76_dev *dev, struct ieee80211_vif *vif,
1024 struct ieee80211_sta *sta)
1025 {
1026 mutex_lock(&dev->mutex);
1027 __mt76_sta_remove(dev, vif, sta);
1028 mutex_unlock(&dev->mutex);
1029 }
1030
mt76_sta_state(struct ieee80211_hw * hw,struct ieee80211_vif * vif,struct ieee80211_sta * sta,enum ieee80211_sta_state old_state,enum ieee80211_sta_state new_state)1031 int mt76_sta_state(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
1032 struct ieee80211_sta *sta,
1033 enum ieee80211_sta_state old_state,
1034 enum ieee80211_sta_state new_state)
1035 {
1036 struct mt76_phy *phy = hw->priv;
1037 struct mt76_dev *dev = phy->dev;
1038 bool ext_phy = phy != &dev->phy;
1039
1040 if (old_state == IEEE80211_STA_NOTEXIST &&
1041 new_state == IEEE80211_STA_NONE)
1042 return mt76_sta_add(dev, vif, sta, ext_phy);
1043
1044 if (old_state == IEEE80211_STA_AUTH &&
1045 new_state == IEEE80211_STA_ASSOC &&
1046 dev->drv->sta_assoc)
1047 dev->drv->sta_assoc(dev, vif, sta);
1048
1049 if (old_state == IEEE80211_STA_NONE &&
1050 new_state == IEEE80211_STA_NOTEXIST)
1051 mt76_sta_remove(dev, vif, sta);
1052
1053 return 0;
1054 }
1055 EXPORT_SYMBOL_GPL(mt76_sta_state);
1056
mt76_sta_pre_rcu_remove(struct ieee80211_hw * hw,struct ieee80211_vif * vif,struct ieee80211_sta * sta)1057 void mt76_sta_pre_rcu_remove(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
1058 struct ieee80211_sta *sta)
1059 {
1060 struct mt76_phy *phy = hw->priv;
1061 struct mt76_dev *dev = phy->dev;
1062 struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv;
1063
1064 mutex_lock(&dev->mutex);
1065 rcu_assign_pointer(dev->wcid[wcid->idx], NULL);
1066 mutex_unlock(&dev->mutex);
1067 }
1068 EXPORT_SYMBOL_GPL(mt76_sta_pre_rcu_remove);
1069
mt76_get_txpower(struct ieee80211_hw * hw,struct ieee80211_vif * vif,int * dbm)1070 int mt76_get_txpower(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
1071 int *dbm)
1072 {
1073 struct mt76_phy *phy = hw->priv;
1074 int n_chains = hweight8(phy->antenna_mask);
1075 int delta = mt76_tx_power_nss_delta(n_chains);
1076
1077 *dbm = DIV_ROUND_UP(phy->txpower_cur + delta, 2);
1078
1079 return 0;
1080 }
1081 EXPORT_SYMBOL_GPL(mt76_get_txpower);
1082
1083 static void
__mt76_csa_finish(void * priv,u8 * mac,struct ieee80211_vif * vif)1084 __mt76_csa_finish(void *priv, u8 *mac, struct ieee80211_vif *vif)
1085 {
1086 if (vif->csa_active && ieee80211_beacon_cntdwn_is_complete(vif))
1087 ieee80211_csa_finish(vif);
1088 }
1089
mt76_csa_finish(struct mt76_dev * dev)1090 void mt76_csa_finish(struct mt76_dev *dev)
1091 {
1092 if (!dev->csa_complete)
1093 return;
1094
1095 ieee80211_iterate_active_interfaces_atomic(dev->hw,
1096 IEEE80211_IFACE_ITER_RESUME_ALL,
1097 __mt76_csa_finish, dev);
1098
1099 dev->csa_complete = 0;
1100 }
1101 EXPORT_SYMBOL_GPL(mt76_csa_finish);
1102
1103 static void
__mt76_csa_check(void * priv,u8 * mac,struct ieee80211_vif * vif)1104 __mt76_csa_check(void *priv, u8 *mac, struct ieee80211_vif *vif)
1105 {
1106 struct mt76_dev *dev = priv;
1107
1108 if (!vif->csa_active)
1109 return;
1110
1111 dev->csa_complete |= ieee80211_beacon_cntdwn_is_complete(vif);
1112 }
1113
mt76_csa_check(struct mt76_dev * dev)1114 void mt76_csa_check(struct mt76_dev *dev)
1115 {
1116 ieee80211_iterate_active_interfaces_atomic(dev->hw,
1117 IEEE80211_IFACE_ITER_RESUME_ALL,
1118 __mt76_csa_check, dev);
1119 }
1120 EXPORT_SYMBOL_GPL(mt76_csa_check);
1121
1122 int
mt76_set_tim(struct ieee80211_hw * hw,struct ieee80211_sta * sta,bool set)1123 mt76_set_tim(struct ieee80211_hw *hw, struct ieee80211_sta *sta, bool set)
1124 {
1125 return 0;
1126 }
1127 EXPORT_SYMBOL_GPL(mt76_set_tim);
1128
mt76_insert_ccmp_hdr(struct sk_buff * skb,u8 key_id)1129 void mt76_insert_ccmp_hdr(struct sk_buff *skb, u8 key_id)
1130 {
1131 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
1132 int hdr_len = ieee80211_get_hdrlen_from_skb(skb);
1133 u8 *hdr, *pn = status->iv;
1134
1135 __skb_push(skb, 8);
1136 memmove(skb->data, skb->data + 8, hdr_len);
1137 hdr = skb->data + hdr_len;
1138
1139 hdr[0] = pn[5];
1140 hdr[1] = pn[4];
1141 hdr[2] = 0;
1142 hdr[3] = 0x20 | (key_id << 6);
1143 hdr[4] = pn[3];
1144 hdr[5] = pn[2];
1145 hdr[6] = pn[1];
1146 hdr[7] = pn[0];
1147
1148 status->flag &= ~RX_FLAG_IV_STRIPPED;
1149 }
1150 EXPORT_SYMBOL_GPL(mt76_insert_ccmp_hdr);
1151
mt76_get_rate(struct mt76_dev * dev,struct ieee80211_supported_band * sband,int idx,bool cck)1152 int mt76_get_rate(struct mt76_dev *dev,
1153 struct ieee80211_supported_band *sband,
1154 int idx, bool cck)
1155 {
1156 int i, offset = 0, len = sband->n_bitrates;
1157
1158 if (cck) {
1159 if (sband == &dev->phy.sband_5g.sband)
1160 return 0;
1161
1162 idx &= ~BIT(2); /* short preamble */
1163 } else if (sband == &dev->phy.sband_2g.sband) {
1164 offset = 4;
1165 }
1166
1167 for (i = offset; i < len; i++) {
1168 if ((sband->bitrates[i].hw_value & GENMASK(7, 0)) == idx)
1169 return i;
1170 }
1171
1172 return 0;
1173 }
1174 EXPORT_SYMBOL_GPL(mt76_get_rate);
1175
mt76_sw_scan(struct ieee80211_hw * hw,struct ieee80211_vif * vif,const u8 * mac)1176 void mt76_sw_scan(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
1177 const u8 *mac)
1178 {
1179 struct mt76_phy *phy = hw->priv;
1180
1181 set_bit(MT76_SCANNING, &phy->state);
1182 }
1183 EXPORT_SYMBOL_GPL(mt76_sw_scan);
1184
mt76_sw_scan_complete(struct ieee80211_hw * hw,struct ieee80211_vif * vif)1185 void mt76_sw_scan_complete(struct ieee80211_hw *hw, struct ieee80211_vif *vif)
1186 {
1187 struct mt76_phy *phy = hw->priv;
1188
1189 clear_bit(MT76_SCANNING, &phy->state);
1190 }
1191 EXPORT_SYMBOL_GPL(mt76_sw_scan_complete);
1192
mt76_get_antenna(struct ieee80211_hw * hw,u32 * tx_ant,u32 * rx_ant)1193 int mt76_get_antenna(struct ieee80211_hw *hw, u32 *tx_ant, u32 *rx_ant)
1194 {
1195 struct mt76_phy *phy = hw->priv;
1196 struct mt76_dev *dev = phy->dev;
1197
1198 mutex_lock(&dev->mutex);
1199 *tx_ant = phy->antenna_mask;
1200 *rx_ant = phy->antenna_mask;
1201 mutex_unlock(&dev->mutex);
1202
1203 return 0;
1204 }
1205 EXPORT_SYMBOL_GPL(mt76_get_antenna);
1206