• Home
  • Raw
  • Download

Lines Matching refs:rt2x00dev

37 	struct rt2x00_dev *rt2x00dev = queue->rt2x00dev;  in rt2x00queue_alloc_rxskb()  local
62 if (rt2x00_has_cap_hw_crypto(rt2x00dev)) { in rt2x00queue_alloc_rxskb()
88 if (rt2x00_has_cap_flag(rt2x00dev, REQUIRE_DMA)) { in rt2x00queue_alloc_rxskb()
91 skb_dma = dma_map_single(rt2x00dev->dev, skb->data, skb->len, in rt2x00queue_alloc_rxskb()
93 if (unlikely(dma_mapping_error(rt2x00dev->dev, skb_dma))) { in rt2x00queue_alloc_rxskb()
107 struct device *dev = entry->queue->rt2x00dev->dev; in rt2x00queue_map_txskb()
123 struct device *dev = entry->queue->rt2x00dev->dev; in rt2x00queue_unmap_skb()
187 static void rt2x00queue_create_tx_descriptor_seq(struct rt2x00_dev *rt2x00dev, in rt2x00queue_create_tx_descriptor_seq() argument
201 if (!rt2x00_has_cap_flag(rt2x00dev, REQUIRE_SW_SEQNO)) { in rt2x00queue_create_tx_descriptor_seq()
208 if (test_bit(CONFIG_QOS_DISABLED, &rt2x00dev->flags)) in rt2x00queue_create_tx_descriptor_seq()
235 static void rt2x00queue_create_tx_descriptor_plcp(struct rt2x00_dev *rt2x00dev, in rt2x00queue_create_tx_descriptor_plcp() argument
258 data_length += rt2x00crypto_tx_overhead(rt2x00dev, skb); in rt2x00queue_create_tx_descriptor_plcp()
299 static void rt2x00queue_create_tx_descriptor_ht(struct rt2x00_dev *rt2x00dev, in rt2x00queue_create_tx_descriptor_ht() argument
338 if (test_bit(CONFIG_HT_DISABLED, &rt2x00dev->flags)) { in rt2x00queue_create_tx_descriptor_ht()
392 static void rt2x00queue_create_tx_descriptor(struct rt2x00_dev *rt2x00dev, in rt2x00queue_create_tx_descriptor() argument
429 ieee80211_get_rts_cts_rate(rt2x00dev->hw, tx_info); in rt2x00queue_create_tx_descriptor()
436 if (txdesc->retry_limit >= rt2x00dev->long_retry) in rt2x00queue_create_tx_descriptor()
473 rate = ieee80211_get_tx_rate(rt2x00dev->hw, tx_info); in rt2x00queue_create_tx_descriptor()
484 rt2x00crypto_create_tx_descriptor(rt2x00dev, skb, txdesc); in rt2x00queue_create_tx_descriptor()
485 rt2x00queue_create_tx_descriptor_seq(rt2x00dev, skb, txdesc); in rt2x00queue_create_tx_descriptor()
487 if (rt2x00_has_cap_flag(rt2x00dev, REQUIRE_HT_TX_DESC)) in rt2x00queue_create_tx_descriptor()
488 rt2x00queue_create_tx_descriptor_ht(rt2x00dev, skb, txdesc, in rt2x00queue_create_tx_descriptor()
491 rt2x00queue_create_tx_descriptor_plcp(rt2x00dev, skb, txdesc, in rt2x00queue_create_tx_descriptor()
498 struct rt2x00_dev *rt2x00dev = entry->queue->rt2x00dev; in rt2x00queue_write_tx_data() local
505 if (unlikely(rt2x00dev->ops->lib->get_entry_state && in rt2x00queue_write_tx_data()
506 rt2x00dev->ops->lib->get_entry_state(entry))) { in rt2x00queue_write_tx_data()
507 rt2x00_err(rt2x00dev, in rt2x00queue_write_tx_data()
517 skb_push(entry->skb, rt2x00dev->extra_tx_headroom); in rt2x00queue_write_tx_data()
518 memset(entry->skb->data, 0, rt2x00dev->extra_tx_headroom); in rt2x00queue_write_tx_data()
523 if (rt2x00dev->ops->lib->write_tx_data) in rt2x00queue_write_tx_data()
524 rt2x00dev->ops->lib->write_tx_data(entry, txdesc); in rt2x00queue_write_tx_data()
529 if (rt2x00_has_cap_flag(rt2x00dev, REQUIRE_DMA) && in rt2x00queue_write_tx_data()
541 queue->rt2x00dev->ops->lib->write_tx_desc(entry, txdesc); in rt2x00queue_write_tx_descriptor()
547 rt2x00debug_dump_frame(queue->rt2x00dev, DUMP_FRAME_TX, entry->skb); in rt2x00queue_write_tx_descriptor()
564 queue->rt2x00dev->ops->lib->kick_queue(queue); in rt2x00queue_kick_tx_queue()
569 struct rt2x00_dev *rt2x00dev = entry->queue->rt2x00dev; in rt2x00queue_bar_check() local
571 rt2x00dev->extra_tx_headroom); in rt2x00queue_bar_check()
604 spin_lock_bh(&rt2x00dev->bar_list_lock); in rt2x00queue_bar_check()
605 list_add_tail_rcu(&bar_entry->list, &rt2x00dev->bar_list); in rt2x00queue_bar_check()
606 spin_unlock_bh(&rt2x00dev->bar_list_lock); in rt2x00queue_bar_check()
624 rt2x00queue_create_tx_descriptor(queue->rt2x00dev, skb, &txdesc, sta); in rt2x00queue_write_tx_frame()
649 if (rt2x00_has_cap_flag(queue->rt2x00dev, REQUIRE_COPY_IV)) in rt2x00queue_write_tx_frame()
663 if (rt2x00_has_cap_flag(queue->rt2x00dev, REQUIRE_L2PAD)) in rt2x00queue_write_tx_frame()
665 else if (rt2x00_has_cap_flag(queue->rt2x00dev, REQUIRE_DMA)) in rt2x00queue_write_tx_frame()
674 rt2x00_err(queue->rt2x00dev, "Dropping frame due to full tx queue %d\n", in rt2x00queue_write_tx_frame()
684 rt2x00_err(queue->rt2x00dev, in rt2x00queue_write_tx_frame()
723 int rt2x00queue_clear_beacon(struct rt2x00_dev *rt2x00dev, in rt2x00queue_clear_beacon() argument
740 if (rt2x00dev->ops->lib->clear_beacon) in rt2x00queue_clear_beacon()
741 rt2x00dev->ops->lib->clear_beacon(intf->beacon); in rt2x00queue_clear_beacon()
746 int rt2x00queue_update_beacon(struct rt2x00_dev *rt2x00dev, in rt2x00queue_update_beacon() argument
761 intf->beacon->skb = ieee80211_beacon_get(rt2x00dev->hw, vif); in rt2x00queue_update_beacon()
770 rt2x00queue_create_tx_descriptor(rt2x00dev, intf->beacon->skb, &txdesc, NULL); in rt2x00queue_update_beacon()
782 rt2x00dev->ops->lib->write_beacon(intf->beacon, &txdesc); in rt2x00queue_update_beacon()
801 rt2x00_err(queue->rt2x00dev, in rt2x00queue_for_each_entry()
850 rt2x00_err(queue->rt2x00dev, "Entry requested from invalid index type (%d)\n", in rt2x00queue_get_entry()
871 rt2x00_err(queue->rt2x00dev, in rt2x00queue_index_inc()
905 ieee80211_stop_queue(queue->rt2x00dev->hw, queue->qid); in rt2x00queue_pause_queue_nocheck()
913 if (!test_bit(DEVICE_STATE_PRESENT, &queue->rt2x00dev->flags) || in rt2x00queue_pause_queue()
924 if (!test_bit(DEVICE_STATE_PRESENT, &queue->rt2x00dev->flags) || in rt2x00queue_unpause_queue()
938 ieee80211_wake_queue(queue->rt2x00dev->hw, queue->qid); in rt2x00queue_unpause_queue()
945 queue->rt2x00dev->ops->lib->kick_queue(queue); in rt2x00queue_unpause_queue()
956 if (!test_bit(DEVICE_STATE_PRESENT, &queue->rt2x00dev->flags) || in rt2x00queue_start_queue()
964 queue->rt2x00dev->ops->lib->start_queue(queue); in rt2x00queue_start_queue()
983 queue->rt2x00dev->ops->lib->stop_queue(queue); in rt2x00queue_stop_queue()
1005 queue->rt2x00dev->ops->lib->kick_queue(queue); in rt2x00queue_flush_queue()
1012 if (likely(queue->rt2x00dev->ops->lib->flush_queue)) in rt2x00queue_flush_queue()
1013 queue->rt2x00dev->ops->lib->flush_queue(queue, drop); in rt2x00queue_flush_queue()
1019 rt2x00_warn(queue->rt2x00dev, "Queue %d failed to flush\n", in rt2x00queue_flush_queue()
1024 void rt2x00queue_start_queues(struct rt2x00_dev *rt2x00dev) in rt2x00queue_start_queues() argument
1032 tx_queue_for_each(rt2x00dev, queue) in rt2x00queue_start_queues()
1035 rt2x00queue_start_queue(rt2x00dev->rx); in rt2x00queue_start_queues()
1039 void rt2x00queue_stop_queues(struct rt2x00_dev *rt2x00dev) in rt2x00queue_stop_queues() argument
1049 ieee80211_stop_queues(rt2x00dev->hw); in rt2x00queue_stop_queues()
1051 tx_queue_for_each(rt2x00dev, queue) in rt2x00queue_stop_queues()
1054 rt2x00queue_stop_queue(rt2x00dev->rx); in rt2x00queue_stop_queues()
1058 void rt2x00queue_flush_queues(struct rt2x00_dev *rt2x00dev, bool drop) in rt2x00queue_flush_queues() argument
1062 tx_queue_for_each(rt2x00dev, queue) in rt2x00queue_flush_queues()
1065 rt2x00queue_flush_queue(rt2x00dev->rx, drop); in rt2x00queue_flush_queues()
1085 void rt2x00queue_init_queues(struct rt2x00_dev *rt2x00dev) in rt2x00queue_init_queues() argument
1090 queue_for_each(rt2x00dev, queue) { in rt2x00queue_init_queues()
1094 rt2x00dev->ops->lib->clear_entry(&queue->entries[i]); in rt2x00queue_init_queues()
1162 int rt2x00queue_initialize(struct rt2x00_dev *rt2x00dev) in rt2x00queue_initialize() argument
1167 status = rt2x00queue_alloc_entries(rt2x00dev->rx); in rt2x00queue_initialize()
1171 tx_queue_for_each(rt2x00dev, queue) { in rt2x00queue_initialize()
1177 status = rt2x00queue_alloc_entries(rt2x00dev->bcn); in rt2x00queue_initialize()
1181 if (rt2x00_has_cap_flag(rt2x00dev, REQUIRE_ATIM_QUEUE)) { in rt2x00queue_initialize()
1182 status = rt2x00queue_alloc_entries(rt2x00dev->atim); in rt2x00queue_initialize()
1187 status = rt2x00queue_alloc_rxskbs(rt2x00dev->rx); in rt2x00queue_initialize()
1194 rt2x00_err(rt2x00dev, "Queue entries allocation failed\n"); in rt2x00queue_initialize()
1196 rt2x00queue_uninitialize(rt2x00dev); in rt2x00queue_initialize()
1201 void rt2x00queue_uninitialize(struct rt2x00_dev *rt2x00dev) in rt2x00queue_uninitialize() argument
1205 rt2x00queue_free_skbs(rt2x00dev->rx); in rt2x00queue_uninitialize()
1207 queue_for_each(rt2x00dev, queue) { in rt2x00queue_uninitialize()
1213 static void rt2x00queue_init(struct rt2x00_dev *rt2x00dev, in rt2x00queue_init() argument
1220 queue->rt2x00dev = rt2x00dev; in rt2x00queue_init()
1227 rt2x00dev->ops->queue_init(queue); in rt2x00queue_init()
1232 int rt2x00queue_allocate(struct rt2x00_dev *rt2x00dev) in rt2x00queue_allocate() argument
1237 rt2x00_has_cap_flag(rt2x00dev, REQUIRE_ATIM_QUEUE); in rt2x00queue_allocate()
1246 rt2x00dev->data_queues = 2 + rt2x00dev->ops->tx_queues + req_atim; in rt2x00queue_allocate()
1248 queue = kcalloc(rt2x00dev->data_queues, sizeof(*queue), GFP_KERNEL); in rt2x00queue_allocate()
1250 rt2x00_err(rt2x00dev, "Queue allocation failed\n"); in rt2x00queue_allocate()
1257 rt2x00dev->rx = queue; in rt2x00queue_allocate()
1258 rt2x00dev->tx = &queue[1]; in rt2x00queue_allocate()
1259 rt2x00dev->bcn = &queue[1 + rt2x00dev->ops->tx_queues]; in rt2x00queue_allocate()
1260 rt2x00dev->atim = req_atim ? &queue[2 + rt2x00dev->ops->tx_queues] : NULL; in rt2x00queue_allocate()
1271 rt2x00queue_init(rt2x00dev, rt2x00dev->rx, QID_RX); in rt2x00queue_allocate()
1274 tx_queue_for_each(rt2x00dev, queue) in rt2x00queue_allocate()
1275 rt2x00queue_init(rt2x00dev, queue, qid++); in rt2x00queue_allocate()
1277 rt2x00queue_init(rt2x00dev, rt2x00dev->bcn, QID_BEACON); in rt2x00queue_allocate()
1279 rt2x00queue_init(rt2x00dev, rt2x00dev->atim, QID_ATIM); in rt2x00queue_allocate()
1284 void rt2x00queue_free(struct rt2x00_dev *rt2x00dev) in rt2x00queue_free() argument
1286 kfree(rt2x00dev->rx); in rt2x00queue_free()
1287 rt2x00dev->rx = NULL; in rt2x00queue_free()
1288 rt2x00dev->tx = NULL; in rt2x00queue_free()
1289 rt2x00dev->bcn = NULL; in rt2x00queue_free()