• Home
  • Raw
  • Download

Lines Matching refs:queue

46 	frame_size = entry->queue->data_size + entry->queue->desc_size;  in rt2x00queue_alloc_rxskb()
154 struct rt2x00_dev *rt2x00dev = entry->queue->rt2x00dev; in rt2x00queue_create_tx_descriptor()
171 txdesc->queue = entry->queue->qid; in rt2x00queue_create_tx_descriptor()
172 txdesc->cw_min = entry->queue->cw_min; in rt2x00queue_create_tx_descriptor()
173 txdesc->cw_max = entry->queue->cw_max; in rt2x00queue_create_tx_descriptor()
174 txdesc->aifs = entry->queue->aifs; in rt2x00queue_create_tx_descriptor()
325 struct data_queue *queue = entry->queue; in rt2x00queue_write_tx_descriptor() local
326 struct rt2x00_dev *rt2x00dev = queue->rt2x00dev; in rt2x00queue_write_tx_descriptor()
346 if (entry->queue->qid == QID_BEACON) in rt2x00queue_write_tx_descriptor()
349 if (rt2x00queue_threshold(queue) || in rt2x00queue_write_tx_descriptor()
351 rt2x00dev->ops->lib->kick_tx_queue(rt2x00dev, queue->qid); in rt2x00queue_write_tx_descriptor()
354 int rt2x00queue_write_tx_frame(struct data_queue *queue, struct sk_buff *skb) in rt2x00queue_write_tx_frame() argument
357 struct queue_entry *entry = rt2x00queue_get_entry(queue, Q_INDEX); in rt2x00queue_write_tx_frame()
363 if (unlikely(rt2x00queue_full(queue))) in rt2x00queue_write_tx_frame()
367 ERROR(queue->rt2x00dev, in rt2x00queue_write_tx_frame()
370 queue->qid, DRV_PROJECT); in rt2x00queue_write_tx_frame()
406 if (test_bit(CONFIG_CRYPTO_COPY_IV, &queue->rt2x00dev->flags)) in rt2x00queue_write_tx_frame()
417 if (unlikely(queue->rt2x00dev->ops->lib->write_tx_data(entry))) { in rt2x00queue_write_tx_frame()
423 if (test_bit(DRIVER_REQUIRE_DMA, &queue->rt2x00dev->flags)) in rt2x00queue_write_tx_frame()
424 rt2x00queue_map_txskb(queue->rt2x00dev, skb); in rt2x00queue_write_tx_frame()
428 rt2x00queue_index_inc(queue, Q_INDEX); in rt2x00queue_write_tx_frame()
469 skbdesc->desc_len = intf->beacon->queue->desc_size; in rt2x00queue_update_beacon()
489 const enum data_queue_qid queue) in rt2x00queue_get_queue() argument
493 if (queue < rt2x00dev->ops->tx_queues && rt2x00dev->tx) in rt2x00queue_get_queue()
494 return &rt2x00dev->tx[queue]; in rt2x00queue_get_queue()
499 if (queue == QID_BEACON) in rt2x00queue_get_queue()
501 else if (queue == QID_ATIM && atim) in rt2x00queue_get_queue()
508 struct queue_entry *rt2x00queue_get_entry(struct data_queue *queue, in rt2x00queue_get_entry() argument
515 ERROR(queue->rt2x00dev, in rt2x00queue_get_entry()
520 spin_lock_irqsave(&queue->lock, irqflags); in rt2x00queue_get_entry()
522 entry = &queue->entries[queue->index[index]]; in rt2x00queue_get_entry()
524 spin_unlock_irqrestore(&queue->lock, irqflags); in rt2x00queue_get_entry()
530 void rt2x00queue_index_inc(struct data_queue *queue, enum queue_index index) in rt2x00queue_index_inc() argument
535 ERROR(queue->rt2x00dev, in rt2x00queue_index_inc()
540 spin_lock_irqsave(&queue->lock, irqflags); in rt2x00queue_index_inc()
542 queue->index[index]++; in rt2x00queue_index_inc()
543 if (queue->index[index] >= queue->limit) in rt2x00queue_index_inc()
544 queue->index[index] = 0; in rt2x00queue_index_inc()
547 queue->length++; in rt2x00queue_index_inc()
549 queue->length--; in rt2x00queue_index_inc()
550 queue->count++; in rt2x00queue_index_inc()
553 spin_unlock_irqrestore(&queue->lock, irqflags); in rt2x00queue_index_inc()
556 static void rt2x00queue_reset(struct data_queue *queue) in rt2x00queue_reset() argument
560 spin_lock_irqsave(&queue->lock, irqflags); in rt2x00queue_reset()
562 queue->count = 0; in rt2x00queue_reset()
563 queue->length = 0; in rt2x00queue_reset()
564 memset(queue->index, 0, sizeof(queue->index)); in rt2x00queue_reset()
566 spin_unlock_irqrestore(&queue->lock, irqflags); in rt2x00queue_reset()
571 struct data_queue *queue; in rt2x00queue_init_queues() local
574 queue_for_each(rt2x00dev, queue) { in rt2x00queue_init_queues()
575 rt2x00queue_reset(queue); in rt2x00queue_init_queues()
577 for (i = 0; i < queue->limit; i++) { in rt2x00queue_init_queues()
578 queue->entries[i].flags = 0; in rt2x00queue_init_queues()
580 rt2x00dev->ops->lib->clear_entry(&queue->entries[i]); in rt2x00queue_init_queues()
585 static int rt2x00queue_alloc_entries(struct data_queue *queue, in rt2x00queue_alloc_entries() argument
592 rt2x00queue_reset(queue); in rt2x00queue_alloc_entries()
594 queue->limit = qdesc->entry_num; in rt2x00queue_alloc_entries()
595 queue->threshold = DIV_ROUND_UP(qdesc->entry_num, 10); in rt2x00queue_alloc_entries()
596 queue->data_size = qdesc->data_size; in rt2x00queue_alloc_entries()
597 queue->desc_size = qdesc->desc_size; in rt2x00queue_alloc_entries()
603 entries = kzalloc(queue->limit * entry_size, GFP_KERNEL); in rt2x00queue_alloc_entries()
611 for (i = 0; i < queue->limit; i++) { in rt2x00queue_alloc_entries()
613 entries[i].queue = queue; in rt2x00queue_alloc_entries()
617 QUEUE_ENTRY_PRIV_OFFSET(entries, i, queue->limit, in rt2x00queue_alloc_entries()
623 queue->entries = entries; in rt2x00queue_alloc_entries()
629 struct data_queue *queue) in rt2x00queue_free_skbs() argument
633 if (!queue->entries) in rt2x00queue_free_skbs()
636 for (i = 0; i < queue->limit; i++) { in rt2x00queue_free_skbs()
637 if (queue->entries[i].skb) in rt2x00queue_free_skbs()
638 rt2x00queue_free_skb(rt2x00dev, queue->entries[i].skb); in rt2x00queue_free_skbs()
643 struct data_queue *queue) in rt2x00queue_alloc_rxskbs() argument
648 for (i = 0; i < queue->limit; i++) { in rt2x00queue_alloc_rxskbs()
649 skb = rt2x00queue_alloc_rxskb(rt2x00dev, &queue->entries[i]); in rt2x00queue_alloc_rxskbs()
652 queue->entries[i].skb = skb; in rt2x00queue_alloc_rxskbs()
660 struct data_queue *queue; in rt2x00queue_initialize() local
667 tx_queue_for_each(rt2x00dev, queue) { in rt2x00queue_initialize()
668 status = rt2x00queue_alloc_entries(queue, rt2x00dev->ops->tx); in rt2x00queue_initialize()
700 struct data_queue *queue; in rt2x00queue_uninitialize() local
704 queue_for_each(rt2x00dev, queue) { in rt2x00queue_uninitialize()
705 kfree(queue->entries); in rt2x00queue_uninitialize()
706 queue->entries = NULL; in rt2x00queue_uninitialize()
711 struct data_queue *queue, enum data_queue_qid qid) in rt2x00queue_init() argument
713 spin_lock_init(&queue->lock); in rt2x00queue_init()
715 queue->rt2x00dev = rt2x00dev; in rt2x00queue_init()
716 queue->qid = qid; in rt2x00queue_init()
717 queue->txop = 0; in rt2x00queue_init()
718 queue->aifs = 2; in rt2x00queue_init()
719 queue->cw_min = 5; in rt2x00queue_init()
720 queue->cw_max = 10; in rt2x00queue_init()
725 struct data_queue *queue; in rt2x00queue_allocate() local
739 queue = kzalloc(rt2x00dev->data_queues * sizeof(*queue), GFP_KERNEL); in rt2x00queue_allocate()
740 if (!queue) { in rt2x00queue_allocate()
748 rt2x00dev->rx = queue; in rt2x00queue_allocate()
749 rt2x00dev->tx = &queue[1]; in rt2x00queue_allocate()
750 rt2x00dev->bcn = &queue[1 + rt2x00dev->ops->tx_queues]; in rt2x00queue_allocate()
764 tx_queue_for_each(rt2x00dev, queue) in rt2x00queue_allocate()
765 rt2x00queue_init(rt2x00dev, queue, qid++); in rt2x00queue_allocate()