Lines Matching refs:txq
50 static void ath_tx_send_normal(struct ath_softc *sc, struct ath_txq *txq,
53 int tx_flags, struct ath_txq *txq,
56 struct ath_txq *txq, struct list_head *bf_q,
59 static void ath_tx_txqaddbuf(struct ath_softc *sc, struct ath_txq *txq,
67 struct ath_txq *txq,
98 void ath_txq_lock(struct ath_softc *sc, struct ath_txq *txq) in ath_txq_lock() argument
99 __acquires(&txq->axq_lock) in ath_txq_lock()
101 spin_lock_bh(&txq->axq_lock); in ath_txq_lock()
104 void ath_txq_unlock(struct ath_softc *sc, struct ath_txq *txq) in ath_txq_unlock() argument
105 __releases(&txq->axq_lock) in ath_txq_unlock()
107 spin_unlock_bh(&txq->axq_lock); in ath_txq_unlock()
110 void ath_txq_unlock_complete(struct ath_softc *sc, struct ath_txq *txq) in ath_txq_unlock_complete() argument
111 __releases(&txq->axq_lock) in ath_txq_unlock_complete()
118 skb_queue_splice_init(&txq->complete_q, &q); in ath_txq_unlock_complete()
119 spin_unlock_bh(&txq->axq_lock); in ath_txq_unlock_complete()
125 static void ath_tx_queue_tid(struct ath_softc *sc, struct ath_txq *txq, in ath_tx_queue_tid() argument
164 static void ath_txq_skb_done(struct ath_softc *sc, struct ath_txq *txq, in ath_txq_skb_done() argument
169 int q = fi->txq; in ath_txq_skb_done()
174 txq = sc->tx.txq_map[q]; in ath_txq_skb_done()
175 if (WARN_ON(--txq->pending_frames < 0)) in ath_txq_skb_done()
176 txq->pending_frames = 0; in ath_txq_skb_done()
178 if (txq->stopped && in ath_txq_skb_done()
179 txq->pending_frames < sc->tx.txq_max_pending[q]) { in ath_txq_skb_done()
184 txq->stopped = false; in ath_txq_skb_done()
219 struct ath_txq *txq = tid->txq; in ath_tx_tid_change_state() local
235 bf = ath_tx_setup_buffer(sc, txq, tid, skb); in ath_tx_tid_change_state()
238 ath_txq_skb_done(sc, txq, skb); in ath_tx_tid_change_state()
248 struct ath_txq *txq = tid->txq; in ath_tx_flush_tid() local
264 ath_txq_skb_done(sc, txq, skb); in ath_tx_flush_tid()
275 ath_tx_complete_buf(sc, bf, txq, &bf_head, NULL, &ts, 0); in ath_tx_flush_tid()
279 ath_txq_unlock(sc, txq); in ath_tx_flush_tid()
281 ath_txq_lock(sc, txq); in ath_tx_flush_tid()
322 static void ath_tid_drain(struct ath_softc *sc, struct ath_txq *txq, in ath_tid_drain() argument
340 ath_tx_complete(sc, skb, ATH_TX_ERROR, txq, NULL); in ath_tid_drain()
345 ath_tx_complete_buf(sc, bf, txq, &bf_head, NULL, &ts, 0); in ath_tid_drain()
349 static void ath_tx_set_retry(struct ath_softc *sc, struct ath_txq *txq, in ath_tx_set_retry() argument
357 TX_STAT_INC(txq->axq_qnum, a_retries); in ath_tx_set_retry()
446 static void ath_tx_complete_aggr(struct ath_softc *sc, struct ath_txq *txq, in ath_tx_complete_aggr() argument
489 ath_tx_complete_buf(sc, bf, txq, &bf_head, NULL, ts, 0); in ath_tx_complete_aggr()
562 ath_tx_set_retry(sc, txq, bf->bf_mpdu, in ath_tx_complete_aggr()
598 ath_tx_complete_buf(sc, bf, txq, &bf_head, sta, ts, in ath_tx_complete_aggr()
618 ath_tx_complete_buf(sc, bf, txq, in ath_tx_complete_aggr()
646 ath_tx_queue_tid(sc, txq, tid); in ath_tx_complete_aggr()
659 ath_txq_unlock(sc, txq); in ath_tx_complete_aggr()
661 ath_txq_lock(sc, txq); in ath_tx_complete_aggr()
674 static void ath_tx_process_buffer(struct ath_softc *sc, struct ath_txq *txq, in ath_tx_process_buffer() argument
687 txq->axq_tx_inprogress = false; in ath_tx_process_buffer()
689 txq->axq_depth--; in ath_tx_process_buffer()
691 txq->axq_ampdu_depth--; in ath_tx_process_buffer()
713 ath_tx_complete_buf(sc, bf, txq, bf_head, sta, ts, txok); in ath_tx_process_buffer()
715 ath_tx_complete_aggr(sc, txq, bf, bf_head, sta, tid, ts, txok); in ath_tx_process_buffer()
718 ath_txq_schedule(sc, txq); in ath_tx_process_buffer()
751 int q = tid->txq->mac80211_qnum; in ath_lookup_rate()
885 ath_tx_get_tid_subframe(struct ath_softc *sc, struct ath_txq *txq, in ath_tx_get_tid_subframe() argument
906 bf = ath_tx_setup_buffer(sc, txq, tid, skb); in ath_tx_get_tid_subframe()
912 ath_txq_skb_done(sc, txq, skb); in ath_tx_get_tid_subframe()
951 ath_tx_complete_buf(sc, bf, txq, &bf_head, NULL, &ts, 0); in ath_tx_get_tid_subframe()
962 ath_tx_form_aggr(struct ath_softc *sc, struct ath_txq *txq, in ath_tx_form_aggr() argument
1023 bf = ath_tx_get_tid_subframe(sc, txq, tid, &tid_q); in ath_tx_form_aggr()
1037 TX_STAT_INC(txq->axq_qnum, a_aggr); in ath_tx_form_aggr()
1325 struct ath_txq *txq, int len) in ath_tx_fill_desc() argument
1336 info.qcu = txq->axq_qnum; in ath_tx_fill_desc()
1356 txq == sc->tx.uapsdq) in ath_tx_fill_desc()
1417 ath_tx_form_burst(struct ath_softc *sc, struct ath_txq *txq, in ath_tx_form_burst() argument
1439 bf = ath_tx_get_tid_subframe(sc, txq, tid, &tid_q); in ath_tx_form_burst()
1451 static bool ath_tx_sched_aggr(struct ath_softc *sc, struct ath_txq *txq, in ath_tx_sched_aggr() argument
1466 bf = ath_tx_get_tid_subframe(sc, txq, tid, &tid_q); in ath_tx_sched_aggr()
1472 if ((aggr && txq->axq_ampdu_depth >= ATH_AGGR_MIN_QDEPTH) || in ath_tx_sched_aggr()
1473 (!aggr && txq->axq_depth >= ATH_NON_AGGR_MIN_QDEPTH)) { in ath_tx_sched_aggr()
1480 last = ath_tx_form_aggr(sc, txq, tid, &bf_q, bf, in ath_tx_sched_aggr()
1483 ath_tx_form_burst(sc, txq, tid, &bf_q, bf, tid_q); in ath_tx_sched_aggr()
1493 ath_tx_fill_desc(sc, bf, txq, aggr_len); in ath_tx_sched_aggr()
1494 ath_tx_txqaddbuf(sc, txq, &bf_q, false); in ath_tx_sched_aggr()
1503 struct ath_txq *txq; in ath_tx_aggr_start() local
1511 txq = txtid->txq; in ath_tx_aggr_start()
1513 ath_txq_lock(sc, txq); in ath_tx_aggr_start()
1536 ath_txq_unlock_complete(sc, txq); in ath_tx_aggr_start()
1546 struct ath_txq *txq = txtid->txq; in ath_tx_aggr_stop() local
1550 ath_txq_lock(sc, txq); in ath_tx_aggr_stop()
1554 ath_txq_unlock_complete(sc, txq); in ath_tx_aggr_stop()
1562 struct ath_txq *txq; in ath_tx_aggr_sleep() local
1571 txq = tid->txq; in ath_tx_aggr_sleep()
1573 ath_txq_lock(sc, txq); in ath_tx_aggr_sleep()
1576 ath_txq_unlock(sc, txq); in ath_tx_aggr_sleep()
1584 ath_txq_unlock(sc, txq); in ath_tx_aggr_sleep()
1594 struct ath_txq *txq; in ath_tx_aggr_wakeup() local
1602 txq = tid->txq; in ath_tx_aggr_wakeup()
1604 ath_txq_lock(sc, txq); in ath_tx_aggr_wakeup()
1608 ath_tx_queue_tid(sc, txq, tid); in ath_tx_aggr_wakeup()
1609 ath_txq_schedule(sc, txq); in ath_tx_aggr_wakeup()
1612 ath_txq_unlock_complete(sc, txq); in ath_tx_aggr_wakeup()
1622 struct ath_txq *txq; in ath_tx_aggr_resume() local
1628 txq = tid->txq; in ath_tx_aggr_resume()
1630 ath_txq_lock(sc, txq); in ath_tx_aggr_resume()
1635 ath_tx_queue_tid(sc, txq, tid); in ath_tx_aggr_resume()
1636 ath_txq_schedule(sc, txq); in ath_tx_aggr_resume()
1639 ath_txq_unlock_complete(sc, txq); in ath_tx_aggr_resume()
1650 struct ath_txq *txq = sc->tx.uapsdq; in ath9k_release_buffered_frames() local
1667 ath_txq_lock(sc, tid->txq); in ath9k_release_buffered_frames()
1686 TX_STAT_INC(txq->axq_qnum, a_queued_hw); in ath9k_release_buffered_frames()
1691 ath_txq_unlock_complete(sc, tid->txq); in ath9k_release_buffered_frames()
1701 ath_txq_lock(sc, txq); in ath9k_release_buffered_frames()
1702 ath_tx_fill_desc(sc, bf, txq, 0); in ath9k_release_buffered_frames()
1703 ath_tx_txqaddbuf(sc, txq, &bf_q, false); in ath9k_release_buffered_frames()
1704 ath_txq_unlock(sc, txq); in ath9k_release_buffered_frames()
1763 struct ath_txq *txq = &sc->tx.txq[axq_qnum]; in ath_txq_setup() local
1765 txq->axq_qnum = axq_qnum; in ath_txq_setup()
1766 txq->mac80211_qnum = -1; in ath_txq_setup()
1767 txq->axq_link = NULL; in ath_txq_setup()
1768 __skb_queue_head_init(&txq->complete_q); in ath_txq_setup()
1769 INIT_LIST_HEAD(&txq->axq_q); in ath_txq_setup()
1770 spin_lock_init(&txq->axq_lock); in ath_txq_setup()
1771 txq->axq_depth = 0; in ath_txq_setup()
1772 txq->axq_ampdu_depth = 0; in ath_txq_setup()
1773 txq->axq_tx_inprogress = false; in ath_txq_setup()
1776 txq->txq_headidx = txq->txq_tailidx = 0; in ath_txq_setup()
1778 INIT_LIST_HEAD(&txq->txq_fifo[i]); in ath_txq_setup()
1780 return &sc->tx.txq[axq_qnum]; in ath_txq_setup()
1790 BUG_ON(sc->tx.txq[qnum].axq_qnum != qnum); in ath_txq_update()
1825 static void ath_drain_txq_list(struct ath_softc *sc, struct ath_txq *txq, in ath_drain_txq_list() argument
1848 ath_tx_process_buffer(sc, txq, &ts, bf, &bf_head); in ath_drain_txq_list()
1858 void ath_draintxq(struct ath_softc *sc, struct ath_txq *txq) in ath_draintxq() argument
1861 ath_txq_lock(sc, txq); in ath_draintxq()
1864 int idx = txq->txq_tailidx; in ath_draintxq()
1866 while (!list_empty(&txq->txq_fifo[idx])) { in ath_draintxq()
1867 ath_drain_txq_list(sc, txq, &txq->txq_fifo[idx]); in ath_draintxq()
1871 txq->txq_tailidx = idx; in ath_draintxq()
1874 txq->axq_link = NULL; in ath_draintxq()
1875 txq->axq_tx_inprogress = false; in ath_draintxq()
1876 ath_drain_txq_list(sc, txq, &txq->axq_q); in ath_draintxq()
1878 ath_txq_unlock_complete(sc, txq); in ath_draintxq()
1886 struct ath_txq *txq; in ath_drain_all_txq() local
1900 if (!sc->tx.txq[i].axq_depth) in ath_drain_all_txq()
1903 if (ath9k_hw_numtxpending(ah, sc->tx.txq[i].axq_qnum)) in ath_drain_all_txq()
1922 txq = &sc->tx.txq[i]; in ath_drain_all_txq()
1923 txq->stopped = false; in ath_drain_all_txq()
1924 ath_draintxq(sc, txq); in ath_drain_all_txq()
1930 void ath_tx_cleanupq(struct ath_softc *sc, struct ath_txq *txq) in ath_tx_cleanupq() argument
1932 ath9k_hw_releasetxqueue(sc->sc_ah, txq->axq_qnum); in ath_tx_cleanupq()
1933 sc->tx.txqsetup &= ~(1<<txq->axq_qnum); in ath_tx_cleanupq()
1939 void ath_txq_schedule(struct ath_softc *sc, struct ath_txq *txq) in ath_txq_schedule() argument
1946 if (txq->mac80211_qnum < 0) in ath_txq_schedule()
1953 tid_list = &sc->cur_chan->acq[txq->mac80211_qnum]; in ath_txq_schedule()
1972 if (ath_tx_sched_aggr(sc, txq, tid, &stop)) in ath_txq_schedule()
1980 ath_tx_queue_tid(sc, txq, tid); in ath_txq_schedule()
2001 struct ath_txq *txq; in ath_txq_schedule_all() local
2005 txq = sc->tx.txq_map[i]; in ath_txq_schedule_all()
2007 spin_lock_bh(&txq->axq_lock); in ath_txq_schedule_all()
2008 ath_txq_schedule(sc, txq); in ath_txq_schedule_all()
2009 spin_unlock_bh(&txq->axq_lock); in ath_txq_schedule_all()
2021 static void ath_tx_txqaddbuf(struct ath_softc *sc, struct ath_txq *txq, in ath_tx_txqaddbuf() argument
2043 txq->axq_qnum, txq->axq_depth); in ath_tx_txqaddbuf()
2045 if (edma && list_empty(&txq->txq_fifo[txq->txq_headidx])) { in ath_tx_txqaddbuf()
2046 list_splice_tail_init(head, &txq->txq_fifo[txq->txq_headidx]); in ath_tx_txqaddbuf()
2047 INCR(txq->txq_headidx, ATH_TXFIFO_DEPTH); in ath_tx_txqaddbuf()
2050 list_splice_tail_init(head, &txq->axq_q); in ath_tx_txqaddbuf()
2052 if (txq->axq_link) { in ath_tx_txqaddbuf()
2053 ath9k_hw_set_desc_link(ah, txq->axq_link, bf->bf_daddr); in ath_tx_txqaddbuf()
2055 txq->axq_qnum, txq->axq_link, in ath_tx_txqaddbuf()
2060 txq->axq_link = bf_last->bf_desc; in ath_tx_txqaddbuf()
2064 TX_STAT_INC(txq->axq_qnum, puttxbuf); in ath_tx_txqaddbuf()
2065 ath9k_hw_puttxbuf(ah, txq->axq_qnum, bf->bf_daddr); in ath_tx_txqaddbuf()
2067 txq->axq_qnum, ito64(bf->bf_daddr), bf->bf_desc); in ath_tx_txqaddbuf()
2071 TX_STAT_INC(txq->axq_qnum, txstart); in ath_tx_txqaddbuf()
2072 ath9k_hw_txstart(ah, txq->axq_qnum); in ath_tx_txqaddbuf()
2077 txq->axq_depth++; in ath_tx_txqaddbuf()
2079 txq->axq_ampdu_depth++; in ath_tx_txqaddbuf()
2088 static void ath_tx_send_normal(struct ath_softc *sc, struct ath_txq *txq, in ath_tx_send_normal() argument
2106 ath_tx_fill_desc(sc, bf, txq, fi->framelen); in ath_tx_send_normal()
2107 ath_tx_txqaddbuf(sc, txq, &bf_head, false); in ath_tx_send_normal()
2108 TX_STAT_INC(txq->axq_qnum, queued); in ath_tx_send_normal()
2152 fi->txq = -1; in setup_frame_info()
2190 struct ath_txq *txq, in ath_tx_setup_buffer() argument
2320 struct ath_txq *txq = txctl->txq; in ath_tx_start() local
2346 ath_txq_lock(sc, txq); in ath_tx_start()
2347 if (txq == sc->tx.txq_map[q]) { in ath_tx_start()
2348 fi->txq = q; in ath_tx_start()
2349 if (++txq->pending_frames > sc->tx.txq_max_pending[q] && in ath_tx_start()
2350 !txq->stopped) { in ath_tx_start()
2355 txq->stopped = true; in ath_tx_start()
2383 ath_txq_unlock(sc, txq); in ath_tx_start()
2384 txq = sc->tx.uapsdq; in ath_tx_start()
2385 ath_txq_lock(sc, txq); in ath_tx_start()
2387 WARN_ON(tid->txq != txctl->txq); in ath_tx_start()
2396 TX_STAT_INC(txq->axq_qnum, a_queued_sw); in ath_tx_start()
2399 ath_tx_queue_tid(sc, txq, tid); in ath_tx_start()
2401 ath_txq_schedule(sc, txq); in ath_tx_start()
2405 bf = ath_tx_setup_buffer(sc, txq, tid, skb); in ath_tx_start()
2407 ath_txq_skb_done(sc, txq, skb); in ath_tx_start()
2421 ath_tx_send_normal(sc, txq, tid, skb); in ath_tx_start()
2424 ath_txq_unlock(sc, txq); in ath_tx_start()
2434 .txq = sc->beacon.cabq in ath_tx_cabq()
2454 bf = ath_tx_setup_buffer(sc, txctl.txq, NULL, skb); in ath_tx_cabq()
2490 ath_txq_lock(sc, txctl.txq); in ath_tx_cabq()
2491 ath_tx_fill_desc(sc, bf, txctl.txq, 0); in ath_tx_cabq()
2492 ath_tx_txqaddbuf(sc, txctl.txq, &bf_q, false); in ath_tx_cabq()
2493 TX_STAT_INC(txctl.txq->axq_qnum, queued); in ath_tx_cabq()
2494 ath_txq_unlock(sc, txctl.txq); in ath_tx_cabq()
2502 int tx_flags, struct ath_txq *txq, in ath_tx_complete() argument
2537 if ((sc->ps_flags & PS_WAIT_FOR_TX_ACK) && !txq->axq_depth) { in ath_tx_complete()
2548 ath_txq_skb_done(sc, txq, skb); in ath_tx_complete()
2550 __skb_queue_tail(&txq->complete_q, skb); in ath_tx_complete()
2554 struct ath_txq *txq, struct list_head *bf_q, in ath_tx_complete_buf() argument
2582 ath_debug_stat_tx(sc, bf, ts, txq, tx_flags); in ath_tx_complete_buf()
2583 ath_tx_complete(sc, skb, tx_flags, txq, sta); in ath_tx_complete_buf()
2654 static void ath_tx_processq(struct ath_softc *sc, struct ath_txq *txq) in ath_tx_processq() argument
2665 txq->axq_qnum, ath9k_hw_gettxbuf(sc->sc_ah, txq->axq_qnum), in ath_tx_processq()
2666 txq->axq_link); in ath_tx_processq()
2668 ath_txq_lock(sc, txq); in ath_tx_processq()
2673 if (list_empty(&txq->axq_q)) { in ath_tx_processq()
2674 txq->axq_link = NULL; in ath_tx_processq()
2675 ath_txq_schedule(sc, txq); in ath_tx_processq()
2678 bf = list_first_entry(&txq->axq_q, struct ath_buf, list); in ath_tx_processq()
2691 if (list_is_last(&bf_held->list, &txq->axq_q)) in ath_tx_processq()
2706 TX_STAT_INC(txq->axq_qnum, txprocdesc); in ath_tx_processq()
2717 &txq->axq_q, lastbf->list.prev); in ath_tx_processq()
2724 ath_tx_process_buffer(sc, txq, &ts, bf, &bf_head); in ath_tx_processq()
2726 ath_txq_unlock_complete(sc, txq); in ath_tx_processq()
2738 ath_tx_processq(sc, &sc->tx.txq[i]); in ath_tx_tasklet()
2748 struct ath_txq *txq; in ath_tx_edma_tasklet() local
2781 txq = &sc->tx.txq[ts.qid]; in ath_tx_edma_tasklet()
2783 ath_txq_lock(sc, txq); in ath_tx_edma_tasklet()
2785 TX_STAT_INC(txq->axq_qnum, txprocdesc); in ath_tx_edma_tasklet()
2787 fifo_list = &txq->txq_fifo[txq->txq_tailidx]; in ath_tx_edma_tasklet()
2789 ath_txq_unlock(sc, txq); in ath_tx_edma_tasklet()
2805 INCR(txq->txq_tailidx, ATH_TXFIFO_DEPTH); in ath_tx_edma_tasklet()
2807 if (!list_empty(&txq->axq_q)) { in ath_tx_edma_tasklet()
2811 txq->axq_link = NULL; in ath_tx_edma_tasklet()
2812 list_splice_tail_init(&txq->axq_q, &bf_q); in ath_tx_edma_tasklet()
2813 ath_tx_txqaddbuf(sc, txq, &bf_q, true); in ath_tx_edma_tasklet()
2822 ath_tx_process_buffer(sc, txq, &ts, bf, &bf_head); in ath_tx_edma_tasklet()
2823 ath_txq_unlock_complete(sc, txq); in ath_tx_edma_tasklet()
2909 tid->txq = sc->tx.txq_map[acno]; in ath_tx_node_init()
2916 struct ath_txq *txq; in ath_tx_node_cleanup() local
2922 txq = tid->txq; in ath_tx_node_cleanup()
2924 ath_txq_lock(sc, txq); in ath_tx_node_cleanup()
2929 ath_tid_drain(sc, txq, tid); in ath_tx_node_cleanup()
2932 ath_txq_unlock(sc, txq); in ath_tx_node_cleanup()
2965 bf = ath_tx_setup_buffer(sc, txctl->txq, NULL, skb); in ath9k_tx99_send()
2974 ath9k_hw_tx99_start(sc->sc_ah, txctl->txq->axq_qnum); in ath9k_tx99_send()
2976 ath_tx_send_normal(sc, txctl->txq, NULL, skb); in ath9k_tx99_send()