• Home
  • Raw
  • Download

Lines Matching refs:tqueue

640 		struct sxgbe_tx_queue *tqueue = priv->txq[queue_num];  in dma_free_tx_skbufs()  local
641 tx_free_ring_skbufs(tqueue); in dma_free_tx_skbufs()
735 static void sxgbe_tx_queue_clean(struct sxgbe_tx_queue *tqueue) in sxgbe_tx_queue_clean() argument
737 struct sxgbe_priv_data *priv = tqueue->priv_ptr; in sxgbe_tx_queue_clean()
740 u8 queue_no = tqueue->queue_no; in sxgbe_tx_queue_clean()
747 while (tqueue->dirty_tx != tqueue->cur_tx) { in sxgbe_tx_queue_clean()
748 unsigned int entry = tqueue->dirty_tx % tx_rsize; in sxgbe_tx_queue_clean()
749 struct sk_buff *skb = tqueue->tx_skbuff[entry]; in sxgbe_tx_queue_clean()
752 p = tqueue->dma_tx + entry; in sxgbe_tx_queue_clean()
760 __func__, tqueue->cur_tx, tqueue->dirty_tx); in sxgbe_tx_queue_clean()
762 if (likely(tqueue->tx_skbuff_dma[entry])) { in sxgbe_tx_queue_clean()
764 tqueue->tx_skbuff_dma[entry], in sxgbe_tx_queue_clean()
767 tqueue->tx_skbuff_dma[entry] = 0; in sxgbe_tx_queue_clean()
772 tqueue->tx_skbuff[entry] = NULL; in sxgbe_tx_queue_clean()
777 tqueue->dirty_tx++; in sxgbe_tx_queue_clean()
782 sxgbe_tx_avail(tqueue, tx_rsize) > SXGBE_TX_THRESH(priv))) { in sxgbe_tx_queue_clean()
801 struct sxgbe_tx_queue *tqueue = priv->txq[queue_num]; in sxgbe_tx_all_clean() local
803 sxgbe_tx_queue_clean(tqueue); in sxgbe_tx_all_clean()
1280 struct sxgbe_tx_queue *tqueue = priv->txq[txq_index]; in sxgbe_xmit() local
1292 if (unlikely(skb_is_gso(skb) && tqueue->prev_mss != cur_mss)) in sxgbe_xmit()
1297 tqueue->hwts_tx_en))) in sxgbe_xmit()
1303 if (unlikely(sxgbe_tx_avail(tqueue, tx_rsize) < nr_frags + 1)) { in sxgbe_xmit()
1312 entry = tqueue->cur_tx % tx_rsize; in sxgbe_xmit()
1313 tx_desc = tqueue->dma_tx + entry; in sxgbe_xmit()
1320 tqueue->tx_skbuff[entry] = skb; in sxgbe_xmit()
1325 if (unlikely(tqueue->prev_mss != cur_mss)) { in sxgbe_xmit()
1337 entry = (++tqueue->cur_tx) % tx_rsize; in sxgbe_xmit()
1338 first_desc = tqueue->dma_tx + entry; in sxgbe_xmit()
1340 tqueue->prev_mss = cur_mss; in sxgbe_xmit()
1359 entry = (++tqueue->cur_tx) % tx_rsize; in sxgbe_xmit()
1360 tx_desc = tqueue->dma_tx + entry; in sxgbe_xmit()
1364 tqueue->tx_skbuff_dma[entry] = tx_desc->tdes01; in sxgbe_xmit()
1365 tqueue->tx_skbuff[entry] = NULL; in sxgbe_xmit()
1383 tqueue->tx_count_frames += nr_frags + 1; in sxgbe_xmit()
1384 if (tqueue->tx_count_frames > tqueue->tx_coal_frames) { in sxgbe_xmit()
1387 mod_timer(&tqueue->txtimer, in sxgbe_xmit()
1388 SXGBE_COAL_TIMER(tqueue->tx_coal_timer)); in sxgbe_xmit()
1390 tqueue->tx_count_frames = 0; in sxgbe_xmit()
1399 tqueue->cur_tx++; in sxgbe_xmit()
1403 __func__, tqueue->cur_tx % tx_rsize, in sxgbe_xmit()
1404 tqueue->dirty_tx % tx_rsize, entry, in sxgbe_xmit()
1407 if (unlikely(sxgbe_tx_avail(tqueue, tx_rsize) <= (MAX_SKB_FRAGS + 1))) { in sxgbe_xmit()
1416 tqueue->hwts_tx_en)) { in sxgbe_xmit()