Searched refs:NUM_TX_DESC (Results 1 – 8 of 8) sorted by relevance
322 #define NUM_TX_DESC 16 /* Number of Tx descriptor registers. */ macro324 #define TX_TOTAL_SIZE NUM_TX_DESC*sizeof(BufferDesc)
50 #define NUM_TX_DESC 64 /* [8..1024] */ macro52 #define TX_RING_BYTES (NUM_TX_DESC * sizeof(struct TxDesc))280 struct sk_buff *Tx_skbuff[NUM_TX_DESC];696 queue_stopped = (pending == NUM_TX_DESC); in sis190_tx_interrupt()699 unsigned int entry = dirty_tx % NUM_TX_DESC; in sis190_tx_interrupt()817 memset(tp->Tx_skbuff, 0x0, NUM_TX_DESC * sizeof(struct sk_buff *)); in sis190_init_ring()1110 for (i = 0; i < NUM_TX_DESC; i++) { in sis190_tx_clear()1192 entry = tp->cur_tx % NUM_TX_DESC; in sis190_start_xmit()1215 if (entry == (NUM_TX_DESC - 1)) in sis190_start_xmit()1235 if ((tp->cur_tx - NUM_TX_DESC) == dirty_tx) { in sis190_start_xmit()
183 struct sk_buff *tx_skbuff[NUM_TX_DESC];1133 for (i = 0; i < NUM_TX_DESC; i++) { in sis900_init_tx_ring()1137 ((i+1)%NUM_TX_DESC)*sizeof(BufferDesc); in sis900_init_tx_ring()1559 for (i = 0; i < NUM_TX_DESC; i++) { in sis900_tx_timeout()1610 entry = sis_priv->cur_tx % NUM_TX_DESC; in sis900_start_xmit()1638 } else if (count_dirty_tx < NUM_TX_DESC) { in sis900_start_xmit()1897 entry = sis_priv->dirty_tx % NUM_TX_DESC; in sis900_finish_xmit()1940 sis_priv->cur_tx - sis_priv->dirty_tx < NUM_TX_DESC - 4) { in sis900_finish_xmit()1987 for (i = 0; i < NUM_TX_DESC; i++) { in sis900_close()
183 #define NUM_TX_DESC 4 macro190 #define TX_BUF_TOT_LEN (TX_BUF_SIZE * NUM_TX_DESC)591 unsigned char *tx_buf[NUM_TX_DESC]; /* Tx bounce buffers */1419 for (i = 0; i < NUM_TX_DESC; i++) in rtl8139_hw_start()1449 for (i = 0; i < NUM_TX_DESC; i++) in rtl8139_init_ring()1662 for (i = 0; i < NUM_TX_DESC; i++) in rtl8139_tx_timeout_task()1665 i == tp->dirty_tx % NUM_TX_DESC ? in rtl8139_tx_timeout_task()1713 entry = tp->cur_tx % NUM_TX_DESC; in rtl8139_start_xmit()1739 if ((tp->cur_tx - NUM_TX_DESC) == tp->dirty_tx) in rtl8139_start_xmit()1762 int entry = dirty_tx % NUM_TX_DESC; in rtl8139_tx_interrupt()[all …]
77 (tp->dirty_tx + NUM_TX_DESC - tp->cur_tx)93 #define NUM_TX_DESC 64 /* Number of Tx descriptor registers */ macro95 #define R8169_TX_RING_BYTES (NUM_TX_DESC * sizeof(struct TxDesc))783 struct ring_info tx_skb[NUM_TX_DESC]; /* Tx data buffers */6704 memset(tp->tx_skb, 0x0, NUM_TX_DESC * sizeof(struct ring_info)); in rtl8169_init_ring()6729 unsigned int entry = (start + i) % NUM_TX_DESC; in rtl8169_tx_clear_range()6749 rtl8169_tx_clear_range(tp, tp->dirty_tx, NUM_TX_DESC); in rtl8169_tx_clear()6798 entry = (entry + 1) % NUM_TX_DESC; in rtl8169_xmit_frags()6813 (RingEnd * !((entry + 1) % NUM_TX_DESC)); in rtl8169_xmit_frags()7027 unsigned int entry = tp->cur_tx % NUM_TX_DESC; in rtl8169_start_xmit()[all …]
61 #define NUM_TX_DESC 4 macro68 #define TX_BUF_TOT_LEN (TX_BUF_SIZE * NUM_TX_DESC)659 entry = priv->tx_tail % NUM_TX_DESC; in _sc92031_tx_tasklet()956 BUG_ON(priv->tx_head - priv->tx_tail >= NUM_TX_DESC); in sc92031_start_xmit()958 entry = priv->tx_head++ % NUM_TX_DESC; in sc92031_start_xmit()983 if (priv->tx_head - priv->tx_tail >= NUM_TX_DESC) in sc92031_start_xmit()
483 #define NUM_TX_DESC 1536 macro
754 #define NUM_TX_DESC 32 /* Number of TX descriptors */ macro792 struct sk_buff *tx_skb[NUM_TX_DESC]; /* TX skb for freeing when sent */1172 lp->dma_size = (NUM_RX_DESC + NUM_TX_DESC) * sizeof(struct de4x5_desc); in de4x5_hw_init()1201 dma_rx_bufs = lp->dma_rings + (NUM_RX_DESC + NUM_TX_DESC) in de4x5_hw_init()1205 + NUM_TX_DESC) + DE4X5_ALIGN) & ~DE4X5_ALIGN); in de4x5_hw_init()1221 lp->txRingSize = NUM_TX_DESC; in de4x5_hw_init()