Home
last modified time | relevance | path

Searched refs:RX_RING_SIZE (Results 1 – 25 of 51) sorted by relevance

123

/drivers/net/ethernet/sun/
Dsungem.h883 #define RX_RING_SIZE 128 macro
907 #if RX_RING_SIZE == 32
909 #elif RX_RING_SIZE == 64
911 #elif RX_RING_SIZE == 128
913 #elif RX_RING_SIZE == 256
915 #elif RX_RING_SIZE == 512
917 #elif RX_RING_SIZE == 1024
919 #elif RX_RING_SIZE == 2048
921 #elif RX_RING_SIZE == 4096
923 #elif RX_RING_SIZE == 8192
[all …]
Dsunhme.h331 #define RX_RING_SIZE 32 /* see ERX_CFG_SIZE* for possible values */ macro
341 #if (RX_RING_SIZE == 32)
344 #if (RX_RING_SIZE == 64)
347 #if (RX_RING_SIZE == 128)
350 #if (RX_RING_SIZE == 256)
353 #error RX_RING_SIZE holds illegal value
359 #define NEXT_RX(num) (((num) + 1) & (RX_RING_SIZE - 1))
361 #define PREV_RX(num) (((num) - 1) & (RX_RING_SIZE - 1))
416 struct sk_buff *rx_skbs[RX_RING_SIZE];
Dsunbmac.h251 #define RX_RING_SIZE 256 macro
253 #define NEXT_RX(num) (((num) + 1) & (RX_RING_SIZE - 1))
255 #define PREV_RX(num) (((num) - 1) & (RX_RING_SIZE - 1))
298 struct sk_buff *rx_skbs[RX_RING_SIZE];
/drivers/net/ethernet/dec/tulip/
Dinterrupt.c67 entry = tp->dirty_rx % RX_RING_SIZE; in tulip_refill_rx()
116 int entry = tp->cur_rx % RX_RING_SIZE; in tulip_poll()
127 if (budget >=RX_RING_SIZE) budget--; in tulip_poll()
148 if (tp->dirty_rx + RX_RING_SIZE == tp->cur_rx) in tulip_poll()
260 entry = (++tp->cur_rx) % RX_RING_SIZE; in tulip_poll()
261 if (tp->cur_rx - tp->dirty_rx > RX_RING_SIZE/4) in tulip_poll()
317 if (tp->rx_buffers[tp->dirty_rx % RX_RING_SIZE].skb == NULL) in tulip_poll()
339 if (tp->cur_rx - tp->dirty_rx > RX_RING_SIZE/2 || in tulip_poll()
340 tp->rx_buffers[tp->dirty_rx % RX_RING_SIZE].skb == NULL) in tulip_poll()
343 if (tp->rx_buffers[tp->dirty_rx % RX_RING_SIZE].skb == NULL) in tulip_poll()
[all …]
Dwinbond-840.c296 dma_addr_t rx_addr[RX_RING_SIZE];
301 struct sk_buff* rx_skbuff[RX_RING_SIZE];
801 np->tx_ring = (struct w840_tx_desc*)&np->rx_ring[RX_RING_SIZE]; in init_rxtx_rings()
804 for (i = 0; i < RX_RING_SIZE; i++) { in init_rxtx_rings()
813 for (i = 0; i < RX_RING_SIZE; i++) { in init_rxtx_rings()
826 np->dirty_rx = (unsigned int)(i - RX_RING_SIZE); in init_rxtx_rings()
837 iowrite32(np->ring_dma_addr+sizeof(struct w840_rx_desc)*RX_RING_SIZE, in init_rxtx_rings()
846 for (i = 0; i < RX_RING_SIZE; i++) { in free_rxtx_rings()
939 for (i = 0; i < RX_RING_SIZE; i++) in tx_timeout()
981 sizeof(struct w840_rx_desc)*RX_RING_SIZE + in alloc_ringdesc()
[all …]
/drivers/net/ethernet/amd/
D7990.h39 #define RX_RING_SIZE (1 << LANCE_LOG_RX_BUFFERS) macro
41 #define RX_RING_MOD_MASK (RX_RING_SIZE - 1)
87 volatile struct lance_rx_desc brx_ring[RX_RING_SIZE];
90 volatile char rx_buf[RX_RING_SIZE][RX_BUFF_SIZE];
Dariadne.c81 #define RX_RING_SIZE 16 macro
89 volatile struct RDRE *rx_ring[RX_RING_SIZE];
91 volatile u_short *rx_buff[RX_RING_SIZE];
101 struct RDRE rx_ring[RX_RING_SIZE];
103 u_short rx_buff[RX_RING_SIZE][PKT_BUF_SIZE / sizeof(u_short)];
145 for (i = 0; i < RX_RING_SIZE; i++) { in ariadne_init_ring()
164 int entry = priv->cur_rx % RX_RING_SIZE; in ariadne_rx()
197 for (i = 0; i < RX_RING_SIZE; i++) in ariadne_rx()
198 if (lowb(priv->rx_ring[(entry + i) % RX_RING_SIZE]->RMD1) & RF_OWN) in ariadne_rx()
201 if (i > RX_RING_SIZE - 2) { in ariadne_rx()
[all …]
Ddeclance.c158 #define RX_RING_SIZE (1 << (LANCE_LOG_RX_BUFFERS)) macro
159 #define RX_RING_MOD_MASK (RX_RING_SIZE - 1)
221 struct lance_rx_desc brx_ring[RX_RING_SIZE];
265 char *rx_buf_ptr_cpu[RX_RING_SIZE];
269 uint rx_buf_ptr_lnc[RX_RING_SIZE];
509 for (i = 0; i < RX_RING_SIZE; i++) { in lance_init_ring()
568 for (i = 0; i < RX_RING_SIZE; i++) { in lance_rx()
1084 for (i = 0; i < RX_RING_SIZE; i++) { in dec_lance_probe()
1094 2 * RX_RING_SIZE * RX_BUFF_SIZE + in dec_lance_probe()
1098 RX_RING_SIZE * RX_BUFF_SIZE + in dec_lance_probe()
[all …]
Da2065.c73 #define RX_RING_SIZE (1 << LANCE_LOG_RX_BUFFERS) macro
76 #define RX_RING_MOD_MASK (RX_RING_SIZE - 1)
96 struct lance_rx_desc brx_ring[RX_RING_SIZE];
99 char rx_buf[RX_RING_SIZE][RX_BUFF_SIZE];
254 char buf[RX_RING_SIZE + 1]; in lance_rx()
256 for (i = 0; i < RX_RING_SIZE; i++) { in lance_rx()
263 buf[RX_RING_SIZE] = 0; in lance_rx()
Dlance.c197 #define RX_RING_SIZE (1 << (LANCE_LOG_RX_BUFFERS)) macro
198 #define RX_RING_MOD_MASK (RX_RING_SIZE - 1)
237 struct lance_rx_head rx_ring[RX_RING_SIZE];
244 struct sk_buff* rx_skbuff[RX_RING_SIZE];
555 lp->rx_buffs = (unsigned long)kmalloc(PKT_BUF_SZ*RX_RING_SIZE, in lance_probe1()
844 for (i = 0; i < RX_RING_SIZE; i++) { in lance_purge_ring()
870 for (i = 0; i < RX_RING_SIZE; i++) { in lance_init_ring()
933 for (i = 0; i < RX_RING_SIZE; i++) in lance_tx_timeout()
1189 for (i=0; i < RX_RING_SIZE; i++) in lance_rx()
1193 if (i > RX_RING_SIZE -2) in lance_rx()
Datarilance.c115 #define RX_RING_SIZE (1 << RX_LOG_RING_SIZE) macro
117 #define RX_RING_MOD_MASK (RX_RING_SIZE - 1)
158 struct lance_rx_head rx_head[RX_RING_SIZE];
715 for( i = 0; i < RX_RING_SIZE; i++ ) { in lance_init_ring()
751 for( i = 0 ; i < RX_RING_SIZE; i++ ) in lance_tx_timeout()
999 for( i = 0; i < RX_RING_SIZE; i++ ) in lance_rx()
1004 if (i > RX_RING_SIZE - 2) { in lance_rx()
Dsun3lance.c97 #define RX_RING_SIZE (1 << RX_LOG_RING_SIZE) macro
99 #define RX_RING_MOD_MASK (RX_RING_SIZE - 1)
142 struct lance_rx_head rx_head[RX_RING_SIZE];
143 char rx_data[RX_RING_SIZE][PKT_BUF_SZ];
473 for( i = 0; i < RX_RING_SIZE; i++ ) { in lance_init_ring()
546 for( i = 0 ; i < RX_RING_SIZE; i++ ) in lance_start_xmit()
/drivers/net/ethernet/packetengines/
Dhamachi.c120 #define RX_RING_SIZE 512 macro
122 #define RX_TOTAL_SIZE RX_RING_SIZE*sizeof(struct hamachi_desc)
486 struct sk_buff* rx_skbuff[RX_RING_SIZE];
1059 for (i = 0; i < RX_RING_SIZE; i++) in hamachi_tx_timeout()
1081 for (i = 0; i < RX_RING_SIZE; i++) in hamachi_tx_timeout()
1117 for (i = 0; i < RX_RING_SIZE; i++){ in hamachi_tx_timeout()
1129 for (i = 0; i < RX_RING_SIZE; i++) { in hamachi_tx_timeout()
1142 hmp->dirty_rx = (unsigned int)(i - RX_RING_SIZE); in hamachi_tx_timeout()
1144 hmp->rx_ring[RX_RING_SIZE-1].status_n_length |= cpu_to_le32(DescEndRing); in hamachi_tx_timeout()
1178 for (i = 0; i < RX_RING_SIZE; i++) { in hamachi_init_ring()
[all …]
Dyellowfin.c75 #define RX_RING_SIZE 64 macro
78 #define RX_TOTAL_SIZE RX_RING_SIZE*sizeof(struct yellowfin_desc)
311 struct sk_buff* rx_skbuff[RX_RING_SIZE];
697 for (i = 0; i < RX_RING_SIZE; i++) in yellowfin_tx_timeout()
733 for (i = 0; i < RX_RING_SIZE; i++) { in yellowfin_init_ring()
737 ((i+1)%RX_RING_SIZE)*sizeof(struct yellowfin_desc)); in yellowfin_init_ring()
740 for (i = 0; i < RX_RING_SIZE; i++) { in yellowfin_init_ring()
749 if (i != RX_RING_SIZE) { in yellowfin_init_ring()
755 yp->dirty_rx = (unsigned int)(i - RX_RING_SIZE); in yellowfin_init_ring()
1039 int entry = yp->cur_rx % RX_RING_SIZE; in yellowfin_rx()
[all …]
/drivers/net/ethernet/pasemi/
Dpasemi_mac.h29 #define RX_RING_SIZE 2048 macro
107 #define RX_DESC(rx, num) ((rx)->chan.ring_virt[(num) & (RX_RING_SIZE-1)])
108 #define RX_DESC_INFO(rx, num) ((rx)->ring_info[(num) & (RX_RING_SIZE-1)])
109 #define RX_BUFF(rx, num) ((rx)->buffers[(num) & (RX_RING_SIZE-1)])
Dpasemi_mac.c394 ring->size = RX_RING_SIZE; in pasemi_mac_setup_rx_resources()
396 RX_RING_SIZE, GFP_KERNEL); in pasemi_mac_setup_rx_resources()
402 if (pasemi_dma_alloc_ring(&ring->chan, RX_RING_SIZE)) in pasemi_mac_setup_rx_resources()
406 RX_RING_SIZE * sizeof(u64), in pasemi_mac_setup_rx_resources()
416 PAS_DMA_RXCHAN_BASEU_SIZ(RX_RING_SIZE >> 3)); in pasemi_mac_setup_rx_resources()
430 PAS_DMA_RXINT_BASEU_SIZ(RX_RING_SIZE >> 3)); in pasemi_mac_setup_rx_resources()
559 for (i = 0; i < RX_RING_SIZE; i++) { in pasemi_mac_free_rx_buffers()
572 for (i = 0; i < RX_RING_SIZE; i++) in pasemi_mac_free_rx_buffers()
580 dma_free_coherent(&mac->dma_pdev->dev, RX_RING_SIZE * sizeof(u64), in pasemi_mac_free_rx_resources()
634 (RX_RING_SIZE - 1); in pasemi_mac_replenish_rx_ring()
[all …]
/drivers/net/wan/
Ddscc4.c165 #define RX_RING_SIZE 32 macro
167 #define RX_TOTAL_SIZE RX_RING_SIZE*sizeof(struct RxFD)
200 struct sk_buff *rx_skbuff[RX_RING_SIZE];
441 ((dpriv->rx_dirty - 1)%RX_RING_SIZE)*sizeof(struct RxFD); in dscc4_rx_update()
507 for (i = 0; i < RX_RING_SIZE; i++) { in dscc4_release_ring()
521 unsigned int dirty = dpriv->rx_dirty%RX_RING_SIZE; in try_get_rx_skb()
648 struct RxFD *rx_fd = dpriv->rx_fd + dpriv->rx_current%RX_RING_SIZE; in dscc4_rx_skb()
653 skb = dpriv->rx_skbuff[dpriv->rx_current++%RX_RING_SIZE]; in dscc4_rx_skb()
680 while ((dpriv->rx_dirty - dpriv->rx_current) % RX_RING_SIZE) { in dscc4_rx_skb()
1659 (dpriv->rx_current%RX_RING_SIZE)* in dscc4_tx_irq()
[all …]
/drivers/net/ethernet/smsc/
Depic100.c55 #define RX_RING_SIZE 256 macro
57 #define RX_TOTAL_SIZE RX_RING_SIZE*sizeof(struct epic_rx_desc)
258 struct sk_buff* rx_skbuff[RX_RING_SIZE];
777 epic_rx(dev, RX_RING_SIZE); in epic_pause()
813 (ep->cur_rx % RX_RING_SIZE) * sizeof(struct epic_rx_desc)); in epic_restart()
910 for (i = 0; i < RX_RING_SIZE; i++) { in epic_init_ring()
921 for (i = 0; i < RX_RING_SIZE; i++) { in epic_init_ring()
931 ep->dirty_rx = (unsigned int)(i - RX_RING_SIZE); in epic_init_ring()
1141 int entry = ep->cur_rx % RX_RING_SIZE; in epic_rx()
1142 int rx_work_limit = ep->dirty_rx + RX_RING_SIZE - ep->cur_rx; in epic_rx()
[all …]
Dsmsc9420.c557 for (i = 0; i < RX_RING_SIZE; i++) { in smsc9420_free_rx_ring()
837 pd->rx_ring_tail = (pd->rx_ring_tail + 1) % RX_RING_SIZE; in smsc9420_alloc_new_rx_buffers()
859 pd->rx_ring_head = (pd->rx_ring_head + 1) % RX_RING_SIZE; in smsc9420_rx_poll()
1238 pd->rx_buffers = kmalloc_array(RX_RING_SIZE, in smsc9420_alloc_rx_ring()
1245 for (i = 0; i < RX_RING_SIZE; i++) { in smsc9420_alloc_rx_ring()
1252 pd->rx_ring[RX_RING_SIZE - 1].length = (PKT_BUF_SZ | RDES1_RER_); in smsc9420_alloc_rx_ring()
1255 for (i = 0; i < RX_RING_SIZE; i++) { in smsc9420_alloc_rx_ring()
1590 sizeof(struct smsc9420_dma_desc) * RX_RING_SIZE + in smsc9420_probe()
1598 pd->tx_ring = (pd->rx_ring + RX_RING_SIZE); in smsc9420_probe()
1600 sizeof(struct smsc9420_dma_desc) * RX_RING_SIZE; in smsc9420_probe()
[all …]
/drivers/net/ethernet/dlink/
Ddl2k.h40 #define RX_RING_SIZE 256 macro
42 #define RX_TOTAL_SIZE RX_RING_SIZE*sizeof(struct netdev_desc)
371 struct sk_buff *rx_skbuff[RX_RING_SIZE];
Dsundance.c70 #define RX_RING_SIZE 64 macro
73 #define RX_TOTAL_SIZE RX_RING_SIZE*sizeof(struct netdev_desc)
373 struct sk_buff* rx_skbuff[RX_RING_SIZE];
1037 for (i = 0; i < RX_RING_SIZE; i++) { in init_ring()
1039 ((i+1)%RX_RING_SIZE)*sizeof(*np->rx_ring)); in init_ring()
1046 for (i = 0; i < RX_RING_SIZE; i++) { in init_ring()
1064 np->dirty_rx = (unsigned int)(i - RX_RING_SIZE); in init_ring()
1335 int entry = np->cur_rx % RX_RING_SIZE; in rx_poll()
1406 entry = (entry + 1) % RX_RING_SIZE; in rx_poll()
1433 for (;(np->cur_rx - np->dirty_rx + RX_RING_SIZE) % RX_RING_SIZE > 0; in refill_rx()
[all …]
/drivers/net/ethernet/natsemi/
Dnatsemi.c104 #define RX_RING_SIZE 32 macro
544 struct sk_buff *rx_skbuff[RX_RING_SIZE];
545 dma_addr_t rx_dma[RX_RING_SIZE];
1718 writel(np->ring_dma + RX_RING_SIZE * sizeof(struct netdev_desc), in init_registers()
1871 for (i = 0; i < RX_RING_SIZE; i++) { in dump_ring()
1916 sizeof(struct netdev_desc) * (RX_RING_SIZE+TX_RING_SIZE), in alloc_ring()
1920 np->tx_ring = &np->rx_ring[RX_RING_SIZE]; in alloc_ring()
1931 int entry = np->dirty_rx % RX_RING_SIZE; in refill_rx()
1950 if (np->cur_rx - np->dirty_rx == RX_RING_SIZE) { in refill_rx()
1978 *((i+1)%TX_RING_SIZE+RX_RING_SIZE)); in init_ring()
[all …]
/drivers/net/ethernet/3com/
D3c59x.c39 #define RX_RING_SIZE 32 macro
602 struct sk_buff* rx_skbuff[RX_RING_SIZE];
1219 vp->rx_ring = pci_alloc_consistent(pdev, sizeof(struct boom_rx_desc) * RX_RING_SIZE in vortex_probe1()
1226 vp->tx_ring = (struct boom_tx_desc *)(vp->rx_ring + RX_RING_SIZE); in vortex_probe1()
1227 vp->tx_ring_dma = vp->rx_ring_dma + sizeof(struct boom_rx_desc) * RX_RING_SIZE; in vortex_probe1()
1484 sizeof(struct boom_rx_desc) * RX_RING_SIZE in vortex_probe1()
1692 for (i = 0; i < RX_RING_SIZE; i++) /* AKPM: this is done in vortex_open, too */ in vortex_up()
1745 for (i = 0; i < RX_RING_SIZE; i++) { in vortex_open()
1760 if (i != RX_RING_SIZE) { in vortex_open()
1774 for (i = 0; i < RX_RING_SIZE; i++) { in vortex_open()
[all …]
D3c515.c57 #define RX_RING_SIZE 16 macro
305 struct boom_rx_desc rx_ring[RX_RING_SIZE];
308 struct sk_buff *rx_skbuff[RX_RING_SIZE];
820 for (i = 0; i < RX_RING_SIZE; i++) { in corkscrew_open()
822 if (i < (RX_RING_SIZE - 1)) in corkscrew_open()
1333 int entry = vp->cur_rx % RX_RING_SIZE; in boomerang_rx()
1395 entry = (++vp->cur_rx) % RX_RING_SIZE; in boomerang_rx()
1400 entry = vp->dirty_rx % RX_RING_SIZE; in boomerang_rx()
1450 for (i = 0; i < RX_RING_SIZE; i++) in corkscrew_close()
/drivers/net/ethernet/via/
Dvia-rhine.c81 #define RX_RING_SIZE 64 macro
448 struct sk_buff *rx_skbuff[RX_RING_SIZE];
449 dma_addr_t rx_skbuff_dma[RX_RING_SIZE];
1165 RX_RING_SIZE * sizeof(struct rx_desc) + in alloc_ring()
1180 RX_RING_SIZE * sizeof(struct rx_desc) + in alloc_ring()
1188 rp->tx_ring = ring + RX_RING_SIZE * sizeof(struct rx_desc); in alloc_ring()
1190 rp->tx_ring_dma = ring_dma + RX_RING_SIZE * sizeof(struct rx_desc); in alloc_ring()
1201 RX_RING_SIZE * sizeof(struct rx_desc) + in free_ring()
1246 for (i = 0; i < RX_RING_SIZE; i++) in rhine_reset_rbufs()
1272 for (i = 0; i < RX_RING_SIZE; i++) { in alloc_rbufs()
[all …]

123