/drivers/net/ethernet/sis/ |
D | sis190.c | 270 u32 rx_buf_sz; member 461 static inline void sis190_give_to_asic(struct RxDesc *desc, u32 rx_buf_sz) in sis190_give_to_asic() argument 466 desc->size = cpu_to_le32((rx_buf_sz & RX_BUF_MASK) | eor); in sis190_give_to_asic() 472 u32 rx_buf_sz) in sis190_map_to_asic() argument 475 sis190_give_to_asic(desc, rx_buf_sz); in sis190_map_to_asic() 490 u32 rx_buf_sz = tp->rx_buf_sz; in sis190_alloc_rx_skb() local 494 skb = netdev_alloc_skb(tp->dev, rx_buf_sz); in sis190_alloc_rx_skb() 497 mapping = pci_map_single(tp->pci_dev, skb->data, tp->rx_buf_sz, in sis190_alloc_rx_skb() 501 sis190_map_to_asic(desc, mapping, rx_buf_sz); in sis190_alloc_rx_skb() 545 pci_dma_sync_single_for_cpu(tp->pci_dev, addr, tp->rx_buf_sz, in sis190_try_rx_copy() [all …]
|
/drivers/net/ethernet/packetengines/ |
D | hamachi.c | 496 unsigned int rx_buf_sz; /* Based on MTU+slack. */ member 1120 hmp->rx_buf_sz, PCI_DMA_FROMDEVICE); in hamachi_tx_timeout() 1129 skb = netdev_alloc_skb_ip_align(dev, hmp->rx_buf_sz); in hamachi_tx_timeout() 1135 skb->data, hmp->rx_buf_sz, PCI_DMA_FROMDEVICE)); in hamachi_tx_timeout() 1137 DescEndPacket | DescIntr | (hmp->rx_buf_sz - 2)); in hamachi_tx_timeout() 1171 hmp->rx_buf_sz = (dev->mtu <= 1492 ? PKT_BUF_SZ : in hamachi_init_ring() 1181 struct sk_buff *skb = netdev_alloc_skb(dev, hmp->rx_buf_sz + 2); in hamachi_init_ring() 1187 skb->data, hmp->rx_buf_sz, PCI_DMA_FROMDEVICE)); in hamachi_init_ring() 1190 DescEndPacket | DescIntr | (hmp->rx_buf_sz -2)); in hamachi_init_ring() 1418 hmp->rx_buf_sz, in hamachi_rx() [all …]
|
D | yellowfin.c | 324 unsigned int rx_buf_sz; /* Based on MTU+slack. */ member 728 yp->rx_buf_sz = (dev->mtu <= 1500 ? PKT_BUF_SZ : dev->mtu + 32); in yellowfin_init_ring() 732 cpu_to_le32(CMD_RX_BUF | INTR_ALWAYS | yp->rx_buf_sz); in yellowfin_init_ring() 738 struct sk_buff *skb = netdev_alloc_skb(dev, yp->rx_buf_sz + 2); in yellowfin_init_ring() 744 skb->data, yp->rx_buf_sz, PCI_DMA_FROMDEVICE)); in yellowfin_init_ring() 1059 yp->rx_buf_sz, PCI_DMA_FROMDEVICE); in yellowfin_rx() 1126 yp->rx_buf_sz, in yellowfin_rx() 1138 yp->rx_buf_sz, in yellowfin_rx() 1153 struct sk_buff *skb = netdev_alloc_skb(dev, yp->rx_buf_sz + 2); in yellowfin_rx() 1159 skb->data, yp->rx_buf_sz, PCI_DMA_FROMDEVICE)); in yellowfin_rx() [all …]
|
/drivers/net/ethernet/dlink/ |
D | dl2k.c | 489 np->rx_buf_sz = (dev->mtu <= 1500 ? PACKET_SIZE : dev->mtu + 32); in alloc_list() 504 skb = netdev_alloc_skb_ip_align(dev, np->rx_buf_sz); in alloc_list() 517 np->pdev, skb->data, np->rx_buf_sz, in alloc_list() 519 np->rx_ring[i].fraginfo |= cpu_to_le64((u64)np->rx_buf_sz << 48); in alloc_list() 674 np->rx_buf_sz); in rio_timer() 685 (np->pdev, skb->data, np->rx_buf_sz, in rio_timer() 689 cpu_to_le64((u64)np->rx_buf_sz << 48); in rio_timer() 963 np->rx_buf_sz, in receive_packet() 970 np->rx_buf_sz, in receive_packet() 978 np->rx_buf_sz, in receive_packet() [all …]
|
D | sundance.c | 396 unsigned int rx_buf_sz; /* Based on MTU+slack. */ member 1034 np->rx_buf_sz = (dev->mtu <= 1520 ? PKT_BUF_SZ : dev->mtu + 16); in init_ring() 1048 netdev_alloc_skb(dev, np->rx_buf_sz + 2); in init_ring() 1055 np->rx_buf_sz, DMA_FROM_DEVICE)); in init_ring() 1062 np->rx_ring[i].frag[0].length = cpu_to_le32(np->rx_buf_sz | LastFrag); in init_ring() 1388 np->rx_buf_sz, DMA_FROM_DEVICE); in rx_poll() 1392 np->rx_buf_sz, DMA_FROM_DEVICE); in rx_poll() 1397 np->rx_buf_sz, DMA_FROM_DEVICE); in rx_poll() 1437 skb = netdev_alloc_skb(dev, np->rx_buf_sz + 2); in refill_rx() 1444 np->rx_buf_sz, DMA_FROM_DEVICE)); in refill_rx() [all …]
|
D | dl2k.h | 377 unsigned int rx_buf_sz; /* Based on MTU+slack. */ member
|
/drivers/net/ethernet/smsc/ |
D | epic100.c | 271 unsigned int rx_buf_sz; /* Based on MTU+slack. */ member 900 ep->rx_buf_sz = (dev->mtu <= 1500 ? PKT_BUF_SZ : dev->mtu + 32); in epic_init_ring() 905 ep->rx_ring[i].buflength = ep->rx_buf_sz; in epic_init_ring() 915 struct sk_buff *skb = netdev_alloc_skb(dev, ep->rx_buf_sz + 2); in epic_init_ring() 921 skb->data, ep->rx_buf_sz, PCI_DMA_FROMDEVICE); in epic_init_ring() 1182 ep->rx_buf_sz, in epic_rx() 1188 ep->rx_buf_sz, in epic_rx() 1193 ep->rx_buf_sz, PCI_DMA_FROMDEVICE); in epic_rx() 1211 skb = ep->rx_skbuff[entry] = netdev_alloc_skb(dev, ep->rx_buf_sz + 2); in epic_rx() 1216 skb->data, ep->rx_buf_sz, PCI_DMA_FROMDEVICE); in epic_rx() [all …]
|
/drivers/iio/adc/ |
D | stm32-adc.c | 208 unsigned int rx_buf_sz; member 1250 unsigned int rx_buf_sz = STM32_DMA_BUFFER_SIZE; in stm32_adc_set_watermark() local 1259 adc->rx_buf_sz = min(rx_buf_sz, watermark * 2 * adc->num_conv); in stm32_adc_set_watermark() 1352 unsigned int i = adc->rx_buf_sz - state.residue; in stm32_adc_dma_residue() 1359 size = adc->rx_buf_sz + i - adc->bufi; in stm32_adc_dma_residue() 1385 adc->rx_buf_sz, adc->rx_buf_sz / 2); in stm32_adc_dma_start() 1390 adc->rx_buf_sz, adc->rx_buf_sz / 2, in stm32_adc_dma_start() 1530 if (adc->bufi >= adc->rx_buf_sz) in stm32_adc_trigger_handler()
|
D | at91-sama5d2_adc.c | 363 int rx_buf_sz; member 791 i = st->dma_st.rx_buf_sz - state.residue; in at91_adc_dma_size_done() 797 size = st->dma_st.rx_buf_sz + i - st->dma_st.buf_idx; in at91_adc_dma_size_done() 826 st->dma_st.rx_buf_sz = 0; in at91_adc_dma_start() 836 st->dma_st.rx_buf_sz += chan->scan_type.storagebits / 8; in at91_adc_dma_start() 838 st->dma_st.rx_buf_sz *= st->dma_st.watermark; in at91_adc_dma_start() 843 st->dma_st.rx_buf_sz, in at91_adc_dma_start() 844 st->dma_st.rx_buf_sz / 2, in at91_adc_dma_start() 1049 sample_size = div_s64(st->dma_st.rx_buf_sz, st->dma_st.watermark); in at91_adc_trigger_handler_dma() 1076 if (st->dma_st.buf_idx >= st->dma_st.rx_buf_sz) in at91_adc_trigger_handler_dma()
|
/drivers/net/ethernet/ |
D | fealnx.c | 407 unsigned int rx_buf_sz; /* Based on MTU+slack. */ member 1065 skb = netdev_alloc_skb(dev, np->rx_buf_sz); in allocate_rx_buffers() 1074 np->rx_buf_sz, PCI_DMA_FROMDEVICE); in allocate_rx_buffers() 1238 np->rx_buf_sz = (dev->mtu <= 1500 ? PKT_BUF_SZ : dev->mtu + 32); in init_ring() 1246 np->rx_ring[i].control = np->rx_buf_sz << RBSShift; in init_ring() 1259 struct sk_buff *skb = netdev_alloc_skb(dev, np->rx_buf_sz); in init_ring() 1269 np->rx_buf_sz, PCI_DMA_FROMDEVICE); in init_ring() 1701 np->rx_buf_sz, in netdev_rx() 1715 np->rx_buf_sz, in netdev_rx() 1720 np->rx_buf_sz, in netdev_rx() [all …]
|
/drivers/staging/vt6655/ |
D | card.c | 560 pDesc->rd0.res_count = cpu_to_le16(priv->rx_buf_sz); in CARDvSafeResetRx() 562 pDesc->rd1.req_count = cpu_to_le16(priv->rx_buf_sz); in CARDvSafeResetRx() 568 pDesc->rd0.res_count = cpu_to_le16(priv->rx_buf_sz); in CARDvSafeResetRx() 570 pDesc->rd1.req_count = cpu_to_le16(priv->rx_buf_sz); in CARDvSafeResetRx()
|
D | device_main.c | 783 rd_info->skb = dev_alloc_skb((int)priv->rx_buf_sz); in device_alloc_rx_buf() 790 priv->rx_buf_sz, DMA_FROM_DEVICE); in device_alloc_rx_buf() 799 rd->rd0.res_count = cpu_to_le16(priv->rx_buf_sz); in device_alloc_rx_buf() 801 rd->rd1.req_count = cpu_to_le16(priv->rx_buf_sz); in device_alloc_rx_buf() 813 priv->rx_buf_sz, DMA_FROM_DEVICE); in device_free_rx_buf() 1224 priv->rx_buf_sz = PKT_BUF_SZ; in vnt_start()
|
D | device.h | 153 u32 rx_buf_sz; member
|
D | dpc.c | 129 priv->rx_buf_sz, DMA_FROM_DEVICE); in vnt_receive_frame()
|
/drivers/net/ethernet/realtek/ |
D | 8139cp.c | 347 unsigned rx_buf_sz; member 413 cp->rx_buf_sz = mtu + ETH_HLEN + 8; in cp_set_rxbufsize() 415 cp->rx_buf_sz = PKT_BUF_SZ; in cp_set_rxbufsize() 477 const unsigned buflen = cp->rx_buf_sz; in cp_rx_poll() 546 cp->rx_buf_sz); in cp_rx_poll() 548 desc->opts1 = cpu_to_le32(DescOwn | cp->rx_buf_sz); in cp_rx_poll() 1067 skb = netdev_alloc_skb_ip_align(dev, cp->rx_buf_sz); in cp_refill_rx() 1072 cp->rx_buf_sz, PCI_DMA_FROMDEVICE); in cp_refill_rx() 1083 cpu_to_le32(DescOwn | RingEnd | cp->rx_buf_sz); in cp_refill_rx() 1086 cpu_to_le32(DescOwn | cp->rx_buf_sz); in cp_refill_rx() [all …]
|
/drivers/net/ethernet/dec/tulip/ |
D | de2104x.c | 310 unsigned rx_buf_sz; member 439 buflen = copying_skb ? (len + RX_OFFSET) : de->rx_buf_sz; in de_rx() 480 cpu_to_le32(RingEnd | de->rx_buf_sz); in de_rx() 482 de->rx_ring[rx_tail].opts2 = cpu_to_le32(de->rx_buf_sz); in de_rx() 1281 skb = netdev_alloc_skb(de->dev, de->rx_buf_sz); in de_refill_rx() 1286 skb->data, de->rx_buf_sz, PCI_DMA_FROMDEVICE); in de_refill_rx() 1292 cpu_to_le32(RingEnd | de->rx_buf_sz); in de_refill_rx() 1294 de->rx_ring[i].opts2 = cpu_to_le32(de->rx_buf_sz); in de_refill_rx() 1340 de->rx_buf_sz, PCI_DMA_FROMDEVICE); in de_clean_rings() 1383 de->rx_buf_sz = (dev->mtu <= 1500 ? PKT_BUF_SZ : dev->mtu + 32); in de_open()
|
D | winbond-840.c | 313 unsigned int rx_buf_sz; /* Based on MTU+slack. */ member 802 np->rx_ring[i].length = np->rx_buf_sz; in init_rxtx_rings() 811 struct sk_buff *skb = netdev_alloc_skb(dev, np->rx_buf_sz); in init_rxtx_rings() 816 np->rx_buf_sz,PCI_DMA_FROMDEVICE); in init_rxtx_rings() 975 np->rx_buf_sz = (dev->mtu <= 1500 ? PKT_BUF_SZ : dev->mtu + 32); in alloc_ringdesc() 1267 skb = netdev_alloc_skb(dev, np->rx_buf_sz); in netdev_rx() 1273 np->rx_buf_sz, PCI_DMA_FROMDEVICE); in netdev_rx()
|
/drivers/net/ethernet/adaptec/ |
D | starfire.c | 561 unsigned int rx_buf_sz; /* Based on MTU+slack. */ member 924 writel((np->rx_buf_sz << RxBufferLenShift) | in netdev_open() 1146 np->rx_buf_sz = (dev->mtu <= 1500 ? PKT_BUF_SZ : dev->mtu + 32); in init_ring() 1150 struct sk_buff *skb = netdev_alloc_skb(dev, np->rx_buf_sz); in init_ring() 1154 …np->rx_info[i].mapping = pci_map_single(np->pci_dev, skb->data, np->rx_buf_sz, PCI_DMA_FROMDEVICE); in init_ring() 1488 pci_unmap_single(np->pci_dev, np->rx_info[entry].mapping, np->rx_buf_sz, PCI_DMA_FROMDEVICE); in __netdev_rx() 1601 skb = netdev_alloc_skb(dev, np->rx_buf_sz); in refill_rx_ring() 1606 pci_map_single(np->pci_dev, skb->data, np->rx_buf_sz, PCI_DMA_FROMDEVICE); in refill_rx_ring() 1982 pci_unmap_single(np->pci_dev, np->rx_info[i].mapping, np->rx_buf_sz, PCI_DMA_FROMDEVICE); in netdev_close()
|
/drivers/net/ethernet/sun/ |
D | sungem.h | 939 #define RX_BUF_ALLOC_SIZE(gp) ((gp)->rx_buf_sz + 28 + RX_OFFSET + 64) 994 int rx_buf_sz; member
|
/drivers/net/ethernet/via/ |
D | via-rhine.c | 476 unsigned int rx_buf_sz; /* Based on MTU+slack. */ member 1219 const int size = rp->rx_buf_sz; in rhine_skb_dma_init() 1263 rp->rx_buf_sz = (dev->mtu <= 1500 ? PKT_BUF_SZ : dev->mtu + 32); in alloc_rbufs() 1269 rp->rx_ring[i].desc_length = cpu_to_le32(rp->rx_buf_sz); in alloc_rbufs() 1308 rp->rx_buf_sz, DMA_FROM_DEVICE); in free_rbufs() 2104 rp->rx_buf_sz, in rhine_rx() 2113 rp->rx_buf_sz, in rhine_rx() 2125 rp->rx_buf_sz, in rhine_rx()
|
/drivers/net/ethernet/natsemi/ |
D | natsemi.c | 561 unsigned int rx_buf_sz; member 1748 if (np->rx_buf_sz > NATSEMI_LONGPKT) in init_registers() 1937 unsigned int buflen = np->rx_buf_sz+NATSEMI_PADDING; in refill_rx() 1952 np->rx_ring[entry].cmd_status = cpu_to_le32(np->rx_buf_sz); in refill_rx() 1965 np->rx_buf_sz = ETH_DATA_LEN + NATSEMI_HEADERS; in set_bufsize() 1967 np->rx_buf_sz = dev->mtu + NATSEMI_HEADERS; in set_bufsize() 2029 unsigned int buflen = np->rx_buf_sz; in drain_rx() 2288 unsigned int buflen = np->rx_buf_sz; in netdev_rx() 2348 } else if (pkt_len > np->rx_buf_sz) { in netdev_rx()
|
/drivers/net/ethernet/huawei/hinic/ |
D | hinic_hw_dev.h | 135 u16 rx_buf_sz; member
|
/drivers/staging/vt6656/ |
D | device.h | 269 u32 rx_buf_sz; member
|
/drivers/net/ethernet/renesas/ |
D | sh_eth.c | 1320 ALIGN(mdp->rx_buf_sz, 32), in sh_eth_ring_free() 1362 int skbuff_size = mdp->rx_buf_sz + SH_ETH_RX_ALIGN + 32 - 1; in sh_eth_ring_format() 1383 buf_len = ALIGN(mdp->rx_buf_sz, 32); in sh_eth_ring_format() 1442 mdp->rx_buf_sz = (ndev->mtu <= 1492 ? PKT_BUF_SZ : in sh_eth_ring_init() 1445 mdp->rx_buf_sz += NET_IP_ALIGN; in sh_eth_ring_init() 1629 int skbuff_size = mdp->rx_buf_sz + SH_ETH_RX_ALIGN + 32 - 1; in sh_eth_rx() 1688 ALIGN(mdp->rx_buf_sz, 32), in sh_eth_rx() 1709 buf_len = ALIGN(mdp->rx_buf_sz, 32); in sh_eth_rx()
|
/drivers/atm/ |
D | iphase.c | 768 iadev->rx_buf_sz = IA_RX_BUF_SZ; 779 iadev->rx_buf_sz = IA_RX_BUF_SZ; 791 iadev->rx_buf_sz = IA_RX_BUF_SZ; 796 iadev->rx_buf_sz, iadev->rx_pkt_ram);) 1171 if (len > iadev->rx_buf_sz) { 1172 printk("Over %d bytes sdu received, dropped!!!\n", iadev->rx_buf_sz); 1332 if ((length > iadev->rx_buf_sz) || (length > 1479 writew(iadev->rx_buf_sz, iadev->reass_reg+BUF_SIZE); 1493 rx_pkt_start += iadev->rx_buf_sz; 3155 iadev->num_rx_desc, iadev->rx_buf_sz,
|