Home
last modified time | relevance | path

Searched refs:dma_tx (Results 1 – 17 of 17) sorted by relevance

/drivers/net/irda/
Dsa1100_ir.c64 struct sa1100_buf dma_tx; member
230 dma_unmap_sg(si->dma_tx.dev, &si->dma_tx.sg, 1, DMA_TO_DEVICE); in sa1100_irda_sirtxdma_irq()
231 dev_kfree_skb(si->dma_tx.skb); in sa1100_irda_sirtxdma_irq()
232 si->dma_tx.skb = NULL; in sa1100_irda_sirtxdma_irq()
235 dev->stats.tx_bytes += sg_dma_len(&si->dma_tx.sg); in sa1100_irda_sirtxdma_irq()
262 si->dma_tx.skb = skb; in sa1100_irda_sir_tx_start()
263 sg_set_buf(&si->dma_tx.sg, si->tx_buff.data, si->tx_buff.len); in sa1100_irda_sir_tx_start()
264 if (dma_map_sg(si->dma_tx.dev, &si->dma_tx.sg, 1, DMA_TO_DEVICE) == 0) { in sa1100_irda_sir_tx_start()
265 si->dma_tx.skb = NULL; in sa1100_irda_sir_tx_start()
271 sa1100_irda_dma_start(&si->dma_tx, DMA_MEM_TO_DEV, sa1100_irda_sirtxdma_irq, dev); in sa1100_irda_sir_tx_start()
[all …]
/drivers/spi/
Dspi-omap2-mcspi.c97 struct dma_chan *dma_tx; member
335 if (mcspi_dma->dma_tx) { in omap2_mcspi_tx_dma()
339 dmaengine_slave_config(mcspi_dma->dma_tx, &cfg); in omap2_mcspi_tx_dma()
345 tx = dmaengine_prep_slave_sg(mcspi_dma->dma_tx, &sg, 1, in omap2_mcspi_tx_dma()
355 dma_async_issue_pending(mcspi_dma->dma_tx); in omap2_mcspi_tx_dma()
838 mcspi_dma->dma_tx = dma_request_channel(mask, omap_dma_filter_fn, &sig); in omap2_mcspi_request_dma()
839 if (!mcspi_dma->dma_tx) { in omap2_mcspi_request_dma()
880 if (!mcspi_dma->dma_rx || !mcspi_dma->dma_tx) { in omap2_mcspi_setup()
920 if (mcspi_dma->dma_tx) { in omap2_mcspi_cleanup()
921 dma_release_channel(mcspi_dma->dma_tx); in omap2_mcspi_cleanup()
[all …]
Dspi-ep93xx.c128 struct dma_chan *dma_tx; member
575 chan = espi->dma_tx; in ep93xx_spi_dma_prepare()
657 chan = espi->dma_tx; in ep93xx_spi_dma_finish()
698 dma_async_issue_pending(espi->dma_tx); in ep93xx_spi_dma_transfer()
993 espi->dma_tx = dma_request_channel(mask, ep93xx_spi_dma_filter, in ep93xx_spi_setup_dma()
995 if (!espi->dma_tx) { in ep93xx_spi_setup_dma()
1017 if (espi->dma_tx) { in ep93xx_spi_release_dma()
1018 dma_release_channel(espi->dma_tx); in ep93xx_spi_release_dma()
Dspi-davinci.c136 struct dma_chan *dma_tx; member
591 dmaengine_slave_config(dspi->dma_tx, &dma_tx_conf); in davinci_spi_bufs()
627 txdesc = dmaengine_prep_slave_sg(dspi->dma_tx, in davinci_spi_bufs()
645 dma_async_issue_pending(dspi->dma_tx); in davinci_spi_bufs()
760 dspi->dma_tx = dma_request_channel(mask, edma_filter_fn, in davinci_spi_request_dma()
762 if (!dspi->dma_tx) { in davinci_spi_request_dma()
1014 dma_release_channel(dspi->dma_tx); in davinci_spi_probe()
/drivers/net/ethernet/micrel/
Dks8842.c159 #define KS8842_USE_DMA(adapter) (((adapter)->dma_tx.channel != -1) && \
171 struct ks8842_tx_dma_ctl dma_tx; member
434 struct ks8842_tx_dma_ctl *ctl = &adapter->dma_tx; in ks8842_tx_frame_dma()
857 struct ks8842_tx_dma_ctl *ctl = &adapter->dma_tx; in ks8842_dma_tx_cb()
873 struct ks8842_tx_dma_ctl *tx_ctl = &adapter->dma_tx; in ks8842_stop_dma()
897 struct ks8842_tx_dma_ctl *tx_ctl = &adapter->dma_tx; in ks8842_dealloc_dma_bufs()
929 struct ks8842_tx_dma_ctl *tx_ctl = &adapter->dma_tx; in ks8842_alloc_dma_bufs()
1004 adapter->dma_tx.channel = -1; in ks8842_open()
1058 if (adapter->dma_tx.adesc) in ks8842_xmit_frame()
1185 adapter->dma_tx.channel = pdata->tx_dma_channel; in ks8842_probe()
[all …]
/drivers/net/ethernet/calxeda/
Dxgmac.c374 struct xgmac_dma_desc *dma_tx; member
737 priv->dma_tx = dma_alloc_coherent(priv->device, in xgmac_dma_desc_rings_init()
742 if (!priv->dma_tx) in xgmac_dma_desc_rings_init()
747 priv->dma_rx, priv->dma_tx, in xgmac_dma_desc_rings_init()
758 desc_init_tx_desc(priv->dma_tx, DMA_TX_RING_SZ); in xgmac_dma_desc_rings_init()
808 p = priv->dma_tx + i; in xgmac_free_tx_skbufs()
813 p = priv->dma_tx + i++; in xgmac_free_tx_skbufs()
830 if (priv->dma_tx) { in xgmac_free_dma_desc_rings()
833 priv->dma_tx, priv->dma_tx_phy); in xgmac_free_dma_desc_rings()
834 priv->dma_tx = NULL; in xgmac_free_dma_desc_rings()
[all …]
/drivers/net/ethernet/stmicro/stmmac/
Ddwmac100_dma.c36 int burst_len, u32 dma_tx, u32 dma_rx, int atds) in dwmac100_dma_init() argument
63 writel(dma_tx, ioaddr + DMA_TX_BASE_ADDR); in dwmac100_dma_init()
Dring_mode.c36 struct dma_desc *desc = priv->dma_tx + entry; in stmmac_jumbo_frm()
57 desc = priv->dma_tx + entry; in stmmac_jumbo_frm()
Ddwmac1000_dma.c34 int burst_len, u32 dma_tx, u32 dma_rx, int atds) in dwmac1000_dma_init() argument
107 writel(dma_tx, ioaddr + DMA_TX_BASE_ADDR); in dwmac1000_dma_init()
Dchain_mode.c36 struct dma_desc *desc = priv->dma_tx + entry; in stmmac_jumbo_frm()
55 desc = priv->dma_tx + entry; in stmmac_jumbo_frm()
Dstmmac_main.c369 desc = (priv->dma_tx + entry); in stmmac_get_tx_hwtstamp()
915 stmmac_display_ring((void *)priv->dma_tx, txsize, 0); in stmmac_display_rings()
963 priv->hw->desc->init_tx_desc(&priv->dma_tx[i], in stmmac_clear_descriptors()
1039 priv->dma_tx = dma_alloc_coherent(priv->device, txsize * in init_dma_desc_rings()
1043 if ((!priv->dma_rx) || (!priv->dma_tx)) in init_dma_desc_rings()
1089 priv->hw->chain->init(priv->dma_tx, priv->dma_tx_phy, in init_dma_desc_rings()
1100 p = priv->dma_tx + i; in init_dma_desc_rings()
1139 p = priv->dma_tx + i; in dma_free_tx_skbufs()
1163 priv->dma_tx, priv->dma_tx_phy); in free_dma_desc_resources()
1226 p = priv->dma_tx + entry; in stmmac_tx_clean()
[all …]
Dstmmac.h39 struct dma_desc *dma_tx; member
Dcommon.h356 int burst_len, u32 dma_tx, u32 dma_rx, int atds);
/drivers/mmc/host/
Domap.c142 struct dma_chan *dma_tx; member
423 c = host->dma_tx; in mmc_omap_release_dma()
1015 c = host->dma_tx; in mmc_omap_prepare_data()
1089 host->dma_tx : host->dma_rx; in mmc_omap_start_request()
1415 host->dma_tx = dma_request_channel(mask, omap_dma_filter_fn, &sig); in mmc_omap_probe()
1417 if (!host->dma_tx) { in mmc_omap_probe()
1423 if (!host->dma_tx) in mmc_omap_probe()
1481 if (host->dma_tx) in mmc_omap_probe()
1482 dma_release_channel(host->dma_tx); in mmc_omap_probe()
1519 if (host->dma_tx) in mmc_omap_remove()
[all …]
Ddavinci_mmc.c206 struct dma_chan *dma_tx; member
413 sync_dev = host->dma_tx; in davinci_abort_dma()
433 chan = host->dma_tx; in mmc_davinci_send_dma_request()
434 dmaengine_slave_config(host->dma_tx, &dma_tx_conf); in mmc_davinci_send_dma_request()
436 desc = dmaengine_prep_slave_sg(host->dma_tx, in mmc_davinci_send_dma_request()
514 dma_release_channel(host->dma_tx); in davinci_release_dma_channels()
526 host->dma_tx = in davinci_acquire_dma_channels()
529 if (!host->dma_tx) { in davinci_acquire_dma_channels()
546 dma_release_channel(host->dma_tx); in davinci_acquire_dma_channels()
/drivers/net/wan/
Dz85230.c541 if(!chan->dma_tx) in z8530_dma_tx()
572 if(chan->dma_tx) in z8530_dma_status()
807 c->dma_tx = 0; in z8530_sync_open()
897 c->dma_tx = 1; in z8530_sync_dma_open()
1098 c->dma_tx = 1; in z8530_sync_txdma_open()
1444 if(c->dma_tx) in z8530_tx_begin()
1465 if(c->dma_tx) in z8530_tx_begin()
1747 …if(c->dma_tx && ((unsigned long)(virt_to_bus(skb->data+skb->len))>=16*1024*1024 || spans_boundary(… in z8530_queue_xmit()
Dz85230.h302 u8 dma_tx; /* TX is to use DMA */ member