Home
last modified time | relevance | path

Searched refs:tx_dma (Results 1 – 18 of 18) sorted by relevance

/drivers/spi/
Datmel_spi.c183 dma_addr_t *tx_dma, in atmel_spi_next_xfer_data() argument
199 *tx_dma = xfer->tx_dma + xfer->len - len; in atmel_spi_next_xfer_data()
201 *tx_dma = as->buffer_dma; in atmel_spi_next_xfer_data()
223 dma_addr_t tx_dma, rx_dma; in atmel_spi_next_xfer() local
238 atmel_spi_next_xfer_data(master, xfer, &tx_dma, &rx_dma, &len); in atmel_spi_next_xfer()
242 spi_writel(as, TPR, tx_dma); in atmel_spi_next_xfer()
251 xfer, xfer->len, xfer->tx_buf, xfer->tx_dma, in atmel_spi_next_xfer()
277 atmel_spi_next_xfer_data(master, xfer, &tx_dma, &rx_dma, &len); in atmel_spi_next_xfer()
281 spi_writel(as, TNPR, tx_dma); in atmel_spi_next_xfer()
290 xfer, xfer->len, xfer->tx_buf, xfer->tx_dma, in atmel_spi_next_xfer()
[all …]
Dspi_imx.c248 dma_addr_t tx_dma; member
475 if (drv_data->tx_dma) in map_dma_buffers()
489 drv_data->tx_dma = dma_map_single(dev, in map_dma_buffers()
493 if (dma_mapping_error(dev, drv_data->tx_dma)) in map_dma_buffers()
518 drv_data->tx_dma = dma_map_single(dev, in map_dma_buffers()
522 if (dma_mapping_error(dev, drv_data->tx_dma)) in map_dma_buffers()
535 if (drv_data->tx_dma) { in map_dma_buffers()
537 drv_data->tx_dma, in map_dma_buffers()
564 drv_data->tx_dma, in unmap_dma_buffers()
928 drv_data->tx_dma = transfer->tx_dma; in pump_transfers()
[all …]
Dpxa2xx_spi.c147 dma_addr_t tx_dma; member
333 return drv_data->rx_dma && drv_data->tx_dma; in map_dma_buffers()
359 drv_data->tx_dma = dma_map_single(dev, drv_data->tx, in map_dma_buffers()
361 if (dma_mapping_error(dev, drv_data->tx_dma)) in map_dma_buffers()
368 dma_unmap_single(dev, drv_data->tx_dma, in map_dma_buffers()
387 dma_unmap_single(dev, drv_data->tx_dma, in unmap_dma_buffers()
900 || transfer->rx_dma || transfer->tx_dma) { in pump_transfers()
933 drv_data->tx_dma = transfer->tx_dma; in pump_transfers()
1037 DSADR(drv_data->tx_channel) = drv_data->tx_dma; in pump_transfers()
Domap2_mcspi.c267 xfer->tx_dma, 0, 0); in omap2_mcspi_txrx_dma()
297 dma_unmap_single(NULL, xfer->tx_dma, count, DMA_TO_DEVICE); in omap2_mcspi_txrx_dma()
840 t->tx_dma = dma_map_single(&spi->dev, (void *) tx_buf, in omap2_mcspi_transfer()
842 if (dma_mapping_error(&spi->dev, t->tx_dma)) { in omap2_mcspi_transfer()
855 dma_unmap_single(NULL, t->tx_dma, in omap2_mcspi_transfer()
Dspi_bfin5xx.c90 dma_addr_t tx_dma; member
541 if (drv_data->tx_dma != 0xFFFF) { in giveback()
672 drv_data->tx_dma = transfer->tx_dma; in pump_transfers()
769 if (drv_data->tx_dma == 0xFFFF) { in pump_transfers()
Dau1550_spi.c368 dma_tx_addr = t->tx_dma; in au1550_spi_dma_txrxb()
380 if (t->tx_dma == 0) { /* if DMA_ADDR_INVALID, map it */ in au1550_spi_dma_txrxb()
453 if (t->tx_buf && t->tx_dma == 0 ) in au1550_spi_dma_txrxb()
Dspi_bitbang.c344 t->rx_dma = t->tx_dma = 0; in bitbang_work()
/drivers/net/
Dznet.c130 int rx_dma, tx_dma; member
177 if (request_dma (znet->tx_dma, "ZNet tx")) in znet_request_resources()
190 free_dma (znet->tx_dma); in znet_request_resources()
210 free_dma (znet->tx_dma); in znet_release_resources()
418 znet->tx_dma = netinfo->dma2; in znet_probe()
832 short dma_port = ((znet->tx_dma&3)<<2) + IO_DMA2_BASE; in show_dma()
837 residue = get_dma_residue(znet->tx_dma); in show_dma()
869 disable_dma(znet->tx_dma); in hardware_init()
870 clear_dma_ff(znet->tx_dma); in hardware_init()
871 set_dma_mode(znet->tx_dma, DMA_TX_MODE); in hardware_init()
[all …]
Dmace.c44 volatile struct dbdma_regs __iomem *tx_dma; member
170 mp->tx_dma = ioremap(macio_resource_start(mdev, 1), 0x1000); in mace_probe()
171 if (mp->tx_dma == NULL) { in mace_probe()
258 iounmap(mp->tx_dma); in mace_probe()
287 iounmap(mp->tx_dma); in mace_remove()
428 volatile struct dbdma_regs __iomem *td = mp->tx_dma; in mace_open()
498 volatile struct dbdma_regs __iomem *td = mp->tx_dma; in mace_close()
529 volatile struct dbdma_regs __iomem *td = mp->tx_dma; in mace_xmit_start()
660 volatile struct dbdma_regs __iomem *td = mp->tx_dma; in mace_interrupt()
806 volatile struct dbdma_regs __iomem *td = mp->tx_dma; in mace_tx_timeout()
Dbmac.c63 volatile struct dbdma_regs __iomem *tx_dma; member
229 volatile struct dbdma_regs __iomem *td = bp->tx_dma; in bmac_enable_and_reset_chip()
478 volatile struct dbdma_regs __iomem *td = bp->tx_dma; in bmac_suspend()
591 volatile struct dbdma_regs __iomem *td = bp->tx_dma; in bmac_init_tx_ring()
646 volatile struct dbdma_regs __iomem *td = bp->tx_dma; in bmac_transmit_packet()
779 if (cp == bus_to_virt(in_le32(&bp->tx_dma->cmdptr))) in bmac_txdma_intr()
1324 bp->tx_dma = ioremap(macio_resource_start(mdev, 1), macio_resource_len(mdev, 1)); in bmac_probe()
1325 if (!bp->tx_dma) in bmac_probe()
1384 iounmap(bp->tx_dma); in bmac_probe()
1411 volatile struct dbdma_regs __iomem *td = bp->tx_dma; in bmac_close()
[all …]
Dau1000_eth.h57 typedef struct tx_dma { struct
Dsis190.c286 dma_addr_t tx_dma; member
888 SIS_W32(TxDescStartAddr, tp->tx_dma); in sis190_hw_start()
1056 tp->TxDescRing = pci_alloc_consistent(pdev, TX_RING_BYTES, &tp->tx_dma); in sis190_open()
1086 tp->tx_dma); in sis190_open()
1148 pci_free_consistent(pdev, TX_RING_BYTES, tp->TxDescRing, tp->tx_dma); in sis190_close()
Dnatsemi.c548 dma_addr_t tx_dma[TX_RING_SIZE]; member
2009 np->tx_dma[i], np->tx_skbuff[i]->len, in drain_tx()
2096 np->tx_dma[entry] = pci_map_single(np->pci_dev, in start_tx()
2099 np->tx_ring[entry].addr = cpu_to_le32(np->tx_dma[entry]); in start_tx()
2160 pci_unmap_single(np->pci_dev,np->tx_dma[entry], in netdev_tx_done()
/drivers/mmc/host/
Dmmc_spi.c467 t->tx_dma = t->rx_dma = host->data_dma; in mmc_spi_command_send()
532 t->tx_dma = dma + offsetof(struct scratch, data_token); in mmc_spi_setup_data_message()
542 t->tx_dma = host->ones_dma; in mmc_spi_setup_data_message()
553 t->tx_dma = dma + offsetof(struct scratch, crc_val); in mmc_spi_setup_data_message()
556 t->tx_dma = host->ones_dma; in mmc_spi_setup_data_message()
584 t->tx_dma = host->ones_dma; in mmc_spi_setup_data_message()
666 t->tx_dma += t->len; in mmc_spi_writeblock()
821 t->tx_dma = dma_addr + sg->offset; in mmc_spi_data_do()
901 host->early_status.tx_dma = host->early_status.rx_dma; in mmc_spi_data_do()
1330 host->status.tx_dma = host->ones_dma; in mmc_spi_probe()
/drivers/atm/
Deni.h79 void __iomem *tx_dma; /* TX DMA queue */ member
Deni.c1163 writel(eni_dev->dma[i*2],eni_dev->tx_dma+dma_wr*8); in do_tx()
1164 writel(eni_dev->dma[i*2+1],eni_dev->tx_dma+dma_wr*8+4); in do_tx()
1826 eni_dev->tx_dma = eni_dev->rx_dma+NR_DMA_RX*8; in eni_start()
1827 eni_dev->service = eni_dev->tx_dma+NR_DMA_TX*8; in eni_start()
1830 eni_dev->vci,eni_dev->rx_dma,eni_dev->tx_dma, in eni_start()
/drivers/mtd/devices/
Dmtd_dataflash.c152 struct spi_transfer x = { .tx_dma = 0, }; in dataflash_erase()
241 struct spi_transfer x[2] = { { .tx_dma = 0, }, }; in dataflash_read()
315 struct spi_transfer x[2] = { { .tx_dma = 0, }, }; in dataflash_write()
/drivers/scsi/
Dmvsas.c657 dma_addr_t tx_dma; member
799 addr, mvi->tx_dma, w_ptr); in mvs_hba_memory_dump()
801 (u32) mvi->tx_dma + sizeof(u32) * w_ptr); in mvs_hba_memory_dump()
2237 mvi->tx, mvi->tx_dma); in mvs_free()
2441 &mvi->tx_dma, GFP_KERNEL); in mvs_alloc()
2953 mw32(TX_LO, mvi->tx_dma); in mvs_hw_init()
2954 mw32(TX_HI, (mvi->tx_dma >> 16) >> 16); in mvs_hw_init()