Home
last modified time | relevance | path

Searched refs:chan_tx (Results 1 – 19 of 19) sorted by relevance

/drivers/mmc/host/
Drenesas_sdhi_sys_dmac.c100 if (!host->chan_tx || !host->chan_rx) in renesas_sdhi_sys_dmac_enable_dma()
113 if (host->chan_tx) in renesas_sdhi_sys_dmac_abort_dma()
114 dmaengine_terminate_sync(host->chan_tx); in renesas_sdhi_sys_dmac_abort_dma()
141 dma_unmap_sg(host->chan_tx->device->dev, in renesas_sdhi_sys_dmac_dma_callback()
217 chan = host->chan_tx; in renesas_sdhi_sys_dmac_start_dma_rx()
219 host->chan_tx = NULL; in renesas_sdhi_sys_dmac_start_dma_rx()
232 struct dma_chan *chan = host->chan_tx; in renesas_sdhi_sys_dmac_start_dma_tx()
290 host->chan_tx = NULL; in renesas_sdhi_sys_dmac_start_dma_tx()
310 if (host->chan_tx) in renesas_sdhi_sys_dmac_start_dma()
326 chan = host->chan_tx; in renesas_sdhi_sys_dmac_issue_tasklet_fn()
[all …]
Dsh_mmcif.c246 struct dma_chan *chan_tx; member
322 chan = host->chan_tx; in sh_mmcif_start_dma_rx()
324 host->chan_tx = NULL; in sh_mmcif_start_dma_rx()
341 struct dma_chan *chan = host->chan_tx; in sh_mmcif_start_dma_tx()
368 host->chan_tx = NULL; in sh_mmcif_start_dma_tx()
432 host->chan_tx = sh_mmcif_request_dma_pdata(host, in sh_mmcif_request_dma()
437 host->chan_tx = dma_request_chan(dev, "tx"); in sh_mmcif_request_dma()
438 if (IS_ERR(host->chan_tx)) in sh_mmcif_request_dma()
439 host->chan_tx = NULL; in sh_mmcif_request_dma()
444 dev_dbg(dev, "%s: got channel TX %p RX %p\n", __func__, host->chan_tx, in sh_mmcif_request_dma()
[all …]
Dusdhi6rol0.c199 struct dma_chan *chan_tx; member
520 if (host->chan_tx) { in usdhi6_dma_release()
521 struct dma_chan *chan = host->chan_tx; in usdhi6_dma_release()
522 host->chan_tx = NULL; in usdhi6_dma_release()
546 dma_unmap_sg(host->chan_tx->device->dev, data->sg, in usdhi6_dma_stop_unmap()
617 if (!host->chan_rx || !host->chan_tx) in usdhi6_dma_start()
623 return usdhi6_dma_setup(host, host->chan_tx, DMA_MEM_TO_DEV); in usdhi6_dma_start()
636 dmaengine_terminate_sync(host->chan_tx); in usdhi6_dma_kill()
670 dma_async_issue_pending(host->chan_tx); in usdhi6_dma_kick()
681 host->chan_tx = dma_request_chan(mmc_dev(host->mmc), "tx"); in usdhi6_dma_request()
[all …]
Drenesas_sdhi_internal_dmac.c301 if (!host->chan_tx || !host->chan_rx) in renesas_sdhi_internal_dmac_enable_dma()
544 host->chan_rx = host->chan_tx = (void *)0xdeadbeaf; in renesas_sdhi_internal_dmac_request_dma()
562 host->chan_rx = host->chan_tx = NULL; in renesas_sdhi_internal_dmac_release_dma()
Duniphier-sd.c200 host->chan_tx = chan; in uniphier_sd_external_dma_request()
318 host->chan_tx = (void *)0xdeadbeaf; in uniphier_sd_internal_dma_request()
327 host->chan_tx = NULL; in uniphier_sd_internal_dma_release()
Dtmio_mmc.h158 struct dma_chan *chan_tx; member
Dtmio_mmc_core.c78 host->chan_tx = NULL; in tmio_mmc_request_dma()
/drivers/i2c/busses/
Di2c-stm32.c26 dma->chan_tx = dma_request_chan(dev, "tx"); in stm32_i2c_dma_request()
27 if (IS_ERR(dma->chan_tx)) { in stm32_i2c_dma_request()
28 ret = PTR_ERR(dma->chan_tx); in stm32_i2c_dma_request()
40 ret = dmaengine_slave_config(dma->chan_tx, &dma_sconfig); in stm32_i2c_dma_request()
71 dma_chan_name(dma->chan_tx), dma_chan_name(dma->chan_rx)); in stm32_i2c_dma_request()
78 dma_release_channel(dma->chan_tx); in stm32_i2c_dma_request()
90 dma_release_channel(dma->chan_tx); in stm32_i2c_dma_free()
91 dma->chan_tx = NULL; in stm32_i2c_dma_free()
113 dma->chan_using = dma->chan_tx; in stm32_i2c_prep_dma_xfer()
Di2c-at91-master.c142 dmaengine_terminate_sync(dma->chan_tx); in at91_twi_dma_cleanup()
198 struct dma_chan *chan_tx = dma->chan_tx; in at91_twi_write_data_dma() local
250 txdesc = dmaengine_prep_slave_sg(chan_tx, dma->sg, sg_len, in at91_twi_write_data_dma()
263 dma_async_issue_pending(chan_tx); in at91_twi_write_data_dma()
780 dma->chan_tx = dma_request_chan(dev->dev, "tx"); in at91_twi_configure_dma()
781 if (IS_ERR(dma->chan_tx)) { in at91_twi_configure_dma()
782 ret = PTR_ERR(dma->chan_tx); in at91_twi_configure_dma()
783 dma->chan_tx = NULL; in at91_twi_configure_dma()
795 if (dmaengine_slave_config(dma->chan_tx, &slave_config)) { in at91_twi_configure_dma()
814 dma_chan_name(dma->chan_tx), dma_chan_name(dma->chan_rx)); in at91_twi_configure_dma()
[all …]
Di2c-imx.c190 struct dma_chan *chan_tx; member
361 dma->chan_tx = dma_request_chan(dev, "tx"); in i2c_imx_dma_request()
362 if (IS_ERR(dma->chan_tx)) { in i2c_imx_dma_request()
363 ret = PTR_ERR(dma->chan_tx); in i2c_imx_dma_request()
374 ret = dmaengine_slave_config(dma->chan_tx, &dma_sconfig); in i2c_imx_dma_request()
402 dma_chan_name(dma->chan_tx), dma_chan_name(dma->chan_rx)); in i2c_imx_dma_request()
409 dma_release_channel(dma->chan_tx); in i2c_imx_dma_request()
474 dma_release_channel(dma->chan_tx); in i2c_imx_dma_free()
475 dma->chan_tx = NULL; in i2c_imx_dma_free()
961 dma->chan_using = dma->chan_tx; in i2c_imx_dma_write()
Di2c-stm32.h37 struct dma_chan *chan_tx; member
Di2c-at91.h129 struct dma_chan *chan_tx; member
/drivers/net/ethernet/
Dlantiq_xrx200.c81 struct xrx200_chan chan_tx; member
151 napi_enable(&priv->chan_tx.napi); in xrx200_open()
152 ltq_dma_open(&priv->chan_tx.dma); in xrx200_open()
153 ltq_dma_enable_irq(&priv->chan_tx.dma); in xrx200_open()
181 napi_disable(&priv->chan_tx.napi); in xrx200_close()
182 ltq_dma_close(&priv->chan_tx.dma); in xrx200_close()
361 struct xrx200_chan *ch = &priv->chan_tx; in xrx200_start_xmit()
487 struct xrx200_chan *ch_tx = &priv->chan_tx; in xrx200_dma_init()
520 "xrx200_net_tx", &priv->chan_tx); in xrx200_dma_init()
548 ltq_dma_free(&priv->chan_tx.dma); in xrx200_hw_cleanup()
[all …]
/drivers/spi/
Dspi-fsl-dspi.c198 struct dma_chan *chan_tx; member
391 dma->tx_desc = dmaengine_prep_slave_single(dma->chan_tx, in dspi_next_xfer_dma_submit()
431 dma_async_issue_pending(dma->chan_tx); in dspi_next_xfer_dma_submit()
442 dmaengine_terminate_all(dma->chan_tx); in dspi_next_xfer_dma_submit()
451 dmaengine_terminate_all(dma->chan_tx); in dspi_next_xfer_dma_submit()
510 dma->chan_tx = dma_request_chan(dev, "tx"); in dspi_request_dma()
511 if (IS_ERR(dma->chan_tx)) { in dspi_request_dma()
512 ret = PTR_ERR(dma->chan_tx); in dspi_request_dma()
517 dma->tx_dma_buf = dma_alloc_coherent(dma->chan_tx->device->dev, in dspi_request_dma()
550 ret = dmaengine_slave_config(dma->chan_tx, &cfg); in dspi_request_dma()
[all …]
Dspi-topcliff-pch.c111 struct dma_chan *chan_tx; member
853 dma->chan_tx = chan; in pch_spi_request_dma()
865 dma_release_channel(dma->chan_tx); in pch_spi_request_dma()
866 dma->chan_tx = NULL; in pch_spi_request_dma()
883 if (dma->chan_tx) { in pch_spi_release_dma()
884 dma_release_channel(dma->chan_tx); in pch_spi_release_dma()
885 dma->chan_tx = NULL; in pch_spi_release_dma()
1077 desc_tx = dmaengine_prep_slave_sg(dma->chan_tx, in pch_spi_handle_dma()
/drivers/tty/serial/
Dpch_uart.c228 struct dma_chan *chan_tx; member
640 if (priv->chan_tx) { in pch_free_dma()
641 dma_release_channel(priv->chan_tx); in pch_free_dma()
642 priv->chan_tx = NULL; in pch_free_dma()
700 priv->chan_tx = chan; in pch_request_dma()
712 dma_release_channel(priv->chan_tx); in pch_request_dma()
713 priv->chan_tx = NULL; in pch_request_dma()
962 desc = dmaengine_prep_slave_sg(priv->chan_tx, in dma_handle_tx()
977 dma_async_issue_pending(priv->chan_tx); in dma_handle_tx()
Dsh-sci.c134 struct dma_chan *chan_tx; member
580 if (s->chan_tx) in sci_start_tx()
588 if (s->chan_tx && !uart_circ_empty(&s->port.state->xmit) && in sci_start_tx()
599 if (!s->chan_tx || s->cfg->regtype == SCIx_RZ_SCIFA_REGTYPE || in sci_start_tx()
631 if (to_sci_port(port)->chan_tx && in sci_stop_tx()
633 dmaengine_terminate_async(to_sci_port(port)->chan_tx); in sci_stop_tx()
1376 s->chan_tx_saved = s->chan_tx = NULL; in sci_dma_tx_release()
1431 struct dma_chan *chan = s->chan_tx; in sci_dma_tx_work_fn()
1486 s->chan_tx = NULL; in sci_dma_tx_work_fn()
1635 s->chan_tx_saved = s->chan_tx = chan; in sci_request_dma()
[all …]
Datmel_serial.c129 struct dma_chan *chan_tx; member
861 struct dma_chan *chan = atmel_port->chan_tx; in atmel_complete_tx_dma()
902 struct dma_chan *chan = atmel_port->chan_tx; in atmel_release_tx_dma()
912 atmel_port->chan_tx = NULL; in atmel_release_tx_dma()
923 struct dma_chan *chan = atmel_port->chan_tx; in atmel_tx_dma()
1021 atmel_port->chan_tx = dma_request_slave_channel(mfd_dev, "tx"); in atmel_prepare_tx_dma()
1022 if (atmel_port->chan_tx == NULL) in atmel_prepare_tx_dma()
1025 dma_chan_name(atmel_port->chan_tx)); in atmel_prepare_tx_dma()
1059 ret = dmaengine_slave_config(atmel_port->chan_tx, in atmel_prepare_tx_dma()
1071 if (atmel_port->chan_tx) in atmel_prepare_tx_dma()
/drivers/firmware/
Dti_sci.c111 struct mbox_chan *chan_tx; member
400 ret = mbox_send_message(info->chan_tx, &xfer->tx_message); in ti_sci_do_xfer()
432 mbox_client_txdone(info->chan_tx, ret); in ti_sci_do_xfer()
3396 info->chan_tx = mbox_request_channel_byname(cl, "tx"); in ti_sci_probe()
3397 if (IS_ERR(info->chan_tx)) { in ti_sci_probe()
3398 ret = PTR_ERR(info->chan_tx); in ti_sci_probe()
3431 if (!IS_ERR(info->chan_tx)) in ti_sci_probe()
3432 mbox_free_channel(info->chan_tx); in ti_sci_probe()