Home
last modified time | relevance | path

Searched refs:dma_rx (Results 1 – 25 of 46) sorted by relevance

12

/kernel/linux/linux-5.10/drivers/spi/
Dspi-pxa2xx-dma.c109 chan = drv_data->controller->dma_rx; in pxa2xx_spi_dma_prepare_one()
130 dmaengine_terminate_async(drv_data->controller->dma_rx); in pxa2xx_spi_dma_transfer()
178 dma_async_issue_pending(drv_data->controller->dma_rx); in pxa2xx_spi_dma_start()
187 dmaengine_terminate_sync(drv_data->controller->dma_rx); in pxa2xx_spi_dma_stop()
206 controller->dma_rx = dma_request_slave_channel_compat(mask, in pxa2xx_spi_dma_setup()
208 if (!controller->dma_rx) { in pxa2xx_spi_dma_setup()
221 if (controller->dma_rx) { in pxa2xx_spi_dma_release()
222 dmaengine_terminate_sync(controller->dma_rx); in pxa2xx_spi_dma_release()
223 dma_release_channel(controller->dma_rx); in pxa2xx_spi_dma_release()
224 controller->dma_rx = NULL; in pxa2xx_spi_dma_release()
Dspi-bcm2835.c517 dma_sync_single_for_device(ctlr->dma_rx->device->dev, in bcm2835_spi_transfer_prologue()
641 dmaengine_terminate_async(ctlr->dma_rx); in bcm2835_spi_dma_tx_done()
682 chan = ctlr->dma_rx; in bcm2835_spi_prepare_sg()
817 dma_async_issue_pending(ctlr->dma_rx); in bcm2835_spi_transfer_one_dma()
827 dmaengine_terminate_async(ctlr->dma_rx); in bcm2835_spi_transfer_one_dma()
873 if (ctlr->dma_rx) { in bcm2835_dma_release()
874 dmaengine_terminate_sync(ctlr->dma_rx); in bcm2835_dma_release()
881 dma_unmap_single(ctlr->dma_rx->device->dev, in bcm2835_dma_release()
886 dma_release_channel(ctlr->dma_rx); in bcm2835_dma_release()
887 ctlr->dma_rx = NULL; in bcm2835_dma_release()
[all …]
Dspi-at91-usart.c148 ctlr->dma_rx = dma_request_chan(dev, "rx"); in at91_usart_spi_configure_dma()
149 if (IS_ERR_OR_NULL(ctlr->dma_rx)) { in at91_usart_spi_configure_dma()
150 if (IS_ERR(ctlr->dma_rx)) { in at91_usart_spi_configure_dma()
151 err = PTR_ERR(ctlr->dma_rx); in at91_usart_spi_configure_dma()
170 if (dmaengine_slave_config(ctlr->dma_rx, &slave_config)) { in at91_usart_spi_configure_dma()
191 if (!IS_ERR_OR_NULL(ctlr->dma_rx)) in at91_usart_spi_configure_dma()
192 dma_release_channel(ctlr->dma_rx); in at91_usart_spi_configure_dma()
194 ctlr->dma_rx = NULL; in at91_usart_spi_configure_dma()
202 if (ctlr->dma_rx) in at91_usart_spi_release_dma()
203 dma_release_channel(ctlr->dma_rx); in at91_usart_spi_release_dma()
[all …]
Dspi-pic32.c309 if (!master->dma_rx || !master->dma_tx) in pic32_spi_dma_transfer()
312 desc_rx = dmaengine_prep_slave_sg(master->dma_rx, in pic32_spi_dma_transfer()
346 dma_async_issue_pending(master->dma_rx); in pic32_spi_dma_transfer()
352 dmaengine_terminate_all(master->dma_rx); in pic32_spi_dma_transfer()
383 ret = dmaengine_slave_config(master->dma_rx, &cfg); in pic32_spi_dma_config()
554 dmaengine_terminate_all(master->dma_rx); in pic32_spi_one_transfer()
615 master->dma_rx = dma_request_chan(dev, "spi-rx"); in pic32_spi_dma_prep()
616 if (IS_ERR(master->dma_rx)) { in pic32_spi_dma_prep()
617 if (PTR_ERR(master->dma_rx) == -EPROBE_DEFER) in pic32_spi_dma_prep()
622 master->dma_rx = NULL; in pic32_spi_dma_prep()
[all …]
Dspi-rockchip.c269 dmaengine_terminate_async(ctlr->dma_rx); in rockchip_spi_handle_err()
420 dmaengine_slave_config(ctlr->dma_rx, &rxconf); in rockchip_spi_prepare_dma()
423 ctlr->dma_rx, in rockchip_spi_prepare_dma()
450 dmaengine_terminate_sync(ctlr->dma_rx); in rockchip_spi_prepare_dma()
462 dma_async_issue_pending(ctlr->dma_rx); in rockchip_spi_prepare_dma()
772 ctlr->dma_rx = dma_request_chan(rs->dev, "rx"); in rockchip_spi_probe()
773 if (IS_ERR(ctlr->dma_rx)) { in rockchip_spi_probe()
774 if (PTR_ERR(ctlr->dma_rx) == -EPROBE_DEFER) { in rockchip_spi_probe()
779 ctlr->dma_rx = NULL; in rockchip_spi_probe()
782 if (ctlr->dma_tx && ctlr->dma_rx) { in rockchip_spi_probe()
[all …]
Dspi-uniphier.c356 if ((!master->dma_tx && !master->dma_rx) in uniphier_spi_can_dma()
358 || (!master->dma_rx && t->rx_buf)) in uniphier_spi_can_dma()
415 dmaengine_slave_config(master->dma_rx, &rxconf); in uniphier_spi_transfer_one_dma()
418 master->dma_rx, in uniphier_spi_transfer_one_dma()
431 dma_async_issue_pending(master->dma_rx); in uniphier_spi_transfer_one_dma()
466 dmaengine_terminate_sync(master->dma_rx); in uniphier_spi_transfer_one_dma()
595 dmaengine_terminate_async(master->dma_rx); in uniphier_spi_handle_err()
734 master->dma_rx = dma_request_chan(&pdev->dev, "rx"); in uniphier_spi_probe()
735 if (IS_ERR_OR_NULL(master->dma_rx)) { in uniphier_spi_probe()
736 if (PTR_ERR(master->dma_rx) == -EPROBE_DEFER) { in uniphier_spi_probe()
[all …]
Dspi-fsl-lpspi.c180 if (!controller->dma_rx) in fsl_lpspi_can_dma()
368 ret = dmaengine_slave_config(controller->dma_rx, &rx); in fsl_lpspi_dma_configure()
559 desc_rx = dmaengine_prep_slave_sg(controller->dma_rx, in fsl_lpspi_dma_transfer()
569 dma_async_issue_pending(controller->dma_rx); in fsl_lpspi_dma_transfer()
597 dmaengine_terminate_all(controller->dma_rx); in fsl_lpspi_dma_transfer()
607 dmaengine_terminate_all(controller->dma_rx); in fsl_lpspi_dma_transfer()
617 dmaengine_terminate_all(controller->dma_rx); in fsl_lpspi_dma_transfer()
627 dmaengine_terminate_all(controller->dma_rx); in fsl_lpspi_dma_transfer()
640 if (controller->dma_rx) { in fsl_lpspi_dma_exit()
641 dma_release_channel(controller->dma_rx); in fsl_lpspi_dma_exit()
[all …]
Dspi-ep93xx.c93 struct dma_chan *dma_rx; member
296 chan = espi->dma_rx; in ep93xx_spi_dma_prepare()
384 chan = espi->dma_rx; in ep93xx_spi_dma_finish()
430 dma_async_issue_pending(espi->dma_rx); in ep93xx_spi_dma_transfer()
505 if (espi->dma_rx && xfer->len > SPI_FIFO_SIZE) in ep93xx_spi_transfer_one()
603 espi->dma_rx = dma_request_channel(mask, ep93xx_spi_dma_filter, in ep93xx_spi_setup_dma()
605 if (!espi->dma_rx) { in ep93xx_spi_setup_dma()
624 dma_release_channel(espi->dma_rx); in ep93xx_spi_setup_dma()
625 espi->dma_rx = NULL; in ep93xx_spi_setup_dma()
634 if (espi->dma_rx) { in ep93xx_spi_release_dma()
[all …]
Dspi-davinci.c120 struct dma_chan *dma_rx; member
396 if (dspi->dma_rx && dspi->dma_tx) in davinci_spi_of_setup()
621 dmaengine_slave_config(dspi->dma_rx, &dma_rx_conf); in davinci_spi_bufs()
624 rxdesc = dmaengine_prep_slave_sg(dspi->dma_rx, in davinci_spi_bufs()
657 dma_async_issue_pending(dspi->dma_rx); in davinci_spi_bufs()
748 dspi->dma_rx = dma_request_chan(sdev, "rx"); in davinci_spi_request_dma()
749 if (IS_ERR(dspi->dma_rx)) in davinci_spi_request_dma()
750 return PTR_ERR(dspi->dma_rx); in davinci_spi_request_dma()
754 dma_release_channel(dspi->dma_rx); in davinci_spi_request_dma()
968 dspi->dma_rx = NULL; in davinci_spi_probe()
[all …]
Dspi-omap2-mcspi.c93 struct dma_chan *dma_rx; member
472 dmaengine_slave_config(mcspi_dma->dma_rx, &cfg); in omap2_mcspi_rx_dma()
503 tx = dmaengine_prep_slave_sg(mcspi_dma->dma_rx, sg_out[0], in omap2_mcspi_rx_dma()
514 dma_async_issue_pending(mcspi_dma->dma_rx); in omap2_mcspi_rx_dma()
519 dmaengine_terminate_sync(mcspi_dma->dma_rx); in omap2_mcspi_rx_dma()
991 mcspi_dma->dma_rx = dma_request_chan(mcspi->dev, in omap2_mcspi_request_dma()
993 if (IS_ERR(mcspi_dma->dma_rx)) { in omap2_mcspi_request_dma()
994 ret = PTR_ERR(mcspi_dma->dma_rx); in omap2_mcspi_request_dma()
995 mcspi_dma->dma_rx = NULL; in omap2_mcspi_request_dma()
1004 dma_release_channel(mcspi_dma->dma_rx); in omap2_mcspi_request_dma()
[all …]
Dspi-stm32.c315 struct dma_chan *dma_rx; member
691 if (spi->cur_usedma && spi->dma_rx) in stm32f4_spi_disable()
692 dmaengine_terminate_all(spi->dma_rx); in stm32f4_spi_disable()
755 if (spi->cur_usedma && spi->dma_rx) in stm32h7_spi_disable()
756 dmaengine_terminate_all(spi->dma_rx); in stm32h7_spi_disable()
1293 if (spi->rx_buf && spi->dma_rx) { in stm32_spi_transfer_one_dma()
1295 dmaengine_slave_config(spi->dma_rx, &rx_dma_conf); in stm32_spi_transfer_one_dma()
1302 spi->dma_rx, xfer->rx_sg.sgl, in stm32_spi_transfer_one_dma()
1321 (spi->rx_buf && spi->dma_rx && !rx_dma_desc)) in stm32_spi_transfer_one_dma()
1336 dma_async_issue_pending(spi->dma_rx); in stm32_spi_transfer_one_dma()
[all …]
Dspi-imx.c231 if (!master->dma_rx) in spi_imx_can_dma()
1190 ret = dmaengine_slave_config(master->dma_rx, &rx); in spi_imx_dma_configure()
1267 if (master->dma_rx) { in spi_imx_sdma_exit()
1268 dma_release_channel(master->dma_rx); in spi_imx_sdma_exit()
1269 master->dma_rx = NULL; in spi_imx_sdma_exit()
1299 master->dma_rx = dma_request_chan(dev, "rx"); in spi_imx_sdma_init()
1300 if (IS_ERR(master->dma_rx)) { in spi_imx_sdma_init()
1301 ret = PTR_ERR(master->dma_rx); in spi_imx_sdma_init()
1303 master->dma_rx = NULL; in spi_imx_sdma_init()
1387 desc_rx = dmaengine_prep_slave_sg(master->dma_rx, in spi_imx_dma_transfer()
[all …]
Dspi-atmel.c493 if (dmaengine_slave_config(master->dma_rx, slave_config)) { in atmel_spi_dma_slave_config()
520 master->dma_rx = dma_request_chan(dev, "rx"); in atmel_spi_configure_dma()
521 if (IS_ERR(master->dma_rx)) { in atmel_spi_configure_dma()
522 err = PTR_ERR(master->dma_rx); in atmel_spi_configure_dma()
538 dma_chan_name(master->dma_rx)); in atmel_spi_configure_dma()
542 if (!IS_ERR(master->dma_rx)) in atmel_spi_configure_dma()
543 dma_release_channel(master->dma_rx); in atmel_spi_configure_dma()
547 master->dma_tx = master->dma_rx = NULL; in atmel_spi_configure_dma()
553 if (master->dma_rx) in atmel_spi_stop_dma()
554 dmaengine_terminate_all(master->dma_rx); in atmel_spi_stop_dma()
[all …]
Dspi-qup.c402 chan = master->dma_rx; in spi_qup_prep_sg()
422 dmaengine_terminate_all(master->dma_rx); in spi_qup_dma_terminate()
489 dma_async_issue_pending(master->dma_rx); in spi_qup_do_dma()
897 IS_ERR_OR_NULL(master->dma_rx)) in spi_qup_can_dma()
920 if (!IS_ERR_OR_NULL(master->dma_rx)) in spi_qup_release_dma()
921 dma_release_channel(master->dma_rx); in spi_qup_release_dma()
935 master->dma_rx = dma_request_chan(dev, "rx"); in spi_qup_init_dma()
936 if (IS_ERR(master->dma_rx)) in spi_qup_init_dma()
937 return PTR_ERR(master->dma_rx); in spi_qup_init_dma()
956 ret = dmaengine_slave_config(master->dma_rx, rx_conf); in spi_qup_init_dma()
[all …]
Dspi-sh-msiof.c751 desc_rx = dmaengine_prep_slave_single(p->ctlr->dma_rx, in sh_msiof_dma_once()
800 dma_async_issue_pending(p->ctlr->dma_rx); in sh_msiof_dma_once()
842 dma_sync_single_for_cpu(p->ctlr->dma_rx->device->dev, in sh_msiof_dma_once()
855 dmaengine_terminate_all(p->ctlr->dma_rx); in sh_msiof_dma_once()
1196 ctlr->dma_rx = sh_msiof_request_dma_chan(dev, DMA_DEV_TO_MEM, in sh_msiof_request_dma()
1198 if (!ctlr->dma_rx) in sh_msiof_request_dma()
1215 rx_dev = ctlr->dma_rx->device->dev; in sh_msiof_request_dma()
1231 dma_release_channel(ctlr->dma_rx); in sh_msiof_request_dma()
1245 dma_unmap_single(ctlr->dma_rx->device->dev, p->rx_dma_addr, PAGE_SIZE, in sh_msiof_release_dma()
1251 dma_release_channel(ctlr->dma_rx); in sh_msiof_release_dma()
Dspi-dw-dma.c94 struct dw_dma_slave dma_rx = { .src_id = 0 }, *rx = &dma_rx; in dw_spi_dma_init_mfld() local
121 dws->master->dma_rx = dws->rxchan; in dw_spi_dma_init_mfld()
152 dws->master->dma_rx = dws->rxchan; in dw_spi_dma_init_generic()
Dspi-rspi.c549 desc_rx = dmaengine_prep_slave_sg(rspi->ctlr->dma_rx, rx->sgl, in rspi_dma_transfer()
607 dma_async_issue_pending(rspi->ctlr->dma_rx); in rspi_dma_transfer()
623 dmaengine_terminate_all(rspi->ctlr->dma_rx); in rspi_dma_transfer()
637 dmaengine_terminate_all(rspi->ctlr->dma_rx); in rspi_dma_transfer()
1152 ctlr->dma_rx = rspi_request_dma_chan(dev, DMA_DEV_TO_MEM, dma_rx_id, in rspi_request_dma()
1154 if (!ctlr->dma_rx) { in rspi_request_dma()
1169 if (ctlr->dma_rx) in rspi_release_dma()
1170 dma_release_channel(ctlr->dma_rx); in rspi_release_dma()
/kernel/linux/linux-5.10/drivers/net/ethernet/micrel/
Dks8842.c148 ((adapter)->dma_rx.channel != -1))
160 struct ks8842_rx_dma_ctl dma_rx; member
543 struct ks8842_rx_dma_ctl *ctl = &adapter->dma_rx; in __ks8842_start_new_rx_dma()
592 struct ks8842_adapter *adapter = from_tasklet(adapter, t, dma_rx.tasklet); in ks8842_rx_frame_dma_tasklet()
594 struct ks8842_rx_dma_ctl *ctl = &adapter->dma_rx; in ks8842_rx_frame_dma_tasklet()
837 if (adapter->dma_rx.adesc) in ks8842_dma_rx_cb()
838 tasklet_schedule(&adapter->dma_rx.tasklet); in ks8842_dma_rx_cb()
862 struct ks8842_rx_dma_ctl *rx_ctl = &adapter->dma_rx; in ks8842_stop_dma()
884 struct ks8842_rx_dma_ctl *rx_ctl = &adapter->dma_rx; in ks8842_dealloc_dma_bufs()
916 struct ks8842_rx_dma_ctl *rx_ctl = &adapter->dma_rx; in ks8842_alloc_dma_bufs()
[all …]
/kernel/linux/linux-5.10/drivers/i2c/busses/
Di2c-sh_mobile.c136 struct dma_chan *dma_rx; member
448 ? pd->dma_rx : pd->dma_tx; in sh_mobile_i2c_dma_unmap()
461 dmaengine_terminate_all(pd->dma_rx); in sh_mobile_i2c_cleanup_dma()
519 struct dma_chan *chan = read ? pd->dma_rx : pd->dma_tx; in sh_mobile_i2c_xfer_dma()
526 chan = pd->dma_rx = sh_mobile_i2c_request_dma_chan(pd->dev, DMA_DEV_TO_MEM, in sh_mobile_i2c_xfer_dma()
833 if (!IS_ERR(pd->dma_rx)) { in sh_mobile_i2c_release_dma()
834 dma_release_channel(pd->dma_rx); in sh_mobile_i2c_release_dma()
835 pd->dma_rx = ERR_PTR(-EPROBE_DEFER); in sh_mobile_i2c_release_dma()
919 pd->dma_rx = pd->dma_tx = ERR_PTR(-EPROBE_DEFER); in sh_mobile_i2c_probe()
Di2c-rcar.c139 struct dma_chan *dma_rx; member
370 ? priv->dma_rx : priv->dma_tx; in rcar_i2c_dma_unmap()
391 dmaengine_terminate_all(priv->dma_rx); in rcar_i2c_cleanup_dma()
413 struct dma_chan *chan = read ? priv->dma_rx : priv->dma_tx; in rcar_i2c_dma()
759 chan = read ? priv->dma_rx : priv->dma_tx; in rcar_i2c_request_dma()
767 priv->dma_rx = chan; in rcar_i2c_request_dma()
779 if (!IS_ERR(priv->dma_rx)) { in rcar_i2c_release_dma()
780 dma_release_channel(priv->dma_rx); in rcar_i2c_release_dma()
781 priv->dma_rx = ERR_PTR(-EPROBE_DEFER); in rcar_i2c_release_dma()
996 priv->dma_rx = priv->dma_tx = ERR_PTR(-EPROBE_DEFER); in rcar_i2c_probe()
/kernel/linux/linux-5.10/drivers/net/ethernet/samsung/sxgbe/
Dsxgbe_dma.c45 dma_addr_t dma_rx, int t_rsize, int r_rsize) in sxgbe_dma_channel_init() argument
71 writel(upper_32_bits(dma_rx), in sxgbe_dma_channel_init()
73 writel(lower_32_bits(dma_rx), in sxgbe_dma_channel_init()
84 dma_addr = dma_rx + ((r_rsize - 1) * SXGBE_DESC_SIZE_BYTES); in sxgbe_dma_channel_init()
Dsxgbe_dma.h24 int pbl, dma_addr_t dma_tx, dma_addr_t dma_rx,
/kernel/linux/linux-5.10/drivers/mmc/host/
Domap.c127 struct dma_chan *dma_rx; member
414 c = host->dma_rx; in mmc_omap_release_dma()
1006 c = host->dma_rx; in mmc_omap_prepare_data()
1077 host->dma_tx : host->dma_rx; in mmc_omap_start_request()
1401 host->dma_rx = dma_request_chan(&pdev->dev, "rx"); in mmc_omap_probe()
1402 if (IS_ERR(host->dma_rx)) { in mmc_omap_probe()
1403 ret = PTR_ERR(host->dma_rx); in mmc_omap_probe()
1411 host->dma_rx = NULL; in mmc_omap_probe()
1456 if (host->dma_rx) in mmc_omap_probe()
1457 dma_release_channel(host->dma_rx); in mmc_omap_probe()
[all …]
Ddavinci_mmc.c193 struct dma_chan *dma_rx; member
393 sync_dev = host->dma_rx; in davinci_abort_dma()
437 chan = host->dma_rx; in mmc_davinci_send_dma_request()
438 dmaengine_slave_config(host->dma_rx, &dma_rx_conf); in mmc_davinci_send_dma_request()
440 desc = dmaengine_prep_slave_sg(host->dma_rx, in mmc_davinci_send_dma_request()
492 dma_release_channel(host->dma_rx); in davinci_release_dma_channels()
503 host->dma_rx = dma_request_chan(mmc_dev(host->mmc), "rx"); in davinci_acquire_dma_channels()
504 if (IS_ERR(host->dma_rx)) { in davinci_acquire_dma_channels()
507 return PTR_ERR(host->dma_rx); in davinci_acquire_dma_channels()
/kernel/linux/linux-5.10/drivers/net/ethernet/calxeda/
Dxgmac.c355 struct xgmac_dma_desc *dma_rx; member
687 p = priv->dma_rx + entry; in xgmac_rx_refill()
737 priv->dma_rx = dma_alloc_coherent(priv->device, in xgmac_dma_desc_rings_init()
742 if (!priv->dma_rx) in xgmac_dma_desc_rings_init()
760 priv->dma_rx, priv->dma_tx, in xgmac_dma_desc_rings_init()
766 desc_init_rx_desc(priv->dma_rx, DMA_RX_RING_SZ, priv->dma_buf_sz); in xgmac_dma_desc_rings_init()
783 priv->dma_rx, priv->dma_rx_phy); in xgmac_dma_desc_rings_init()
802 p = priv->dma_rx + i; in xgmac_free_rx_skbufs()
849 if (priv->dma_rx) { in xgmac_free_dma_desc_rings()
852 priv->dma_rx, priv->dma_rx_phy); in xgmac_free_dma_desc_rings()
[all …]

12