/drivers/dma/qcom/ |
D | hidma.c | 76 static inline struct hidma_dev *to_hidma_dev(struct dma_device *dmadev) in to_hidma_dev() argument 78 return container_of(dmadev, struct hidma_dev, ddev); in to_hidma_dev() 98 static void hidma_free(struct hidma_dev *dmadev) in hidma_free() argument 100 INIT_LIST_HEAD(&dmadev->ddev.channels); in hidma_free() 171 struct hidma_dev *dmadev = to_hidma_dev(ddev); in hidma_callback() local 190 pm_runtime_mark_last_busy(dmadev->ddev.dev); in hidma_callback() 191 pm_runtime_put_autosuspend(dmadev->ddev.dev); in hidma_callback() 195 static int hidma_chan_init(struct hidma_dev *dmadev, u32 dma_sig) in hidma_chan_init() argument 200 mchan = devm_kzalloc(dmadev->ddev.dev, sizeof(*mchan), GFP_KERNEL); in hidma_chan_init() 204 ddev = &dmadev->ddev; in hidma_chan_init() [all …]
|
D | hidma_dbg.c | 96 struct hidma_dev *dmadev = mchan->dmadev; in hidma_chan_stats() local 98 pm_runtime_get_sync(dmadev->ddev.dev); in hidma_chan_stats() 103 hidma_ll_chstats(s, mchan->dmadev->lldev, mdesc->tre_ch); in hidma_chan_stats() 107 hidma_ll_chstats(s, mchan->dmadev->lldev, mdesc->tre_ch); in hidma_chan_stats() 111 hidma_ll_chstats(s, mchan->dmadev->lldev, mdesc->tre_ch); in hidma_chan_stats() 113 hidma_ll_devstats(s, mchan->dmadev->lldev); in hidma_chan_stats() 114 pm_runtime_mark_last_busy(dmadev->ddev.dev); in hidma_chan_stats() 115 pm_runtime_put_autosuspend(dmadev->ddev.dev); in hidma_chan_stats() 126 struct hidma_dev *dmadev = s->private; in hidma_dma_info() local 129 seq_printf(s, "nr_descriptors=%d\n", dmadev->nr_descriptors); in hidma_dma_info() [all …]
|
D | hidma.h | 106 struct hidma_dev *dmadev; member 169 int hidma_debug_init(struct hidma_dev *dmadev); 170 void hidma_debug_uninit(struct hidma_dev *dmadev);
|
/drivers/dma/ |
D | dma-jz4740.c | 149 static inline uint32_t jz4740_dma_read(struct jz4740_dma_dev *dmadev, in jz4740_dma_read() argument 152 return readl(dmadev->base + reg); in jz4740_dma_read() 155 static inline void jz4740_dma_write(struct jz4740_dma_dev *dmadev, in jz4740_dma_write() argument 158 writel(val, dmadev->base + reg); in jz4740_dma_write() 161 static inline void jz4740_dma_write_mask(struct jz4740_dma_dev *dmadev, in jz4740_dma_write_mask() argument 166 tmp = jz4740_dma_read(dmadev, reg); in jz4740_dma_write_mask() 169 jz4740_dma_write(dmadev, reg, tmp); in jz4740_dma_write_mask() 210 struct jz4740_dma_dev *dmadev = jz4740_dma_chan_get_dev(chan); in jz4740_dma_slave_config() local 260 jz4740_dma_write(dmadev, JZ_REG_DMA_CMD(chan->id), cmd); in jz4740_dma_slave_config() 261 jz4740_dma_write(dmadev, JZ_REG_DMA_STATUS_CTRL(chan->id), 0); in jz4740_dma_slave_config() [all …]
|
D | stm32-dma.c | 206 static u32 stm32_dma_read(struct stm32_dma_device *dmadev, u32 reg) in stm32_dma_read() argument 208 return readl_relaxed(dmadev->base + reg); in stm32_dma_read() 211 static void stm32_dma_write(struct stm32_dma_device *dmadev, u32 reg, u32 val) in stm32_dma_write() argument 213 writel_relaxed(val, dmadev->base + reg); in stm32_dma_write() 285 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); in stm32_dma_irq_status() local 297 dma_isr = stm32_dma_read(dmadev, STM32_DMA_HISR); in stm32_dma_irq_status() 299 dma_isr = stm32_dma_read(dmadev, STM32_DMA_LISR); in stm32_dma_irq_status() 308 struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan); in stm32_dma_irq_clear() local 321 stm32_dma_write(dmadev, STM32_DMA_HIFCR, dma_ifcr); in stm32_dma_irq_clear() 323 stm32_dma_write(dmadev, STM32_DMA_LIFCR, dma_ifcr); in stm32_dma_irq_clear() [all …]
|
D | sa11x0-dma.c | 135 static struct sa11x0_dma_dev *to_sa11x0_dma(struct dma_device *dmadev) in to_sa11x0_dma() argument 137 return container_of(dmadev, struct sa11x0_dma_dev, slave); in to_sa11x0_dma() 826 static int sa11x0_dma_init_dmadev(struct dma_device *dmadev, in sa11x0_dma_init_dmadev() argument 831 INIT_LIST_HEAD(&dmadev->channels); in sa11x0_dma_init_dmadev() 832 dmadev->dev = dev; in sa11x0_dma_init_dmadev() 833 dmadev->device_free_chan_resources = sa11x0_dma_free_chan_resources; in sa11x0_dma_init_dmadev() 834 dmadev->device_config = sa11x0_dma_device_config; in sa11x0_dma_init_dmadev() 835 dmadev->device_pause = sa11x0_dma_device_pause; in sa11x0_dma_init_dmadev() 836 dmadev->device_resume = sa11x0_dma_device_resume; in sa11x0_dma_init_dmadev() 837 dmadev->device_terminate_all = sa11x0_dma_device_terminate_all; in sa11x0_dma_init_dmadev() [all …]
|
D | virt-dma.c | 135 void vchan_init(struct virt_dma_chan *vc, struct dma_device *dmadev) in vchan_init() argument 147 vc->chan.device = dmadev; in vchan_init() 148 list_add_tail(&vc->chan.device_node, &dmadev->channels); in vchan_init()
|
D | mv_xor_v2.c | 169 struct dma_device dmadev; member 318 dev_dbg(xor_dev->dmadev.dev, in mv_xor_v2_tx_submit() 393 dev_dbg(xor_dev->dmadev.dev, in mv_xor_v2_prep_dma_memcpy() 449 dev_dbg(xor_dev->dmadev.dev, in mv_xor_v2_prep_dma_xor() 568 dev_dbg(xor_dev->dmadev.dev, "%s %d\n", __func__, __LINE__); in mv_xor_v2_tasklet() 838 dma_dev = &xor_dev->dmadev; in mv_xor_v2_probe() 893 dma_async_device_unregister(&xor_dev->dmadev); in mv_xor_v2_remove()
|
D | s3c24xx-dma.c | 1085 struct dma_device *dmadev, unsigned int channels, bool slave) in s3c24xx_dma_init_virtual_channels() argument 1090 INIT_LIST_HEAD(&dmadev->channels); in s3c24xx_dma_init_virtual_channels() 1098 chan = devm_kzalloc(dmadev->dev, sizeof(*chan), GFP_KERNEL); in s3c24xx_dma_init_virtual_channels() 1116 dev_dbg(dmadev->dev, in s3c24xx_dma_init_virtual_channels() 1121 vchan_init(&chan->vc, dmadev); in s3c24xx_dma_init_virtual_channels() 1123 dev_info(dmadev->dev, "initialized %d virtual %s channels\n", in s3c24xx_dma_init_virtual_channels() 1128 static void s3c24xx_dma_free_virtual_channels(struct dma_device *dmadev) in s3c24xx_dma_free_virtual_channels() argument 1134 next, &dmadev->channels, vc.chan.device_node) { in s3c24xx_dma_free_virtual_channels()
|
D | altera-msgdma.c | 183 struct dma_device dmadev; member 846 dma_dev = &mdev->dmadev; in msgdma_probe() 913 dma_async_device_unregister(&mdev->dmadev); in msgdma_remove()
|
D | virt-dma.h | 46 void vchan_init(struct virt_dma_chan *vc, struct dma_device *dmadev);
|
D | mv_xor.c | 54 ((chan)->dmadev.dev) 1020 struct device *dev = mv_chan->dmadev.dev; in mv_xor_channel_remove() 1022 dma_async_device_unregister(&mv_chan->dmadev); in mv_xor_channel_remove() 1031 list_for_each_entry_safe(chan, _chan, &mv_chan->dmadev.channels, in mv_xor_channel_remove() 1061 dma_dev = &mv_chan->dmadev; in mv_xor_channel_add()
|
D | mv_xor.h | 126 struct dma_device dmadev; member
|
D | pxa_dma.c | 144 #define to_pxad_dev(dmadev) \ argument 145 container_of(dmadev, struct pxad_device, slave) 1267 static void pxad_free_channels(struct dma_device *dmadev) in pxad_free_channels() argument 1271 list_for_each_entry_safe(c, cn, &dmadev->channels, in pxad_free_channels()
|
D | amba-pl08x.c | 2365 struct dma_device *dmadev, unsigned int channels, bool slave) in pl08x_dma_init_virtual_channels() argument 2370 INIT_LIST_HEAD(&dmadev->channels); in pl08x_dma_init_virtual_channels() 2415 vchan_init(&chan->vc, dmadev); in pl08x_dma_init_virtual_channels() 2422 static void pl08x_free_virtual_channels(struct dma_device *dmadev) in pl08x_free_virtual_channels() argument 2428 next, &dmadev->channels, vc.chan.device_node) { in pl08x_free_virtual_channels()
|
D | mmp_pdma.c | 140 #define to_mmp_pdma_dev(dmadev) \ argument 141 container_of(dmadev, struct mmp_pdma_device, device)
|
D | zx_dma.c | 134 #define to_zx_dma(dmadev) container_of(dmadev, struct zx_dma_dev, slave) argument
|
D | k3dma.c | 119 #define to_k3_dma(dmadev) container_of(dmadev, struct k3_dma_dev, slave) argument
|
D | fsl-edma.c | 998 static void fsl_edma_cleanup_vchan(struct dma_device *dmadev) in fsl_edma_cleanup_vchan() argument 1003 &dmadev->channels, vchan.chan.device_node) { in fsl_edma_cleanup_vchan()
|
D | edma.c | 2419 static void edma_cleanupp_vchan(struct dma_device *dmadev) in edma_cleanupp_vchan() argument 2424 &dmadev->channels, vchan.chan.device_node) { in edma_cleanupp_vchan()
|
/drivers/net/ethernet/amazon/ena/ |
D | ena_com.c | 158 aenq->entries = dma_zalloc_coherent(dev->dmadev, size, &aenq->dma_addr, in ena_com_admin_init_aenq() 348 dev_node = dev_to_node(ena_dev->dmadev); in ena_com_init_io_sq() 349 set_dev_node(ena_dev->dmadev, ctx->numa_node); in ena_com_init_io_sq() 351 dma_zalloc_coherent(ena_dev->dmadev, size, in ena_com_init_io_sq() 354 set_dev_node(ena_dev->dmadev, dev_node); in ena_com_init_io_sq() 357 dma_zalloc_coherent(ena_dev->dmadev, size, in ena_com_init_io_sq() 362 dev_node = dev_to_node(ena_dev->dmadev); in ena_com_init_io_sq() 363 set_dev_node(ena_dev->dmadev, ctx->numa_node); in ena_com_init_io_sq() 365 devm_kzalloc(ena_dev->dmadev, size, GFP_KERNEL); in ena_com_init_io_sq() 366 set_dev_node(ena_dev->dmadev, dev_node); in ena_com_init_io_sq() [all …]
|
D | ena_com.h | 320 void *dmadev; member
|
/drivers/net/wireless/broadcom/brcm80211/brcmsmac/ |
D | dma.c | 213 struct device *dmadev; member 451 return dma_alloc_coherent(di->dmadev, size, pap, GFP_ATOMIC); in dma_alloc_consistent() 486 dma_free_coherent(di->dmadev, size, va, *descpa); in dma_ringalloc() 590 di->dmadev = core->dma_dev; in dma_attach() 755 dma_free_coherent(di->dmadev, di->txdalloc, in dma_detach() 759 dma_free_coherent(di->dmadev, di->rxdalloc, in dma_detach() 897 dma_unmap_single(di->dmadev, pa, di->rxbufsize, DMA_FROM_DEVICE); in dma64_getnextrxp() 1080 pa = dma_map_single(di->dmadev, p->data, di->rxbufsize, in dma_rxfill() 1082 if (dma_mapping_error(di->dmadev, pa)) { in dma_rxfill() 1291 pa = dma_map_single(di->dmadev, data, len, DMA_TO_DEVICE); in dma_txenq() [all …]
|
/drivers/media/platform/omap/ |
D | omap_vout_vrfb.c | 239 struct dma_device *dmadev = chan->device; in omap_vout_prepare_vrfb() local 274 tx = dmadev->device_prep_interleaved_dma(chan, xt, flags); in omap_vout_prepare_vrfb()
|
/drivers/media/platform/ |
D | m2m-deinterlace.c | 230 struct dma_device *dmadev = chan->device; in deinterlace_issue_dma() local 349 tx = dmadev->device_prep_interleaved_dma(chan, ctx->xt, flags); in deinterlace_issue_dma()
|