/drivers/dma/ |
D | dma-axi-dmac.c | 120 struct dma_device dma_dev; member 129 dma_dev); in chan_to_axi_dmac() 575 struct dma_device *dma_dev; in axi_dmac_probe() local 618 dma_dev = &dmac->dma_dev; in axi_dmac_probe() 619 dma_cap_set(DMA_SLAVE, dma_dev->cap_mask); in axi_dmac_probe() 620 dma_cap_set(DMA_CYCLIC, dma_dev->cap_mask); in axi_dmac_probe() 621 dma_dev->device_free_chan_resources = axi_dmac_free_chan_resources; in axi_dmac_probe() 622 dma_dev->device_tx_status = dma_cookie_status; in axi_dmac_probe() 623 dma_dev->device_issue_pending = axi_dmac_issue_pending; in axi_dmac_probe() 624 dma_dev->device_prep_slave_sg = axi_dmac_prep_slave_sg; in axi_dmac_probe() [all …]
|
D | tegra210-adma.c | 141 struct dma_device dma_dev; member 625 chan = dma_get_any_slave_channel(&tdma->dma_dev); in tegra_dma_of_xlate() 713 INIT_LIST_HEAD(&tdma->dma_dev.channels); in tegra_adma_probe() 725 vchan_init(&tdc->vc, &tdma->dma_dev); in tegra_adma_probe() 730 dma_cap_set(DMA_SLAVE, tdma->dma_dev.cap_mask); in tegra_adma_probe() 731 dma_cap_set(DMA_PRIVATE, tdma->dma_dev.cap_mask); in tegra_adma_probe() 732 dma_cap_set(DMA_CYCLIC, tdma->dma_dev.cap_mask); in tegra_adma_probe() 734 tdma->dma_dev.dev = &pdev->dev; in tegra_adma_probe() 735 tdma->dma_dev.device_alloc_chan_resources = in tegra_adma_probe() 737 tdma->dma_dev.device_free_chan_resources = in tegra_adma_probe() [all …]
|
D | nbpfaxi.c | 226 struct dma_device dma_dev; member 320 dev_dbg(nbpf->dma_dev.dev, "%s(0x%p + 0x%x) = 0x%x\n", in nbpf_read() 329 dev_dbg(nbpf->dma_dev.dev, "%s(0x%p + 0x%x) = 0x%x\n", in nbpf_write() 395 dev_dbg(chan->nbpf->dma_dev.dev, "%s(): next 0x%x, cur 0x%x\n", __func__, in nbpf_start() 827 dev_dbg(chan->nbpf->dma_dev.dev, "%s(): force-free desc %p cookie %d\n", in nbpf_chan_idle() 1083 dchan = dma_get_any_slave_channel(&nbpf->dma_dev); in nbpf_of_xlate() 1214 dev_warn(nbpf->dma_dev.dev, "DMA error IRQ %u\n", irq); in nbpf_err_irq() 1232 struct dma_device *dma_dev = &nbpf->dma_dev; in nbpf_chan_probe() local 1240 chan->dma_chan.device = dma_dev; in nbpf_chan_probe() 1244 dev_dbg(dma_dev->dev, "%s(): channel %d: -> %p\n", __func__, n, chan->base); in nbpf_chan_probe() [all …]
|
D | img-mdc-dma.c | 136 struct dma_device dma_dev; member 185 return mdma->dma_dev.dev; in mdma2dev() 807 list_for_each_entry(chan, &mdma->dma_dev.channels, device_node) { in mdc_of_xlate() 893 dma_cap_zero(mdma->dma_dev.cap_mask); in mdc_dma_probe() 894 dma_cap_set(DMA_SLAVE, mdma->dma_dev.cap_mask); in mdc_dma_probe() 895 dma_cap_set(DMA_PRIVATE, mdma->dma_dev.cap_mask); in mdc_dma_probe() 896 dma_cap_set(DMA_CYCLIC, mdma->dma_dev.cap_mask); in mdc_dma_probe() 897 dma_cap_set(DMA_MEMCPY, mdma->dma_dev.cap_mask); in mdc_dma_probe() 927 mdma->dma_dev.dev = &pdev->dev; in mdc_dma_probe() 928 mdma->dma_dev.device_prep_slave_sg = mdc_prep_slave_sg; in mdc_dma_probe() [all …]
|
D | fsl_raid.c | 630 struct dma_device *dma_dev; in fsl_re_chan_probe() local 638 dma_dev = &re_priv->dma_dev; in fsl_re_chan_probe() 684 chan->chan.device = dma_dev; in fsl_re_chan_probe() 754 struct dma_device *dma_dev; in fsl_re_probe() local 783 dma_dev = &re_priv->dma_dev; in fsl_re_probe() 784 dma_dev->dev = dev; in fsl_re_probe() 785 INIT_LIST_HEAD(&dma_dev->channels); in fsl_re_probe() 788 dma_dev->device_alloc_chan_resources = fsl_re_alloc_chan_resources; in fsl_re_probe() 789 dma_dev->device_tx_status = fsl_re_tx_status; in fsl_re_probe() 790 dma_dev->device_issue_pending = fsl_re_issue_pending; in fsl_re_probe() [all …]
|
D | iop-adma.c | 1282 struct dma_device *dma_dev; in iop_adma_probe() local 1296 dma_dev = &adev->common; in iop_adma_probe() 1318 dma_dev->cap_mask = plat_data->cap_mask; in iop_adma_probe() 1323 INIT_LIST_HEAD(&dma_dev->channels); in iop_adma_probe() 1326 dma_dev->device_alloc_chan_resources = iop_adma_alloc_chan_resources; in iop_adma_probe() 1327 dma_dev->device_free_chan_resources = iop_adma_free_chan_resources; in iop_adma_probe() 1328 dma_dev->device_tx_status = iop_adma_status; in iop_adma_probe() 1329 dma_dev->device_issue_pending = iop_adma_issue_pending; in iop_adma_probe() 1330 dma_dev->dev = &pdev->dev; in iop_adma_probe() 1333 if (dma_has_cap(DMA_MEMCPY, dma_dev->cap_mask)) in iop_adma_probe() [all …]
|
D | mv_xor_v2.c | 673 struct dma_device *dma_dev; in mv_xor_v2_probe() local 772 dma_dev = &xor_dev->dmadev; in mv_xor_v2_probe() 775 dma_cap_zero(dma_dev->cap_mask); in mv_xor_v2_probe() 776 dma_cap_set(DMA_MEMCPY, dma_dev->cap_mask); in mv_xor_v2_probe() 777 dma_cap_set(DMA_XOR, dma_dev->cap_mask); in mv_xor_v2_probe() 778 dma_cap_set(DMA_INTERRUPT, dma_dev->cap_mask); in mv_xor_v2_probe() 781 INIT_LIST_HEAD(&dma_dev->channels); in mv_xor_v2_probe() 784 dma_dev->device_tx_status = dma_cookie_status; in mv_xor_v2_probe() 785 dma_dev->device_issue_pending = mv_xor_v2_issue_pending; in mv_xor_v2_probe() 786 dma_dev->dev = &pdev->dev; in mv_xor_v2_probe() [all …]
|
D | ep93xx_dma.c | 201 struct dma_device dma_dev; member 1304 struct dma_device *dma_dev; in ep93xx_dma_probe() local 1313 dma_dev = &edma->dma_dev; in ep93xx_dma_probe() 1317 INIT_LIST_HEAD(&dma_dev->channels); in ep93xx_dma_probe() 1322 edmac->chan.device = dma_dev; in ep93xx_dma_probe() 1342 &dma_dev->channels); in ep93xx_dma_probe() 1345 dma_cap_zero(dma_dev->cap_mask); in ep93xx_dma_probe() 1346 dma_cap_set(DMA_SLAVE, dma_dev->cap_mask); in ep93xx_dma_probe() 1347 dma_cap_set(DMA_CYCLIC, dma_dev->cap_mask); in ep93xx_dma_probe() 1349 dma_dev->dev = &pdev->dev; in ep93xx_dma_probe() [all …]
|
D | xgene-dma.c | 320 struct dma_device dma_dev[XGENE_DMA_MAX_CHANNEL]; member 1650 struct dma_device *dma_dev) in xgene_dma_set_caps() argument 1653 dma_cap_zero(dma_dev->cap_mask); in xgene_dma_set_caps() 1656 dma_cap_set(DMA_SG, dma_dev->cap_mask); in xgene_dma_set_caps() 1669 dma_cap_set(DMA_PQ, dma_dev->cap_mask); in xgene_dma_set_caps() 1670 dma_cap_set(DMA_XOR, dma_dev->cap_mask); in xgene_dma_set_caps() 1673 dma_cap_set(DMA_XOR, dma_dev->cap_mask); in xgene_dma_set_caps() 1677 dma_dev->dev = chan->dev; in xgene_dma_set_caps() 1678 dma_dev->device_alloc_chan_resources = xgene_dma_alloc_chan_resources; in xgene_dma_set_caps() 1679 dma_dev->device_free_chan_resources = xgene_dma_free_chan_resources; in xgene_dma_set_caps() [all …]
|
D | mic_x100_dma.c | 474 dma_async_device_unregister(&mic_dma_dev->dma_dev); in mic_dma_unregister_dma_device() 608 dma_cap_zero(mic_dma_dev->dma_dev.cap_mask); in mic_dma_register_dma_device() 613 dma_cap_set(DMA_MEMCPY, mic_dma_dev->dma_dev.cap_mask); in mic_dma_register_dma_device() 616 dma_cap_set(DMA_PRIVATE, mic_dma_dev->dma_dev.cap_mask); in mic_dma_register_dma_device() 617 mic_dma_dev->dma_dev.device_alloc_chan_resources = in mic_dma_register_dma_device() 619 mic_dma_dev->dma_dev.device_free_chan_resources = in mic_dma_register_dma_device() 621 mic_dma_dev->dma_dev.device_tx_status = mic_dma_tx_status; in mic_dma_register_dma_device() 622 mic_dma_dev->dma_dev.device_prep_dma_memcpy = mic_dma_prep_memcpy_lock; in mic_dma_register_dma_device() 623 mic_dma_dev->dma_dev.device_prep_dma_imm_data = in mic_dma_register_dma_device() 625 mic_dma_dev->dma_dev.device_prep_dma_interrupt = in mic_dma_register_dma_device() [all …]
|
D | fsl-edma.c | 172 struct dma_device dma_dev; member 764 list_for_each_entry_safe(chan, _chan, &fsl_edma->dma_dev.channels, device_node) { in fsl_edma_xlate() 931 INIT_LIST_HEAD(&fsl_edma->dma_dev.channels); in fsl_edma_probe() 940 vchan_init(&fsl_chan->vchan, &fsl_edma->dma_dev); in fsl_edma_probe() 951 dma_cap_set(DMA_PRIVATE, fsl_edma->dma_dev.cap_mask); in fsl_edma_probe() 952 dma_cap_set(DMA_SLAVE, fsl_edma->dma_dev.cap_mask); in fsl_edma_probe() 953 dma_cap_set(DMA_CYCLIC, fsl_edma->dma_dev.cap_mask); in fsl_edma_probe() 955 fsl_edma->dma_dev.dev = &pdev->dev; in fsl_edma_probe() 956 fsl_edma->dma_dev.device_alloc_chan_resources in fsl_edma_probe() 958 fsl_edma->dma_dev.device_free_chan_resources in fsl_edma_probe() [all …]
|
D | mv_xor.c | 1033 struct dma_device *dma_dev; in mv_xor_channel_add() local 1046 dma_dev = &mv_chan->dmadev; in mv_xor_channel_add() 1054 mv_chan->dummy_src_addr = dma_map_single(dma_dev->dev, in mv_xor_channel_add() 1056 mv_chan->dummy_dst_addr = dma_map_single(dma_dev->dev, in mv_xor_channel_add() 1070 dma_dev->cap_mask = cap_mask; in mv_xor_channel_add() 1072 INIT_LIST_HEAD(&dma_dev->channels); in mv_xor_channel_add() 1075 dma_dev->device_alloc_chan_resources = mv_xor_alloc_chan_resources; in mv_xor_channel_add() 1076 dma_dev->device_free_chan_resources = mv_xor_free_chan_resources; in mv_xor_channel_add() 1077 dma_dev->device_tx_status = mv_xor_status; in mv_xor_channel_add() 1078 dma_dev->device_issue_pending = mv_xor_issue_pending; in mv_xor_channel_add() [all …]
|
D | tegra20-apb-dma.c | 217 struct dma_device dma_dev; member 1253 chan = dma_get_any_slave_channel(&tdma->dma_dev); in tegra_dma_of_xlate() 1364 INIT_LIST_HEAD(&tdma->dma_dev.channels); in tegra_dma_probe() 1388 tdc->dma_chan.device = &tdma->dma_dev; in tegra_dma_probe() 1391 &tdma->dma_dev.channels); in tegra_dma_probe() 1406 dma_cap_set(DMA_SLAVE, tdma->dma_dev.cap_mask); in tegra_dma_probe() 1407 dma_cap_set(DMA_PRIVATE, tdma->dma_dev.cap_mask); in tegra_dma_probe() 1408 dma_cap_set(DMA_CYCLIC, tdma->dma_dev.cap_mask); in tegra_dma_probe() 1411 tdma->dma_dev.dev = &pdev->dev; in tegra_dma_probe() 1412 tdma->dma_dev.device_alloc_chan_resources = in tegra_dma_probe() [all …]
|
/drivers/mmc/host/ |
D | mmc_spi.c | 143 struct device *dma_dev; member 182 if (host->dma_dev) in mmc_spi_readbytes() 183 dma_sync_single_for_device(host->dma_dev, in mmc_spi_readbytes() 189 if (host->dma_dev) in mmc_spi_readbytes() 190 dma_sync_single_for_cpu(host->dma_dev, in mmc_spi_readbytes() 537 if (host->dma_dev) { in mmc_spi_command_send() 539 dma_sync_single_for_device(host->dma_dev, in mmc_spi_command_send() 545 if (host->dma_dev) in mmc_spi_command_send() 546 dma_sync_single_for_cpu(host->dma_dev, in mmc_spi_command_send() 682 if (host->dma_dev) in mmc_spi_writeblock() [all …]
|
/drivers/crypto/ccp/ |
D | ccp-dmaengine.c | 626 struct dma_device *dma_dev = &ccp->dma_dev; in ccp_dmaengine_register() local 669 dma_dev->dev = ccp->dev; in ccp_dmaengine_register() 670 dma_dev->src_addr_widths = CCP_DMA_WIDTH(dma_get_mask(ccp->dev)); in ccp_dmaengine_register() 671 dma_dev->dst_addr_widths = CCP_DMA_WIDTH(dma_get_mask(ccp->dev)); in ccp_dmaengine_register() 672 dma_dev->directions = DMA_MEM_TO_MEM; in ccp_dmaengine_register() 673 dma_dev->residue_granularity = DMA_RESIDUE_GRANULARITY_DESCRIPTOR; in ccp_dmaengine_register() 674 dma_cap_set(DMA_MEMCPY, dma_dev->cap_mask); in ccp_dmaengine_register() 675 dma_cap_set(DMA_SG, dma_dev->cap_mask); in ccp_dmaengine_register() 676 dma_cap_set(DMA_INTERRUPT, dma_dev->cap_mask); in ccp_dmaengine_register() 678 INIT_LIST_HEAD(&dma_dev->channels); in ccp_dmaengine_register() [all …]
|
/drivers/net/ethernet/broadcom/ |
D | bgmac.c | 131 struct device *dma_dev = bgmac->dma_dev; in bgmac_dma_tx_add() local 158 slot->dma_addr = dma_map_single(dma_dev, skb->data, skb_headlen(skb), in bgmac_dma_tx_add() 160 if (unlikely(dma_mapping_error(dma_dev, slot->dma_addr))) in bgmac_dma_tx_add() 176 slot->dma_addr = skb_frag_dma_map(dma_dev, frag, 0, in bgmac_dma_tx_add() 178 if (unlikely(dma_mapping_error(dma_dev, slot->dma_addr))) in bgmac_dma_tx_add() 207 dma_unmap_single(dma_dev, slot->dma_addr, skb_headlen(skb), in bgmac_dma_tx_add() 216 dma_unmap_page(dma_dev, slot->dma_addr, len, DMA_TO_DEVICE); in bgmac_dma_tx_add() 233 struct device *dma_dev = bgmac->dma_dev; in bgmac_dma_tx_free() local 259 dma_unmap_single(dma_dev, slot->dma_addr, len, in bgmac_dma_tx_free() 262 dma_unmap_page(dma_dev, slot->dma_addr, len, in bgmac_dma_tx_free() [all …]
|
D | b44.c | 153 dma_sync_single_for_device(sdev->dma_dev, dma_base + offset, in b44_sync_dma_desc_for_device() 162 dma_sync_single_for_cpu(sdev->dma_dev, dma_base + offset, in b44_sync_dma_desc_for_cpu() 632 dma_unmap_single(bp->sdev->dma_dev, in b44_tx() 677 mapping = dma_map_single(bp->sdev->dma_dev, skb->data, in b44_alloc_rx_skb() 683 if (dma_mapping_error(bp->sdev->dma_dev, mapping) || in b44_alloc_rx_skb() 686 if (!dma_mapping_error(bp->sdev->dma_dev, mapping)) in b44_alloc_rx_skb() 687 dma_unmap_single(bp->sdev->dma_dev, mapping, in b44_alloc_rx_skb() 693 mapping = dma_map_single(bp->sdev->dma_dev, skb->data, in b44_alloc_rx_skb() 696 if (dma_mapping_error(bp->sdev->dma_dev, mapping) || in b44_alloc_rx_skb() 698 if (!dma_mapping_error(bp->sdev->dma_dev, mapping)) in b44_alloc_rx_skb() [all …]
|
/drivers/tty/serial/8250/ |
D | 8250_mid.c | 47 struct pci_dev *dma_dev; member 73 mid->dma_dev = pci_get_slot(pdev->bus, in pnw_setup() 92 mid->dma_dev = pci_get_slot(pdev->bus, PCI_DEVFN(5, 0)); in tng_setup() 145 mid->dma_dev = pdev; in dnv_setup() 153 if (!mid->dma_dev) in dnv_exit() 200 if (s->dma_dev != chan->device->dev || s->chan_id != chan->chan_id) in mid8250_dma_filter() 214 if (!mid->dma_dev) in mid8250_dma_setup() 231 rx_param->dma_dev = &mid->dma_dev->dev; in mid8250_dma_setup() 232 tx_param->dma_dev = &mid->dma_dev->dev; in mid8250_dma_setup()
|
D | 8250_lpss.c | 118 struct pci_dev *dma_dev = pci_get_slot(pdev->bus, dma_devfn); in byt_serial_setup() local 137 param->dma_dev = &dma_dev->dev; in byt_serial_setup() 190 param->dma_dev = &pdev->dev; in qrk_serial_setup_dma() 202 if (!param->dma_dev) in qrk_serial_exit_dma() 235 if (dws->dma_dev != chan->device->dev) in lpss8250_dma_filter() 248 if (!lpss->dma_param.dma_dev) in lpss8250_dma_setup()
|
/drivers/dma/sh/ |
D | sudmac.c | 55 struct sudmac_device, shdma_dev.dma_dev) 242 struct platform_device *pdev = to_platform_device(sdev->dma_dev.dev); in sudmac_chan_probe() 280 dev_err(sdev->dma_dev.dev, in sudmac_chan_probe() 335 struct dma_device *dma_dev; in sudmac_probe() local 352 dma_dev = &su_dev->shdma_dev.dma_dev; in sudmac_probe() 359 dma_cap_set(DMA_SLAVE, dma_dev->cap_mask); in sudmac_probe() 379 err = dma_async_device_register(&su_dev->shdma_dev.dma_dev); in sudmac_probe() 396 struct dma_device *dma_dev = &su_dev->shdma_dev.dma_dev; in sudmac_remove() local 398 dma_async_device_unregister(dma_dev); in sudmac_remove()
|
D | shdmac.c | 169 dev_warn(shdev->shdma_dev.dma_dev.dev, "Can't initialize DMAOR.\n"); in sh_dmae_rst() 173 dev_warn(shdev->shdma_dev.dma_dev.dev, in sh_dmae_rst() 528 struct platform_device *pdev = to_platform_device(sdev->dma_dev.dev); in sh_dmae_chan_probe() 533 sh_chan = devm_kzalloc(sdev->dma_dev.dev, sizeof(struct sh_dmae_chan), in sh_dmae_chan_probe() 555 dev_err(sdev->dma_dev.dev, in sh_dmae_chan_probe() 695 struct dma_device *dma_dev; in sh_dmae_probe() local 735 dma_dev = &shdev->shdma_dev.dma_dev; in sh_dmae_probe() 746 dma_dev->src_addr_widths = widths; in sh_dmae_probe() 747 dma_dev->dst_addr_widths = widths; in sh_dmae_probe() 748 dma_dev->directions = BIT(DMA_MEM_TO_DEV) | BIT(DMA_DEV_TO_MEM); in sh_dmae_probe() [all …]
|
D | shdma-base.c | 40 #define to_shdma_dev(d) container_of(d, struct shdma_dev, dma_dev) 309 dev_warn(sdev->dma_dev.dev, "invalid slave ID passed to dma_request_slave\n"); in shdma_chan_filter() 964 schan->dma_chan.device = &sdev->dma_dev; in shdma_chan_probe() 967 schan->dev = sdev->dma_dev.dev; in shdma_chan_probe() 981 &sdev->dma_dev.channels); in shdma_chan_probe() 995 struct dma_device *dma_dev = &sdev->dma_dev; in shdma_init() local 1018 INIT_LIST_HEAD(&dma_dev->channels); in shdma_init() 1021 dma_dev->device_alloc_chan_resources in shdma_init() 1023 dma_dev->device_free_chan_resources = shdma_free_chan_resources; in shdma_init() 1024 dma_dev->device_prep_dma_memcpy = shdma_prep_memcpy; in shdma_init() [all …]
|
/drivers/spi/ |
D | spi-dw-mid.c | 39 if (s->dma_dev != chan->device->dev) in mid_spi_dma_chan_filter() 48 struct pci_dev *dma_dev; in mid_spi_dma_init() local 57 dma_dev = pci_get_device(PCI_VENDOR_ID_INTEL, 0x0827, NULL); in mid_spi_dma_init() 58 if (!dma_dev) in mid_spi_dma_init() 65 rx->dma_dev = &dma_dev->dev; in mid_spi_dma_init() 72 tx->dma_dev = &dma_dev->dev; in mid_spi_dma_init()
|
D | spi-pxa2xx-pci.c | 58 if (dws->dma_dev != chan->device->dev) in lpss_dma_filter() 67 struct pci_dev *dma_dev; in lpss_spi_setup() local 72 dma_dev = pci_get_slot(dev->bus, PCI_DEVFN(PCI_SLOT(dev->devfn), 0)); in lpss_spi_setup() 77 slave->dma_dev = &dma_dev->dev; in lpss_spi_setup() 85 slave->dma_dev = &dma_dev->dev; in lpss_spi_setup()
|
/drivers/net/ethernet/sun/ |
D | sunhme.c | 255 ((__hp)->dma_map((__hp)->dma_dev, (__ptr), (__size), (__dir))) 257 ((__hp)->dma_unmap((__hp)->dma_dev, (__addr), (__size), (__dir))) 259 ((__hp)->dma_sync_for_cpu((__hp)->dma_dev, (__addr), (__size), (__dir))) 261 ((__hp)->dma_sync_for_device((__hp)->dma_dev, (__addr), (__size), (__dir))) 281 dma_map_single((__hp)->dma_dev, (__ptr), (__size), (__dir)) 283 dma_unmap_single((__hp)->dma_dev, (__addr), (__size), (__dir)) 285 dma_dma_sync_single_for_cpu((__hp)->dma_dev, (__addr), (__size), (__dir)) 287 dma_dma_sync_single_for_device((__hp)->dma_dev, (__addr), (__size), (__dir)) 309 pci_map_single((__hp)->dma_dev, (__ptr), (__size), (__dir)) 311 pci_unmap_single((__hp)->dma_dev, (__addr), (__size), (__dir)) [all …]
|