Home
last modified time | relevance | path

Searched refs:dma_base (Results 1 – 24 of 24) sorted by relevance

/arch/c6x/mm/
Ddma-coherent.c33 static phys_addr_t dma_base; variable
54 return dma_base + (pos << PAGE_SHIFT); in __alloc_dma_pages()
60 u32 pos = (addr - dma_base) >> PAGE_SHIFT; in __free_dma_pages()
62 if (addr < dma_base || (pos + (1 << order)) >= dma_pages) { in __free_dma_pages()
130 dma_base = start; in coherent_mem_init()
/arch/blackfin/kernel/
Ddma-mapping.c21 static unsigned long dma_base; variable
32 dma_base = PAGE_ALIGN(start); in dma_alloc_init()
35 memset((void *)dma_base, 0, DMA_UNCACHED_REGION); in dma_alloc_init()
39 dma_page, dma_pages, dma_base); in dma_alloc_init()
59 ret = dma_base + (start << PAGE_SHIFT); in __alloc_dma_pages()
68 unsigned long page = (addr - dma_base) >> PAGE_SHIFT; in __free_dma_pages()
/arch/arm/mach-omap2/
Ddma.c86 static void __iomem *dma_base; variable
89 void __iomem *addr = dma_base; in dma_write()
99 void __iomem *addr = dma_base; in dma_read()
250 dma_base = ioremap(mem->start, resource_size(mem)); in omap2_system_dma_init_dev()
251 if (!dma_base) { in omap2_system_dma_init_dev()
/arch/arm/mach-davinci/
Dsram.c28 dma_addr_t dma_base = davinci_soc_info.sram_dma; in sram_alloc() local
32 if (!sram_pool || (dma && !dma_base)) in sram_alloc()
/arch/arm/mach-omap1/
Ddma.c176 static void __iomem *dma_base; variable
179 void __iomem *addr = dma_base; in dma_write()
191 void __iomem *addr = dma_base; in dma_read()
291 dma_base = ioremap(res[0].start, resource_size(&res[0])); in omap1_system_dma_init()
292 if (!dma_base) { in omap1_system_dma_init()
370 iounmap(dma_base); in omap1_system_dma_init()
/arch/alpha/kernel/
Dpci_iommu.c101 arena->dma_base = base; in iommu_arena_new_node()
130 base = arena->dma_base >> PAGE_SHIFT; in iommu_arena_find_pages()
297 if (!arena || arena->dma_base + arena->size - 1 > max_dma) in pci_map_single_1()
316 ret = arena->dma_base + dma_ofs * PAGE_SIZE; in pci_map_single_1()
399 if (!arena || dma_addr < arena->dma_base) in alpha_pci_unmap_page()
402 dma_ofs = (dma_addr - arena->dma_base) >> PAGE_SHIFT; in alpha_pci_unmap_page()
406 dma_addr, arena->dma_base, arena->size); in alpha_pci_unmap_page()
608 out->dma_address = arena->dma_base + dma_ofs*PAGE_SIZE + paddr; in sg_fill()
687 if (!arena || arena->dma_base + arena->size - 1 > max_dma) in alpha_pci_map_sg()
750 if (!arena || arena->dma_base + arena->size - 1 > max_dma) in alpha_pci_unmap_sg()
[all …]
Dcore_titan.c324 port->wsba[0].csr = hose->sg_isa->dma_base | 3; in titan_init_one_pachip_port()
332 port->wsba[2].csr = hose->sg_pci->dma_base | 3; in titan_init_one_pachip_port()
494 baddr >= (unsigned long)hose->sg_pci->dma_base && in titan_ioremap()
495 last < (unsigned long)hose->sg_pci->dma_base + hose->sg_pci->size){ in titan_ioremap()
500 baddr -= hose->sg_pci->dma_base; in titan_ioremap()
501 last -= hose->sg_pci->dma_base; in titan_ioremap()
608 aper->arena->dma_base + aper->pg_start * PAGE_SIZE; in titan_agp_setup()
698 unsigned long baddr = addr - aper->arena->dma_base; in titan_agp_translate()
Dcore_marvel.c289 hose->sg_isa->dma_base | wbase_m_ena | wbase_m_sg; in io7_init_hose()
307 hose->sg_pci->dma_base | wbase_m_ena | wbase_m_sg; in io7_init_hose()
722 baddr >= (unsigned long)hose->sg_pci->dma_base && in marvel_ioremap()
723 last < (unsigned long)hose->sg_pci->dma_base + hose->sg_pci->size) { in marvel_ioremap()
728 baddr -= hose->sg_pci->dma_base; in marvel_ioremap()
729 last -= hose->sg_pci->dma_base; in marvel_ioremap()
922 aper->arena->dma_base + aper->pg_start * PAGE_SIZE; in marvel_agp_setup()
1034 unsigned long baddr = addr - aper->arena->dma_base; in marvel_agp_translate()
Dpci_impl.h139 dma_addr_t dma_base; member
Dpci.c99 if (pci && pci->dma_base + pci->size >= 0xfff00000UL) in quirk_cypress()
100 pci->size = 0xfff00000UL - pci->dma_base; in quirk_cypress()
324 sg_base = hose->sg_pci ? hose->sg_pci->dma_base : ~0; in common_init_pci()
Dcore_tsunami.c332 pchip->wsba[0].csr = hose->sg_isa->dma_base | 3; in tsunami_init_one_pchip()
336 pchip->wsba[1].csr = hose->sg_pci->dma_base | 3; in tsunami_init_one_pchip()
Dcore_mcpcia.c373 *(vuip)MCPCIA_W0_BASE(mid) = hose->sg_isa->dma_base | 3; in mcpcia_startup_hose()
377 *(vuip)MCPCIA_W1_BASE(mid) = hose->sg_pci->dma_base | 3; in mcpcia_startup_hose()
Dcore_apecs.c357 *(vuip)APECS_IOC_PB2R = hose->sg_isa->dma_base | 0x000c0000; in apecs_init_arch()
Dcore_wildfire.c118 pci->pci_window[0].wbase.csr = hose->sg_isa->dma_base | 3; in wildfire_init_hose()
130 pci->pci_window[3].wbase.csr = hose->sg_pci->dma_base | 3; in wildfire_init_hose()
Dcore_lca.c282 *(vulp)LCA_IOC_W_BASE0 = hose->sg_isa->dma_base | (3UL << 32); in lca_init_arch()
Dcore_cia.c371 addr0 = arena->dma_base; in verify_tb_operation()
731 *(vip)CIA_IOC_PCI_W0_BASE = hose->sg_isa->dma_base | 3; in do_init_arch()
/arch/arm64/mm/
Ddma-mapping.c801 u64 dma_base; member
814 u64 dma_base, u64 size) in do_iommu_attach() argument
843 if (iommu_dma_init_domain(domain, dma_base, size)) in do_iommu_attach()
861 u64 dma_base, u64 size) in queue_iommu_attach() argument
871 iommudata->dma_base = dma_base; in queue_iommu_attach()
890 master->dma_base, master->size)) { in __iommu_attach_notifier()
944 static void __iommu_setup_dma_ops(struct device *dev, u64 dma_base, u64 size, in __iommu_setup_dma_ops() argument
959 do_iommu_attach(dev, ops, dma_base, size); in __iommu_setup_dma_ops()
962 queue_iommu_attach(dev, ops, dma_base, size); in __iommu_setup_dma_ops()
981 static void __iommu_setup_dma_ops(struct device *dev, u64 dma_base, u64 size, in __iommu_setup_dma_ops() argument
[all …]
/arch/arm/mach-rpc/
Ddma.c157 unsigned long dma_base = idma->base; in iomd_enable_dma() local
180 iomd_writeb(DMA_CR_C, dma_base + CR); in iomd_enable_dma()
187 iomd_writeb(ctrl, dma_base + CR); in iomd_enable_dma()
194 unsigned long dma_base = idma->base; in iomd_disable_dma() local
200 iomd_writeb(0, dma_base + CR); in iomd_disable_dma()
/arch/mips/include/asm/
Ddma-mapping.h38 static inline void arch_setup_dma_ops(struct device *dev, u64 dma_base, in arch_setup_dma_ops() argument
/arch/arm64/include/asm/
Ddma-mapping.h50 void arch_setup_dma_ops(struct device *dev, u64 dma_base, u64 size,
/arch/powerpc/platforms/pseries/
Diommu.c393 __be64 dma_base; /* address hi,lo */ member
453 dma_offset = next + be64_to_cpu(maprange->dma_base); in tce_clearrange_multi_pSeriesLP()
478 be64_to_cpu(maprange->dma_base); in tce_setrange_multi_pSeriesLP()
523 dma_offset = next + be64_to_cpu(maprange->dma_base); in tce_setrange_multi_pSeriesLP()
885 dma_addr = be64_to_cpu(direct64->dma_base); in find_existing_ddw()
1115 ddwprop->dma_base = cpu_to_be64(((u64)create.addr_hi << 32) | in enable_ddw()
1148 dma_addr = be64_to_cpu(ddwprop->dma_base); in enable_ddw()
/arch/arm/include/asm/
Ddma-mapping.h127 extern void arch_setup_dma_ops(struct device *dev, u64 dma_base, u64 size,
/arch/mips/txx9/generic/
Dsetup.c854 unsigned int dma_base = dmac_id * TXX9_DMA_MAX_NR_CHANNELS; in txx9_aclc_init() local
865 .start = dma_base + dma_chan_out, in txx9_aclc_init()
869 .start = dma_base + dma_chan_in, in txx9_aclc_init()
/arch/arm/mm/
Ddma-mapping.c2093 static bool arm_setup_iommu_dma_ops(struct device *dev, u64 dma_base, u64 size, in arm_setup_iommu_dma_ops() argument
2101 mapping = arm_iommu_create_mapping(dev->bus, dma_base, size); in arm_setup_iommu_dma_ops()
2131 static bool arm_setup_iommu_dma_ops(struct device *dev, u64 dma_base, u64 size, in arm_setup_iommu_dma_ops() argument
2148 void arch_setup_dma_ops(struct device *dev, u64 dma_base, u64 size, in arch_setup_dma_ops() argument
2154 if (arm_setup_iommu_dma_ops(dev, dma_base, size, iommu)) in arch_setup_dma_ops()