/arch/mips/sgi-ip32/ |
D | ip32-dma.c | 23 dma_addr_t dma_addr = paddr & RAM_OFFSET_MASK; in phys_to_dma() local 26 dma_addr += CRIME_HI_MEM_BASE; in phys_to_dma() 27 return dma_addr; in phys_to_dma() 30 phys_addr_t dma_to_phys(struct device *dev, dma_addr_t dma_addr) in dma_to_phys() argument 32 phys_addr_t paddr = dma_addr & RAM_OFFSET_MASK; in dma_to_phys() 34 if (dma_addr >= 256*1024*1024) in dma_to_phys()
|
/arch/s390/pci/ |
D | pci_dma.c | 99 unsigned long *dma_walk_cpu_trans(unsigned long *rto, dma_addr_t dma_addr) in dma_walk_cpu_trans() argument 104 rtx = calc_rtx(dma_addr); in dma_walk_cpu_trans() 109 sx = calc_sx(dma_addr); in dma_walk_cpu_trans() 114 px = calc_px(dma_addr); in dma_walk_cpu_trans() 134 dma_addr_t dma_addr, size_t size, int flags) in __dma_update_trans() argument 152 entry = dma_walk_cpu_trans(zdev->dma_table, dma_addr); in __dma_update_trans() 159 dma_addr += PAGE_SIZE; in __dma_update_trans() 167 dma_addr -= PAGE_SIZE; in __dma_update_trans() 168 entry = dma_walk_cpu_trans(zdev->dma_table, dma_addr); in __dma_update_trans() 179 static int __dma_purge_tlb(struct zpci_dev *zdev, dma_addr_t dma_addr, in __dma_purge_tlb() argument [all …]
|
/arch/mips/loongson2ef/lemote-2f/ |
D | dma.c | 9 phys_addr_t dma_to_phys(struct device *dev, dma_addr_t dma_addr) in dma_to_phys() argument 11 if (dma_addr > 0x8fffffff) in dma_to_phys() 12 return dma_addr; in dma_to_phys() 13 return dma_addr & 0x0fffffff; in dma_to_phys()
|
/arch/mips/include/asm/mach-rc32434/ |
D | dma_v.h | 42 static inline void rc32434_start_dma(struct dma_reg *ch, u32 dma_addr) in rc32434_start_dma() argument 45 __raw_writel(dma_addr, &ch->dmadptr); in rc32434_start_dma() 48 static inline void rc32434_chain_dma(struct dma_reg *ch, u32 dma_addr) in rc32434_chain_dma() argument 50 __raw_writel(dma_addr, &ch->dmandptr); in rc32434_chain_dma()
|
/arch/mips/bmips/ |
D | dma.c | 55 phys_addr_t dma_to_phys(struct device *dev, dma_addr_t dma_addr) in dma_to_phys() argument 60 if (dma_addr >= r->parent_addr && in dma_to_phys() 61 dma_addr < (r->parent_addr + r->size)) in dma_to_phys() 62 return dma_addr - r->parent_addr + r->child_addr; in dma_to_phys() 64 return dma_addr; in dma_to_phys()
|
/arch/arm/common/ |
D | dmabounce.c | 210 dma_addr_t dma_addr, const char *where) in find_safe_buffer_dev() argument 214 if (dma_mapping_error(dev, dma_addr)) { in find_safe_buffer_dev() 218 return find_safe_buffer(dev->archdata.dmabounce, dma_addr); in find_safe_buffer_dev() 221 static int needs_bounce(struct device *dev, dma_addr_t dma_addr, size_t size) in needs_bounce() argument 237 if ((dma_addr | (dma_addr + size - 1)) & ~mask) in needs_bounce() 241 return !!dev->archdata.dmabounce->needs_bounce(dev, dma_addr, size); in needs_bounce() 318 dma_addr_t dma_addr; in dmabounce_map_page() local 324 dma_addr = pfn_to_dma(dev, page_to_pfn(page)) + offset; in dmabounce_map_page() 326 ret = needs_bounce(dev, dma_addr, size); in dmabounce_map_page() 331 arm_dma_ops.sync_single_for_device(dev, dma_addr, size, dir); in dmabounce_map_page() [all …]
|
/arch/mips/loongson2ef/fuloong-2e/ |
D | dma.c | 9 phys_addr_t dma_to_phys(struct device *dev, dma_addr_t dma_addr) in dma_to_phys() argument 11 return dma_addr & 0x7fffffff; in dma_to_phys()
|
/arch/x86/kernel/ |
D | amd_gart_64.c | 247 static void gart_unmap_page(struct device *dev, dma_addr_t dma_addr, in gart_unmap_page() argument 255 if (WARN_ON_ONCE(dma_addr == DMA_MAPPING_ERROR)) in gart_unmap_page() 263 if (dma_addr < iommu_bus_base || in gart_unmap_page() 264 dma_addr >= iommu_bus_base + iommu_size) in gart_unmap_page() 267 iommu_page = (dma_addr - iommu_bus_base)>>PAGE_SHIFT; in gart_unmap_page() 268 npages = iommu_num_pages(dma_addr, size, PAGE_SIZE); in gart_unmap_page() 466 gart_alloc_coherent(struct device *dev, size_t size, dma_addr_t *dma_addr, in gart_alloc_coherent() argument 471 vaddr = dma_direct_alloc(dev, size, dma_addr, flag, attrs); in gart_alloc_coherent() 476 *dma_addr = dma_map_area(dev, virt_to_phys(vaddr), size, in gart_alloc_coherent() 479 if (unlikely(*dma_addr == DMA_MAPPING_ERROR)) in gart_alloc_coherent() [all …]
|
/arch/alpha/kernel/ |
D | pci_iommu.c | 349 static void alpha_pci_unmap_page(struct device *dev, dma_addr_t dma_addr, in alpha_pci_unmap_page() argument 361 if (dma_addr >= __direct_map_base in alpha_pci_unmap_page() 362 && dma_addr < __direct_map_base + __direct_map_size) { in alpha_pci_unmap_page() 366 dma_addr, size, __builtin_return_address(0)); in alpha_pci_unmap_page() 371 if (dma_addr > 0xffffffff) { in alpha_pci_unmap_page() 373 dma_addr, size, __builtin_return_address(0)); in alpha_pci_unmap_page() 378 if (!arena || dma_addr < arena->dma_base) in alpha_pci_unmap_page() 381 dma_ofs = (dma_addr - arena->dma_base) >> PAGE_SHIFT; in alpha_pci_unmap_page() 385 dma_addr, arena->dma_base, arena->size); in alpha_pci_unmap_page() 390 npages = iommu_num_pages(dma_addr, size, PAGE_SIZE); in alpha_pci_unmap_page() [all …]
|
/arch/arm/include/asm/ |
D | dma-mapping.h | 68 void *cpu_addr, dma_addr_t dma_addr, size_t size, 124 void *cpu_addr, dma_addr_t dma_addr, size_t size,
|
/arch/arm/mm/ |
D | dma-mapping.c | 222 void *cpu_addr, dma_addr_t dma_addr, size_t size, 760 void *cpu_addr, dma_addr_t dma_addr, size_t size, in __arm_dma_mmap() argument 766 unsigned long pfn = dma_to_pfn(dev, dma_addr); in __arm_dma_mmap() 786 void *cpu_addr, dma_addr_t dma_addr, size_t size, in arm_coherent_dma_mmap() argument 789 return __arm_dma_mmap(dev, vma, cpu_addr, dma_addr, size, attrs); in arm_coherent_dma_mmap() 793 void *cpu_addr, dma_addr_t dma_addr, size_t size, in arm_dma_mmap() argument 797 return __arm_dma_mmap(dev, vma, cpu_addr, dma_addr, size, attrs); in arm_dma_mmap() 1317 dma_addr_t dma_addr, iova; in __iommu_create_mapping() local 1320 dma_addr = __alloc_iova(mapping, size); in __iommu_create_mapping() 1321 if (dma_addr == DMA_MAPPING_ERROR) in __iommu_create_mapping() [all …]
|
/arch/powerpc/kernel/ |
D | iommu.c | 388 static bool iommu_free_check(struct iommu_table *tbl, dma_addr_t dma_addr, in iommu_free_check() argument 393 entry = dma_addr >> tbl->it_page_shift; in iommu_free_check() 401 printk(KERN_INFO "\tdma_addr = 0x%llx\n", (u64)dma_addr); in iommu_free_check() 435 static void __iommu_free(struct iommu_table *tbl, dma_addr_t dma_addr, in __iommu_free() argument 442 entry = dma_addr >> tbl->it_page_shift; in __iommu_free() 447 if (!iommu_free_check(tbl, dma_addr, npages)) in __iommu_free() 457 static void iommu_free(struct iommu_table *tbl, dma_addr_t dma_addr, in iommu_free() argument 460 __iommu_free(tbl, dma_addr, npages); in iommu_free() 475 dma_addr_t dma_next = 0, dma_addr; in ppc_iommu_map_sg() local 531 dma_addr = entry << tbl->it_page_shift; in ppc_iommu_map_sg() [all …]
|
/arch/arm/mach-rpc/ |
D | dma.c | 32 dma_addr_t dma_addr; member 59 idma->cur_addr = idma->dma_addr; in iomd_get_next_sg() 73 idma->dma_addr += end - offset; in iomd_get_next_sg() 78 idma->dma_addr = idma->dma.sg->dma_address; in iomd_get_next_sg() 178 idma->dma_addr = idma->dma.sg->dma_address; in iomd_enable_dma()
|
/arch/sparc/include/asm/ |
D | iommu-common.h | 50 u64 dma_addr, unsigned long npages,
|
/arch/sparc/kernel/ |
D | ioport.c | 357 dma_addr_t dma_addr, unsigned long attrs) in arch_dma_free() argument 364 dma_make_coherent(dma_addr, size); in arch_dma_free() 366 free_pages((unsigned long)phys_to_virt(dma_addr), get_order(size)); in arch_dma_free()
|
D | iommu.c | 438 dma_addr_t dma_next = 0, dma_addr; in dma_4u_map_sg() local 504 dma_addr = iommu->tbl.table_map_base + in dma_4u_map_sg() 506 dma_addr |= (s->offset & ~IO_PAGE_MASK); in dma_4u_map_sg() 521 if ((dma_addr != dma_next) || in dma_4u_map_sg() 536 outs->dma_address = dma_addr; in dma_4u_map_sg() 542 dma_next = dma_addr + slen; in dma_4u_map_sg()
|
D | iommu-common.c | 246 void iommu_tbl_range_free(struct iommu_map_table *iommu, u64 dma_addr, in iommu_tbl_range_free() argument 254 entry = (dma_addr - iommu->table_map_base) >> shift; in iommu_tbl_range_free()
|
D | pci_sun4v.c | 474 dma_addr_t dma_next = 0, dma_addr; in dma_4v_map_sg() local 547 dma_addr = tbl->table_map_base + (entry << IO_PAGE_SHIFT); in dma_4v_map_sg() 548 dma_addr |= (s->offset & ~IO_PAGE_MASK); in dma_4v_map_sg() 564 if ((dma_addr != dma_next) || in dma_4v_map_sg() 579 outs->dma_address = dma_addr; in dma_4v_map_sg() 585 dma_next = dma_addr + slen; in dma_4v_map_sg()
|
/arch/arm/mach-ixp4xx/ |
D | common.c | 367 static int ixp4xx_needs_bounce(struct device *dev, dma_addr_t dma_addr, size_t size) in ixp4xx_needs_bounce() argument 369 return (dma_addr + size) > SZ_64M; in ixp4xx_needs_bounce()
|
/arch/mips/ath25/ |
D | ar5312.c | 63 u32 dma_addr = ar5312_rst_reg_read(AR5312_DMAADDR); /* clears error */ in ar5312_ahb_err_handler() local 66 proc_addr, proc1, dma_addr, dma1); in ar5312_ahb_err_handler()
|
/arch/mips/jazz/ |
D | jazzdma.c | 535 static void jazz_dma_unmap_page(struct device *dev, dma_addr_t dma_addr, in jazz_dma_unmap_page() argument 539 arch_sync_dma_for_cpu(vdma_log2phys(dma_addr), size, dir); in jazz_dma_unmap_page() 540 vdma_free(dma_addr); in jazz_dma_unmap_page()
|
/arch/sparc/mm/ |
D | iommu.c | 279 static void sbus_iommu_unmap_page(struct device *dev, dma_addr_t dma_addr, in sbus_iommu_unmap_page() argument 283 unsigned int busa = dma_addr & PAGE_MASK; in sbus_iommu_unmap_page() 284 unsigned long off = dma_addr & ~PAGE_MASK; in sbus_iommu_unmap_page()
|
/arch/s390/include/asm/ |
D | pci_dma.h | 188 unsigned long *dma_walk_cpu_trans(unsigned long *rto, dma_addr_t dma_addr);
|
/arch/mips/pci/ |
D | pci-ar2315.c | 179 phys_addr_t dma_to_phys(struct device *dev, dma_addr_t dma_addr) in dma_to_phys() argument 181 return dma_addr - ar2315_dev_offset(dev); in dma_to_phys()
|
/arch/powerpc/platforms/pseries/ |
D | iommu.c | 894 static bool find_existing_ddw(struct device_node *pdn, u64 *dma_addr, int *window_shift) in find_existing_ddw() argument 905 *dma_addr = be64_to_cpu(dma64->dma_base); in find_existing_ddw() 1191 static struct property *ddw_property_create(const char *propname, u32 liobn, u64 dma_addr, in ddw_property_create() argument 1213 ddwprop->dma_base = cpu_to_be64(dma_addr); in ddw_property_create()
|