/arch/arc/mm/ |
D | ioremap.c | 17 void __iomem *ioremap(unsigned long paddr, unsigned long size) in ioremap() argument 22 end = paddr + size - 1; in ioremap() 23 if (!size || (end < paddr)) in ioremap() 27 if (paddr >= ARC_UNCACHED_ADDR_SPACE) in ioremap() 28 return (void __iomem *)paddr; in ioremap() 30 return ioremap_prot(paddr, size, PAGE_KERNEL_NO_CACHE); in ioremap() 41 void __iomem *ioremap_prot(phys_addr_t paddr, unsigned long size, in ioremap_prot() argument 50 end = paddr + size - 1; in ioremap_prot() 51 if ((!size) || (end < paddr)) in ioremap_prot() 62 off = paddr & ~PAGE_MASK; in ioremap_prot() [all …]
|
D | dma.c | 32 void *paddr; in dma_alloc_noncoherent() local 35 paddr = alloc_pages_exact(size, gfp); in dma_alloc_noncoherent() 36 if (!paddr) in dma_alloc_noncoherent() 40 *dma_handle = plat_kernel_addr_to_dma(dev, paddr); in dma_alloc_noncoherent() 42 return paddr; in dma_alloc_noncoherent() 57 void *paddr, *kvaddr; in dma_alloc_coherent() local 60 paddr = alloc_pages_exact(size, gfp); in dma_alloc_coherent() 61 if (!paddr) in dma_alloc_coherent() 65 kvaddr = ioremap_nocache((unsigned long)paddr, size); in dma_alloc_coherent() 70 *dma_handle = plat_kernel_addr_to_dma(dev, paddr); in dma_alloc_coherent() [all …]
|
D | cache_arc700.c | 211 static inline void __cache_line_loop(unsigned long paddr, unsigned long vaddr, in __cache_line_loop() argument 239 sz += paddr & ~CACHE_LINE_MASK; in __cache_line_loop() 240 paddr &= CACHE_LINE_MASK; in __cache_line_loop() 248 paddr |= (vaddr >> PAGE_SHIFT) & 0x1F; in __cache_line_loop() 252 write_aux_reg(aux_tag, paddr); in __cache_line_loop() 259 write_aux_reg(aux_tag, paddr); in __cache_line_loop() 260 paddr += L1_CACHE_BYTES; in __cache_line_loop() 266 write_aux_reg(aux_cmd, paddr); in __cache_line_loop() 267 paddr += L1_CACHE_BYTES; in __cache_line_loop() 335 static inline void __dc_line_op(unsigned long paddr, unsigned long vaddr, in __dc_line_op() argument [all …]
|
/arch/m68k/mm/ |
D | memory.c | 130 static inline void clear040(unsigned long paddr) in clear040() argument 137 : : "a" (paddr)); in clear040() 141 static inline void cleari040(unsigned long paddr) in cleari040() argument 148 : : "a" (paddr)); in cleari040() 153 static inline void push040(unsigned long paddr) in push040() argument 160 : : "a" (paddr)); in push040() 165 static inline void pushcl040(unsigned long paddr) in pushcl040() argument 170 push040(paddr); in pushcl040() 172 clear040(paddr); in pushcl040() 203 void cache_clear (unsigned long paddr, int len) in cache_clear() argument [all …]
|
/arch/mips/cavium-octeon/ |
D | dma-octeon.c | 31 static dma_addr_t octeon_hole_phys_to_dma(phys_addr_t paddr) in octeon_hole_phys_to_dma() argument 33 …if (paddr >= CVMX_PCIE_BAR1_PHYS_BASE && paddr < (CVMX_PCIE_BAR1_PHYS_BASE + CVMX_PCIE_BAR1_PHYS_S… in octeon_hole_phys_to_dma() 34 return paddr - CVMX_PCIE_BAR1_PHYS_BASE + CVMX_PCIE_BAR1_RC_BASE; in octeon_hole_phys_to_dma() 36 return paddr; in octeon_hole_phys_to_dma() 47 static dma_addr_t octeon_gen1_phys_to_dma(struct device *dev, phys_addr_t paddr) in octeon_gen1_phys_to_dma() argument 49 if (paddr >= 0x410000000ull && paddr < 0x420000000ull) in octeon_gen1_phys_to_dma() 50 paddr -= 0x400000000ull; in octeon_gen1_phys_to_dma() 51 return octeon_hole_phys_to_dma(paddr); in octeon_gen1_phys_to_dma() 64 static dma_addr_t octeon_gen2_phys_to_dma(struct device *dev, phys_addr_t paddr) in octeon_gen2_phys_to_dma() argument 66 return octeon_hole_phys_to_dma(paddr); in octeon_gen2_phys_to_dma() [all …]
|
/arch/m68k/kernel/ |
D | sys_m68k.c | 69 unsigned long paddr, i; in cache_flush_040() local 102 if ((paddr = virt_to_phys_040(addr))) { in cache_flush_040() 103 paddr += addr & ~(PAGE_MASK | 15); in cache_flush_040() 115 if ((paddr = virt_to_phys_040(addr))) in cache_flush_040() 124 i = (PAGE_SIZE - (paddr & ~PAGE_MASK)) >> 4; in cache_flush_040() 134 : : "a" (paddr)); in cache_flush_040() 141 : : "a" (paddr)); in cache_flush_040() 149 : : "a" (paddr)); in cache_flush_040() 164 if ((paddr = virt_to_phys_040(addr))) in cache_flush_040() 173 paddr += 16; in cache_flush_040() [all …]
|
/arch/frv/mm/ |
D | highmem.c | 42 unsigned long paddr; in kmap_atomic() local 47 paddr = page_to_phys(page); in kmap_atomic() 53 case 0: return __kmap_atomic_primary(0, paddr, 6); in kmap_atomic() 54 case 1: return __kmap_atomic_primary(0, paddr, 7); in kmap_atomic() 55 case 2: return __kmap_atomic_primary(0, paddr, 8); in kmap_atomic() 56 case 3: return __kmap_atomic_primary(0, paddr, 9); in kmap_atomic() 57 case 4: return __kmap_atomic_primary(0, paddr, 10); in kmap_atomic() 60 return __kmap_atomic_secondary(type - 5, paddr); in kmap_atomic()
|
/arch/x86/mm/ |
D | pat.c | 400 static unsigned long lookup_memtype(u64 paddr) in lookup_memtype() argument 405 if (x86_platform.is_untracked_pat_range(paddr, paddr + PAGE_SIZE)) in lookup_memtype() 408 if (pat_pagerange_is_ram(paddr, paddr + PAGE_SIZE)) { in lookup_memtype() 410 page = pfn_to_page(paddr >> PAGE_SHIFT); in lookup_memtype() 424 entry = rbt_memtype_lookup(paddr); in lookup_memtype() 593 static int reserve_pfn_range(u64 paddr, unsigned long size, pgprot_t *vma_prot, in reserve_pfn_range() argument 601 is_ram = pat_pagerange_is_ram(paddr, paddr + size); in reserve_pfn_range() 612 flags = lookup_memtype(paddr); in reserve_pfn_range() 617 (unsigned long long)paddr, in reserve_pfn_range() 618 (unsigned long long)(paddr + size - 1), in reserve_pfn_range() [all …]
|
/arch/sparc/include/asm/ |
D | mxcc.h | 88 static inline void mxcc_set_stream_src(unsigned long *paddr) in mxcc_set_stream_src() argument 90 unsigned long data0 = paddr[0]; in mxcc_set_stream_src() 91 unsigned long data1 = paddr[1]; in mxcc_set_stream_src() 101 static inline void mxcc_set_stream_dst(unsigned long *paddr) in mxcc_set_stream_dst() argument 103 unsigned long data0 = paddr[0]; in mxcc_set_stream_dst() 104 unsigned long data1 = paddr[1]; in mxcc_set_stream_dst()
|
/arch/mips/jazz/ |
D | jazzdma.c | 50 unsigned long paddr = 0; in vdma_pgtbl_init() local 54 pgtbl[i].frame = paddr; in vdma_pgtbl_init() 56 paddr += VDMA_PAGESIZE; in vdma_pgtbl_init() 92 unsigned long vdma_alloc(unsigned long paddr, unsigned long size) in vdma_alloc() argument 99 if (paddr > 0x1fffffff) { in vdma_alloc() 102 paddr); in vdma_alloc() 115 pages = VDMA_PAGE(paddr + size) - VDMA_PAGE(paddr) + 1; in vdma_alloc() 138 laddr = (first << 12) + (paddr & (VDMA_PAGESIZE - 1)); in vdma_alloc() 139 frame = paddr & ~(VDMA_PAGESIZE - 1); in vdma_alloc() 212 int vdma_remap(unsigned long laddr, unsigned long paddr, unsigned long size) in vdma_remap() argument [all …]
|
/arch/frv/include/asm/ |
D | highmem.h | 79 #define __kmap_atomic_primary(cached, paddr, ampr) \ argument 83 dampr = paddr | xAMPRx_L | xAMPRx_M | xAMPRx_S | xAMPRx_SS_16Kb | xAMPRx_V; \ 101 #define __kmap_atomic_secondary(slot, paddr) \ argument 104 unsigned long dampr = paddr | xAMPRx_L | xAMPRx_M | xAMPRx_S | xAMPRx_SS_16Kb | xAMPRx_V; \ 118 unsigned long paddr; in kmap_atomic_primary() local 121 paddr = page_to_phys(page); in kmap_atomic_primary() 123 return __kmap_atomic_primary(1, paddr, 2); in kmap_atomic_primary()
|
/arch/ia64/sn/pci/pcibr/ |
D | pcibr_dma.c | 45 u64 paddr, size_t req_size, u64 flags, int dma_flags) in pcibr_dmamap_ate32() argument 67 if (!(MINIMAL_ATE_FLAG(paddr, req_size))) { in pcibr_dmamap_ate32() 86 xio_addr = IS_PIC_SOFT(pcibus_info) ? PHYS_TO_DMA(paddr) : in pcibr_dmamap_ate32() 87 PHYS_TO_TIODMA(paddr); in pcibr_dmamap_ate32() 89 xio_addr = paddr; in pcibr_dmamap_ate32() 128 pcibr_dmatrans_direct64(struct pcidev_info * info, u64 paddr, in pcibr_dmatrans_direct64() argument 138 PHYS_TO_DMA(paddr) : in pcibr_dmatrans_direct64() 139 PHYS_TO_TIODMA(paddr); in pcibr_dmatrans_direct64() 141 pci_addr = paddr; in pcibr_dmatrans_direct64() 167 u64 paddr, size_t req_size, u64 flags, int dma_flags) in pcibr_dmatrans_direct32() argument [all …]
|
/arch/xtensa/mm/ |
D | cache.c | 84 unsigned long vaddr, unsigned long *paddr) in coherent_kvaddr() argument 87 *paddr = page_to_phys(page); in coherent_kvaddr() 90 *paddr = 0; in coherent_kvaddr() 97 unsigned long paddr; in clear_user_highpage() local 98 void *kvaddr = coherent_kvaddr(page, TLBTEMP_BASE_1, vaddr, &paddr); in clear_user_highpage() 103 clear_page_alias(kvaddr, paddr); in clear_user_highpage() 245 unsigned long paddr = (unsigned long)kmap_atomic(page); in update_mmu_cache() local 246 __flush_dcache_page(paddr); in update_mmu_cache() 247 __invalidate_icache_page(paddr); in update_mmu_cache() 249 kunmap_atomic((void *)paddr); in update_mmu_cache()
|
/arch/x86/include/asm/uv/ |
D | uv_hub.h | 311 static inline unsigned long uv_soc_phys_ram_to_gpa(unsigned long paddr) in uv_soc_phys_ram_to_gpa() argument 313 if (paddr < uv_hub_info->lowmem_remap_top) in uv_soc_phys_ram_to_gpa() 314 paddr |= uv_hub_info->lowmem_remap_base; in uv_soc_phys_ram_to_gpa() 315 paddr |= uv_hub_info->gnode_upper; in uv_soc_phys_ram_to_gpa() 316 paddr = ((paddr << uv_hub_info->m_shift) >> uv_hub_info->m_shift) | in uv_soc_phys_ram_to_gpa() 317 ((paddr >> uv_hub_info->m_val) << uv_hub_info->n_lshift); in uv_soc_phys_ram_to_gpa() 318 return paddr; in uv_soc_phys_ram_to_gpa() 338 unsigned long paddr; in uv_gpa_to_soc_phys_ram() local 344 paddr = gpa & uv_hub_info->gpa_mask; in uv_gpa_to_soc_phys_ram() 345 if (paddr >= remap_base && paddr < remap_base + remap_top) in uv_gpa_to_soc_phys_ram() [all …]
|
/arch/powerpc/kernel/ |
D | io-workarounds.c | 28 static struct iowa_bus *iowa_pci_find(unsigned long vaddr, unsigned long paddr) in iowa_pci_find() argument 45 if (paddr) in iowa_pci_find() 48 if (paddr >= res->start && paddr <= res->end) in iowa_pci_find() 68 unsigned long vaddr, paddr; in iowa_mem_find_bus() local 78 paddr = 0; in iowa_mem_find_bus() 84 paddr = pte_pfn(*ptep) << PAGE_SHIFT; in iowa_mem_find_bus() 86 bus = iowa_pci_find(vaddr, paddr); in iowa_mem_find_bus()
|
D | crash_dump.c | 102 phys_addr_t paddr; in copy_oldmem_page() local 108 paddr = pfn << PAGE_SHIFT; in copy_oldmem_page() 110 if (memblock_is_region_memory(paddr, csize)) { in copy_oldmem_page() 111 vaddr = __va(paddr); in copy_oldmem_page() 114 vaddr = __ioremap(paddr, PAGE_SIZE, 0); in copy_oldmem_page()
|
/arch/arm64/kernel/ |
D | efi.c | 170 u64 paddr, npages, size; in reserve_regions() local 176 paddr = md->phys_addr; in reserve_regions() 183 paddr, paddr + (npages << EFI_PAGE_SHIFT) - 1, in reserve_regions() 187 memrange_efi_to_native(&paddr, &npages); in reserve_regions() 191 early_init_dt_add_memory_arch(paddr, size); in reserve_regions() 194 memblock_reserve(paddr, size); in reserve_regions() 238 u64 paddr, npages, size; in efi_virtmap_init() local 246 paddr = md->phys_addr; in efi_virtmap_init() 248 memrange_efi_to_native(&paddr, &npages); in efi_virtmap_init() 266 create_pgd_mapping(&efi_mm, paddr, md->virt_addr, size, in efi_virtmap_init()
|
/arch/c6x/mm/ |
D | dma-coherent.c | 79 u32 paddr; in dma_alloc_coherent() local 87 paddr = __alloc_dma_pages(order); in dma_alloc_coherent() 90 *handle = paddr; in dma_alloc_coherent() 92 if (!paddr) in dma_alloc_coherent() 95 return phys_to_virt(paddr); in dma_alloc_coherent()
|
/arch/microblaze/kernel/ |
D | ptrace.c | 117 u32 paddr = virt_to_phys((u32)reg_addr); in arch_ptrace() 118 invalidate_icache_range(paddr, paddr + 4); in arch_ptrace() 120 flush_dcache_range(paddr, paddr + 4); in arch_ptrace()
|
/arch/c6x/kernel/ |
D | dma.c | 20 unsigned long paddr = handle; in c6x_dma_sync() local 26 L2_cache_block_invalidate(paddr, paddr + size); in c6x_dma_sync() 29 L2_cache_block_writeback(paddr, paddr + size); in c6x_dma_sync() 32 L2_cache_block_writeback_invalidate(paddr, paddr + size); in c6x_dma_sync()
|
/arch/powerpc/platforms/embedded6xx/ |
D | c2k.c | 45 phys_addr_t paddr; in c2k_setup_arch() local 54 paddr = of_translate_address(np, reg); in c2k_setup_arch() 56 mv64x60_mpp_reg_base = ioremap(paddr, reg[1]); in c2k_setup_arch() 60 paddr = of_translate_address(np, reg); in c2k_setup_arch() 62 mv64x60_gpp_reg_base = ioremap(paddr, reg[1]); in c2k_setup_arch()
|
/arch/alpha/kernel/ |
D | pci_iommu.c | 40 mk_iommu_pte(unsigned long paddr) in mk_iommu_pte() argument 42 return (paddr >> (PAGE_SHIFT-1)) | 1; in mk_iommu_pte() 258 unsigned long paddr; in pci_map_single_1() local 263 paddr = __pa(cpu_addr); in pci_map_single_1() 267 if (paddr + size + __direct_map_base - 1 <= max_dma in pci_map_single_1() 268 && paddr + size <= __direct_map_size) { in pci_map_single_1() 269 ret = paddr + __direct_map_base; in pci_map_single_1() 280 ret = paddr + alpha_mv.pci_dac_offset; in pci_map_single_1() 300 npages = iommu_num_pages(paddr, size, PAGE_SIZE); in pci_map_single_1() 312 paddr &= PAGE_MASK; in pci_map_single_1() [all …]
|
/arch/arc/include/asm/ |
D | dma-mapping.h | 62 static inline void __inline_dma_cache_sync(unsigned long paddr, size_t size, in __inline_dma_cache_sync() argument 67 dma_cache_inv(paddr, size); in __inline_dma_cache_sync() 70 dma_cache_wback(paddr, size); in __inline_dma_cache_sync() 73 dma_cache_wback_inv(paddr, size); in __inline_dma_cache_sync() 76 pr_err("Invalid DMA dir [%d] for OP @ %lx\n", dir, paddr); in __inline_dma_cache_sync() 80 void __arc_dma_cache_sync(unsigned long paddr, size_t size, 111 unsigned long paddr = page_to_phys(page) + offset; in dma_map_page() local 112 return dma_map_single(dev, (void *)paddr, size, dir); in dma_map_page()
|
D | page.h | 19 #define clear_page(paddr) memset((paddr), 0, PAGE_SIZE) argument 92 #define __va(paddr) ((void *)((unsigned long)(paddr))) argument
|
/arch/sh/mm/ |
D | tlb-sh5.c | 121 unsigned long asid, unsigned long paddr) in sh64_setup_tlb_slot() argument 128 ptel = neff_sign_extend(paddr); in sh64_setup_tlb_slot() 153 unsigned long paddr, flags; in tlb_wire_entry() local 162 paddr = pte_val(pte) & _PAGE_FLAGS_HARDWARE_MASK; in tlb_wire_entry() 163 paddr &= ~PAGE_MASK; in tlb_wire_entry() 165 sh64_setup_tlb_slot(entry, addr, get_asid(), paddr); in tlb_wire_entry()
|