Home
last modified time | relevance | path

Searched refs:virt_to_pfn (Results 1 – 25 of 25) sorted by relevance

/arch/arc/include/asm/
Dpage.h86 #define virt_to_pfn(kaddr) (__pa(kaddr) >> PAGE_SHIFT) macro
88 #define ARCH_PFN_OFFSET virt_to_pfn(CONFIG_LINUX_RAM_BASE)
104 #define virt_to_page(kaddr) pfn_to_page(virt_to_pfn(kaddr))
105 #define virt_addr_valid(kaddr) pfn_valid(virt_to_pfn(kaddr))
Dmmzone.h22 is_end_low = pfn <= virt_to_pfn(0xFFFFFFFFUL); in pfn_to_nid()
/arch/powerpc/include/asm/
Dpage.h132 #define virt_to_pfn(kaddr) (__pa(kaddr) >> PAGE_SHIFT) macro
133 #define virt_to_page(kaddr) pfn_to_page(virt_to_pfn(kaddr))
144 pfn_valid(virt_to_pfn(kaddr)))
146 #define virt_addr_valid(kaddr) pfn_valid(virt_to_pfn(kaddr))
/arch/arm/kernel/
Dhibernate.c29 unsigned long nosave_begin_pfn = virt_to_pfn(&__nosave_begin); in pfn_is_nosave()
30 unsigned long nosave_end_pfn = virt_to_pfn(&__nosave_end - 1); in pfn_is_nosave()
/arch/score/include/asm/
Dpage.h68 #define virt_to_pfn(vaddr) (phys_to_pfn((__pa(vaddr)))) macro
71 #define virt_to_page(vaddr) (pfn_to_page(virt_to_pfn(vaddr)))
84 #define virt_addr_valid(vaddr) (pfn_valid(virt_to_pfn(vaddr)))
/arch/m68k/include/asm/
Dpage_no.h23 #define virt_to_pfn(kaddr) (__pa(kaddr) >> PAGE_SHIFT) macro
30 #define page_to_pfn(page) virt_to_pfn(page_to_virt(page))
Dpage_mm.h124 #define virt_to_pfn(kaddr) (__pa(kaddr) >> PAGE_SHIFT) macro
150 pfn_to_page(virt_to_pfn(addr)); \
/arch/openrisc/include/asm/
Dpage.h79 #define virt_to_pfn(kaddr) (__pa(kaddr) >> PAGE_SHIFT) macro
89 #define virt_addr_valid(kaddr) (pfn_valid(virt_to_pfn(kaddr)))
/arch/microblaze/include/asm/
Dpage.h148 # define virt_to_pfn(vaddr) (phys_to_pfn((__pa(vaddr)))) macro
158 # define virt_to_page(vaddr) (pfn_to_page(virt_to_pfn(vaddr)))
176 #define virt_addr_valid(vaddr) (pfn_valid(virt_to_pfn(vaddr)))
/arch/arm/include/asm/
Dmemory.h267 #define virt_to_pfn(kaddr) \ macro
372 #define virt_to_page(kaddr) pfn_to_page(virt_to_pfn(kaddr))
374 && pfn_valid(virt_to_pfn(kaddr)))
Ddma-mapping.h65 return pfn_to_dma(dev, virt_to_pfn(addr)); in virt_to_dma()
/arch/x86/include/asm/xen/
Dpage.h271 #define virt_to_pfn(v) (PFN_DOWN(__pa(v))) macro
272 #define virt_to_mfn(v) (pfn_to_mfn(virt_to_pfn(v)))
276 #define virt_to_gfn(v) (pfn_to_gfn(virt_to_pfn(v)))
/arch/m68k/sun3/
Ddvma.c33 ptep = pfn_pte(virt_to_pfn(kaddr), PAGE_KERNEL); in dvma_page()
/arch/s390/include/asm/
Dpage.h164 #define virt_to_pfn(kaddr) (__pa(kaddr) >> PAGE_SHIFT) macro
167 #define virt_to_page(kaddr) pfn_to_page(virt_to_pfn(kaddr))
/arch/arm64/mm/
Dkasan_init.c165 pfn_to_nid(virt_to_pfn(lm_alias(_text)))); in kasan_init()
196 pfn_to_nid(virt_to_pfn(start))); in kasan_init()
/arch/m68k/mm/
Dsun3mmu.c73 pte_t pte = pfn_pte(virt_to_pfn(address), PAGE_INIT); in paging_init()
Dmcfmmu.c68 pte_t pte = pfn_pte(virt_to_pfn(address), PAGE_INIT); in paging_init()
/arch/hexagon/include/asm/
Dpage.h143 #define virt_to_pfn(kaddr) (__pa(kaddr) >> PAGE_SHIFT) macro
/arch/m68k/sun3x/
Ddvma.c125 set_pte(pte, pfn_pte(virt_to_pfn(kaddr), in dvma_map_cpu()
/arch/arm64/include/asm/
Dmemory.h280 #define virt_to_pfn(x) __phys_to_pfn(__virt_to_phys((unsigned long)(x))) macro
/arch/microblaze/kernel/
Ddma.c181 pfn = virt_to_pfn(cpu_addr); in dma_direct_mmap_coherent()
/arch/arm64/kernel/
Dhibernate.c249 set_pte(pte, pfn_pte(virt_to_pfn(dst), PAGE_KERNEL_EXEC)); in create_safe_exec_page()
/arch/x86/xen/
Dpmu.c530 pfn = virt_to_pfn(xenpmu_data); in xen_pmu_init()
Dmmu_pv.c2461 __set_phys_to_machine(virt_to_pfn(vaddr), INVALID_P2M_ENTRY); in xen_zap_pfn_range()
2464 out_frames[i] = virt_to_pfn(vaddr); in xen_zap_pfn_range()
2506 set_phys_to_machine(virt_to_pfn(vaddr), mfn); in xen_remap_exchanged_ptes()
2583 out_frame = virt_to_pfn(vstart); in xen_create_contiguous_region()
Denlighten_pv.c489 pfn = virt_to_pfn(va); in xen_load_gdt_boot()