/arch/arc/include/asm/ |
D | page.h | 83 #define virt_to_pfn(kaddr) (__pa(kaddr) >> PAGE_SHIFT) macro 85 #define ARCH_PFN_OFFSET virt_to_pfn(CONFIG_LINUX_RAM_BASE) 101 #define virt_to_page(kaddr) pfn_to_page(virt_to_pfn(kaddr)) 102 #define virt_addr_valid(kaddr) pfn_valid(virt_to_pfn(kaddr))
|
D | mmzone.h | 19 is_end_low = pfn <= virt_to_pfn(0xFFFFFFFFUL); in pfn_to_nid()
|
/arch/riscv/include/asm/ |
D | pgalloc.h | 18 unsigned long pfn = virt_to_pfn(pte); in pmd_populate_kernel() 26 unsigned long pfn = virt_to_pfn(page_address(pte)); in pmd_populate() 34 unsigned long pfn = virt_to_pfn(pmd); in pud_populate()
|
D | page.h | 103 #define virt_to_pfn(vaddr) (phys_to_pfn(__pa(vaddr))) macro 106 #define virt_to_page(vaddr) (pfn_to_page(virt_to_pfn(vaddr))) 122 #define virt_addr_valid(vaddr) (pfn_valid(virt_to_pfn(vaddr)))
|
/arch/arm/kernel/ |
D | hibernate.c | 28 unsigned long nosave_begin_pfn = virt_to_pfn(&__nosave_begin); in pfn_is_nosave() 29 unsigned long nosave_end_pfn = virt_to_pfn(&__nosave_end - 1); in pfn_is_nosave()
|
/arch/m68k/include/asm/ |
D | page_no.h | 23 #define virt_to_pfn(kaddr) (__pa(kaddr) >> PAGE_SHIFT) macro 30 #define page_to_pfn(page) virt_to_pfn(page_to_virt(page))
|
D | page_mm.h | 124 #define virt_to_pfn(kaddr) (__pa(kaddr) >> PAGE_SHIFT) macro 150 pfn_to_page(virt_to_pfn(addr)); \
|
/arch/powerpc/include/asm/ |
D | page.h | 131 #define virt_to_pfn(kaddr) (__pa(kaddr) >> PAGE_SHIFT) macro 132 #define virt_to_page(kaddr) pfn_to_page(virt_to_pfn(kaddr)) 135 #define virt_addr_valid(kaddr) pfn_valid(virt_to_pfn(kaddr))
|
/arch/openrisc/include/asm/ |
D | page.h | 75 #define virt_to_pfn(kaddr) (__pa(kaddr) >> PAGE_SHIFT) macro 85 #define virt_addr_valid(kaddr) (pfn_valid(virt_to_pfn(kaddr)))
|
/arch/microblaze/include/asm/ |
D | page.h | 148 # define virt_to_pfn(vaddr) (phys_to_pfn((__pa(vaddr)))) macro 158 # define virt_to_page(vaddr) (pfn_to_page(virt_to_pfn(vaddr))) 176 #define virt_addr_valid(vaddr) (pfn_valid(virt_to_pfn(vaddr)))
|
/arch/arm/include/asm/ |
D | memory.h | 259 #define virt_to_pfn(kaddr) \ macro 364 #define virt_to_page(kaddr) pfn_to_page(virt_to_pfn(kaddr)) 366 && pfn_valid(virt_to_pfn(kaddr)))
|
D | dma-mapping.h | 67 return pfn_to_dma(dev, virt_to_pfn(addr)); in virt_to_dma()
|
/arch/x86/include/asm/xen/ |
D | page.h | 307 #define virt_to_pfn(v) (PFN_DOWN(__pa(v))) macro 308 #define virt_to_mfn(v) (pfn_to_mfn(virt_to_pfn(v))) 312 #define virt_to_gfn(v) (pfn_to_gfn(virt_to_pfn(v)))
|
/arch/arm64/include/asm/ |
D | memory.h | 309 #define virt_to_pfn(x) __phys_to_pfn(__virt_to_phys((unsigned long)(x))) macro 335 #define virt_to_page(x) pfn_to_page(virt_to_pfn(x)) 353 __is_lm_address(__addr) && pfn_valid(virt_to_pfn(__addr)); \
|
/arch/riscv/mm/ |
D | context.c | 61 csr_write(CSR_SATP, virt_to_pfn(next->pgd) | SATP_MODE); in switch_mm()
|
/arch/m68k/sun3/ |
D | dvma.c | 33 ptep = pfn_pte(virt_to_pfn(kaddr), PAGE_KERNEL); in dvma_page()
|
/arch/mips/include/asm/ |
D | page.h | 249 #define virt_to_pfn(kaddr) PFN_DOWN(virt_to_phys((void *)(kaddr))) macro 250 #define virt_to_page(kaddr) pfn_to_page(virt_to_pfn(kaddr))
|
/arch/s390/include/asm/ |
D | page.h | 162 #define virt_to_pfn(kaddr) (__pa(kaddr) >> PAGE_SHIFT) macro 166 #define virt_to_page(kaddr) pfn_to_page(virt_to_pfn(kaddr))
|
/arch/m68k/mm/ |
D | sun3mmu.c | 79 pte_t pte = pfn_pte(virt_to_pfn(address), PAGE_INIT); in paging_init()
|
D | mcfmmu.c | 73 pte_t pte = pfn_pte(virt_to_pfn(address), PAGE_INIT); in paging_init()
|
/arch/hexagon/include/asm/ |
D | page.h | 130 #define virt_to_pfn(kaddr) (__pa(kaddr) >> PAGE_SHIFT) macro
|
/arch/csky/include/asm/ |
D | page.h | 37 #define virt_to_pfn(kaddr) (__pa(kaddr) >> PAGE_SHIFT) macro
|
/arch/arm64/mm/ |
D | kasan_init.c | 227 early_pfn_to_nid(virt_to_pfn(lm_alias(_text)))); in kasan_init() 247 early_pfn_to_nid(virt_to_pfn(start))); in kasan_init()
|
/arch/m68k/sun3x/ |
D | dvma.c | 125 set_pte(pte, pfn_pte(virt_to_pfn(kaddr), in dvma_map_cpu()
|
/arch/arm64/kernel/ |
D | hibernate.c | 256 set_pte(ptep, pfn_pte(virt_to_pfn(dst), PAGE_KERNEL_EXEC)); in create_safe_exec_page()
|