/arch/mips/include/asm/ |
D | pgtable-32.h | 116 pfn_pte(unsigned long pfn, pgprot_t prot) in pfn_pte() function 131 static inline pte_t pfn_pte(unsigned long pfn, pgprot_t prot) in pfn_pte() function 145 #define pfn_pte(pfn, prot) __pte(((pfn) << (PAGE_SHIFT + 2)) | pgprot_val(prot)) macro 148 #define pfn_pte(pfn, prot) __pte(((unsigned long long)(pfn) << _PFN_SHIFT) | pgprot_val(prot)) macro
|
/arch/powerpc/mm/ |
D | pgtable-book3e.c | 92 set_pte_at(&init_mm, ea, ptep, pfn_pte(pa >> PAGE_SHIFT, in map_kernel_page() 116 set_pte_at(&init_mm, ea, ptep, pfn_pte(pa >> PAGE_SHIFT, in map_kernel_page()
|
/arch/arm64/mm/ |
D | hugetlbpage.c | 51 return __pgprot(pte_val(pfn_pte(pfn, __pgprot(0))) ^ pte_val(pte)); in pte_pgprot() 189 pte_val(pfn_pte(pfn, hugeprot))); in set_huge_pte_at() 190 set_pte_at(mm, addr, ptep, pfn_pte(pfn, hugeprot)); in set_huge_pte_at() 362 set_pte_at(vma->vm_mm, addr, ptep, pfn_pte(pfn, hugeprot)); in huge_ptep_set_access_flags() 391 set_pte_at(mm, addr, ptep, pfn_pte(pfn, hugeprot)); in huge_ptep_set_wrprotect()
|
D | kasan_init.c | 50 set_pte(pte, pfn_pte(sym_to_pfn(kasan_zero_page), in kasan_early_pte_populate() 205 pfn_pte(sym_to_pfn(kasan_zero_page), PAGE_KERNEL_RO)); in kasan_init()
|
/arch/tile/mm/ |
D | init.c | 465 *(pte_t *)pmd = pte_mkhuge(pfn_pte(pfn, prot)); in kernel_physical_mapping_init() 468 pte[pte_ofs] = pfn_pte(pfn, prot); in kernel_physical_mapping_init() 476 pte[pte_ofs] = pfn_pte(pfn, prot); in kernel_physical_mapping_init() 547 pte[pte_ofs] = pfn_pte(pfn, prot); in kernel_physical_mapping_init() 552 pte_t pteval = pfn_pte(0, PAGE_KERNEL_EXEC); in kernel_physical_mapping_init() 573 *(pte_t *)(pmd++) = pfn_pte(pfn, pteval); in kernel_physical_mapping_init() 925 pfn_pte(pfn, PAGE_KERNEL)); in free_init_pages()
|
/arch/powerpc/include/asm/ |
D | pgtable.h | 28 #define mk_pte(page, pgprot) pfn_pte(page_to_pfn(page), (pgprot))
|
/arch/m68k/sun3/ |
D | dvma.c | 33 ptep = pfn_pte(virt_to_pfn(kaddr), PAGE_KERNEL); in dvma_page()
|
/arch/x86/kernel/ |
D | crash_dump_32.c | 28 return pte_pfn(pfn_pte(pfn, __pgprot(0))) == pfn; in is_crashed_pfn_valid()
|
/arch/um/include/asm/ |
D | pgtable-2level.h | 42 #define pfn_pte(pfn, prot) __pte(pfn_to_phys(pfn) | pgprot_val(prot)) macro
|
/arch/tile/include/asm/ |
D | pgtable.h | 278 static inline pte_t pfn_pte(unsigned long pfn, pgprot_t prot) in pfn_pte() function 303 #define mk_pte(page, pgprot) pfn_pte(page_to_pfn(page), (pgprot)) 311 return pfn_pte(pte_pfn(pte), newprot); in pte_modify() 480 #define pfn_pmd(pfn, pgprot) pte_pmd(pfn_pte((pfn), (pgprot)))
|
/arch/m68k/mm/ |
D | sun3mmu.c | 73 pte_t pte = pfn_pte(virt_to_pfn(address), PAGE_INIT); in paging_init()
|
D | sun3kmap.c | 36 ptep = pfn_pte(phys >> PAGE_SHIFT, PAGE_KERNEL); in do_page_mapin()
|
/arch/mn10300/mm/ |
D | init.c | 90 set_pte(ppte, pfn_pte(USER_ATOMIC_OPS_PAGE_ADDR >> PAGE_SHIFT, in paging_init()
|
/arch/x86/xen/ |
D | p2m.c | 342 pfn_pte(PFN_DOWN(__pa(p2m_missing)), PAGE_KERNEL_RO)); in xen_rebuild_p2m_list() 344 pfn_pte(PFN_DOWN(__pa(p2m_identity)), PAGE_KERNEL_RO)); in xen_rebuild_p2m_list() 381 pfn_pte(PFN_DOWN(__pa(mfns)), PAGE_KERNEL)); in xen_rebuild_p2m_list() 391 pfn_pte(PFN_DOWN(__pa(mfns)), PAGE_KERNEL_RO)); in xen_rebuild_p2m_list() 601 pfn_pte(PFN_DOWN(__pa(p2m)), PAGE_KERNEL)); in xen_alloc_p2m_entry()
|
/arch/mips/bcm47xx/ |
D | prom.c | 122 #define ENTRYLO(x) ((pte_val(pfn_pte((x) >> _PFN_SHIFT, PAGE_KERNEL_UNCACHED)) >> 6) | 1)
|
/arch/x86/mm/ |
D | iomap_32.c | 68 set_pte(kmap_pte - idx, pfn_pte(pfn, prot)); in kmap_atomic_prot_pfn()
|
/arch/m32r/include/asm/ |
D | pgtable-2level.h | 72 #define pfn_pte(pfn, prot) __pte(((pfn) << PAGE_SHIFT) | pgprot_val(prot)) macro
|
/arch/metag/mm/ |
D | highmem.c | 109 set_pte(kmap_pte - idx, pfn_pte(pfn, PAGE_KERNEL)); in kmap_atomic_pfn()
|
/arch/mips/mm/ |
D | highmem.c | 117 set_pte(kmap_pte-idx, pfn_pte(pfn, PAGE_KERNEL)); in kmap_atomic_pfn()
|
/arch/unicore32/kernel/ |
D | hibernate.c | 111 set_pte(pte, pfn_pte(pfn, PAGE_KERNEL_EXEC)); in resume_physical_mapping_init()
|
/arch/mips/kvm/ |
D | mmu.c | 774 entry = pfn_pte(pfn, __pgprot(prot_bits)); in kvm_mips_map_page() 826 ptep[0] = pfn_pte(0, __pgprot(0)); in kvm_trap_emul_invalidate_gva() 827 ptep[1] = pfn_pte(0, __pgprot(0)); in kvm_trap_emul_invalidate_gva() 834 ptep[0] = pfn_pte(0, __pgprot(0)); in kvm_trap_emul_invalidate_gva() 835 ptep[1] = pfn_pte(0, __pgprot(0)); in kvm_trap_emul_invalidate_gva() 1076 pte_gpa[!idx] = pfn_pte(0, __pgprot(0)); in kvm_mips_handle_mapped_seg_tlb_fault() 1120 *ptep = pte_mkyoung(pte_mkdirty(pfn_pte(pfn, PAGE_SHARED))); in kvm_mips_handle_commpage_tlb_fault()
|
/arch/m68k/include/asm/ |
D | sun3_pgtable.h | 103 #define mk_pte(page, pgprot) pfn_pte(page_to_pfn(page), (pgprot)) 131 #define pfn_pte(pfn, pgprot) \ macro
|
/arch/nios2/include/asm/ |
D | pgtable.h | 199 #define pfn_pte(pfn, prot) (__pte(pfn | pgprot_val(prot))) macro 242 #define mk_pte(page, prot) (pfn_pte(page_to_pfn(page), prot))
|
/arch/arm/mm/ |
D | flush.c | 46 set_top_pte(to, pfn_pte(pfn, PAGE_KERNEL)); in flush_pfn_alias() 61 set_top_pte(va, pfn_pte(pfn, PAGE_KERNEL)); in flush_icache_alias()
|
/arch/unicore32/include/asm/ |
D | pgtable.h | 154 #define pfn_pte(pfn, prot) (__pte(((pfn) << PAGE_SHIFT) \ macro 231 #define mk_pte(page, prot) pfn_pte(page_to_pfn(page), prot)
|