Home
last modified time | relevance | path

Searched refs:p4d (Results 1 – 25 of 113) sorted by relevance

12345

/arch/powerpc/include/asm/nohash/64/
Dpgtable-4k.h56 #define p4d_none(p4d) (!p4d_val(p4d)) argument
57 #define p4d_bad(p4d) (p4d_val(p4d) == 0) argument
58 #define p4d_present(p4d) (p4d_val(p4d) != 0) argument
62 static inline pud_t *p4d_pgtable(p4d_t p4d) in p4d_pgtable() argument
64 return (pud_t *) (p4d_val(p4d) & ~P4D_MASKED_BITS); in p4d_pgtable()
72 static inline pte_t p4d_pte(p4d_t p4d) in p4d_pte() argument
74 return __pte(p4d_val(p4d)); in p4d_pte()
81 extern struct page *p4d_page(p4d_t p4d);
/arch/x86/include/asm/
Dpgalloc.h113 static inline void p4d_populate(struct mm_struct *mm, p4d_t *p4d, pud_t *pud) in p4d_populate() argument
116 set_p4d(p4d, __p4d(_PAGE_TABLE | __pa(pud))); in p4d_populate()
119 static inline void p4d_populate_safe(struct mm_struct *mm, p4d_t *p4d, pud_t *pud) in p4d_populate_safe() argument
122 set_p4d_safe(p4d, __p4d(_PAGE_TABLE | __pa(pud))); in p4d_populate_safe()
134 static inline void pgd_populate(struct mm_struct *mm, pgd_t *pgd, p4d_t *p4d) in pgd_populate() argument
138 paravirt_alloc_p4d(mm, __pa(p4d) >> PAGE_SHIFT); in pgd_populate()
139 set_pgd(pgd, __pgd(_PAGE_TABLE | __pa(p4d))); in pgd_populate()
142 static inline void pgd_populate_safe(struct mm_struct *mm, pgd_t *pgd, p4d_t *p4d) in pgd_populate_safe() argument
146 paravirt_alloc_p4d(mm, __pa(p4d) >> PAGE_SHIFT); in pgd_populate_safe()
147 set_pgd_safe(pgd, __pgd(_PAGE_TABLE | __pa(p4d))); in pgd_populate_safe()
[all …]
Dpgtable_types.h332 typedef struct { p4dval_t p4d; } p4d_t; member
339 static inline p4dval_t native_p4d_val(p4d_t p4d) in native_p4d_val() argument
341 return p4d.p4d; in native_p4d_val()
351 static inline p4dval_t native_p4d_val(p4d_t p4d) in native_p4d_val() argument
353 return native_pgd_val(p4d.pgd); in native_p4d_val()
374 return (pud_t) { .p4d.pgd = native_make_pgd(val) }; in native_make_pud()
379 return native_pgd_val(pud.p4d.pgd); in native_pud_val()
400 return (pmd_t) { .pud.p4d.pgd = native_make_pgd(val) }; in native_make_pmd()
405 return native_pgd_val(pmd.pud.p4d.pgd); in native_pmd_val()
409 static inline p4dval_t p4d_pfn_mask(p4d_t p4d) in p4d_pfn_mask() argument
[all …]
/arch/sh/mm/
Dhugetlbpage.c28 p4d_t *p4d; in huge_pte_alloc() local
35 p4d = p4d_alloc(mm, pgd, addr); in huge_pte_alloc()
36 if (p4d) { in huge_pte_alloc()
37 pud = pud_alloc(mm, p4d, addr); in huge_pte_alloc()
53 p4d_t *p4d; in huge_pte_offset() local
60 p4d = p4d_offset(pgd, addr); in huge_pte_offset()
61 if (p4d) { in huge_pte_offset()
62 pud = pud_offset(p4d, addr); in huge_pte_offset()
Dtlbex_32.c26 p4d_t *p4d; in handle_tlbmiss() local
46 p4d = p4d_offset(pgd, address); in handle_tlbmiss()
47 if (p4d_none_or_clear_bad(p4d)) in handle_tlbmiss()
49 pud = pud_offset(p4d, address); in handle_tlbmiss()
/arch/mips/mm/
Dhugetlbpage.c28 p4d_t *p4d; in huge_pte_alloc() local
33 p4d = p4d_alloc(mm, pgd, addr); in huge_pte_alloc()
34 pud = pud_alloc(mm, p4d, addr); in huge_pte_alloc()
45 p4d_t *p4d; in huge_pte_offset() local
51 p4d = p4d_offset(pgd, addr); in huge_pte_offset()
52 if (p4d_present(*p4d)) { in huge_pte_offset()
53 pud = pud_offset(p4d, addr); in huge_pte_offset()
/arch/x86/mm/
Dident_map.c81 p4d_t *p4d = p4d_page + p4d_index(addr); in ident_p4d_init() local
88 if (p4d_present(*p4d)) { in ident_p4d_init()
89 pud = pud_offset(p4d, 0); in ident_p4d_init()
104 set_p4d(p4d, __p4d(__pa(pud) | info->kernpg_flag)); in ident_p4d_init()
127 p4d_t *p4d; in kernel_ident_mapping_init() local
134 p4d = p4d_offset(pgd, 0); in kernel_ident_mapping_init()
135 result = ident_p4d_init(info, p4d, addr, next); in kernel_ident_mapping_init()
141 p4d = (p4d_t *)info->alloc_pgt_page(info->context); in kernel_ident_mapping_init()
142 if (!p4d) in kernel_ident_mapping_init()
144 result = ident_p4d_init(info, p4d, addr, next); in kernel_ident_mapping_init()
[all …]
Dinit_64.c73 DEFINE_POPULATE(p4d_populate, p4d, pud, init)
74 DEFINE_POPULATE(pgd_populate, pgd, p4d, init)
88 DEFINE_ENTRY(p4d, p4d, init)
187 p4d_t *p4d; in sync_global_pgds_l4() local
191 p4d = p4d_offset(pgd, addr); in sync_global_pgds_l4()
196 if (!p4d_none(*p4d_ref) && !p4d_none(*p4d)) in sync_global_pgds_l4()
197 BUG_ON(p4d_pgtable(*p4d) in sync_global_pgds_l4()
200 if (p4d_none(*p4d)) in sync_global_pgds_l4()
201 set_p4d(p4d, *p4d_ref); in sync_global_pgds_l4()
247 p4d_t *p4d = (p4d_t *)spp_getpage(); in fill_p4d() local
[all …]
Dkasan_init_64.c103 static void __init kasan_populate_p4d(p4d_t *p4d, unsigned long addr, in kasan_populate_p4d() argument
109 if (p4d_none(*p4d)) { in kasan_populate_p4d()
112 p4d_populate(&init_mm, p4d, p); in kasan_populate_p4d()
115 pud = pud_offset(p4d, addr); in kasan_populate_p4d()
127 p4d_t *p4d; in kasan_populate_pgd() local
135 p4d = p4d_offset(pgd, addr); in kasan_populate_pgd()
138 kasan_populate_p4d(p4d, addr, next, nid); in kasan_populate_pgd()
139 } while (p4d++, addr = next, addr != end); in kasan_populate_pgd()
194 unsigned long p4d; in early_p4d_offset() local
199 p4d = pgd_val(*pgd) & PTE_PFN_MASK; in early_p4d_offset()
[all …]
Dpgtable_32.c30 p4d_t *p4d; in set_pte_vaddr() local
40 p4d = p4d_offset(pgd, vaddr); in set_pte_vaddr()
41 if (p4d_none(*p4d)) { in set_pte_vaddr()
45 pud = pud_offset(p4d, vaddr); in set_pte_vaddr()
Dpti.c202 p4d_t *p4d; in pti_user_pagetable_walk_pmd() local
205 p4d = pti_user_pagetable_walk_p4d(address); in pti_user_pagetable_walk_pmd()
206 if (!p4d) in pti_user_pagetable_walk_pmd()
209 BUILD_BUG_ON(p4d_large(*p4d) != 0); in pti_user_pagetable_walk_pmd()
210 if (p4d_none(*p4d)) { in pti_user_pagetable_walk_pmd()
215 set_p4d(p4d, __p4d(_KERNPG_TABLE | __pa(new_pud_page))); in pti_user_pagetable_walk_pmd()
218 pud = pud_offset(p4d, address); in pti_user_pagetable_walk_pmd()
316 p4d_t *p4d; in pti_clone_pgtable() local
326 p4d = p4d_offset(pgd, addr); in pti_clone_pgtable()
327 if (WARN_ON(p4d_none(*p4d))) in pti_clone_pgtable()
[all …]
/arch/mips/include/asm/
Dpgtable-64.h189 static inline int p4d_none(p4d_t p4d) in p4d_none() argument
191 return p4d_val(p4d) == (unsigned long)invalid_pud_table; in p4d_none()
194 static inline int p4d_bad(p4d_t p4d) in p4d_bad() argument
196 if (unlikely(p4d_val(p4d) & ~PAGE_MASK)) in p4d_bad()
202 static inline int p4d_present(p4d_t p4d) in p4d_present() argument
204 return p4d_val(p4d) != (unsigned long)invalid_pud_table; in p4d_present()
212 static inline pud_t *p4d_pgtable(p4d_t p4d) in p4d_pgtable() argument
214 return (pud_t *)p4d_val(p4d); in p4d_pgtable()
217 #define p4d_phys(p4d) virt_to_phys((void *)p4d_val(p4d)) argument
218 #define p4d_page(p4d) (pfn_to_page(p4d_phys(p4d) >> PAGE_SHIFT)) argument
[all …]
/arch/arm/mm/
Dpgd.c145 p4d_t *p4d; in pgd_free() local
157 p4d = p4d_offset(pgd, 0); in pgd_free()
158 if (p4d_none_or_clear_bad(p4d)) in pgd_free()
161 pud = pud_offset(p4d, 0); in pgd_free()
178 p4d_clear(p4d); in pgd_free()
182 p4d_free(mm, p4d); in pgd_free()
193 p4d = p4d_offset(pgd, 0); in pgd_free()
194 if (p4d_none_or_clear_bad(p4d)) in pgd_free()
196 pud = pud_offset(p4d, 0); in pgd_free()
203 p4d_clear(p4d); in pgd_free()
[all …]
/arch/s390/mm/
Dvmem.c311 static int modify_pud_table(p4d_t *p4d, unsigned long addr, unsigned long end, in modify_pud_table() argument
322 pud = pud_offset(p4d, addr); in modify_pud_table()
365 static void try_free_pud_table(p4d_t *p4d, unsigned long start) in try_free_pud_table() argument
379 pud = pud_offset(p4d, start); in try_free_pud_table()
384 vmem_free_pages(p4d_deref(*p4d), CRST_ALLOC_ORDER); in try_free_pud_table()
385 p4d_clear(p4d); in try_free_pud_table()
393 p4d_t *p4d; in modify_p4d_table() local
396 p4d = p4d_offset(pgd, addr); in modify_p4d_table()
397 for (; addr < end; addr = next, p4d++) { in modify_p4d_table()
400 if (p4d_none(*p4d)) in modify_p4d_table()
[all …]
Dpage-states.c120 static void mark_kernel_pud(p4d_t *p4d, unsigned long addr, unsigned long end) in mark_kernel_pud() argument
127 pud = pud_offset(p4d, addr); in mark_kernel_pud()
145 p4d_t *p4d; in mark_kernel_p4d() local
148 p4d = p4d_offset(pgd, addr); in mark_kernel_p4d()
151 if (p4d_none(*p4d)) in mark_kernel_p4d()
153 if (!p4d_folded(*p4d)) { in mark_kernel_p4d()
154 page = phys_to_page(p4d_val(*p4d)); in mark_kernel_p4d()
158 mark_kernel_pud(p4d, addr, next); in mark_kernel_p4d()
159 } while (p4d++, addr = next, addr != end); in mark_kernel_p4d()
/arch/x86/power/
Dhibernate_64.c32 p4d_t *p4d = NULL; in set_up_temporary_text_mapping() local
55 p4d = (p4d_t *)get_safe_page(GFP_ATOMIC); in set_up_temporary_text_mapping()
56 if (!p4d) in set_up_temporary_text_mapping()
72 if (p4d) { in set_up_temporary_text_mapping()
74 pgd_t new_pgd = __pgd(__pa(p4d) | pgprot_val(pgtable_prot)); in set_up_temporary_text_mapping()
76 set_p4d(p4d + p4d_index(restore_jump_address), new_p4d); in set_up_temporary_text_mapping()
Dhibernate.c153 p4d_t *p4d; in relocate_restore_code() local
167 p4d = p4d_offset(pgd, relocated_restore_code); in relocate_restore_code()
168 if (p4d_large(*p4d)) { in relocate_restore_code()
169 set_p4d(p4d, __p4d(p4d_val(*p4d) & ~_PAGE_NX)); in relocate_restore_code()
172 pud = pud_offset(p4d, relocated_restore_code); in relocate_restore_code()
/arch/powerpc/mm/
Dpgtable_64.c102 struct page *p4d_page(p4d_t p4d) in p4d_page() argument
104 if (p4d_is_leaf(p4d)) { in p4d_page()
106 VM_WARN_ON(!p4d_huge(p4d)); in p4d_page()
107 return pte_page(p4d_pte(p4d)); in p4d_page()
109 return virt_to_page(p4d_pgtable(p4d)); in p4d_page()
Dpgtable.c308 p4d_t *p4d; in assert_pte_locked() local
316 p4d = p4d_offset(pgd, addr); in assert_pte_locked()
317 BUG_ON(p4d_none(*p4d)); in assert_pte_locked()
318 pud = pud_offset(p4d, addr); in assert_pte_locked()
359 p4d_t p4d, *p4dp; in __find_linux_pte() local
381 p4d = READ_ONCE(*p4dp); in __find_linux_pte()
384 if (p4d_none(p4d)) in __find_linux_pte()
387 if (p4d_is_leaf(p4d)) { in __find_linux_pte()
392 if (is_hugepd(__hugepd(p4d_val(p4d)))) { in __find_linux_pte()
393 hpdp = (hugepd_t *)&p4d; in __find_linux_pte()
[all …]
/arch/parisc/mm/
Dhugetlbpage.c51 p4d_t *p4d; in huge_pte_alloc() local
64 p4d = p4d_offset(pgd, addr); in huge_pte_alloc()
65 pud = pud_alloc(mm, p4d, addr); in huge_pte_alloc()
78 p4d_t *p4d; in huge_pte_offset() local
87 p4d = p4d_offset(pgd, addr); in huge_pte_offset()
88 if (!p4d_none(*p4d)) { in huge_pte_offset()
89 pud = pud_offset(p4d, addr); in huge_pte_offset()
/arch/ia64/mm/
Dhugetlbpage.c33 p4d_t *p4d; in huge_pte_alloc() local
39 p4d = p4d_offset(pgd, taddr); in huge_pte_alloc()
40 pud = pud_alloc(mm, p4d, taddr); in huge_pte_alloc()
54 p4d_t *p4d; in huge_pte_offset() local
61 p4d = p4d_offset(pgd, taddr); in huge_pte_offset()
62 if (p4d_present(*p4d)) { in huge_pte_offset()
63 pud = pud_offset(p4d, taddr); in huge_pte_offset()
/arch/sparc/mm/
Dhugetlbpage.c284 p4d_t *p4d; in huge_pte_alloc() local
289 p4d = p4d_offset(pgd, addr); in huge_pte_alloc()
290 pud = pud_alloc(mm, p4d, addr); in huge_pte_alloc()
307 p4d_t *p4d; in huge_pte_offset() local
314 p4d = p4d_offset(pgd, addr); in huge_pte_offset()
315 if (p4d_none(*p4d)) in huge_pte_offset()
317 pud = pud_offset(p4d, addr); in huge_pte_offset()
462 static void hugetlb_free_pud_range(struct mmu_gather *tlb, p4d_t *p4d, in hugetlb_free_pud_range() argument
471 pud = pud_offset(p4d, addr); in hugetlb_free_pud_range()
494 pud = pud_offset(p4d, start); in hugetlb_free_pud_range()
[all …]
/arch/riscv/mm/
Dpageattr.c41 static int pageattr_p4d_entry(p4d_t *p4d, unsigned long addr, in pageattr_p4d_entry() argument
44 p4d_t val = READ_ONCE(*p4d); in pageattr_p4d_entry()
48 set_p4d(p4d, val); in pageattr_p4d_entry()
213 p4d_t *p4d; in kernel_page_present() local
223 p4d = p4d_offset(pgd, addr); in kernel_page_present()
224 if (!p4d_present(*p4d)) in kernel_page_present()
226 if (p4d_leaf(*p4d)) in kernel_page_present()
229 pud = pud_offset(p4d, addr); in kernel_page_present()
/arch/s390/include/asm/
Dpgalloc.h62 static inline void p4d_free(struct mm_struct *mm, p4d_t *p4d) in p4d_free() argument
65 crst_table_free(mm, (unsigned long *) p4d); in p4d_free()
104 static inline void pgd_populate(struct mm_struct *mm, pgd_t *pgd, p4d_t *p4d) in pgd_populate() argument
106 pgd_val(*pgd) = _REGION1_ENTRY | __pa(p4d); in pgd_populate()
109 static inline void p4d_populate(struct mm_struct *mm, p4d_t *p4d, pud_t *pud) in p4d_populate() argument
111 p4d_val(*p4d) = _REGION2_ENTRY | __pa(pud); in p4d_populate()
/arch/arc/include/asm/
Dpgtable-levels.h114 #define p4d_pgtable(p4d) ((pud_t *)(p4d_val(p4d) & PAGE_MASK)) argument
115 #define p4d_page(p4d) virt_to_page(p4d_pgtable(p4d)) argument
116 #define set_p4d(p4dp, p4d) (*(p4dp) = p4d) argument

12345