Home
last modified time | relevance | path

Searched refs:p4d (Results 1 – 21 of 21) sorted by relevance

/mm/kasan/
Dinit.c45 static inline bool kasan_pud_table(p4d_t p4d) in kasan_pud_table() argument
47 return p4d_page(p4d) == virt_to_page(lm_alias(kasan_early_shadow_pud)); in kasan_pud_table()
50 static inline bool kasan_pud_table(p4d_t p4d) in kasan_pud_table() argument
142 static int __ref zero_pud_populate(p4d_t *p4d, unsigned long addr, in zero_pud_populate() argument
145 pud_t *pud = pud_offset(p4d, addr); in zero_pud_populate()
182 p4d_t *p4d = p4d_offset(pgd, addr); in zero_p4d_populate() local
191 p4d_populate(&init_mm, p4d, in zero_p4d_populate()
193 pud = pud_offset(p4d, addr); in zero_p4d_populate()
202 if (p4d_none(*p4d)) { in zero_p4d_populate()
206 p = pud_alloc(&init_mm, p4d, addr); in zero_p4d_populate()
[all …]
Dshadow.c149 p4d_t *p4d; in shadow_mapped() local
156 p4d = p4d_offset(pgd, addr); in shadow_mapped()
157 if (p4d_none(*p4d)) in shadow_mapped()
159 pud = pud_offset(p4d, addr); in shadow_mapped()
/mm/
Dsparse-vmemmap.c138 static int vmemmap_pud_range(p4d_t *p4d, unsigned long addr, in vmemmap_pud_range() argument
145 pud = pud_offset(p4d, addr); in vmemmap_pud_range()
162 p4d_t *p4d; in vmemmap_p4d_range() local
165 p4d = p4d_offset(pgd, addr); in vmemmap_p4d_range()
170 ret = vmemmap_pud_range(p4d, addr, next, walk); in vmemmap_p4d_range()
173 } while (p4d++, addr = next, addr != end); in vmemmap_p4d_range()
537 pud_t * __meminit vmemmap_pud_populate(p4d_t *p4d, unsigned long addr, int node) in vmemmap_pud_populate() argument
539 pud_t *pud = pud_offset(p4d, addr); in vmemmap_pud_populate()
551 p4d_t *p4d = p4d_offset(pgd, addr); in vmemmap_p4d_populate() local
552 if (p4d_none(*p4d)) { in vmemmap_p4d_populate()
[all …]
Dpgalloc-track.h19 static inline pud_t *pud_alloc_track(struct mm_struct *mm, p4d_t *p4d, in pud_alloc_track() argument
23 if (unlikely(p4d_none(*p4d))) { in pud_alloc_track()
24 if (__pud_alloc(mm, p4d, address)) in pud_alloc_track()
29 return pud_offset(p4d, address); in pud_alloc_track()
Dpagewalk.c161 static int walk_pud_range(p4d_t *p4d, unsigned long addr, unsigned long end, in walk_pud_range() argument
170 pud = pud_offset(p4d, addr); in walk_pud_range()
216 p4d_t *p4d; in walk_p4d_range() local
222 p4d = p4d_offset(pgd, addr); in walk_p4d_range()
225 if (p4d_none_or_clear_bad(p4d)) { in walk_p4d_range()
233 err = ops->p4d_entry(p4d, addr, next, walk); in walk_p4d_range()
237 if (is_hugepd(__hugepd(p4d_val(*p4d)))) in walk_p4d_range()
238 err = walk_hugepd_range((hugepd_t *)p4d, addr, next, walk, P4D_SHIFT); in walk_p4d_range()
240 err = walk_pud_range(p4d, addr, next, walk); in walk_p4d_range()
243 } while (p4d++, addr = next, addr != end); in walk_p4d_range()
Dmemory.c330 static inline void free_pud_range(struct mmu_gather *tlb, p4d_t *p4d, in free_pud_range() argument
339 pud = pud_offset(p4d, addr); in free_pud_range()
358 pud = pud_offset(p4d, start); in free_pud_range()
359 p4d_clear(p4d); in free_pud_range()
368 p4d_t *p4d; in free_p4d_range() local
373 p4d = p4d_offset(pgd, addr); in free_p4d_range()
376 if (p4d_none_or_clear_bad(p4d)) in free_p4d_range()
378 free_pud_range(tlb, p4d, addr, next, floor, ceiling); in free_p4d_range()
379 } while (p4d++, addr = next, addr != end); in free_p4d_range()
392 p4d = p4d_offset(pgd, start); in free_p4d_range()
[all …]
Dvmalloc.c209 static int vmap_pud_range(p4d_t *p4d, unsigned long addr, unsigned long end, in vmap_pud_range() argument
216 pud = pud_alloc_track(&init_mm, p4d, addr, mask); in vmap_pud_range()
235 static int vmap_try_huge_p4d(p4d_t *p4d, unsigned long addr, unsigned long end, in vmap_try_huge_p4d() argument
254 if (p4d_present(*p4d) && !p4d_free_pud_page(p4d, addr)) in vmap_try_huge_p4d()
257 return p4d_set_huge(p4d, phys_addr, prot); in vmap_try_huge_p4d()
264 p4d_t *p4d; in vmap_p4d_range() local
267 p4d = p4d_alloc_track(&init_mm, pgd, addr, mask); in vmap_p4d_range()
268 if (!p4d) in vmap_p4d_range()
273 if (vmap_try_huge_p4d(p4d, addr, next, phys_addr, prot, in vmap_p4d_range()
279 if (vmap_pud_range(p4d, addr, next, phys_addr, prot, in vmap_p4d_range()
[all …]
Dgup.c796 p4d_t *p4d; in follow_p4d_mask() local
799 p4d = p4d_offset(pgdp, address); in follow_p4d_mask()
800 if (p4d_none(*p4d)) in follow_p4d_mask()
802 BUILD_BUG_ON(p4d_huge(*p4d)); in follow_p4d_mask()
803 if (unlikely(p4d_bad(*p4d))) in follow_p4d_mask()
806 if (is_hugepd(__hugepd(p4d_val(*p4d)))) { in follow_p4d_mask()
808 __hugepd(p4d_val(*p4d)), flags, in follow_p4d_mask()
814 return follow_pud_mask(vma, address, p4d, flags, ctx); in follow_p4d_mask()
896 p4d_t *p4d; in get_gate_page() local
911 p4d = p4d_offset(pgd, address); in get_gate_page()
[all …]
Dmprotect.c296 p4d_t *p4d, unsigned long addr, unsigned long end, in change_pud_range() argument
303 pud = pud_offset(p4d, addr); in change_pud_range()
319 p4d_t *p4d; in change_p4d_range() local
323 p4d = p4d_offset(pgd, addr); in change_p4d_range()
326 if (p4d_none_or_clear_bad(p4d)) in change_p4d_range()
328 pages += change_pud_range(vma, p4d, addr, next, newprot, in change_p4d_range()
330 } while (p4d++, addr = next, addr != end); in change_p4d_range()
Ddebug_vm_pgtable.c506 p4d_t p4d; in p4d_basic_tests() local
509 memset(&p4d, RANDOM_NZVALUE, sizeof(p4d_t)); in p4d_basic_tests()
510 WARN_ON(!p4d_same(p4d, p4d)); in p4d_basic_tests()
562 p4d_t p4d = READ_ONCE(*args->p4dp); in p4d_clear_tests() local
568 p4d = __p4d(p4d_val(p4d) | RANDOM_ORVALUE); in p4d_clear_tests()
569 WRITE_ONCE(*args->p4dp, p4d); in p4d_clear_tests()
571 p4d = READ_ONCE(*args->p4dp); in p4d_clear_tests()
572 WARN_ON(!p4d_none(p4d)); in p4d_clear_tests()
577 p4d_t p4d; in p4d_populate_tests() local
590 p4d = READ_ONCE(*args->p4dp); in p4d_populate_tests()
[all …]
Dpage_vma_mapped.c159 p4d_t *p4d; in page_vma_mapped_walk() local
202 p4d = p4d_offset(pgd, pvmw->address); in page_vma_mapped_walk()
203 if (!p4d_present(*p4d)) { in page_vma_mapped_walk()
207 pud = pud_offset(p4d, pvmw->address); in page_vma_mapped_walk()
Dmremap.c36 p4d_t *p4d; in get_old_pud() local
43 p4d = p4d_offset(pgd, addr); in get_old_pud()
44 if (p4d_none_or_clear_bad(p4d)) in get_old_pud()
47 pud = pud_offset(p4d, addr); in get_old_pud()
74 p4d_t *p4d; in alloc_new_pud() local
77 p4d = p4d_alloc(mm, pgd, addr); in alloc_new_pud()
78 if (!p4d) in alloc_new_pud()
81 return pud_alloc(mm, p4d, addr); in alloc_new_pud()
Dpgtable-generic.c28 void p4d_clear_bad(p4d_t *p4d) in p4d_clear_bad() argument
30 p4d_ERROR(*p4d); in p4d_clear_bad()
31 p4d_clear(p4d); in p4d_clear_bad()
Duserfaultfd.c264 p4d_t *p4d; in mm_alloc_pmd() local
268 p4d = p4d_alloc(mm, pgd, address); in mm_alloc_pmd()
269 if (!p4d) in mm_alloc_pmd()
271 pud = pud_alloc(mm, p4d, address); in mm_alloc_pmd()
Dptdump.c49 static int ptdump_p4d_entry(p4d_t *p4d, unsigned long addr, in ptdump_p4d_entry() argument
53 p4d_t val = READ_ONCE(*p4d); in ptdump_p4d_entry()
Dmemory-failure.c312 p4d_t *p4d; in dev_pagemap_mapping_shift() local
320 p4d = p4d_offset(pgd, address); in dev_pagemap_mapping_shift()
321 if (!p4d_present(*p4d)) in dev_pagemap_mapping_shift()
323 pud = pud_offset(p4d, address); in dev_pagemap_mapping_shift()
Drmap.c748 p4d_t *p4d; in mm_find_pmd() local
757 p4d = p4d_offset(pgd, address); in mm_find_pmd()
758 if (!p4d_present(*p4d)) in mm_find_pmd()
761 pud = pud_offset(p4d, address); in mm_find_pmd()
Dswapfile.c2036 static inline int unuse_pud_range(struct vm_area_struct *vma, p4d_t *p4d, in unuse_pud_range() argument
2045 pud = pud_offset(p4d, addr); in unuse_pud_range()
2063 p4d_t *p4d; in unuse_p4d_range() local
2067 p4d = p4d_offset(pgd, addr); in unuse_p4d_range()
2070 if (p4d_none_or_clear_bad(p4d)) in unuse_p4d_range()
2072 ret = unuse_pud_range(vma, p4d, addr, next, type, in unuse_p4d_range()
2076 } while (p4d++, addr = next, addr != end); in unuse_p4d_range()
Dhugetlb.c6076 p4d_t *p4d = p4d_offset(pgd, *addr); in huge_pmd_unshare() local
6077 pud_t *pud = pud_offset(p4d, *addr); in huge_pmd_unshare()
6127 p4d_t *p4d; in huge_pte_alloc() local
6132 p4d = p4d_alloc(mm, pgd, addr); in huge_pte_alloc()
6133 if (!p4d) in huge_pte_alloc()
6135 pud = pud_alloc(mm, p4d, addr); in huge_pte_alloc()
6165 p4d_t *p4d; in huge_pte_offset() local
6172 p4d = p4d_offset(pgd, addr); in huge_pte_offset()
6173 if (!p4d_present(*p4d)) in huge_pte_offset()
6176 pud = pud_offset(p4d, addr); in huge_pte_offset()
Dhuge_memory.c2230 p4d_t *p4d; in split_huge_pmd_address() local
2238 p4d = p4d_offset(pgd, address); in split_huge_pmd_address()
2239 if (!p4d_present(*p4d)) in split_huge_pmd_address()
2242 pud = pud_offset(p4d, address); in split_huge_pmd_address()
Dvmscan.c3927 static int walk_pud_range(p4d_t *p4d, unsigned long start, unsigned long end, in walk_pud_range() argument
3936 VM_WARN_ON_ONCE(p4d_leaf(*p4d)); in walk_pud_range()
3938 pud = pud_offset(p4d, start & P4D_MASK); in walk_pud_range()