Home
last modified time | relevance | path

Searched refs:pud (Results 1 – 14 of 14) sorted by relevance

/mm/kasan/
Dkasan_init.c62 static void __init zero_pmd_populate(pud_t *pud, unsigned long addr, in zero_pmd_populate() argument
65 pmd_t *pmd = pmd_offset(pud, addr); in zero_pmd_populate()
87 pud_t *pud = pud_offset(pgd, addr); in zero_pud_populate() local
95 pud_populate(&init_mm, pud, kasan_zero_pmd); in zero_pud_populate()
96 pmd = pmd_offset(pud, addr); in zero_pud_populate()
101 if (pud_none(*pud)) { in zero_pud_populate()
102 pud_populate(&init_mm, pud, in zero_pud_populate()
105 zero_pmd_populate(pud, addr, next); in zero_pud_populate()
106 } while (pud++, addr = next, addr != end); in zero_pud_populate()
127 pud_t *pud; in kasan_populate_zero_shadow() local
[all …]
/mm/
Dsparse-vmemmap.c115 pmd_t * __meminit vmemmap_pmd_populate(pud_t *pud, unsigned long addr, int node) in vmemmap_pmd_populate() argument
117 pmd_t *pmd = pmd_offset(pud, addr); in vmemmap_pmd_populate()
129 pud_t *pud = pud_offset(pgd, addr); in vmemmap_pud_populate() local
130 if (pud_none(*pud)) { in vmemmap_pud_populate()
134 pud_populate(&init_mm, pud, p); in vmemmap_pud_populate()
136 return pud; in vmemmap_pud_populate()
156 pud_t *pud; in vmemmap_populate_basepages() local
164 pud = vmemmap_pud_populate(pgd, addr, node); in vmemmap_populate_basepages()
165 if (!pud) in vmemmap_populate_basepages()
167 pmd = vmemmap_pmd_populate(pud, addr, node); in vmemmap_populate_basepages()
Dgup.c205 pud_t *pud; in follow_page_mask() local
223 pud = pud_offset(pgd, address); in follow_page_mask()
224 if (pud_none(*pud)) in follow_page_mask()
226 if (pud_huge(*pud) && vma->vm_flags & VM_HUGETLB) { in follow_page_mask()
227 page = follow_huge_pud(mm, address, pud, flags); in follow_page_mask()
232 if (unlikely(pud_bad(*pud))) in follow_page_mask()
235 pmd = pmd_offset(pud, address); in follow_page_mask()
274 pud_t *pud; in get_gate_page() local
287 pud = pud_offset(pgd, address); in get_gate_page()
288 BUG_ON(pud_none(*pud)); in get_gate_page()
[all …]
Dmemory.c392 static inline void free_pmd_range(struct mmu_gather *tlb, pud_t *pud, in free_pmd_range() argument
401 pmd = pmd_offset(pud, addr); in free_pmd_range()
420 pmd = pmd_offset(pud, start); in free_pmd_range()
421 pud_clear(pud); in free_pmd_range()
430 pud_t *pud; in free_pud_range() local
435 pud = pud_offset(pgd, addr); in free_pud_range()
438 if (pud_none_or_clear_bad(pud)) in free_pud_range()
440 free_pmd_range(tlb, pud, addr, next, floor, ceiling); in free_pud_range()
441 } while (pud++, addr = next, addr != end); in free_pud_range()
454 pud = pud_offset(pgd, start); in free_pud_range()
[all …]
Dpagewalk.c27 static int walk_pmd_range(pud_t *pud, unsigned long addr, unsigned long end, in walk_pmd_range() argument
34 pmd = pmd_offset(pud, addr); in walk_pmd_range()
75 pud_t *pud; in walk_pud_range() local
79 pud = pud_offset(pgd, addr); in walk_pud_range()
82 if (pud_none_or_clear_bad(pud)) { in walk_pud_range()
90 err = walk_pmd_range(pud, addr, next, walk); in walk_pud_range()
93 } while (pud++, addr = next, addr != end); in walk_pud_range()
Dmprotect.c138 pud_t *pud, unsigned long addr, unsigned long end, in change_pmd_range() argument
148 pmd = pmd_offset(pud, addr); in change_pmd_range()
198 pud_t *pud; in change_pud_range() local
202 pud = pud_offset(pgd, addr); in change_pud_range()
205 if (pud_none_or_clear_bad(pud)) in change_pud_range()
207 pages += change_pmd_range(vma, pud, addr, next, newprot, in change_pud_range()
209 } while (pud++, addr = next, addr != end); in change_pud_range()
Dmremap.c35 pud_t *pud; in get_old_pmd() local
42 pud = pud_offset(pgd, addr); in get_old_pmd()
43 if (pud_none_or_clear_bad(pud)) in get_old_pmd()
46 pmd = pmd_offset(pud, addr); in get_old_pmd()
57 pud_t *pud; in alloc_new_pmd() local
61 pud = pud_alloc(mm, pgd, addr); in alloc_new_pmd()
62 if (!pud) in alloc_new_pmd()
65 pmd = pmd_alloc(mm, pud, addr); in alloc_new_pmd()
Dvmalloc.c72 static void vunmap_pmd_range(pud_t *pud, unsigned long addr, unsigned long end) in vunmap_pmd_range() argument
77 pmd = pmd_offset(pud, addr); in vunmap_pmd_range()
90 pud_t *pud; in vunmap_pud_range() local
93 pud = pud_offset(pgd, addr); in vunmap_pud_range()
96 if (pud_clear_huge(pud)) in vunmap_pud_range()
98 if (pud_none_or_clear_bad(pud)) in vunmap_pud_range()
100 vunmap_pmd_range(pud, addr, next); in vunmap_pud_range()
101 } while (pud++, addr = next, addr != end); in vunmap_pud_range()
145 static int vmap_pmd_range(pud_t *pud, unsigned long addr, in vmap_pmd_range() argument
151 pmd = pmd_alloc(&init_mm, pud, addr); in vmap_pmd_range()
[all …]
Duserfaultfd.c127 pud_t *pud; in mm_alloc_pmd() local
131 pud = pud_alloc(mm, pgd, address); in mm_alloc_pmd()
132 if (pud) in mm_alloc_pmd()
138 pmd = pmd_alloc(mm, pud, address); in mm_alloc_pmd()
Dpgtable-generic.c25 void pud_clear_bad(pud_t *pud) in pud_clear_bad() argument
27 pud_ERROR(*pud); in pud_clear_bad()
28 pud_clear(pud); in pud_clear_bad()
Dhugetlb.c4389 pte_t *huge_pmd_share(struct mm_struct *mm, unsigned long addr, pud_t *pud) in huge_pmd_share() argument
4402 return (pte_t *)pmd_alloc(mm, pud, addr); in huge_pmd_share()
4424 if (pud_none(*pud)) { in huge_pmd_share()
4425 pud_populate(mm, pud, in huge_pmd_share()
4433 pte = (pte_t *)pmd_alloc(mm, pud, addr); in huge_pmd_share()
4453 pud_t *pud = pud_offset(pgd, *addr); in huge_pmd_unshare() local
4459 pud_clear(pud); in huge_pmd_unshare()
4467 pte_t *huge_pmd_share(struct mm_struct *mm, unsigned long addr, pud_t *pud) in huge_pmd_share() argument
4489 pud_t *pud; in huge_pte_alloc() local
4493 pud = pud_alloc(mm, pgd, addr); in huge_pte_alloc()
[all …]
Dhuge_memory.c1673 pud_t *pud; in page_check_address_pmd() local
1682 pud = pud_offset(pgd, address); in page_check_address_pmd()
1683 if (!pud_present(*pud)) in page_check_address_pmd()
1685 pmd = pmd_offset(pud, address); in page_check_address_pmd()
3063 pud_t *pud; in split_huge_page_address() local
3072 pud = pud_offset(pgd, address); in split_huge_page_address()
3073 if (!pud_present(*pud)) in split_huge_page_address()
3076 pmd = pmd_offset(pud, address); in split_huge_page_address()
Drmap.c742 pud_t *pud; in mm_find_pmd() local
750 pud = pud_offset(pgd, address); in mm_find_pmd()
751 if (!pud_present(*pud)) in mm_find_pmd()
754 pmd = pmd_offset(pud, address); in mm_find_pmd()
Dswapfile.c1225 static inline int unuse_pmd_range(struct vm_area_struct *vma, pud_t *pud, in unuse_pmd_range() argument
1233 pmd = pmd_offset(pud, addr); in unuse_pmd_range()
1249 pud_t *pud; in unuse_pud_range() local
1253 pud = pud_offset(pgd, addr); in unuse_pud_range()
1256 if (pud_none_or_clear_bad(pud)) in unuse_pud_range()
1258 ret = unuse_pmd_range(vma, pud, addr, next, entry, page); in unuse_pud_range()
1261 } while (pud++, addr = next, addr != end); in unuse_pud_range()