Home
last modified time | relevance | path

Searched refs:pud (Results 1 – 16 of 16) sorted by relevance

/mm/kasan/
Dkasan_init.c66 static void __init zero_pmd_populate(pud_t *pud, unsigned long addr, in zero_pmd_populate() argument
69 pmd_t *pmd = pmd_offset(pud, addr); in zero_pmd_populate()
91 pud_t *pud = pud_offset(p4d, addr); in zero_pud_populate() local
99 pud_populate(&init_mm, pud, lm_alias(kasan_zero_pmd)); in zero_pud_populate()
100 pmd = pmd_offset(pud, addr); in zero_pud_populate()
105 if (pud_none(*pud)) { in zero_pud_populate()
106 pud_populate(&init_mm, pud, in zero_pud_populate()
109 zero_pmd_populate(pud, addr, next); in zero_pud_populate()
110 } while (pud++, addr = next, addr != end); in zero_pud_populate()
122 pud_t *pud; in zero_p4d_populate() local
[all …]
Dkasan.c745 pud_t *pud; in shadow_mapped() local
754 pud = pud_offset(p4d, addr); in shadow_mapped()
755 if (pud_none(*pud)) in shadow_mapped()
763 if (pud_bad(*pud)) in shadow_mapped()
765 pmd = pmd_offset(pud, addr); in shadow_mapped()
/mm/
Dpagewalk.c28 static int walk_pmd_range(pud_t *pud, unsigned long addr, unsigned long end, in walk_pmd_range() argument
35 pmd = pmd_offset(pud, addr); in walk_pmd_range()
76 pud_t *pud; in walk_pud_range() local
80 pud = pud_offset(p4d, addr); in walk_pud_range()
84 if (pud_none(*pud) || !walk->vma) { in walk_pud_range()
93 spinlock_t *ptl = pud_trans_huge_lock(pud, walk->vma); in walk_pud_range()
96 err = walk->pud_entry(pud, addr, next, walk); in walk_pud_range()
104 split_huge_pud(walk->vma, pud, addr); in walk_pud_range()
105 if (pud_none(*pud)) in walk_pud_range()
109 err = walk_pmd_range(pud, addr, next, walk); in walk_pud_range()
[all …]
Dhuge_memory.c795 static pud_t maybe_pud_mkwrite(pud_t pud, struct vm_area_struct *vma) in maybe_pud_mkwrite() argument
798 pud = pud_mkwrite(pud); in maybe_pud_mkwrite()
799 return pud; in maybe_pud_mkwrite()
803 pud_t *pud, pfn_t pfn, pgprot_t prot, bool write) in insert_pfn_pud() argument
809 ptl = pud_lock(mm, pud); in insert_pfn_pud()
817 set_pud_at(mm, addr, pud, entry); in insert_pfn_pud()
818 update_mmu_cache_pud(vma, addr, pud); in insert_pfn_pud()
823 pud_t *pud, pfn_t pfn, bool write) in vmf_insert_pfn_pud() argument
842 insert_pfn_pud(vma, addr, pud, pfn, pgprot, write); in vmf_insert_pfn_pud()
997 pud_t *pud, int flags) in touch_pud() argument
[all …]
Dsparse-vmemmap.c183 pmd_t * __meminit vmemmap_pmd_populate(pud_t *pud, unsigned long addr, int node) in vmemmap_pmd_populate() argument
185 pmd_t *pmd = pmd_offset(pud, addr); in vmemmap_pmd_populate()
197 pud_t *pud = pud_offset(p4d, addr); in vmemmap_pud_populate() local
198 if (pud_none(*pud)) { in vmemmap_pud_populate()
202 pud_populate(&init_mm, pud, p); in vmemmap_pud_populate()
204 return pud; in vmemmap_pud_populate()
237 pud_t *pud; in vmemmap_populate_basepages() local
248 pud = vmemmap_pud_populate(p4d, addr, node); in vmemmap_populate_basepages()
249 if (!pud) in vmemmap_populate_basepages()
251 pmd = vmemmap_pmd_populate(pud, addr, node); in vmemmap_populate_basepages()
Dgup.c313 pud_t *pud; in follow_pud_mask() local
318 pud = pud_offset(p4dp, address); in follow_pud_mask()
319 if (pud_none(*pud)) in follow_pud_mask()
321 if (pud_huge(*pud) && vma->vm_flags & VM_HUGETLB) { in follow_pud_mask()
322 page = follow_huge_pud(mm, address, pud, flags); in follow_pud_mask()
327 if (is_hugepd(__hugepd(pud_val(*pud)))) { in follow_pud_mask()
329 __hugepd(pud_val(*pud)), flags, in follow_pud_mask()
335 if (pud_devmap(*pud)) { in follow_pud_mask()
336 ptl = pud_lock(mm, pud); in follow_pud_mask()
337 page = follow_devmap_pud(vma, address, pud, flags); in follow_pud_mask()
[all …]
Dmemory.c453 static inline void free_pmd_range(struct mmu_gather *tlb, pud_t *pud, in free_pmd_range() argument
462 pmd = pmd_offset(pud, addr); in free_pmd_range()
481 pmd = pmd_offset(pud, start); in free_pmd_range()
482 pud_clear(pud); in free_pmd_range()
491 pud_t *pud; in free_pud_range() local
496 pud = pud_offset(p4d, addr); in free_pud_range()
499 if (pud_none_or_clear_bad(pud)) in free_pud_range()
501 free_pmd_range(tlb, pud, addr, next, floor, ceiling); in free_pud_range()
502 } while (pud++, addr = next, addr != end); in free_pud_range()
515 pud = pud_offset(p4d, start); in free_pud_range()
[all …]
Dpgtable-generic.c32 void pud_clear_bad(pud_t *pud) in pud_clear_bad() argument
34 pud_ERROR(*pud); in pud_clear_bad()
35 pud_clear(pud); in pud_clear_bad()
139 pud_t pud; in pudp_huge_clear_flush() local
143 pud = pudp_huge_get_and_clear(vma->vm_mm, address, pudp); in pudp_huge_clear_flush()
145 return pud; in pudp_huge_clear_flush()
Dvmalloc.c71 static void vunmap_pmd_range(pud_t *pud, unsigned long addr, unsigned long end) in vunmap_pmd_range() argument
76 pmd = pmd_offset(pud, addr); in vunmap_pmd_range()
89 pud_t *pud; in vunmap_pud_range() local
92 pud = pud_offset(p4d, addr); in vunmap_pud_range()
95 if (pud_clear_huge(pud)) in vunmap_pud_range()
97 if (pud_none_or_clear_bad(pud)) in vunmap_pud_range()
99 vunmap_pmd_range(pud, addr, next); in vunmap_pud_range()
100 } while (pud++, addr = next, addr != end); in vunmap_pud_range()
160 static int vmap_pmd_range(pud_t *pud, unsigned long addr, in vmap_pmd_range() argument
166 pmd = pmd_alloc(&init_mm, pud, addr); in vmap_pmd_range()
[all …]
Dmremap.c37 pud_t *pud; in get_old_pmd() local
48 pud = pud_offset(p4d, addr); in get_old_pmd()
49 if (pud_none_or_clear_bad(pud)) in get_old_pmd()
52 pmd = pmd_offset(pud, addr); in get_old_pmd()
64 pud_t *pud; in alloc_new_pmd() local
71 pud = pud_alloc(mm, p4d, addr); in alloc_new_pmd()
72 if (!pud) in alloc_new_pmd()
75 pmd = pmd_alloc(mm, pud, addr); in alloc_new_pmd()
Dmprotect.c177 pud_t *pud, unsigned long addr, unsigned long end, in change_pmd_range() argument
187 pmd = pmd_offset(pud, addr); in change_pmd_range()
249 pud_t *pud; in change_pud_range() local
253 pud = pud_offset(p4d, addr); in change_pud_range()
256 if (pud_none_or_clear_bad(pud)) in change_pud_range()
258 pages += change_pmd_range(vma, pud, addr, next, newprot, in change_pud_range()
260 } while (pud++, addr = next, addr != end); in change_pud_range()
Dpage_vma_mapped.c143 pud_t *pud; in page_vma_mapped_walk() local
173 pud = pud_offset(p4d, pvmw->address); in page_vma_mapped_walk()
174 if (!pud_present(*pud)) in page_vma_mapped_walk()
176 pvmw->pmd = pmd_offset(pud, pvmw->address); in page_vma_mapped_walk()
Dhugetlb.c4608 pte_t *huge_pmd_share(struct mm_struct *mm, unsigned long addr, pud_t *pud) in huge_pmd_share() argument
4621 return (pte_t *)pmd_alloc(mm, pud, addr); in huge_pmd_share()
4643 if (pud_none(*pud)) { in huge_pmd_share()
4644 pud_populate(mm, pud, in huge_pmd_share()
4652 pte = (pte_t *)pmd_alloc(mm, pud, addr); in huge_pmd_share()
4673 pud_t *pud = pud_offset(p4d, *addr); in huge_pmd_unshare() local
4679 pud_clear(pud); in huge_pmd_unshare()
4687 pte_t *huge_pmd_share(struct mm_struct *mm, unsigned long addr, pud_t *pud) in huge_pmd_share() argument
4710 pud_t *pud; in huge_pte_alloc() local
4717 pud = pud_alloc(mm, p4d, addr); in huge_pte_alloc()
[all …]
Duserfaultfd.c154 pud_t *pud; in mm_alloc_pmd() local
160 pud = pud_alloc(mm, p4d, address); in mm_alloc_pmd()
161 if (!pud) in mm_alloc_pmd()
168 return pmd_alloc(mm, pud, address); in mm_alloc_pmd()
Drmap.c714 pud_t *pud; in mm_find_pmd() local
726 pud = pud_offset(p4d, address); in mm_find_pmd()
727 if (!pud_present(*pud)) in mm_find_pmd()
730 pmd = pmd_offset(pud, address); in mm_find_pmd()
Dswapfile.c1858 static inline int unuse_pmd_range(struct vm_area_struct *vma, pud_t *pud, in unuse_pmd_range() argument
1866 pmd = pmd_offset(pud, addr); in unuse_pmd_range()
1883 pud_t *pud; in unuse_pud_range() local
1887 pud = pud_offset(p4d, addr); in unuse_pud_range()
1890 if (pud_none_or_clear_bad(pud)) in unuse_pud_range()
1892 ret = unuse_pmd_range(vma, pud, addr, next, entry, page); in unuse_pud_range()
1895 } while (pud++, addr = next, addr != end); in unuse_pud_range()