/mm/ |
D | init-mm.c | 29 struct mm_struct init_mm = { variable 34 .write_protect_seq = SEQCNT_ZERO(init_mm.write_protect_seq), 35 MMAP_LOCK_INITIALIZER(init_mm) 36 .page_table_lock = __SPIN_LOCK_UNLOCKED(init_mm.page_table_lock), 37 .arg_lock = __SPIN_LOCK_UNLOCKED(init_mm.arg_lock), 38 .mmlist = LIST_HEAD_INIT(init_mm.mmlist), 41 INIT_MM_CONTEXT(init_mm) 47 init_mm.start_code = (unsigned long)start_code; in setup_initial_init_mm() 48 init_mm.end_code = (unsigned long)end_code; in setup_initial_init_mm() 49 init_mm.end_data = (unsigned long)end_data; in setup_initial_init_mm() [all …]
|
D | sparse-vmemmap.c | 63 pte_t *pgtable = pte_alloc_one_kernel(&init_mm); in split_vmemmap_huge_pmd() 68 pmd_populate_kernel(&init_mm, &__pmd, pgtable); in split_vmemmap_huge_pmd() 76 set_pte_at(&init_mm, addr, pte, entry); in split_vmemmap_huge_pmd() 81 pmd_populate_kernel(&init_mm, pmd, pgtable); in split_vmemmap_huge_pmd() 245 set_pte_at(&init_mm, addr, pte, entry); in vmemmap_remap_pte() 262 set_pte_at(&init_mm, addr, pte, mk_pte(page, pgprot)); in vmemmap_restore_pte() 303 mmap_write_lock(&init_mm); in vmemmap_remap_free() 305 mmap_write_downgrade(&init_mm); in vmemmap_remap_free() 323 mmap_read_unlock(&init_mm); in vmemmap_remap_free() 380 mmap_read_lock(&init_mm); in vmemmap_remap_alloc() [all …]
|
D | highmem.c | 189 pte_clear(&init_mm, PKMAP_ADDR(i), &pkmap_page_table[i]); in flush_all_zero_pkmaps() 250 set_pte_at(&init_mm, vaddr, in map_new_virtual() 537 arch_kmap_local_set_pte(&init_mm, vaddr, kmap_pte, pteval); in __kmap_local_pfn_prot() 597 pte_clear(&init_mm, addr, kmap_pte); in kunmap_local_indexed() 648 pte_clear(&init_mm, addr, kmap_pte); in __kmap_local_sched_out() 677 set_pte_at(&init_mm, addr, kmap_pte, pteval); in __kmap_local_sched_in()
|
D | vmalloc.c | 122 set_huge_pte_at(&init_mm, addr, pte, entry); in vmap_pte_range() 127 set_pte_at(&init_mm, addr, pte, pfn_pte(pfn, prot)); in vmap_pte_range() 166 pmd = pmd_alloc_track(&init_mm, pud, addr, mask); in vmap_pmd_range() 216 pud = pud_alloc_track(&init_mm, p4d, addr, mask); in vmap_pud_range() 267 p4d = p4d_alloc_track(&init_mm, pgd, addr, mask); in vmap_p4d_range() 338 pte_t ptent = ptep_get_and_clear(&init_mm, addr, pte); in vunmap_pte_range() 486 set_pte_at(&init_mm, addr, pte, mk_pte(page, prot)); in vmap_pages_pte_range() 500 pmd = pmd_alloc_track(&init_mm, pud, addr, mask); in vmap_pages_pmd_range() 518 pud = pud_alloc_track(&init_mm, p4d, addr, mask); in vmap_pages_pud_range() 536 p4d = p4d_alloc_track(&init_mm, pgd, addr, mask); in vmap_pages_p4d_range() [all …]
|
D | memory.c | 531 pte_t *new = pte_alloc_one_kernel(&init_mm); in __pte_alloc_kernel() 537 spin_lock(&init_mm.page_table_lock); in __pte_alloc_kernel() 539 pmd_populate_kernel(&init_mm, pmd, new); in __pte_alloc_kernel() 542 spin_unlock(&init_mm.page_table_lock); in __pte_alloc_kernel() 544 pte_free_kernel(&init_mm, new); in __pte_alloc_kernel() 2586 mapped_pte = pte = (mm == &init_mm) ? in apply_to_pte_range() 2592 mapped_pte = pte = (mm == &init_mm) ? in apply_to_pte_range() 2614 if (mm != &init_mm) in apply_to_pte_range()
|
D | swapfile.c | 2183 prev_mm = &init_mm; in try_to_unuse() 2187 p = &init_mm.mmlist; in try_to_unuse() 2190 (p = p->next) != &init_mm.mmlist) { in try_to_unuse() 2283 list_for_each_safe(p, next, &init_mm.mmlist) in drain_mmlist()
|
D | rmap.c | 1716 list_add(&mm->mmlist, &init_mm.mmlist); in try_to_unmap_one()
|
/mm/kasan/ |
D | init.c | 103 set_pte_at(&init_mm, addr, pte, zero_pte); in zero_pte_populate() 119 pmd_populate_kernel(&init_mm, pmd, in zero_pmd_populate() 128 p = pte_alloc_one_kernel(&init_mm); in zero_pmd_populate() 134 pmd_populate_kernel(&init_mm, pmd, p); in zero_pmd_populate() 153 pud_populate(&init_mm, pud, in zero_pud_populate() 156 pmd_populate_kernel(&init_mm, pmd, in zero_pud_populate() 165 p = pmd_alloc(&init_mm, pud, addr); in zero_pud_populate() 169 pud_populate(&init_mm, pud, in zero_pud_populate() 191 p4d_populate(&init_mm, p4d, in zero_p4d_populate() 194 pud_populate(&init_mm, pud, in zero_p4d_populate() [all …]
|
D | shadow.c | 273 spin_lock(&init_mm.page_table_lock); in kasan_populate_vmalloc_pte() 275 set_pte_at(&init_mm, addr, ptep, pte); in kasan_populate_vmalloc_pte() 278 spin_unlock(&init_mm.page_table_lock); in kasan_populate_vmalloc_pte() 297 ret = apply_to_page_range(&init_mm, shadow_start, in kasan_populate_vmalloc() 350 spin_lock(&init_mm.page_table_lock); in kasan_depopulate_vmalloc_pte() 353 pte_clear(&init_mm, addr, ptep); in kasan_depopulate_vmalloc_pte() 356 spin_unlock(&init_mm.page_table_lock); in kasan_depopulate_vmalloc_pte() 464 apply_to_existing_page_range(&init_mm, in kasan_release_vmalloc()
|