/mm/ |
D | rmap.c | 79 static inline struct anon_vma *anon_vma_alloc(void) in anon_vma_alloc() 81 struct anon_vma *anon_vma; in anon_vma_alloc() local 83 anon_vma = kmem_cache_alloc(anon_vma_cachep, GFP_KERNEL); in anon_vma_alloc() 84 if (anon_vma) { in anon_vma_alloc() 85 atomic_set(&anon_vma->refcount, 1); in anon_vma_alloc() 86 anon_vma->degree = 1; /* Reference for first vma */ in anon_vma_alloc() 87 anon_vma->parent = anon_vma; in anon_vma_alloc() 92 anon_vma->root = anon_vma; in anon_vma_alloc() 95 return anon_vma; in anon_vma_alloc() 98 static inline void anon_vma_free(struct anon_vma *anon_vma) in anon_vma_free() argument [all …]
|
D | mmap.c | 397 struct anon_vma *anon_vma = vma->anon_vma; in validate_mm() local 400 if (anon_vma) { in validate_mm() 401 anon_vma_lock_read(anon_vma); in validate_mm() 404 anon_vma_unlock_read(anon_vma); in validate_mm() 516 anon_vma_interval_tree_remove(avc, &avc->anon_vma->rb_root); in anon_vma_interval_tree_pre_update_vma() 525 anon_vma_interval_tree_insert(avc, &avc->anon_vma->rb_root); in anon_vma_interval_tree_post_update_vma() 727 struct anon_vma *anon_vma = NULL; in __vma_adjust() local 779 if (remove_next == 2 && !next->anon_vma) in __vma_adjust() 808 if (exporter && exporter->anon_vma && !importer->anon_vma) { in __vma_adjust() 811 importer->anon_vma = exporter->anon_vma; in __vma_adjust() [all …]
|
D | ksm.c | 198 struct anon_vma *anon_vma; /* when stable */ member 528 if (!(vma->vm_flags & VM_MERGEABLE) || !vma->anon_vma) in find_mergeable_vma() 543 put_anon_vma(rmap_item->anon_vma); in break_cow() 645 put_anon_vma(rmap_item->anon_vma); in remove_node_from_stable_tree() 795 put_anon_vma(rmap_item->anon_vma); in remove_rmap_item_from_tree() 983 if (!(vma->vm_flags & VM_MERGEABLE) || !vma->anon_vma) in unmerge_and_remove_all_rmap_items() 1296 rmap_item->anon_vma = vma->anon_vma; in try_to_merge_with_ksm_page() 1297 get_anon_vma(vma->anon_vma); in try_to_merge_with_ksm_page() 2291 if (!vma->anon_vma) in scan_get_next_rmap_item() 2469 if (vma->anon_vma) { in ksm_madvise() [all …]
|
D | huge_memory.c | 1334 VM_BUG_ON_VMA(!vma->anon_vma, vma); in do_huge_pmd_wp_page() 1546 struct anon_vma *anon_vma = NULL; in do_huge_pmd_numa_page() local 1616 anon_vma = page_lock_anon_vma_read(page); in do_huge_pmd_numa_page() 1628 if (unlikely(!anon_vma)) { in do_huge_pmd_numa_page() 1689 if (anon_vma) in do_huge_pmd_numa_page() 1690 page_unlock_anon_vma_read(anon_vma); in do_huge_pmd_numa_page() 2708 struct anon_vma *anon_vma = NULL; in split_huge_page_to_list() local 2731 anon_vma = page_get_anon_vma(head); in split_huge_page_to_list() 2732 if (!anon_vma) { in split_huge_page_to_list() 2738 anon_vma_lock_write(anon_vma); in split_huge_page_to_list() [all …]
|
D | migrate.c | 1009 struct anon_vma *anon_vma = NULL; in __unmap_and_move() local 1070 anon_vma = page_get_anon_vma(page); in __unmap_and_move() 1108 VM_BUG_ON_PAGE(PageAnon(page) && !PageKsm(page) && !anon_vma, in __unmap_and_move() 1126 if (anon_vma) in __unmap_and_move() 1127 put_anon_vma(anon_vma); in __unmap_and_move() 1288 struct anon_vma *anon_vma = NULL; in unmap_and_move_huge_page() local 1330 anon_vma = page_get_anon_vma(hpage); in unmap_and_move_huge_page() 1351 if (anon_vma) in unmap_and_move_huge_page() 1352 put_anon_vma(anon_vma); in unmap_and_move_huge_page()
|
D | mremap.c | 88 if (vma->anon_vma) in take_rmap_locks() 89 anon_vma_lock_write(vma->anon_vma); in take_rmap_locks() 94 if (vma->anon_vma) in drop_rmap_locks() 95 anon_vma_unlock_write(vma->anon_vma); in drop_rmap_locks()
|
D | khugepaged.c | 424 if (!vma->anon_vma || vma->vm_ops) in hugepage_vma_check() 1029 anon_vma_lock_write(vma->anon_vma); in collapse_huge_page() 1064 anon_vma_unlock_write(vma->anon_vma); in collapse_huge_page() 1073 anon_vma_unlock_write(vma->anon_vma); in collapse_huge_page() 1441 if (vma->anon_vma) in retract_page_tables()
|
D | debug.c | 128 vma->anon_vma, vma->vm_ops, vma->vm_pgoff, in dump_vma()
|
D | mprotect.c | 396 vma->anon_vma, vma->vm_file, pgoff, vma_policy(vma), in mprotect_fixup()
|
D | mlock.c | 536 *prev = vma_merge(mm, *prev, start, end, newflags, vma->anon_vma, in mlock_fixup()
|
D | util.c | 637 struct anon_vma *page_anon_vma(struct page *page) in page_anon_vma()
|
D | madvise.c | 135 *prev = vma_merge(mm, *prev, start, end, new_flags, vma->anon_vma, in madvise_behavior()
|
D | memory-failure.c | 443 struct anon_vma *av; in collect_procs_anon()
|
D | memory.c | 541 (void *)addr, vma->vm_flags, vma->anon_vma, mapping, index); in print_bad_pte() 974 !vma->anon_vma) in copy_page_range()
|
D | mempolicy.c | 761 vma->anon_vma, vma->vm_file, pgoff, in mbind_range()
|
D | swapfile.c | 2075 if (vma->anon_vma) { in unuse_mm()
|