Home
last modified time | relevance | path

Searched refs:vm_area_struct (Results 1 – 25 of 38) sorted by relevance

12

/mm/
Dmmap.c77 struct vm_area_struct *vma, struct vm_area_struct *prev,
122 void vma_set_page_prot(struct vm_area_struct *vma) in vma_set_page_prot()
139 static void __remove_shared_vm_struct(struct vm_area_struct *vma, in __remove_shared_vm_struct()
156 void unlink_file_vma(struct vm_area_struct *vma) in unlink_file_vma()
171 static struct vm_area_struct *remove_vma(struct vm_area_struct *vma) in remove_vma()
173 struct vm_area_struct *next = vma->vm_next; in remove_vma()
192 struct vm_area_struct *next; in SYSCALL_DEFINE1()
288 static inline unsigned long vma_compute_gap(struct vm_area_struct *vma) in vma_compute_gap()
310 static unsigned long vma_compute_subtree_gap(struct vm_area_struct *vma) in vma_compute_subtree_gap()
315 struct vm_area_struct, vm_rb)->rb_subtree_gap; in vma_compute_subtree_gap()
[all …]
Dmadvise.c65 static long madvise_behavior(struct vm_area_struct *vma, in madvise_behavior()
66 struct vm_area_struct **prev, in madvise_behavior()
187 struct vm_area_struct *vma = walk->private; in swapin_walk_pmd_entry()
222 static void force_shm_swapin_readahead(struct vm_area_struct *vma, in force_shm_swapin_readahead()
253 static long madvise_willneed(struct vm_area_struct *vma, in madvise_willneed()
254 struct vm_area_struct **prev, in madvise_willneed()
308 struct vm_area_struct *vma = walk->vma; in madvise_cold_or_pageout_pte_range()
472 struct vm_area_struct *vma, in madvise_cold_page_range()
485 static long madvise_cold(struct vm_area_struct *vma, in madvise_cold()
486 struct vm_area_struct **prev, in madvise_cold()
[all …]
Dinterval_tree.c13 static inline unsigned long vma_start_pgoff(struct vm_area_struct *v) in vma_start_pgoff()
18 static inline unsigned long vma_last_pgoff(struct vm_area_struct *v) in vma_last_pgoff()
23 INTERVAL_TREE_DEFINE(struct vm_area_struct, shared.rb,
28 void vma_interval_tree_insert_after(struct vm_area_struct *node, in vma_interval_tree_insert_after()
29 struct vm_area_struct *prev, in vma_interval_tree_insert_after()
33 struct vm_area_struct *parent; in vma_interval_tree_insert_after()
43 struct vm_area_struct, shared.rb); in vma_interval_tree_insert_after()
48 struct vm_area_struct, shared.rb); in vma_interval_tree_insert_after()
Dnommu.c100 struct vm_area_struct *vma; in kobjsize()
124 int follow_pfn(struct vm_area_struct *vma, unsigned long address, in follow_pfn()
164 struct vm_area_struct *vma; in vmalloc_user()
382 int vm_insert_page(struct vm_area_struct *vma, unsigned long addr, in vm_insert_page()
389 int vm_map_pages(struct vm_area_struct *vma, struct page **pages, in vm_map_pages()
396 int vm_map_pages_zero(struct vm_area_struct *vma, struct page **pages, in vm_map_pages_zero()
582 static void add_vma_to_mm(struct mm_struct *mm, struct vm_area_struct *vma) in add_vma_to_mm()
584 struct vm_area_struct *pvma, *prev; in add_vma_to_mm()
609 pvma = rb_entry(parent, struct vm_area_struct, vm_rb); in add_vma_to_mm()
638 prev = rb_entry(rb_prev, struct vm_area_struct, vm_rb); in add_vma_to_mm()
[all …]
Dpgtable-generic.c55 int ptep_set_access_flags(struct vm_area_struct *vma, in ptep_set_access_flags()
69 int ptep_clear_flush_young(struct vm_area_struct *vma, in ptep_clear_flush_young()
81 pte_t ptep_clear_flush(struct vm_area_struct *vma, unsigned long address, in ptep_clear_flush()
96 int pmdp_set_access_flags(struct vm_area_struct *vma, in pmdp_set_access_flags()
111 int pmdp_clear_flush_young(struct vm_area_struct *vma, in pmdp_clear_flush_young()
124 pmd_t pmdp_huge_clear_flush(struct vm_area_struct *vma, unsigned long address, in pmdp_huge_clear_flush()
137 pud_t pudp_huge_clear_flush(struct vm_area_struct *vma, unsigned long address, in pudp_huge_clear_flush()
185 pmd_t pmdp_invalidate(struct vm_area_struct *vma, unsigned long address, in pmdp_invalidate()
195 pmd_t pmdp_collapse_flush(struct vm_area_struct *vma, unsigned long address, in pmdp_collapse_flush()
Dinternal.h39 void free_pgtables(struct mmu_gather *tlb, struct vm_area_struct *start_vma,
42 static inline bool can_madv_lru_vma(struct vm_area_struct *vma) in can_madv_lru_vma()
48 struct vm_area_struct *vma,
292 void __vma_link_list(struct mm_struct *mm, struct vm_area_struct *vma,
293 struct vm_area_struct *prev, struct rb_node *rb_parent);
296 extern long populate_vma_page_range(struct vm_area_struct *vma,
298 extern void munlock_vma_pages_range(struct vm_area_struct *vma,
300 static inline void munlock_vma_pages_all(struct vm_area_struct *vma) in munlock_vma_pages_all()
339 extern pmd_t maybe_pmd_mkwrite(pmd_t pmd, struct vm_area_struct *vma);
345 __vma_address(struct page *page, struct vm_area_struct *vma) in __vma_address()
[all …]
Drmap.c138 static void anon_vma_chain_link(struct vm_area_struct *vma, in anon_vma_chain_link()
176 int __anon_vma_prepare(struct vm_area_struct *vma) in __anon_vma_prepare()
262 int anon_vma_clone(struct vm_area_struct *dst, struct vm_area_struct *src) in anon_vma_clone()
316 int anon_vma_fork(struct vm_area_struct *vma, struct vm_area_struct *pvma) in anon_vma_fork()
377 void unlink_anon_vmas(struct vm_area_struct *vma) in unlink_anon_vmas()
688 unsigned long page_address_in_vma(struct page *page, struct vm_area_struct *vma) in page_address_in_vma()
754 static bool page_referenced_one(struct page *page, struct vm_area_struct *vma, in page_referenced_one()
816 static bool invalid_page_referenced_vma(struct vm_area_struct *vma, void *arg) in invalid_page_referenced_vma()
884 static bool page_mkclean_one(struct page *page, struct vm_area_struct *vma, in page_mkclean_one()
958 static bool invalid_mkclean_vma(struct vm_area_struct *vma, void *arg) in invalid_mkclean_vma()
[all …]
Dmremap.c59 static pmd_t *alloc_new_pmd(struct mm_struct *mm, struct vm_area_struct *vma, in alloc_new_pmd()
84 static void take_rmap_locks(struct vm_area_struct *vma) in take_rmap_locks()
92 static void drop_rmap_locks(struct vm_area_struct *vma) in drop_rmap_locks()
115 static void move_ptes(struct vm_area_struct *vma, pmd_t *old_pmd, in move_ptes()
117 struct vm_area_struct *new_vma, pmd_t *new_pmd, in move_ptes()
195 static bool move_normal_pmd(struct vm_area_struct *vma, unsigned long old_addr, in move_normal_pmd()
240 unsigned long move_page_tables(struct vm_area_struct *vma, in move_page_tables()
241 unsigned long old_addr, struct vm_area_struct *new_vma, in move_page_tables()
318 static unsigned long move_vma(struct vm_area_struct *vma, in move_vma()
325 struct vm_area_struct *new_vma; in move_vma()
[all …]
Dmemory.c389 void free_pgtables(struct mmu_gather *tlb, struct vm_area_struct *vma, in free_pgtables()
393 struct vm_area_struct *next = vma->vm_next; in free_pgtables()
500 static void print_bad_pte(struct vm_area_struct *vma, unsigned long addr, in print_bad_pte()
593 struct page *vm_normal_page(struct vm_area_struct *vma, unsigned long addr, in vm_normal_page()
649 struct page *vm_normal_page_pmd(struct vm_area_struct *vma, unsigned long addr, in vm_normal_page_pmd()
698 pte_t *dst_pte, pte_t *src_pte, struct vm_area_struct *vma, in copy_one_pte()
804 pmd_t *dst_pmd, pmd_t *src_pmd, struct vm_area_struct *vma, in copy_pte_range()
867 pud_t *dst_pud, pud_t *src_pud, struct vm_area_struct *vma, in copy_pmd_range()
901 p4d_t *dst_p4d, p4d_t *src_p4d, struct vm_area_struct *vma, in copy_pud_range()
935 pgd_t *dst_pgd, pgd_t *src_pgd, struct vm_area_struct *vma, in copy_p4d_range()
[all …]
Dgup.c123 static struct page *no_page_table(struct vm_area_struct *vma, in no_page_table()
139 static int follow_pfn_pte(struct vm_area_struct *vma, unsigned long address, in follow_pfn_pte()
173 static struct page *follow_page_pte(struct vm_area_struct *vma, in follow_page_pte()
308 static struct page *follow_pmd_mask(struct vm_area_struct *vma, in follow_pmd_mask()
423 static struct page *follow_pud_mask(struct vm_area_struct *vma, in follow_pud_mask()
463 static struct page *follow_p4d_mask(struct vm_area_struct *vma, in follow_p4d_mask()
508 static struct page *follow_page_mask(struct vm_area_struct *vma, in follow_page_mask()
548 struct page *follow_page(struct vm_area_struct *vma, unsigned long address, in follow_page()
561 unsigned int gup_flags, struct vm_area_struct **vma, in get_gate_page()
618 static int faultin_page(struct task_struct *tsk, struct vm_area_struct *vma, in faultin_page()
[all …]
Dvmacache.c36 void vmacache_update(unsigned long addr, struct vm_area_struct *newvma) in vmacache_update()
62 struct vm_area_struct *vmacache_find(struct mm_struct *mm, unsigned long addr) in vmacache_find()
73 struct vm_area_struct *vma = current->vmacache.vmas[idx]; in vmacache_find()
93 struct vm_area_struct *vmacache_find_exact(struct mm_struct *mm, in vmacache_find_exact()
106 struct vm_area_struct *vma = current->vmacache.vmas[idx]; in vmacache_find_exact()
Dhuge_memory.c65 bool transparent_hugepage_enabled(struct vm_area_struct *vma) in transparent_hugepage_enabled()
492 pmd_t maybe_pmd_mkwrite(pmd_t pmd, struct vm_area_struct *vma) in maybe_pmd_mkwrite()
586 struct vm_area_struct *vma = vmf->vma; in __do_huge_pmd_anonymous_page()
672 static inline gfp_t alloc_hugepage_direct_gfpmask(struct vm_area_struct *vma) in alloc_hugepage_direct_gfpmask()
700 struct vm_area_struct *vma, unsigned long haddr, pmd_t *pmd, in set_huge_zero_page()
717 struct vm_area_struct *vma = vmf->vma; in do_huge_pmd_anonymous_page()
777 static void insert_pfn_pmd(struct vm_area_struct *vma, unsigned long addr, in insert_pfn_pmd()
827 struct vm_area_struct *vma = vmf->vma; in vmf_insert_pfn_pmd()
859 static pud_t maybe_pud_mkwrite(pud_t pud, struct vm_area_struct *vma) in maybe_pud_mkwrite()
866 static void insert_pfn_pud(struct vm_area_struct *vma, unsigned long addr, in insert_pfn_pud()
[all …]
Dhugetlb.c218 static inline struct hugepage_subpool *subpool_vma(struct vm_area_struct *vma) in subpool_vma()
624 struct vm_area_struct *vma, unsigned long address) in vma_hugecache_offset()
630 pgoff_t linear_hugepage_index(struct vm_area_struct *vma, in linear_hugepage_index()
641 unsigned long vma_kernel_pagesize(struct vm_area_struct *vma) in vma_kernel_pagesize()
655 __weak unsigned long vma_mmu_pagesize(struct vm_area_struct *vma) in vma_mmu_pagesize()
688 static unsigned long get_vma_private_data(struct vm_area_struct *vma) in get_vma_private_data()
693 static void set_vma_private_data(struct vm_area_struct *vma, in set_vma_private_data()
756 static struct resv_map *vma_resv_map(struct vm_area_struct *vma) in vma_resv_map()
771 static void set_vma_resv_map(struct vm_area_struct *vma, struct resv_map *map) in set_vma_resv_map()
780 static void set_vma_resv_flags(struct vm_area_struct *vma, unsigned long flags) in set_vma_resv_flags()
[all …]
Dmprotect.c38 static unsigned long change_pte_range(struct vm_area_struct *vma, pmd_t *pmd, in change_pte_range()
164 static inline unsigned long change_pmd_range(struct vm_area_struct *vma, in change_pmd_range()
227 static inline unsigned long change_pud_range(struct vm_area_struct *vma, in change_pud_range()
247 static inline unsigned long change_p4d_range(struct vm_area_struct *vma, in change_p4d_range()
267 static unsigned long change_protection_range(struct vm_area_struct *vma, in change_protection_range()
297 unsigned long change_protection(struct vm_area_struct *vma, unsigned long start, in change_protection()
339 mprotect_fixup(struct vm_area_struct *vma, struct vm_area_struct **pprev, in mprotect_fixup()
456 struct vm_area_struct *vma, *prev; in do_mprotect_pkey()
Dmempolicy.c380 struct vm_area_struct *vma; in mpol_rebind_mm()
413 struct vm_area_struct *prev;
493 struct vm_area_struct *vma = walk->vma; in queue_pages_pte_range()
594 unsigned long change_prot_numa(struct vm_area_struct *vma, in change_prot_numa()
606 static unsigned long change_prot_numa(struct vm_area_struct *vma, in change_prot_numa()
616 struct vm_area_struct *vma = walk->vma; in queue_pages_test_walk()
698 static int vma_replace_policy(struct vm_area_struct *vma, in vma_replace_policy()
734 struct vm_area_struct *next; in mbind_range()
735 struct vm_area_struct *prev; in mbind_range()
736 struct vm_area_struct *vma; in mbind_range()
[all …]
Dpagewalk.c185 struct vm_area_struct *vma = walk->vma; in walk_hugetlb_range()
228 struct vm_area_struct *vma = walk->vma; in walk_page_test()
255 struct vm_area_struct *vma = walk->vma; in __walk_page_range()
307 struct vm_area_struct *vma; in walk_page_range()
356 int walk_page_vma(struct vm_area_struct *vma, const struct mm_walk_ops *ops, in walk_page_vma()
Dmincore.c96 struct vm_area_struct *vma, unsigned char *vec) in __mincore_unmapped_range()
126 struct vm_area_struct *vma = walk->vma; in mincore_pte_range()
180 static inline bool can_do_mincore(struct vm_area_struct *vma) in can_do_mincore()
209 struct vm_area_struct *vma; in do_mincore()
Dmlock.c375 struct vm_area_struct *vma, struct zone *zone, in __munlock_pagevec_fill()
445 void munlock_vma_pages_range(struct vm_area_struct *vma, in munlock_vma_pages_range()
519 static int mlock_fixup(struct vm_area_struct *vma, struct vm_area_struct **prev, in mlock_fixup()
587 struct vm_area_struct * vma, * prev; in apply_vma_lock_flags()
642 struct vm_area_struct *vma; in count_mm_mlocked_page_nr()
765 struct vm_area_struct * vma, * prev = NULL; in apply_mlockall_flags()
Duserfaultfd.c23 struct vm_area_struct *dst_vma, in mcopy_atomic_pte()
114 struct vm_area_struct *dst_vma, in mfill_zeropage_pte()
174 struct vm_area_struct *dst_vma, in __mcopy_atomic_hugetlb()
386 struct vm_area_struct *dst_vma,
395 struct vm_area_struct *dst_vma, in mfill_atomic_pte()
440 struct vm_area_struct *dst_vma; in __mcopy_atomic()
Dswap_state.c310 struct page *lookup_swap_cache(swp_entry_t entry, struct vm_area_struct *vma, in lookup_swap_cache()
360 struct vm_area_struct *vma, unsigned long addr, in __read_swap_cache_async()
449 struct vm_area_struct *vma, unsigned long addr, bool do_poll) in read_swap_cache_async()
550 struct vm_area_struct *vma = vmf->vma; in swap_cluster_readahead()
627 static inline void swap_ra_clamp_pfn(struct vm_area_struct *vma, in swap_ra_clamp_pfn()
643 struct vm_area_struct *vma = vmf->vma; in swap_ra_info()
726 struct vm_area_struct *vma = vmf->vma; in swap_vma_readahead()
Dksm.c470 static int break_ksm(struct vm_area_struct *vma, unsigned long addr) in break_ksm()
519 static struct vm_area_struct *find_mergeable_vma(struct mm_struct *mm, in find_mergeable_vma()
522 struct vm_area_struct *vma; in find_mergeable_vma()
537 struct vm_area_struct *vma; in break_cow()
556 struct vm_area_struct *vma; in get_mergeable_page()
843 static int unmerge_ksm_pages(struct vm_area_struct *vma, in unmerge_ksm_pages()
968 struct vm_area_struct *vma; in unmerge_and_remove_all_rmap_items()
1032 static int write_protect_page(struct vm_area_struct *vma, struct page *page, in write_protect_page()
1119 static int replace_page(struct vm_area_struct *vma, struct page *page, in replace_page()
1201 static int try_to_merge_one_page(struct vm_area_struct *vma, in try_to_merge_one_page()
[all …]
Dkhugepaged.c313 int hugepage_madvise(struct vm_area_struct *vma, in hugepage_madvise()
407 static bool hugepage_vma_check(struct vm_area_struct *vma, in hugepage_vma_check()
464 int khugepaged_enter_vma_merge(struct vm_area_struct *vma, in khugepaged_enter_vma_merge()
532 static int __collapse_huge_page_isolate(struct vm_area_struct *vma, in __collapse_huge_page_isolate()
647 struct vm_area_struct *vma, in __collapse_huge_page_copy()
861 struct vm_area_struct **vmap) in hugepage_vma_revalidate()
863 struct vm_area_struct *vma; in hugepage_vma_revalidate()
891 struct vm_area_struct *vma, in __collapse_huge_page_swapin()
958 struct vm_area_struct *vma; in collapse_huge_page()
1116 struct vm_area_struct *vma, in khugepaged_scan_pmd()
[all …]
Dutil.c273 void __vma_link_list(struct mm_struct *mm, struct vm_area_struct *vma, in __vma_link_list()
274 struct vm_area_struct *prev, struct rb_node *rb_parent) in __vma_link_list()
276 struct vm_area_struct *next; in __vma_link_list()
286 struct vm_area_struct, vm_rb); in __vma_link_list()
296 int vma_is_stack_for_current(struct vm_area_struct *vma) in vma_is_stack_for_current()
Dshmem.c144 gfp_t gfp, struct vm_area_struct *vma,
148 gfp_t gfp, struct vm_area_struct *vma,
256 bool vma_is_shmem(struct vm_area_struct *vma) in vma_is_shmem()
734 unsigned long shmem_swap_usage(struct vm_area_struct *vma) in shmem_swap_usage()
1434 static void shmem_pseudo_vma_init(struct vm_area_struct *vma, in shmem_pseudo_vma_init()
1444 static void shmem_pseudo_vma_destroy(struct vm_area_struct *vma) in shmem_pseudo_vma_destroy()
1453 struct vm_area_struct pvma; in shmem_swapin()
1469 struct vm_area_struct pvma; in shmem_alloc_hugepage()
1494 struct vm_area_struct pvma; in shmem_alloc_page()
1628 gfp_t gfp, struct vm_area_struct *vma, in shmem_swapin_page()
[all …]
Dmigrate.c204 static bool remove_migration_pte(struct page *page, struct vm_area_struct *vma, in remove_migration_pte()
347 void migration_entry_wait_huge(struct vm_area_struct *vma, in migration_entry_wait_huge()
1528 struct vm_area_struct *vma; in add_page_for_migration()
1701 struct vm_area_struct *vma; in do_pages_stat_array()
1948 int migrate_misplaced_page(struct page *page, struct vm_area_struct *vma, in migrate_misplaced_page()
2004 struct vm_area_struct *vma, in migrate_misplaced_transhuge_page()
2173 struct vm_area_struct *vma = walk->vma; in migrate_vma_collect_pmd()
2688 struct vm_area_struct *vma = migrate->vma; in migrate_vma_insert_page()

12