Searched refs:vm_normal_page (Results 1 – 9 of 9) sorted by relevance
/mm/ |
D | khugepaged.c | 561 page = vm_normal_page(vma, address, pteval); in __collapse_huge_page_isolate() 1182 page = vm_normal_page(vma, _address, pteval); in khugepaged_scan_pmd() 1371 page = vm_normal_page(vma, addr, *pte); in collapse_pte_mapped_thp() 1389 page = vm_normal_page(vma, addr, *pte); in collapse_pte_mapped_thp()
|
D | madvise.c | 400 page = vm_normal_page(vma, addr, ptent); in madvise_cold_or_pageout_pte_range() 612 page = vm_normal_page(vma, addr, ptent); in madvise_free_pte_range()
|
D | memory.c | 605 struct page *vm_normal_page(struct vm_area_struct *vma, unsigned long addr, in vm_normal_page() function 801 page = vm_normal_page(vma, addr, pte); in copy_one_pte() 1077 page = vm_normal_page(vma, addr, ptent); in zap_pte_range() 2704 vmf->page = vm_normal_page(vma, vmf->address, vmf->orig_pte); in do_wp_page() 3850 page = vm_normal_page(vma, vmf->address, pte); in do_numa_page() 4427 if (vm_normal_page(vma, address, pte)) in follow_phys()
|
D | mprotect.c | 83 page = vm_normal_page(vma, addr, oldpte); in change_pte_range()
|
D | mlock.c | 399 page = vm_normal_page(vma, start, *pte); in __munlock_pagevec_fill()
|
D | gup.c | 233 page = vm_normal_page(vma, address, pte); in follow_page_pte() 616 *page = vm_normal_page(*vma, address, *pte); in get_gate_page()
|
D | migrate.c | 2292 page = vm_normal_page(migrate->vma, addr, pte); in migrate_vma_collect_pmd()
|
D | mempolicy.c | 517 page = vm_normal_page(vma, addr, *pte); in queue_pages_pte_range()
|
D | memcontrol.c | 5403 struct page *page = vm_normal_page(vma, addr, ptent); in mc_handle_present_pte()
|