Home
last modified time | relevance | path

Searched refs:dst_mm (Results 1 – 5 of 5) sorted by relevance

/mm/
Duserfaultfd.c21 static int mcopy_atomic_pte(struct mm_struct *dst_mm, in mcopy_atomic_pte() argument
69 if (mem_cgroup_try_charge(page, dst_mm, GFP_KERNEL, &memcg, false)) in mcopy_atomic_pte()
76 dst_pte = pte_offset_map_lock(dst_mm, dst_pmd, dst_addr, &ptl); in mcopy_atomic_pte()
90 inc_mm_counter(dst_mm, MM_ANONPAGES); in mcopy_atomic_pte()
95 set_pte_at(dst_mm, dst_addr, dst_pte, _dst_pte); in mcopy_atomic_pte()
112 static int mfill_zeropage_pte(struct mm_struct *dst_mm, in mfill_zeropage_pte() argument
125 dst_pte = pte_offset_map_lock(dst_mm, dst_pmd, dst_addr, &ptl); in mfill_zeropage_pte()
138 set_pte_at(dst_mm, dst_addr, dst_pte, _dst_pte); in mfill_zeropage_pte()
173 static __always_inline ssize_t __mcopy_atomic_hugetlb(struct mm_struct *dst_mm, in __mcopy_atomic_hugetlb() argument
200 up_read(&dst_mm->mmap_sem); in __mcopy_atomic_hugetlb()
[all …]
Dmemory.c697 copy_one_pte(struct mm_struct *dst_mm, struct mm_struct *src_mm, in copy_one_pte() argument
714 if (unlikely(list_empty(&dst_mm->mmlist))) { in copy_one_pte()
716 if (list_empty(&dst_mm->mmlist)) in copy_one_pte()
717 list_add(&dst_mm->mmlist, in copy_one_pte()
799 set_pte_at(dst_mm, addr, dst_pte, pte); in copy_one_pte()
803 static int copy_pte_range(struct mm_struct *dst_mm, struct mm_struct *src_mm, in copy_pte_range() argument
817 dst_pte = pte_alloc_map_lock(dst_mm, dst_pmd, addr, &dst_ptl); in copy_pte_range()
842 entry.val = copy_one_pte(dst_mm, src_mm, dst_pte, src_pte, in copy_pte_range()
852 add_mm_rss_vec(dst_mm, rss); in copy_pte_range()
866 static inline int copy_pmd_range(struct mm_struct *dst_mm, struct mm_struct *src_mm, in copy_pmd_range() argument
[all …]
Dhuge_memory.c986 int copy_huge_pmd(struct mm_struct *dst_mm, struct mm_struct *src_mm, in copy_huge_pmd() argument
1000 pgtable = pte_alloc_one(dst_mm); in copy_huge_pmd()
1004 dst_ptl = pmd_lock(dst_mm, dst_pmd); in copy_huge_pmd()
1023 add_mm_counter(dst_mm, MM_ANONPAGES, HPAGE_PMD_NR); in copy_huge_pmd()
1024 mm_inc_nr_ptes(dst_mm); in copy_huge_pmd()
1025 pgtable_trans_huge_deposit(dst_mm, dst_pmd, pgtable); in copy_huge_pmd()
1026 set_pmd_at(dst_mm, addr, dst_pmd, pmd); in copy_huge_pmd()
1033 pte_free(dst_mm, pgtable); in copy_huge_pmd()
1048 zero_page = mm_get_huge_zero_page(dst_mm); in copy_huge_pmd()
1049 set_huge_zero_page(pgtable, dst_mm, vma, addr, dst_pmd, in copy_huge_pmd()
[all …]
Dshmem.c2305 static int shmem_mfill_atomic_pte(struct mm_struct *dst_mm, in shmem_mfill_atomic_pte() argument
2368 ret = mem_cgroup_try_charge_delay(page, dst_mm, gfp, &memcg, false); in shmem_mfill_atomic_pte()
2393 dst_pte = pte_offset_map_lock(dst_mm, dst_pmd, dst_addr, &ptl); in shmem_mfill_atomic_pte()
2412 inc_mm_counter(dst_mm, mm_counter_file(page)); in shmem_mfill_atomic_pte()
2414 set_pte_at(dst_mm, dst_addr, dst_pte, _dst_pte); in shmem_mfill_atomic_pte()
2437 int shmem_mcopy_atomic_pte(struct mm_struct *dst_mm, in shmem_mcopy_atomic_pte() argument
2444 return shmem_mfill_atomic_pte(dst_mm, dst_pmd, dst_vma, in shmem_mcopy_atomic_pte()
2448 int shmem_mfill_zeropage_pte(struct mm_struct *dst_mm, in shmem_mfill_zeropage_pte() argument
2455 return shmem_mfill_atomic_pte(dst_mm, dst_pmd, dst_vma, in shmem_mfill_zeropage_pte()
Dhugetlb.c4260 int hugetlb_mcopy_atomic_pte(struct mm_struct *dst_mm, in hugetlb_mcopy_atomic_pte() argument
4329 ptl = huge_pte_lockptr(h, dst_mm, dst_pte); in hugetlb_mcopy_atomic_pte()
4362 set_huge_pte_at(dst_mm, dst_addr, dst_pte, _dst_pte); in hugetlb_mcopy_atomic_pte()
4366 hugetlb_count_add(pages_per_huge_page(h), dst_mm); in hugetlb_mcopy_atomic_pte()