Home
last modified time | relevance | path

Searched refs:ptl (Results 1 – 19 of 19) sorted by relevance

/mm/
Dpage_vma_mapped.c50 pvmw->ptl = pte_lockptr(pvmw->vma->vm_mm, pvmw->pmd); in map_pte()
51 spin_lock(pvmw->ptl); in map_pte()
160 pvmw->ptl = huge_pte_lockptr(page_hstate(page), mm, pvmw->pte); in page_vma_mapped_walk()
161 spin_lock(pvmw->ptl); in page_vma_mapped_walk()
184 pvmw->ptl = pmd_lock(mm, pvmw->pmd); in page_vma_mapped_walk()
206 spin_unlock(pvmw->ptl); in page_vma_mapped_walk()
207 pvmw->ptl = NULL; in page_vma_mapped_walk()
231 if (pvmw->ptl) { in page_vma_mapped_walk()
232 spin_unlock(pvmw->ptl); in page_vma_mapped_walk()
233 pvmw->ptl = NULL; in page_vma_mapped_walk()
[all …]
Dhuge_memory.c614 vmf->ptl = pmd_lock(vma->vm_mm, vmf->pmd); in __do_huge_pmd_anonymous_page()
628 spin_unlock(vmf->ptl); in __do_huge_pmd_anonymous_page()
646 spin_unlock(vmf->ptl); in __do_huge_pmd_anonymous_page()
653 spin_unlock(vmf->ptl); in __do_huge_pmd_anonymous_page()
744 vmf->ptl = pmd_lock(vma->vm_mm, vmf->pmd); in do_huge_pmd_anonymous_page()
750 spin_unlock(vmf->ptl); in do_huge_pmd_anonymous_page()
752 spin_unlock(vmf->ptl); in do_huge_pmd_anonymous_page()
758 spin_unlock(vmf->ptl); in do_huge_pmd_anonymous_page()
762 spin_unlock(vmf->ptl); in do_huge_pmd_anonymous_page()
783 spinlock_t *ptl; in insert_pfn_pmd() local
[all …]
Dmemory.c426 spinlock_t *ptl; in __pte_alloc() local
446 ptl = pmd_lock(mm, pmd); in __pte_alloc()
452 spin_unlock(ptl); in __pte_alloc()
1031 spinlock_t *ptl; in zap_pte_range() local
1039 start_pte = pte_offset_map_lock(mm, pmd, addr, &ptl); in zap_pte_range()
1137 pte_unmap_unlock(start_pte, ptl); in zap_pte_range()
1412 spinlock_t **ptl) in __get_locked_pte() argument
1431 return pte_alloc_map_lock(mm, pmd, addr, ptl); in __get_locked_pte()
1447 spinlock_t *ptl; in insert_page() local
1454 pte = get_locked_pte(mm, addr, &ptl); in insert_page()
[all …]
Dmadvise.c197 spinlock_t *ptl; in swapin_walk_pmd_entry() local
199 orig_pte = pte_offset_map_lock(vma->vm_mm, pmd, start, &ptl); in swapin_walk_pmd_entry()
201 pte_unmap_unlock(orig_pte, ptl); in swapin_walk_pmd_entry()
310 spinlock_t *ptl; in madvise_cold_or_pageout_pte_range() local
323 ptl = pmd_trans_huge_lock(pmd, vma); in madvise_cold_or_pageout_pte_range()
324 if (!ptl) in madvise_cold_or_pageout_pte_range()
345 spin_unlock(ptl); in madvise_cold_or_pageout_pte_range()
375 spin_unlock(ptl); in madvise_cold_or_pageout_pte_range()
386 orig_pte = pte = pte_offset_map_lock(vma->vm_mm, pmd, addr, &ptl); in madvise_cold_or_pageout_pte_range()
414 pte_unmap_unlock(orig_pte, ptl); in madvise_cold_or_pageout_pte_range()
[all …]
Dmigrate.c308 spinlock_t *ptl) in __migration_entry_wait() argument
314 spin_lock(ptl); in __migration_entry_wait()
332 pte_unmap_unlock(ptep, ptl); in __migration_entry_wait()
336 pte_unmap_unlock(ptep, ptl); in __migration_entry_wait()
342 spinlock_t *ptl = pte_lockptr(mm, pmd); in migration_entry_wait() local
344 __migration_entry_wait(mm, ptep, ptl); in migration_entry_wait()
350 spinlock_t *ptl = huge_pte_lockptr(hstate_vma(vma), mm, pte); in migration_entry_wait_huge() local
351 __migration_entry_wait(mm, pte, ptl); in migration_entry_wait_huge()
357 spinlock_t *ptl; in pmd_migration_entry_wait() local
360 ptl = pmd_lock(mm, pmd); in pmd_migration_entry_wait()
[all …]
Dmincore.c125 spinlock_t *ptl; in mincore_pte_range() local
131 ptl = pmd_trans_huge_lock(pmd, vma); in mincore_pte_range()
132 if (ptl) { in mincore_pte_range()
134 spin_unlock(ptl); in mincore_pte_range()
143 ptep = pte_offset_map_lock(walk->mm, pmd, addr, &ptl); in mincore_pte_range()
173 pte_unmap_unlock(ptep - 1, ptl); in mincore_pte_range()
Dhugetlb.c3527 spinlock_t *ptl; in __unmap_hugepage_range() local
3557 ptl = huge_pte_lock(h, mm, ptep); in __unmap_hugepage_range()
3559 spin_unlock(ptl); in __unmap_hugepage_range()
3569 spin_unlock(ptl); in __unmap_hugepage_range()
3579 spin_unlock(ptl); in __unmap_hugepage_range()
3591 spin_unlock(ptl); in __unmap_hugepage_range()
3610 spin_unlock(ptl); in __unmap_hugepage_range()
3729 struct page *pagecache_page, spinlock_t *ptl) in hugetlb_cow() argument
3770 spin_unlock(ptl); in hugetlb_cow()
3786 spin_lock(ptl); in hugetlb_cow()
[all …]
Dgup.c179 spinlock_t *ptl; in follow_page_pte() local
186 ptep = pte_offset_map_lock(mm, pmd, address, &ptl); in follow_page_pte()
202 pte_unmap_unlock(ptep, ptl); in follow_page_pte()
209 pte_unmap_unlock(ptep, ptl); in follow_page_pte()
245 pte_unmap_unlock(ptep, ptl); in follow_page_pte()
299 pte_unmap_unlock(ptep, ptl); in follow_page_pte()
302 pte_unmap_unlock(ptep, ptl); in follow_page_pte()
314 spinlock_t *ptl; in follow_pmd_mask() local
358 ptl = pmd_lock(mm, pmd); in follow_pmd_mask()
360 spin_unlock(ptl); in follow_pmd_mask()
[all …]
Duserfaultfd.c30 spinlock_t *ptl; in mcopy_atomic_pte() local
76 dst_pte = pte_offset_map_lock(dst_mm, dst_pmd, dst_addr, &ptl); in mcopy_atomic_pte()
100 pte_unmap_unlock(dst_pte, ptl); in mcopy_atomic_pte()
105 pte_unmap_unlock(dst_pte, ptl); in mcopy_atomic_pte()
118 spinlock_t *ptl; in mfill_zeropage_pte() local
125 dst_pte = pte_offset_map_lock(dst_mm, dst_pmd, dst_addr, &ptl); in mfill_zeropage_pte()
143 pte_unmap_unlock(dst_pte, ptl); in mfill_zeropage_pte()
Dkhugepaged.c649 spinlock_t *ptl) in __collapse_huge_page_copy() argument
664 spin_lock(ptl); in __collapse_huge_page_copy()
670 spin_unlock(ptl); in __collapse_huge_page_copy()
682 spin_lock(ptl); in __collapse_huge_page_copy()
689 spin_unlock(ptl); in __collapse_huge_page_copy()
1125 spinlock_t *ptl; in khugepaged_scan_pmd() local
1138 pte = pte_offset_map_lock(mm, pmd, address, &ptl); in khugepaged_scan_pmd()
1228 pte_unmap_unlock(pte, ptl); in khugepaged_scan_pmd()
1297 spinlock_t *ptl; in collapse_pte_mapped_thp() local
1318 start_pte = pte_offset_map_lock(mm, pmd, haddr, &ptl); in collapse_pte_mapped_thp()
[all …]
Dpagewalk.c96 spinlock_t *ptl = pud_trans_huge_lock(pud, walk->vma); in walk_pud_range() local
98 if (ptl) { in walk_pud_range()
100 spin_unlock(ptl); in walk_pud_range()
Dmempolicy.c441 static int queue_pages_pmd(pmd_t *pmd, spinlock_t *ptl, unsigned long addr, in queue_pages_pmd() argument
455 spin_unlock(ptl); in queue_pages_pmd()
474 spin_unlock(ptl); in queue_pages_pmd()
500 spinlock_t *ptl; in queue_pages_pte_range() local
502 ptl = pmd_trans_huge_lock(pmd, vma); in queue_pages_pte_range()
503 if (ptl) { in queue_pages_pte_range()
504 ret = queue_pages_pmd(pmd, ptl, addr, end, walk); in queue_pages_pte_range()
513 pte = pte_offset_map_lock(walk->mm, pmd, addr, &ptl); in queue_pages_pte_range()
545 pte_unmap_unlock(pte - 1, ptl); in queue_pages_pte_range()
562 spinlock_t *ptl; in queue_pages_hugetlb() local
[all …]
Dmprotect.c43 spinlock_t *ptl; in change_pte_range() local
61 pte = pte_offset_map_lock(vma->vm_mm, pmd, addr, &ptl); in change_pte_range()
159 pte_unmap_unlock(pte - 1, ptl); in change_pte_range()
Dmlock.c379 spinlock_t *ptl; in __munlock_pagevec_fill() local
386 pte = get_locked_pte(vma->vm_mm, start, &ptl); in __munlock_pagevec_fill()
423 pte_unmap_unlock(pte, ptl); in __munlock_pagevec_fill()
Dmemcontrol.c5626 spinlock_t *ptl; in mem_cgroup_count_precharge_pte_range() local
5628 ptl = pmd_trans_huge_lock(pmd, vma); in mem_cgroup_count_precharge_pte_range()
5629 if (ptl) { in mem_cgroup_count_precharge_pte_range()
5637 spin_unlock(ptl); in mem_cgroup_count_precharge_pte_range()
5643 pte = pte_offset_map_lock(vma->vm_mm, pmd, addr, &ptl); in mem_cgroup_count_precharge_pte_range()
5647 pte_unmap_unlock(pte - 1, ptl); in mem_cgroup_count_precharge_pte_range()
5826 spinlock_t *ptl; in mem_cgroup_move_charge_pte_range() local
5831 ptl = pmd_trans_huge_lock(pmd, vma); in mem_cgroup_move_charge_pte_range()
5832 if (ptl) { in mem_cgroup_move_charge_pte_range()
5834 spin_unlock(ptl); in mem_cgroup_move_charge_pte_range()
[all …]
Dhmm.c736 spinlock_t *ptl; in hmm_vma_walk_hugetlb_entry() local
740 ptl = huge_pte_lock(hstate_vma(vma), walk->mm, pte); in hmm_vma_walk_hugetlb_entry()
762 spin_unlock(ptl); in hmm_vma_walk_hugetlb_entry()
Dksm.c1126 spinlock_t *ptl; in replace_page() local
1143 ptep = pte_offset_map_lock(mm, pmd, addr, &ptl); in replace_page()
1145 pte_unmap_unlock(ptep, ptl); in replace_page()
1184 pte_unmap_unlock(ptep, ptl); in replace_page()
Dshmem.c2319 spinlock_t *ptl; in shmem_mfill_atomic_pte() local
2393 dst_pte = pte_offset_map_lock(dst_mm, dst_pmd, dst_addr, &ptl); in shmem_mfill_atomic_pte()
2418 pte_unmap_unlock(dst_pte, ptl); in shmem_mfill_atomic_pte()
2424 pte_unmap_unlock(dst_pte, ptl); in shmem_mfill_atomic_pte()
Dswapfile.c1858 spinlock_t *ptl; in unuse_pte() local
1873 pte = pte_offset_map_lock(vma->vm_mm, pmd, addr, &ptl); in unuse_pte()
1900 pte_unmap_unlock(pte, ptl); in unuse_pte()