/mm/ |
D | page_vma_mapped.c | 51 pvmw->ptl = pte_lockptr(pvmw->vma->vm_mm, pvmw->pmd); in map_pte() 52 spin_lock(pvmw->ptl); in map_pte() 177 pvmw->ptl = huge_pte_lockptr(page_hstate(page), mm, pvmw->pte); in page_vma_mapped_walk() 178 spin_lock(pvmw->ptl); in page_vma_mapped_walk() 222 pvmw->ptl = pmd_lock(mm, pvmw->pmd); in page_vma_mapped_walk() 244 spin_unlock(pvmw->ptl); in page_vma_mapped_walk() 245 pvmw->ptl = NULL; in page_vma_mapped_walk() 254 spinlock_t *ptl = pmd_lock(mm, pvmw->pmd); in page_vma_mapped_walk() local 256 spin_unlock(ptl); in page_vma_mapped_walk() 273 if (pvmw->ptl) { in page_vma_mapped_walk() [all …]
|
D | huge_memory.c | 628 vmf->ptl = pmd_lock(vma->vm_mm, vmf->pmd); in __do_huge_pmd_anonymous_page() 640 spin_unlock(vmf->ptl); in __do_huge_pmd_anonymous_page() 657 spin_unlock(vmf->ptl); in __do_huge_pmd_anonymous_page() 664 spin_unlock(vmf->ptl); in __do_huge_pmd_anonymous_page() 752 vmf->ptl = pmd_lock(vma->vm_mm, vmf->pmd); in do_huge_pmd_anonymous_page() 757 spin_unlock(vmf->ptl); in do_huge_pmd_anonymous_page() 760 spin_unlock(vmf->ptl); in do_huge_pmd_anonymous_page() 768 spin_unlock(vmf->ptl); in do_huge_pmd_anonymous_page() 771 spin_unlock(vmf->ptl); in do_huge_pmd_anonymous_page() 792 spinlock_t *ptl; in insert_pfn_pmd() local [all …]
|
D | memory.c | 278 spinlock_t *ptl = pmd_lock(tlb->mm, pmd); in free_pte_range() local 279 spin_unlock(ptl); in free_pte_range() 497 spinlock_t *ptl; in __pte_alloc() local 517 ptl = pmd_lock(mm, pmd); in __pte_alloc() 523 spin_unlock(ptl); in __pte_alloc() 1382 spinlock_t *ptl; in zap_pte_range() local 1390 start_pte = pte_offset_map_lock(mm, pmd, addr, &ptl); in zap_pte_range() 1494 pte_unmap_unlock(start_pte, ptl); in zap_pte_range() 1535 spinlock_t *ptl = pmd_lock(tlb->mm, pmd); in zap_pmd_range() local 1541 spin_unlock(ptl); in zap_pmd_range() [all …]
|
D | madvise.c | 209 spinlock_t *ptl; in swapin_walk_pmd_entry() local 211 orig_pte = pte_offset_map_lock(vma->vm_mm, pmd, start, &ptl); in swapin_walk_pmd_entry() 213 pte_unmap_unlock(orig_pte, ptl); in swapin_walk_pmd_entry() 327 spinlock_t *ptl; in madvise_cold_or_pageout_pte_range() local 344 ptl = pmd_trans_huge_lock(pmd, vma); in madvise_cold_or_pageout_pte_range() 345 if (!ptl) in madvise_cold_or_pageout_pte_range() 371 spin_unlock(ptl); in madvise_cold_or_pageout_pte_range() 401 spin_unlock(ptl); in madvise_cold_or_pageout_pte_range() 412 orig_pte = pte = pte_offset_map_lock(vma->vm_mm, pmd, addr, &ptl); in madvise_cold_or_pageout_pte_range() 442 pte_unmap_unlock(orig_pte, ptl); in madvise_cold_or_pageout_pte_range() [all …]
|
D | mincore.c | 102 spinlock_t *ptl; in mincore_pte_range() local 108 ptl = pmd_trans_huge_lock(pmd, vma); in mincore_pte_range() 109 if (ptl) { in mincore_pte_range() 111 spin_unlock(ptl); in mincore_pte_range() 120 ptep = pte_offset_map_lock(walk->mm, pmd, addr, &ptl); in mincore_pte_range() 150 pte_unmap_unlock(ptep - 1, ptl); in mincore_pte_range()
|
D | hmm.c | 418 spinlock_t *ptl = pud_trans_huge_lock(pudp, walk->vma); in hmm_vma_walk_pud() local 420 if (!ptl) in hmm_vma_walk_pud() 428 spin_unlock(ptl); in hmm_vma_walk_pud() 439 spin_unlock(ptl); in hmm_vma_walk_pud() 451 spin_unlock(ptl); in hmm_vma_walk_pud() 465 spin_unlock(ptl); in hmm_vma_walk_pud() 484 spinlock_t *ptl; in hmm_vma_walk_hugetlb_entry() local 487 ptl = huge_pte_lock(hstate_vma(vma), walk->mm, pte); in hmm_vma_walk_hugetlb_entry() 497 spin_unlock(ptl); in hmm_vma_walk_hugetlb_entry() 505 spin_unlock(ptl); in hmm_vma_walk_hugetlb_entry()
|
D | hugetlb.c | 4457 spinlock_t *ptl; in __unmap_hugepage_range() local 4488 ptl = huge_pte_lock(h, mm, ptep); in __unmap_hugepage_range() 4490 spin_unlock(ptl); in __unmap_hugepage_range() 4498 spin_unlock(ptl); in __unmap_hugepage_range() 4508 spin_unlock(ptl); in __unmap_hugepage_range() 4520 spin_unlock(ptl); in __unmap_hugepage_range() 4539 spin_unlock(ptl); in __unmap_hugepage_range() 4660 struct page *pagecache_page, spinlock_t *ptl) in hugetlb_cow() argument 4701 spin_unlock(ptl); in hugetlb_cow() 4737 spin_lock(ptl); in hugetlb_cow() [all …]
|
D | migrate.c | 300 spinlock_t *ptl) in __migration_entry_wait() argument 306 spin_lock(ptl); in __migration_entry_wait() 325 pte_unmap_unlock(ptep, ptl); in __migration_entry_wait() 329 pte_unmap_unlock(ptep, ptl); in __migration_entry_wait() 335 spinlock_t *ptl = pte_lockptr(mm, pmd); in migration_entry_wait() local 337 __migration_entry_wait(mm, ptep, ptl); in migration_entry_wait() 343 spinlock_t *ptl = huge_pte_lockptr(hstate_vma(vma), mm, pte); in migration_entry_wait_huge() local 344 __migration_entry_wait(mm, pte, ptl); in migration_entry_wait_huge() 350 spinlock_t *ptl; in pmd_migration_entry_wait() local 353 ptl = pmd_lock(mm, pmd); in pmd_migration_entry_wait() [all …]
|
D | gup.c | 504 spinlock_t *ptl; in follow_page_pte() local 528 ptep = pte_offset_map_lock(mm, pmd, address, &ptl); in follow_page_pte() 544 pte_unmap_unlock(ptep, ptl); in follow_page_pte() 551 pte_unmap_unlock(ptep, ptl); in follow_page_pte() 639 pte_unmap_unlock(ptep, ptl); in follow_page_pte() 642 pte_unmap_unlock(ptep, ptl); in follow_page_pte() 654 spinlock_t *ptl; in follow_pmd_mask() local 698 ptl = pmd_lock(mm, pmd); in follow_pmd_mask() 700 spin_unlock(ptl); in follow_pmd_mask() 711 ptl = pmd_lock(mm, pmd); in follow_pmd_mask() [all …]
|
D | userfaultfd.c | 68 spinlock_t *ptl; in mfill_atomic_install_pte() local 84 dst_pte = pte_offset_map_lock(dst_mm, dst_pmd, dst_addr, &ptl); in mfill_atomic_install_pte() 120 pte_unmap_unlock(dst_pte, ptl); in mfill_atomic_install_pte() 190 spinlock_t *ptl; in mfill_zeropage_pte() local 197 dst_pte = pte_offset_map_lock(dst_mm, dst_pmd, dst_addr, &ptl); in mfill_zeropage_pte() 215 pte_unmap_unlock(dst_pte, ptl); in mfill_zeropage_pte()
|
D | khugepaged.c | 746 spinlock_t *ptl, in __collapse_huge_page_copy() argument 762 spin_lock(ptl); in __collapse_huge_page_copy() 768 spin_unlock(ptl); in __collapse_huge_page_copy() 780 spin_lock(ptl); in __collapse_huge_page_copy() 787 spin_unlock(ptl); in __collapse_huge_page_copy() 1240 spinlock_t *ptl; in khugepaged_scan_pmd() local 1253 pte = pte_offset_map_lock(mm, pmd, address, &ptl); in khugepaged_scan_pmd() 1373 pte_unmap_unlock(pte, ptl); in khugepaged_scan_pmd() 1447 spinlock_t *ptl; in collapse_pte_mapped_thp() local 1490 start_pte = pte_offset_map_lock(mm, pmd, haddr, &ptl); in collapse_pte_mapped_thp() [all …]
|
D | mempolicy.c | 451 static int queue_pages_pmd(pmd_t *pmd, spinlock_t *ptl, unsigned long addr, in queue_pages_pmd() argument 453 __releases(ptl) in queue_pages_pmd() 466 spin_unlock(ptl); in queue_pages_pmd() 484 spin_unlock(ptl); in queue_pages_pmd() 511 spinlock_t *ptl; in queue_pages_pte_range() local 513 ptl = pmd_trans_huge_lock(pmd, vma); in queue_pages_pte_range() 514 if (ptl) { in queue_pages_pte_range() 515 ret = queue_pages_pmd(pmd, ptl, addr, end, walk); in queue_pages_pte_range() 524 mapped_pte = pte = pte_offset_map_lock(walk->mm, pmd, addr, &ptl); in queue_pages_pte_range() 556 pte_unmap_unlock(mapped_pte, ptl); in queue_pages_pte_range() [all …]
|
D | debug_vm_pgtable.c | 1241 spinlock_t *ptl = NULL; in debug_vm_pgtable() local 1305 args.ptep = pte_offset_map_lock(args.mm, args.pmdp, args.vaddr, &ptl); in debug_vm_pgtable() 1308 pte_unmap_unlock(args.ptep, ptl); in debug_vm_pgtable() 1310 ptl = pmd_lock(args.mm, args.pmdp); in debug_vm_pgtable() 1315 spin_unlock(ptl); in debug_vm_pgtable() 1317 ptl = pud_lock(args.mm, args.pudp); in debug_vm_pgtable() 1322 spin_unlock(ptl); in debug_vm_pgtable()
|
D | pagewalk.c | 46 spinlock_t *ptl; in walk_pte_range() local 53 pte = pte_offset_map_lock(walk->mm, pmd, addr, &ptl); in walk_pte_range() 55 pte_unmap_unlock(pte, ptl); in walk_pte_range()
|
D | memory-failure.c | 635 spinlock_t *ptl; in hwpoison_pte_range() local 637 ptl = pmd_trans_huge_lock(pmdp, walk->vma); in hwpoison_pte_range() 638 if (ptl) { in hwpoison_pte_range() 640 spin_unlock(ptl); in hwpoison_pte_range() 648 addr, &ptl); in hwpoison_pte_range() 655 pte_unmap_unlock(mapped_pte, ptl); in hwpoison_pte_range()
|
D | mlock.c | 355 spinlock_t *ptl; in __munlock_pagevec_fill() local 362 pte = get_locked_pte(vma->vm_mm, start, &ptl); in __munlock_pagevec_fill() 399 pte_unmap_unlock(pte, ptl); in __munlock_pagevec_fill()
|
D | mprotect.c | 44 spinlock_t *ptl; in change_pte_range() local 68 pte = pte_offset_map_lock(vma->vm_mm, pmd, addr, &ptl); in change_pte_range() 197 pte_unmap_unlock(pte - 1, ptl); in change_pte_range()
|
D | memcontrol.c | 5897 spinlock_t *ptl; in mem_cgroup_count_precharge_pte_range() local 5899 ptl = pmd_trans_huge_lock(pmd, vma); in mem_cgroup_count_precharge_pte_range() 5900 if (ptl) { in mem_cgroup_count_precharge_pte_range() 5908 spin_unlock(ptl); in mem_cgroup_count_precharge_pte_range() 5914 pte = pte_offset_map_lock(vma->vm_mm, pmd, addr, &ptl); in mem_cgroup_count_precharge_pte_range() 5918 pte_unmap_unlock(pte - 1, ptl); in mem_cgroup_count_precharge_pte_range() 6094 spinlock_t *ptl; in mem_cgroup_move_charge_pte_range() local 6099 ptl = pmd_trans_huge_lock(pmd, vma); in mem_cgroup_move_charge_pte_range() 6100 if (ptl) { in mem_cgroup_move_charge_pte_range() 6102 spin_unlock(ptl); in mem_cgroup_move_charge_pte_range() [all …]
|
D | vmscan.c | 3704 spinlock_t *ptl; in walk_pte_range() local 3715 ptl = pte_lockptr(args->mm, pmd); in walk_pte_range() 3716 if (!spin_trylock(ptl)) in walk_pte_range() 3765 spin_unlock(ptl); in walk_pte_range() 3776 spinlock_t *ptl; in walk_pmd_range_locked() local 3798 ptl = pmd_lockptr(args->mm, pmd); in walk_pmd_range_locked() 3799 if (!spin_trylock(ptl)) in walk_pmd_range_locked() 3842 spin_unlock(ptl); in walk_pmd_range_locked() 4427 lockdep_assert_held(pvmw->ptl); in lru_gen_look_around() 4430 if (spin_is_contended(pvmw->ptl)) in lru_gen_look_around()
|
D | ksm.c | 1121 spinlock_t *ptl; in replace_page() local 1138 ptep = pte_offset_map_lock(mm, pmd, addr, &ptl); in replace_page() 1140 pte_unmap_unlock(ptep, ptl); in replace_page() 1179 pte_unmap_unlock(ptep, ptl); in replace_page()
|
D | filemap.c | 3278 vmf->ptl = pmd_lock(mm, vmf->pmd); in filemap_map_pmd() 3284 spin_unlock(vmf->ptl); in filemap_map_pmd() 3415 pte_unmap_unlock(vmf->pte, vmf->ptl); in filemap_map_pages()
|
D | swapfile.c | 1905 spinlock_t *ptl; in unuse_pte() local 1914 pte = pte_offset_map_lock(vma->vm_mm, pmd, addr, &ptl); in unuse_pte() 1933 pte_unmap_unlock(pte, ptl); in unuse_pte()
|
/mm/damon/ |
D | vaddr.c | 374 spinlock_t *ptl; in damon_mkold_pmd_entry() local 377 ptl = pmd_lock(walk->mm, pmd); in damon_mkold_pmd_entry() 379 spin_unlock(ptl); in damon_mkold_pmd_entry() 385 spin_unlock(ptl); in damon_mkold_pmd_entry() 388 spin_unlock(ptl); in damon_mkold_pmd_entry() 393 pte = pte_offset_map_lock(walk->mm, pmd, addr, &ptl); in damon_mkold_pmd_entry() 398 pte_unmap_unlock(pte, ptl); in damon_mkold_pmd_entry() 439 spinlock_t *ptl; in damon_mkold_hugetlb_entry() local 442 ptl = huge_pte_lock(h, walk->mm, pte); in damon_mkold_hugetlb_entry() 450 spin_unlock(ptl); in damon_mkold_hugetlb_entry() [all …]
|