Searched refs:pte_lockptr (Results 1 – 9 of 9) sorted by relevance
/kernel/linux/linux-5.10/mm/ |
D | page_vma_mapped.c | 50 pvmw->ptl = pte_lockptr(pvmw->vma->vm_mm, pvmw->pmd); in map_pte() 279 pvmw->ptl = pte_lockptr(mm, pvmw->pmd); in page_vma_mapped_walk() 285 pvmw->ptl = pte_lockptr(mm, pvmw->pmd); in page_vma_mapped_walk()
|
D | mremap.c | 153 new_ptl = pte_lockptr(mm, new_pmd); in move_ptes()
|
D | memory.c | 960 src_ptl = pte_lockptr(src_mm, src_pmd); in copy_pte_range() 2618 spinlock_t *ptl = pte_lockptr(mm, pmd); in pte_unmap_same() 4353 vmf->ptl = pte_lockptr(vma->vm_mm, vmf->pmd); in do_numa_page() 4557 vmf->ptl = pte_lockptr(vmf->vma->vm_mm, vmf->pmd); in handle_pte_fault()
|
D | khugepaged.c | 1143 pte_ptl = pte_lockptr(mm, pmd); in collapse_huge_page()
|
D | migrate.c | 348 spinlock_t *ptl = pte_lockptr(mm, pmd); in migration_entry_wait()
|
/kernel/linux/linux-5.10/arch/arm/mm/ |
D | fault-armv.c | 120 ptl = pte_lockptr(vma->vm_mm, pmd); in adjust_pte()
|
/kernel/linux/linux-5.10/Documentation/vm/ |
D | split_page_table_lock.rst | 26 - pte_lockptr()
|
/kernel/linux/linux-5.10/arch/powerpc/mm/ |
D | pgtable.c | 321 assert_spin_locked(pte_lockptr(mm, pmd)); in assert_pte_locked()
|
/kernel/linux/linux-5.10/include/linux/ |
D | mm.h | 2223 static inline spinlock_t *pte_lockptr(struct mm_struct *mm, pmd_t *pmd) in pte_lockptr() function 2248 static inline spinlock_t *pte_lockptr(struct mm_struct *mm, pmd_t *pmd) in pte_lockptr() function 2281 spinlock_t *__ptl = pte_lockptr(mm, pmd); \
|