Home
last modified time | relevance | path

Searched refs:pvmw (Results 1 – 8 of 8) sorted by relevance

/mm/
Dpage_vma_mapped.c10 static inline bool not_found(struct page_vma_mapped_walk *pvmw) in not_found() argument
12 page_vma_mapped_walk_done(pvmw); in not_found()
16 static bool map_pte(struct page_vma_mapped_walk *pvmw) in map_pte() argument
18 pvmw->pte = pte_offset_map(pvmw->pmd, pvmw->address); in map_pte()
19 if (!(pvmw->flags & PVMW_SYNC)) { in map_pte()
20 if (pvmw->flags & PVMW_MIGRATION) { in map_pte()
21 if (!is_swap_pte(*pvmw->pte)) in map_pte()
39 if (is_swap_pte(*pvmw->pte)) { in map_pte()
43 entry = pte_to_swp_entry(*pvmw->pte); in map_pte()
47 } else if (!pte_present(*pvmw->pte)) in map_pte()
[all …]
Drmap.c792 struct page_vma_mapped_walk pvmw = { in page_referenced_one() local
799 while (page_vma_mapped_walk(&pvmw)) { in page_referenced_one()
800 address = pvmw.address; in page_referenced_one()
803 page_vma_mapped_walk_done(&pvmw); in page_referenced_one()
808 if (pvmw.pte) { in page_referenced_one()
809 if (lru_gen_enabled() && pte_young(*pvmw.pte)) { in page_referenced_one()
810 lru_gen_look_around(&pvmw); in page_referenced_one()
815 pvmw.pte)) in page_referenced_one()
819 pvmw.pmd)) in page_referenced_one()
924 struct page_vma_mapped_walk pvmw = { in page_mkclean_one() local
[all …]
Dpage_idle.c51 struct page_vma_mapped_walk pvmw = { in page_idle_clear_pte_refs_one() local
58 while (page_vma_mapped_walk(&pvmw)) { in page_idle_clear_pte_refs_one()
59 addr = pvmw.address; in page_idle_clear_pte_refs_one()
60 if (pvmw.pte) { in page_idle_clear_pte_refs_one()
65 if (ptep_clear_young_notify(vma, addr, pvmw.pte)) in page_idle_clear_pte_refs_one()
68 if (pmdp_clear_young_notify(vma, addr, pvmw.pmd)) in page_idle_clear_pte_refs_one()
Dksm.c1031 struct page_vma_mapped_walk pvmw = { in write_protect_page() local
1039 pvmw.address = page_address_in_vma(page, vma); in write_protect_page()
1040 if (pvmw.address == -EFAULT) in write_protect_page()
1046 pvmw.address, in write_protect_page()
1047 pvmw.address + PAGE_SIZE); in write_protect_page()
1050 if (!page_vma_mapped_walk(&pvmw)) in write_protect_page()
1052 if (WARN_ONCE(!pvmw.pte, "Unexpected PMD mapping?")) in write_protect_page()
1055 if (pte_write(*pvmw.pte) || pte_dirty(*pvmw.pte) || in write_protect_page()
1056 (pte_protnone(*pvmw.pte) && pte_savedwrite(*pvmw.pte)) || in write_protect_page()
1061 flush_cache_page(vma, pvmw.address, page_to_pfn(page)); in write_protect_page()
[all …]
Dmigrate.c179 struct page_vma_mapped_walk pvmw = { in remove_migration_pte() local
190 while (page_vma_mapped_walk(&pvmw)) { in remove_migration_pte()
194 new = page - pvmw.page->index + in remove_migration_pte()
195 linear_page_index(vma, pvmw.address); in remove_migration_pte()
199 if (!pvmw.pte) { in remove_migration_pte()
201 remove_migration_pmd(&pvmw, new); in remove_migration_pte()
208 if (pte_swp_soft_dirty(*pvmw.pte)) in remove_migration_pte()
214 entry = pte_to_swp_entry(*pvmw.pte); in remove_migration_pte()
217 else if (pte_swp_uffd_wp(*pvmw.pte)) in remove_migration_pte()
228 if (pte_swp_soft_dirty(*pvmw.pte)) in remove_migration_pte()
[all …]
Dhuge_memory.c3162 void set_pmd_migration_entry(struct page_vma_mapped_walk *pvmw, in set_pmd_migration_entry() argument
3165 struct vm_area_struct *vma = pvmw->vma; in set_pmd_migration_entry()
3167 unsigned long address = pvmw->address; in set_pmd_migration_entry()
3172 if (!(pvmw->pmd && !pvmw->pte)) in set_pmd_migration_entry()
3176 pmdval = pmdp_invalidate(vma, address, pvmw->pmd); in set_pmd_migration_entry()
3186 set_pmd_at(mm, address, pvmw->pmd, pmdswp); in set_pmd_migration_entry()
3191 void remove_migration_pmd(struct page_vma_mapped_walk *pvmw, struct page *new) in remove_migration_pmd() argument
3193 struct vm_area_struct *vma = pvmw->vma; in remove_migration_pmd()
3195 unsigned long address = pvmw->address; in remove_migration_pmd()
3200 if (!(pvmw->pmd && !pvmw->pte)) in remove_migration_pmd()
[all …]
Dvmscan.c4328 void lru_gen_look_around(struct page_vma_mapped_walk *pvmw) in lru_gen_look_around() argument
4338 struct page *page = pvmw->page; in lru_gen_look_around()
4346 lockdep_assert_held(pvmw->ptl); in lru_gen_look_around()
4349 if (spin_is_contended(pvmw->ptl)) in lru_gen_look_around()
4355 start = max(pvmw->address & PMD_MASK, pvmw->vma->vm_start); in lru_gen_look_around()
4356 end = min(pvmw->address | ~PMD_MASK, pvmw->vma->vm_end - 1) + 1; in lru_gen_look_around()
4359 if (pvmw->address - start < MIN_LRU_BATCH * PAGE_SIZE / 2) in lru_gen_look_around()
4361 else if (end - pvmw->address < MIN_LRU_BATCH * PAGE_SIZE / 2) in lru_gen_look_around()
4364 start = pvmw->address - MIN_LRU_BATCH * PAGE_SIZE / 2; in lru_gen_look_around()
4365 end = pvmw->address + MIN_LRU_BATCH * PAGE_SIZE / 2; in lru_gen_look_around()
[all …]
/mm/damon/
Dpaddr.c22 struct page_vma_mapped_walk pvmw = { in __damon_pa_mkold() local
28 while (page_vma_mapped_walk(&pvmw)) { in __damon_pa_mkold()
29 addr = pvmw.address; in __damon_pa_mkold()
30 if (pvmw.pte) in __damon_pa_mkold()
31 damon_ptep_mkold(pvmw.pte, vma, addr); in __damon_pa_mkold()
33 damon_pmdp_mkold(pvmw.pmd, vma, addr); in __damon_pa_mkold()
96 struct page_vma_mapped_walk pvmw = { in __damon_pa_young() local
104 while (page_vma_mapped_walk(&pvmw)) { in __damon_pa_young()
105 addr = pvmw.address; in __damon_pa_young()
106 if (pvmw.pte) { in __damon_pa_young()
[all …]