Searched refs:last (Results 1 – 10 of 10) sorted by relevance
/mm/ |
D | interval_tree.c | 34 unsigned long last = vma_last_pgoff(node); in vma_interval_tree_insert_after() local 44 if (parent->shared.rb_subtree_last < last) in vma_interval_tree_insert_after() 45 parent->shared.rb_subtree_last = last; in vma_interval_tree_insert_after() 49 if (parent->shared.rb_subtree_last < last) in vma_interval_tree_insert_after() 50 parent->shared.rb_subtree_last = last; in vma_interval_tree_insert_after() 55 node->shared.rb_subtree_last = last; in vma_interval_tree_insert_after() 93 unsigned long first, unsigned long last) in anon_vma_interval_tree_iter_first() argument 95 return __anon_vma_interval_tree_iter_first(root, first, last); in anon_vma_interval_tree_iter_first() 100 unsigned long first, unsigned long last) in anon_vma_interval_tree_iter_next() argument 102 return __anon_vma_interval_tree_iter_next(node, first, last); in anon_vma_interval_tree_iter_next()
|
D | hmm.c | 31 unsigned long last; member 69 hmm_vma_walk->last = addr; in hmm_vma_fault() 277 hmm_vma_walk->last = addr; in hmm_vma_handle_pte() 340 hmm_vma_walk->last = addr; in hmm_vma_walk_pmd() 576 .last = range->start, in hmm_range_fault() 588 ret = walk_page_range(mm, hmm_vma_walk.last, range->end, in hmm_range_fault()
|
D | nommu.c | 435 struct vm_region *region, *last; in validate_nommu_regions() local 442 last = rb_entry(lastp, struct vm_region, vm_rb); in validate_nommu_regions() 443 BUG_ON(last->vm_end <= last->vm_start); in validate_nommu_regions() 444 BUG_ON(last->vm_top < last->vm_end); in validate_nommu_regions() 448 last = rb_entry(lastp, struct vm_region, vm_rb); in validate_nommu_regions() 452 BUG_ON(region->vm_start < last->vm_top); in validate_nommu_regions()
|
D | zswap.c | 484 struct zswap_pool *pool, *last = NULL; in zswap_pool_last_get() local 489 last = pool; in zswap_pool_last_get() 490 WARN_ONCE(!last && zswap_has_pool, in zswap_pool_last_get() 492 if (!zswap_pool_get(last)) in zswap_pool_last_get() 493 last = NULL; in zswap_pool_last_get() 497 return last; in zswap_pool_last_get()
|
D | mmap.c | 2938 struct vm_area_struct *vma, *prev, *last; in __do_munmap() local 2991 last = find_vma(mm, end); in __do_munmap() 2992 if (last && end > last->vm_start) { in __do_munmap() 2993 int error = __split_vma(mm, last, end, 1); in __do_munmap()
|
D | Kconfig.debug | 25 with stack traces of last allocation and freeing of the page, when
|
D | vmscan.c | 2940 static void reset_mm_stats(struct lruvec *lruvec, struct lru_gen_mm_walk *walk, bool last) in reset_mm_stats() argument 2957 if (NR_HIST_GENS > 1 && last) { in reset_mm_stats() 2993 bool last = false; in iterate_mm_list() local 3026 last = true; in iterate_mm_list() 3041 if (*iter || last) in iterate_mm_list() 3042 reset_mm_stats(lruvec, walk, last); in iterate_mm_list() 3054 return last; in iterate_mm_list()
|
D | mmu_notifier.c | 959 &interval_sub->interval_tree.last)) in __mmu_interval_notifier_insert()
|
D | memcontrol.c | 1280 struct mem_cgroup *last; in invalidate_reclaim_iterators() local 1284 last = memcg; in invalidate_reclaim_iterators() 1293 if (last != root_mem_cgroup) in invalidate_reclaim_iterators()
|
/mm/damon/ |
D | vaddr.c | 323 struct damon_region *first = NULL, *last; in damon_va_apply_three_regions() local 333 last = r; in damon_va_apply_three_regions() 350 last->ar.end = ALIGN(br->end, DAMON_MIN_REGION); in damon_va_apply_three_regions()
|