Lines Matching refs:lru
142 if ((_page)->lru.prev != _base) { \
145 prev = lru_to_page(&(_page->lru)); \
156 if ((_page)->lru.prev != _base) { \
159 prev = lru_to_page(&(_page->lru)); \
355 unsigned long lruvec_lru_size(struct lruvec *lruvec, enum lru_list lru, int zone_idx) in lruvec_lru_size() argument
362 lru_size += mem_cgroup_get_zone_lru_size(lruvec, lru, zid); in lruvec_lru_size()
364 lru_size = node_page_state(lruvec_pgdat(lruvec), NR_LRU_BASE + lru); in lruvec_lru_size()
374 size = mem_cgroup_get_zone_lru_size(lruvec, lru, zid); in lruvec_lru_size()
377 NR_ZONE_LRU_BASE + lru); in lruvec_lru_size()
1152 list_del(&page->lru); in shrink_page_list()
1271 list_add_tail(&page->lru, page_list); in shrink_page_list()
1500 list_add(&page->lru, &free_pages); in shrink_page_list()
1527 list_add(&page->lru, &ret_pages); in shrink_page_list()
1556 list_for_each_entry_safe(page, next, page_list, lru) { in reclaim_clean_pages_from_list()
1560 list_move(&page->lru, &clean_pages); in reclaim_clean_pages_from_list()
1654 enum lru_list lru, unsigned long *nr_zone_taken) in update_lru_sizes() argument
1662 __update_lru_size(lruvec, lru, zid, -nr_zone_taken[zid]); in update_lru_sizes()
1664 mem_cgroup_update_lru_size(lruvec, lru, zid, -nr_zone_taken[zid]); in update_lru_sizes()
1693 enum lru_list lru) in isolate_lru_pages() argument
1695 struct list_head *src = &lruvec->lists[lru]; in isolate_lru_pages()
1718 list_move(&page->lru, &pages_skipped); in isolate_lru_pages()
1738 list_move(&page->lru, dst); in isolate_lru_pages()
1743 list_move(&page->lru, src); in isolate_lru_pages()
1772 total_scan, skipped, nr_taken, mode, lru); in isolate_lru_pages()
1773 update_lru_sizes(lruvec, lru, nr_zone_taken); in isolate_lru_pages()
1817 int lru = page_lru(page); in isolate_lru_page() local
1820 del_page_from_lru_list(page, lruvec, lru); in isolate_lru_page()
1892 enum lru_list lru; in move_pages_to_lru() local
1898 list_del(&page->lru); in move_pages_to_lru()
1907 lru = page_lru(page); in move_pages_to_lru()
1910 update_lru_size(lruvec, lru, page_zonenum(page), nr_pages); in move_pages_to_lru()
1911 list_move(&page->lru, &lruvec->lists[lru]); in move_pages_to_lru()
1916 del_page_from_lru_list(page, lruvec, lru); in move_pages_to_lru()
1923 list_add(&page->lru, &pages_to_free); in move_pages_to_lru()
1956 struct scan_control *sc, enum lru_list lru) in shrink_inactive_list() argument
1963 int file = is_file_lru(lru); in shrink_inactive_list()
1987 &nr_scanned, sc, lru); in shrink_inactive_list()
2053 enum lru_list lru) in shrink_active_list() argument
2065 int file = is_file_lru(lru); in shrink_active_list()
2073 &nr_scanned, sc, lru); in shrink_active_list()
2086 list_del(&page->lru); in shrink_active_list()
2115 list_add(&page->lru, &l_active); in shrink_active_list()
2122 list_add(&page->lru, &l_inactive); in shrink_active_list()
2178 list_move(&page->lru, &node_page_list); in reclaim_pages()
2188 list_del(&page->lru); in reclaim_pages()
2202 list_del(&page->lru); in reclaim_pages()
2284 static unsigned long shrink_list(enum lru_list lru, unsigned long nr_to_scan, in shrink_list() argument
2287 if (is_active_lru(lru)) { in shrink_list()
2288 if (inactive_list_is_low(lruvec, is_file_lru(lru), sc, true)) in shrink_list()
2289 shrink_active_list(nr_to_scan, lruvec, sc, lru); in shrink_list()
2293 return shrink_inactive_list(nr_to_scan, lruvec, sc, lru); in shrink_list()
2325 enum lru_list lru; in get_scan_count() local
2466 for_each_evictable_lru(lru) { in get_scan_count()
2467 int file = is_file_lru(lru); in get_scan_count()
2472 lruvec_size = lruvec_lru_size(lruvec, lru, sc->reclaim_idx); in get_scan_count()
2573 nr[lru] = scan; in get_scan_count()
2587 enum lru_list lru; in shrink_node_memcg() local
2618 for_each_evictable_lru(lru) { in shrink_node_memcg()
2619 if (nr[lru]) { in shrink_node_memcg()
2620 nr_to_scan = min(nr[lru], SWAP_CLUSTER_MAX); in shrink_node_memcg()
2621 nr[lru] -= nr_to_scan; in shrink_node_memcg()
2623 nr_reclaimed += shrink_list(lru, nr_to_scan, in shrink_node_memcg()
2655 lru = LRU_BASE; in shrink_node_memcg()
2660 lru = LRU_FILE; in shrink_node_memcg()
2665 nr[lru] = 0; in shrink_node_memcg()
2666 nr[lru + LRU_ACTIVE] = 0; in shrink_node_memcg()
2672 lru = (lru == LRU_FILE) ? LRU_BASE : LRU_FILE; in shrink_node_memcg()
2673 nr_scanned = targets[lru] - nr[lru]; in shrink_node_memcg()
2674 nr[lru] = targets[lru] * (100 - percentage) / 100; in shrink_node_memcg()
2675 nr[lru] -= min(nr[lru], nr_scanned); in shrink_node_memcg()
2677 lru += LRU_ACTIVE; in shrink_node_memcg()
2678 nr_scanned = targets[lru] - nr[lru]; in shrink_node_memcg()
2679 nr[lru] = targets[lru] * (100 - percentage) / 100; in shrink_node_memcg()
2680 nr[lru] -= min(nr[lru], nr_scanned); in shrink_node_memcg()
4398 enum lru_list lru = page_lru_base_type(page); in check_move_unevictable_pages() local
4403 add_page_to_lru_list(page, lruvec, lru); in check_move_unevictable_pages()