Lines Matching refs:lru
139 if ((_page)->lru.prev != _base) { \
142 prev = lru_to_page(&(_page->lru)); \
153 if ((_page)->lru.prev != _base) { \
156 prev = lru_to_page(&(_page->lru)); \
352 unsigned long lruvec_lru_size(struct lruvec *lruvec, enum lru_list lru, int zone_idx) in lruvec_lru_size() argument
359 lru_size += mem_cgroup_get_zone_lru_size(lruvec, lru, zid); in lruvec_lru_size()
361 lru_size = node_page_state(lruvec_pgdat(lruvec), NR_LRU_BASE + lru); in lruvec_lru_size()
371 size = mem_cgroup_get_zone_lru_size(lruvec, lru, zid); in lruvec_lru_size()
374 NR_ZONE_LRU_BASE + lru); in lruvec_lru_size()
1145 list_del(&page->lru); in shrink_page_list()
1264 list_add_tail(&page->lru, page_list); in shrink_page_list()
1493 list_add(&page->lru, &free_pages); in shrink_page_list()
1520 list_add(&page->lru, &ret_pages); in shrink_page_list()
1549 list_for_each_entry_safe(page, next, page_list, lru) { in reclaim_clean_pages_from_list()
1553 list_move(&page->lru, &clean_pages); in reclaim_clean_pages_from_list()
1647 enum lru_list lru, unsigned long *nr_zone_taken) in update_lru_sizes() argument
1655 __update_lru_size(lruvec, lru, zid, -nr_zone_taken[zid]); in update_lru_sizes()
1657 mem_cgroup_update_lru_size(lruvec, lru, zid, -nr_zone_taken[zid]); in update_lru_sizes()
1686 enum lru_list lru) in isolate_lru_pages() argument
1688 struct list_head *src = &lruvec->lists[lru]; in isolate_lru_pages()
1711 list_move(&page->lru, &pages_skipped); in isolate_lru_pages()
1731 list_move(&page->lru, dst); in isolate_lru_pages()
1736 list_move(&page->lru, src); in isolate_lru_pages()
1765 total_scan, skipped, nr_taken, mode, lru); in isolate_lru_pages()
1766 update_lru_sizes(lruvec, lru, nr_zone_taken); in isolate_lru_pages()
1810 int lru = page_lru(page); in isolate_lru_page() local
1813 del_page_from_lru_list(page, lruvec, lru); in isolate_lru_page()
1885 enum lru_list lru; in move_pages_to_lru() local
1891 list_del(&page->lru); in move_pages_to_lru()
1900 lru = page_lru(page); in move_pages_to_lru()
1903 update_lru_size(lruvec, lru, page_zonenum(page), nr_pages); in move_pages_to_lru()
1904 list_move(&page->lru, &lruvec->lists[lru]); in move_pages_to_lru()
1909 del_page_from_lru_list(page, lruvec, lru); in move_pages_to_lru()
1916 list_add(&page->lru, &pages_to_free); in move_pages_to_lru()
1949 struct scan_control *sc, enum lru_list lru) in shrink_inactive_list() argument
1956 int file = is_file_lru(lru); in shrink_inactive_list()
1980 &nr_scanned, sc, lru); in shrink_inactive_list()
2046 enum lru_list lru) in shrink_active_list() argument
2058 int file = is_file_lru(lru); in shrink_active_list()
2066 &nr_scanned, sc, lru); in shrink_active_list()
2079 list_del(&page->lru); in shrink_active_list()
2107 list_add(&page->lru, &l_active); in shrink_active_list()
2114 list_add(&page->lru, &l_inactive); in shrink_active_list()
2170 list_move(&page->lru, &node_page_list); in reclaim_pages()
2180 list_del(&page->lru); in reclaim_pages()
2194 list_del(&page->lru); in reclaim_pages()
2276 static unsigned long shrink_list(enum lru_list lru, unsigned long nr_to_scan, in shrink_list() argument
2279 if (is_active_lru(lru)) { in shrink_list()
2280 if (inactive_list_is_low(lruvec, is_file_lru(lru), sc, true)) in shrink_list()
2281 shrink_active_list(nr_to_scan, lruvec, sc, lru); in shrink_list()
2285 return shrink_inactive_list(nr_to_scan, lruvec, sc, lru); in shrink_list()
2317 enum lru_list lru; in get_scan_count() local
2458 for_each_evictable_lru(lru) { in get_scan_count()
2459 int file = is_file_lru(lru); in get_scan_count()
2464 lruvec_size = lruvec_lru_size(lruvec, lru, sc->reclaim_idx); in get_scan_count()
2553 nr[lru] = scan; in get_scan_count()
2567 enum lru_list lru; in shrink_node_memcg() local
2598 for_each_evictable_lru(lru) { in shrink_node_memcg()
2599 if (nr[lru]) { in shrink_node_memcg()
2600 nr_to_scan = min(nr[lru], SWAP_CLUSTER_MAX); in shrink_node_memcg()
2601 nr[lru] -= nr_to_scan; in shrink_node_memcg()
2603 nr_reclaimed += shrink_list(lru, nr_to_scan, in shrink_node_memcg()
2635 lru = LRU_BASE; in shrink_node_memcg()
2640 lru = LRU_FILE; in shrink_node_memcg()
2645 nr[lru] = 0; in shrink_node_memcg()
2646 nr[lru + LRU_ACTIVE] = 0; in shrink_node_memcg()
2652 lru = (lru == LRU_FILE) ? LRU_BASE : LRU_FILE; in shrink_node_memcg()
2653 nr_scanned = targets[lru] - nr[lru]; in shrink_node_memcg()
2654 nr[lru] = targets[lru] * (100 - percentage) / 100; in shrink_node_memcg()
2655 nr[lru] -= min(nr[lru], nr_scanned); in shrink_node_memcg()
2657 lru += LRU_ACTIVE; in shrink_node_memcg()
2658 nr_scanned = targets[lru] - nr[lru]; in shrink_node_memcg()
2659 nr[lru] = targets[lru] * (100 - percentage) / 100; in shrink_node_memcg()
2660 nr[lru] -= min(nr[lru], nr_scanned); in shrink_node_memcg()
4360 enum lru_list lru = page_lru_base_type(page); in check_move_unevictable_pages() local
4365 add_page_to_lru_list(page, lruvec, lru); in check_move_unevictable_pages()