Lines Matching refs:nr
130 } nr; member
335 unsigned long nr; in zone_reclaimable_pages() local
337 nr = zone_page_state_snapshot(zone, NR_ZONE_INACTIVE_FILE) + in zone_reclaimable_pages()
340 nr += zone_page_state_snapshot(zone, NR_ZONE_INACTIVE_ANON) + in zone_reclaimable_pages()
343 return nr; in zone_reclaimable_pages()
469 long nr; in do_shrink_slab() local
488 nr = atomic_long_xchg(&shrinker->nr_deferred[nid], 0); in do_shrink_slab()
490 total_scan = nr; in do_shrink_slab()
509 next_deferred = nr; in do_shrink_slab()
536 trace_mm_shrink_slab_start(shrinker, shrinkctl, nr, in do_shrink_slab()
588 trace_mm_shrink_slab_end(shrinker, nid, freed, nr, new_nr, total_scan); in do_shrink_slab()
2029 sc->nr.dirty += stat.nr_dirty; in shrink_inactive_list()
2030 sc->nr.congested += stat.nr_congested; in shrink_inactive_list()
2031 sc->nr.unqueued_dirty += stat.nr_unqueued_dirty; in shrink_inactive_list()
2032 sc->nr.writeback += stat.nr_writeback; in shrink_inactive_list()
2033 sc->nr.immediate += stat.nr_immediate; in shrink_inactive_list()
2034 sc->nr.taken += nr_taken; in shrink_inactive_list()
2036 sc->nr.file_taken += nr_taken; in shrink_inactive_list()
2305 struct scan_control *sc, unsigned long *nr, in get_scan_count() argument
2553 nr[lru] = scan; in get_scan_count()
2564 unsigned long nr[NR_LRU_LISTS]; in shrink_node_memcg() local
2573 get_scan_count(lruvec, memcg, sc, nr, lru_pages); in shrink_node_memcg()
2576 memcpy(targets, nr, sizeof(nr)); in shrink_node_memcg()
2593 while (nr[LRU_INACTIVE_ANON] || nr[LRU_ACTIVE_FILE] || in shrink_node_memcg()
2594 nr[LRU_INACTIVE_FILE]) { in shrink_node_memcg()
2599 if (nr[lru]) { in shrink_node_memcg()
2600 nr_to_scan = min(nr[lru], SWAP_CLUSTER_MAX); in shrink_node_memcg()
2601 nr[lru] -= nr_to_scan; in shrink_node_memcg()
2620 nr_file = nr[LRU_INACTIVE_FILE] + nr[LRU_ACTIVE_FILE]; in shrink_node_memcg()
2621 nr_anon = nr[LRU_INACTIVE_ANON] + nr[LRU_ACTIVE_ANON]; in shrink_node_memcg()
2645 nr[lru] = 0; in shrink_node_memcg()
2646 nr[lru + LRU_ACTIVE] = 0; in shrink_node_memcg()
2653 nr_scanned = targets[lru] - nr[lru]; in shrink_node_memcg()
2654 nr[lru] = targets[lru] * (100 - percentage) / 100; in shrink_node_memcg()
2655 nr[lru] -= min(nr[lru], nr_scanned); in shrink_node_memcg()
2658 nr_scanned = targets[lru] - nr[lru]; in shrink_node_memcg()
2659 nr[lru] = targets[lru] * (100 - percentage) / 100; in shrink_node_memcg()
2660 nr[lru] -= min(nr[lru], nr_scanned); in shrink_node_memcg()
2764 memset(&sc->nr, 0, sizeof(sc->nr)); in shrink_node()
2852 if (sc->nr.writeback && sc->nr.writeback == sc->nr.taken) in shrink_node()
2860 if (sc->nr.dirty && sc->nr.dirty == sc->nr.congested) in shrink_node()
2864 if (sc->nr.unqueued_dirty == sc->nr.file_taken) in shrink_node()
2873 if (sc->nr.immediate) in shrink_node()
2882 sc->nr.dirty && sc->nr.dirty == sc->nr.congested) in shrink_node()
4343 for (i = 0; i < pvec->nr; i++) { in check_move_unevictable_pages()