Lines Matching refs:nr
159 } nr; member
325 unsigned long nr; in zone_reclaimable_pages() local
327 nr = zone_page_state_snapshot(zone, NR_ZONE_INACTIVE_FILE) + in zone_reclaimable_pages()
330 nr += zone_page_state_snapshot(zone, NR_ZONE_INACTIVE_ANON) + in zone_reclaimable_pages()
333 return nr; in zone_reclaimable_pages()
448 long nr; in do_shrink_slab() local
467 nr = atomic_long_xchg(&shrinker->nr_deferred[nid], 0); in do_shrink_slab()
469 total_scan = nr; in do_shrink_slab()
488 next_deferred = nr; in do_shrink_slab()
515 trace_mm_shrink_slab_start(shrinker, shrinkctl, nr, in do_shrink_slab()
567 trace_mm_shrink_slab_end(shrinker, nid, freed, nr, new_nr, total_scan); in do_shrink_slab()
2063 sc->nr.dirty += stat.nr_dirty; in shrink_inactive_list()
2064 sc->nr.congested += stat.nr_congested; in shrink_inactive_list()
2065 sc->nr.unqueued_dirty += stat.nr_unqueued_dirty; in shrink_inactive_list()
2066 sc->nr.writeback += stat.nr_writeback; in shrink_inactive_list()
2067 sc->nr.immediate += stat.nr_immediate; in shrink_inactive_list()
2068 sc->nr.taken += nr_taken; in shrink_inactive_list()
2070 sc->nr.file_taken += nr_taken; in shrink_inactive_list()
2329 unsigned long *nr) in get_scan_count() argument
2527 nr[lru] = scan; in get_scan_count()
2533 unsigned long nr[NR_LRU_LISTS]; in shrink_lruvec() local
2542 get_scan_count(lruvec, sc, nr); in shrink_lruvec()
2545 memcpy(targets, nr, sizeof(nr)); in shrink_lruvec()
2562 while (nr[LRU_INACTIVE_ANON] || nr[LRU_ACTIVE_FILE] || in shrink_lruvec()
2563 nr[LRU_INACTIVE_FILE]) { in shrink_lruvec()
2568 if (nr[lru]) { in shrink_lruvec()
2569 nr_to_scan = min(nr[lru], SWAP_CLUSTER_MAX); in shrink_lruvec()
2570 nr[lru] -= nr_to_scan; in shrink_lruvec()
2589 nr_file = nr[LRU_INACTIVE_FILE] + nr[LRU_ACTIVE_FILE]; in shrink_lruvec()
2590 nr_anon = nr[LRU_INACTIVE_ANON] + nr[LRU_ACTIVE_ANON]; in shrink_lruvec()
2614 nr[lru] = 0; in shrink_lruvec()
2615 nr[lru + LRU_ACTIVE] = 0; in shrink_lruvec()
2622 nr_scanned = targets[lru] - nr[lru]; in shrink_lruvec()
2623 nr[lru] = targets[lru] * (100 - percentage) / 100; in shrink_lruvec()
2624 nr[lru] -= min(nr[lru], nr_scanned); in shrink_lruvec()
2627 nr_scanned = targets[lru] - nr[lru]; in shrink_lruvec()
2628 nr[lru] = targets[lru] * (100 - percentage) / 100; in shrink_lruvec()
2629 nr[lru] -= min(nr[lru], nr_scanned); in shrink_lruvec()
2787 memset(&sc->nr, 0, sizeof(sc->nr)); in shrink_node()
2913 if (sc->nr.writeback && sc->nr.writeback == sc->nr.taken) in shrink_node()
2917 if (sc->nr.unqueued_dirty == sc->nr.file_taken) in shrink_node()
2926 if (sc->nr.immediate) in shrink_node()
2940 sc->nr.dirty && sc->nr.dirty == sc->nr.congested) in shrink_node()
4437 for (i = 0; i < pvec->nr; i++) { in check_move_unevictable_pages()