Lines Matching refs:idx
774 void __mod_memcg_state(struct mem_cgroup *memcg, int idx, int val) in __mod_memcg_state() argument
781 if (memcg_stat_item_in_bytes(idx)) in __mod_memcg_state()
784 x = val + __this_cpu_read(memcg->vmstats_percpu->stat[idx]); in __mod_memcg_state()
792 __this_cpu_add(memcg->vmstats_local->stat[idx], x); in __mod_memcg_state()
794 atomic_long_add(x, &mi->vmstats[idx]); in __mod_memcg_state()
797 __this_cpu_write(memcg->vmstats_percpu->stat[idx], x); in __mod_memcg_state()
811 void __mod_memcg_lruvec_state(struct lruvec *lruvec, enum node_stat_item idx, in __mod_memcg_lruvec_state() argument
822 __mod_memcg_state(memcg, idx, val); in __mod_memcg_lruvec_state()
825 __this_cpu_add(pn->lruvec_stat_local->count[idx], val); in __mod_memcg_lruvec_state()
827 if (vmstat_item_in_bytes(idx)) in __mod_memcg_lruvec_state()
830 x = val + __this_cpu_read(pn->lruvec_stat_cpu->count[idx]); in __mod_memcg_lruvec_state()
836 atomic_long_add(x, &pi->lruvec_stat[idx]); in __mod_memcg_lruvec_state()
839 __this_cpu_write(pn->lruvec_stat_cpu->count[idx], x); in __mod_memcg_lruvec_state()
852 void __mod_lruvec_state(struct lruvec *lruvec, enum node_stat_item idx, in __mod_lruvec_state() argument
856 __mod_node_page_state(lruvec_pgdat(lruvec), idx, val); in __mod_lruvec_state()
860 __mod_memcg_lruvec_state(lruvec, idx, val); in __mod_lruvec_state()
864 void __mod_lruvec_slab_state(void *p, enum node_stat_item idx, int val) in __mod_lruvec_slab_state() argument
880 __mod_node_page_state(pgdat, idx, val); in __mod_lruvec_slab_state()
883 __mod_lruvec_state(lruvec, idx, val); in __mod_lruvec_slab_state()
888 void mod_memcg_obj_state(void *p, int idx, int val) in mod_memcg_obj_state() argument
895 mod_memcg_state(memcg, idx, val); in mod_memcg_obj_state()
905 void __count_memcg_events(struct mem_cgroup *memcg, enum vm_event_item idx, in __count_memcg_events() argument
913 x = count + __this_cpu_read(memcg->vmstats_percpu->events[idx]); in __count_memcg_events()
921 __this_cpu_add(memcg->vmstats_local->events[idx], x); in __count_memcg_events()
923 atomic_long_add(x, &mi->vmevents[idx]); in __count_memcg_events()
926 __this_cpu_write(memcg->vmstats_percpu->events[idx], x); in __count_memcg_events()
1527 unsigned int idx; member
1577 if (memory_stats[i].idx == NR_ANON_THPS) in memory_stats_init()
1581 VM_BUG_ON(memory_stats[i].idx >= MEMCG_NR_STAT); in memory_stats_init()
1611 size = memcg_page_state(memcg, memory_stats[i].idx); in memory_stat_format()
1615 if (unlikely(memory_stats[i].idx == NR_SLAB_UNRECLAIMABLE_B)) { in memory_stat_format()
4613 static unsigned long memcg_exact_page_state(struct mem_cgroup *memcg, int idx) in memcg_exact_page_state() argument
4615 long x = atomic_long_read(&memcg->vmstats[idx]); in memcg_exact_page_state()
4619 x += per_cpu_ptr(memcg->vmstats_percpu, cpu)->stat[idx]; in memcg_exact_page_state()
6509 if (memory_stats[i].idx >= NR_VM_NODE_STAT_ITEMS) in memory_numa_stat_show()
6518 size = lruvec_page_state(lruvec, memory_stats[i].idx); in memory_numa_stat_show()