/mm/ |
D | debug_page_ref.c | 8 void __page_ref_set(struct page *page, int v) in __page_ref_set() argument 10 trace_page_ref_set(page, v); in __page_ref_set() 15 void __page_ref_mod(struct page *page, int v) in __page_ref_mod() argument 17 trace_page_ref_mod(page, v); in __page_ref_mod() 22 void __page_ref_mod_and_test(struct page *page, int v, int ret) in __page_ref_mod_and_test() argument 24 trace_page_ref_mod_and_test(page, v, ret); in __page_ref_mod_and_test() 29 void __page_ref_mod_and_return(struct page *page, int v, int ret) in __page_ref_mod_and_return() argument 31 trace_page_ref_mod_and_return(page, v, ret); in __page_ref_mod_and_return() 36 void __page_ref_mod_unless(struct page *page, int v, int u) in __page_ref_mod_unless() argument 38 trace_page_ref_mod_unless(page, v, u); in __page_ref_mod_unless() [all …]
|
D | vmstat.c | 383 s8 v, t; in __inc_zone_state() local 385 v = __this_cpu_inc_return(*p); in __inc_zone_state() 387 if (unlikely(v > t)) { in __inc_zone_state() 390 zone_page_state_add(v + overstep, zone, item); in __inc_zone_state() 399 s8 v, t; in __inc_node_state() local 401 v = __this_cpu_inc_return(*p); in __inc_node_state() 403 if (unlikely(v > t)) { in __inc_node_state() 406 node_page_state_add(v + overstep, pgdat, item); in __inc_node_state() 427 s8 v, t; in __dec_zone_state() local 429 v = __this_cpu_dec_return(*p); in __dec_zone_state() [all …]
|
D | mempolicy.c | 171 pol->v.nodes = *nodes; in mpol_new_interleave() 182 pol->v.preferred_node = first_node(*nodes); in mpol_new_preferred() 190 pol->v.nodes = *nodes; in mpol_new_bind() 307 nodes_remap(tmp, pol->v.nodes,pol->w.cpuset_mems_allowed, in mpol_rebind_nodemask() 315 pol->v.nodes = tmp; in mpol_rebind_nodemask() 327 pol->v.preferred_node = node; in mpol_rebind_preferred() 333 pol->v.preferred_node = first_node(tmp); in mpol_rebind_preferred() 335 pol->v.preferred_node = node_remap(pol->v.preferred_node, in mpol_rebind_preferred() 843 *nodes = p->v.nodes; in get_policy_nodemask() 847 node_set(p->v.preferred_node, *nodes); in get_policy_nodemask() [all …]
|
D | interval_tree.c | 13 static inline unsigned long vma_start_pgoff(struct vm_area_struct *v) in vma_start_pgoff() argument 15 return v->vm_pgoff; in vma_start_pgoff() 18 static inline unsigned long vma_last_pgoff(struct vm_area_struct *v) in vma_last_pgoff() argument 20 return v->vm_pgoff + vma_pages(v) - 1; in vma_last_pgoff()
|
D | vmalloc.c | 3437 static void show_numa_info(struct seq_file *m, struct vm_struct *v) in show_numa_info() argument 3445 if (v->flags & VM_UNINITIALIZED) in show_numa_info() 3452 for (nr = 0; nr < v->nr_pages; nr++) in show_numa_info() 3453 counters[page_to_nid(v->pages[nr])]++; in show_numa_info() 3480 struct vm_struct *v; in s_show() local 3496 v = va->vm; in s_show() 3499 v->addr, v->addr + v->size, v->size); in s_show() 3501 if (v->caller) in s_show() 3502 seq_printf(m, " %pS", v->caller); in s_show() 3504 if (v->nr_pages) in s_show() [all …]
|
D | swap_state.c | 48 #define SWAP_RA_HITS(v) ((v) & SWAP_RA_HITS_MASK) argument 49 #define SWAP_RA_WIN(v) (((v) & SWAP_RA_WIN_MASK) >> SWAP_RA_WIN_SHIFT) argument 50 #define SWAP_RA_ADDR(v) ((v) & PAGE_MASK) argument
|
D | kmemleak.c | 1640 static void *kmemleak_seq_next(struct seq_file *seq, void *v, loff_t *pos) in kmemleak_seq_next() argument 1642 struct kmemleak_object *prev_obj = v; in kmemleak_seq_next() 1662 static void kmemleak_seq_stop(struct seq_file *seq, void *v) in kmemleak_seq_stop() argument 1664 if (!IS_ERR(v)) { in kmemleak_seq_stop() 1671 if (v) in kmemleak_seq_stop() 1672 put_object(v); in kmemleak_seq_stop() 1679 static int kmemleak_seq_show(struct seq_file *seq, void *v) in kmemleak_seq_show() argument 1681 struct kmemleak_object *object = v; in kmemleak_seq_show()
|
D | memcontrol.c | 3762 static int memcg_numa_stat_show(struct seq_file *m, void *v) in memcg_numa_stat_show() argument 3849 static int memcg_stat_show(struct seq_file *m, void *v) in memcg_stat_show() argument 4276 static int mem_cgroup_oom_control_read(struct seq_file *sf, void *v) in mem_cgroup_oom_control_read() argument 6025 static int memory_min_show(struct seq_file *m, void *v) in memory_min_show() argument 6048 static int memory_low_show(struct seq_file *m, void *v) in memory_low_show() argument 6071 static int memory_high_show(struct seq_file *m, void *v) in memory_high_show() argument 6100 static int memory_max_show(struct seq_file *m, void *v) in memory_max_show() argument 6165 static int memory_events_show(struct seq_file *m, void *v) in memory_events_show() argument 6173 static int memory_events_local_show(struct seq_file *m, void *v) in memory_events_local_show() argument 6181 static int memory_stat_show(struct seq_file *m, void *v) in memory_stat_show() argument [all …]
|
D | percpu-stats.c | 135 static int percpu_stats_show(struct seq_file *m, void *v) in percpu_stats_show() argument
|
D | percpu.c | 2173 int group, v; in pcpu_dump_alloc_info() local 2176 v = ai->nr_groups; in pcpu_dump_alloc_info() 2177 while (v /= 10) in pcpu_dump_alloc_info() 2180 v = num_possible_cpus(); in pcpu_dump_alloc_info() 2181 while (v /= 10) in pcpu_dump_alloc_info()
|
D | swapfile.c | 2730 static void *swap_next(struct seq_file *swap, void *v, loff_t *pos) in swap_next() argument 2732 struct swap_info_struct *si = v; in swap_next() 2735 if (v == SEQ_START_TOKEN) in swap_next() 2750 static void swap_stop(struct seq_file *swap, void *v) in swap_stop() argument 2755 static int swap_show(struct seq_file *swap, void *v) in swap_show() argument 2757 struct swap_info_struct *si = v; in swap_show()
|
D | backing-dev.c | 48 static int bdi_debug_stats_show(struct seq_file *m, void *v) in bdi_debug_stats_show() argument
|
D | slub.c | 4776 int v; in setup_slub_memcg_sysfs() local 4778 if (get_option(&str, &v) > 0) in setup_slub_memcg_sysfs() 4779 memcg_sysfs_enabled = v; in setup_slub_memcg_sysfs()
|
D | zsmalloc.c | 590 static int zs_stats_size_show(struct seq_file *s, void *v) in zs_stats_size_show() argument
|