/mm/ |
D | memory-failure.c | 81 static int hwpoison_filter_dev(struct page *p) in hwpoison_filter_dev() 111 static int hwpoison_filter_flags(struct page *p) in hwpoison_filter_flags() 136 static int hwpoison_filter_task(struct page *p) in hwpoison_filter_task() 147 static int hwpoison_filter_task(struct page *p) { return 0; } in hwpoison_filter_task() 150 int hwpoison_filter(struct page *p) in hwpoison_filter() 167 int hwpoison_filter(struct page *p) in hwpoison_filter() 241 void shake_page(struct page *p, int access) in shake_page() 308 static void add_to_kill(struct task_struct *tsk, struct page *p, in add_to_kill() 568 static int delete_from_lru_cache(struct page *p) in delete_from_lru_cache() 593 static int truncate_error_page(struct page *p, unsigned long pfn, in truncate_error_page() [all …]
|
D | oom_kill.c | 134 struct task_struct *find_lock_task_mm(struct task_struct *p) in find_lock_task_mm() 163 static bool oom_unkillable_task(struct task_struct *p) in oom_unkillable_task() 200 long oom_badness(struct task_struct *p, unsigned long totalpages) in oom_badness() 370 struct task_struct *p; in select_bad_process() local 380 static int dump_task(struct task_struct *p, void *arg) in dump_task() 431 struct task_struct *p; in dump_tasks() local 452 static void dump_header(struct oom_control *oc, struct task_struct *p) in dump_header() 490 bool process_shares_mm(struct task_struct *p, struct mm_struct *mm) in process_shares_mm() 812 struct task_struct *p; in task_will_free_mem() local 858 struct task_struct *p; in __oom_kill_process() local
|
D | swapfile.c | 548 static void inc_cluster_info_page(struct swap_info_struct *p, in inc_cluster_info_page() 568 static void dec_cluster_info_page(struct swap_info_struct *p, in dec_cluster_info_page() 671 static void __del_from_avail_list(struct swap_info_struct *p) in __del_from_avail_list() 680 static void del_from_avail_list(struct swap_info_struct *p) in del_from_avail_list() 704 static void add_to_avail_list(struct swap_info_struct *p) in add_to_avail_list() 1121 struct swap_info_struct *p; in __swap_info_get() local 1150 struct swap_info_struct *p; in _swap_info_get() local 1168 struct swap_info_struct *p; in swap_info_get() local 1179 struct swap_info_struct *p; in swap_info_get_cont() local 1192 static unsigned char __swap_entry_free_locked(struct swap_info_struct *p, in __swap_entry_free_locked() [all …]
|
D | sparse-vmemmap.c | 148 void *p = vmemmap_alloc_block_buf(PAGE_SIZE, node); in vmemmap_pte_populate() local 159 void *p = vmemmap_alloc_block(size, node); in vmemmap_alloc_block_zero() local 172 void *p = vmemmap_alloc_block_zero(PAGE_SIZE, node); in vmemmap_pmd_populate() local 184 void *p = vmemmap_alloc_block_zero(PAGE_SIZE, node); in vmemmap_pud_populate() local 196 void *p = vmemmap_alloc_block_zero(PAGE_SIZE, node); in vmemmap_p4d_populate() local 208 void *p = vmemmap_alloc_block_zero(PAGE_SIZE, node); in vmemmap_pgd_populate() local
|
D | slab_common.c | 103 void __kmem_cache_free_bulk(struct kmem_cache *s, size_t nr, void **p) in __kmem_cache_free_bulk() 116 void **p) in __kmem_cache_alloc_bulk() 1443 void *slab_next(struct seq_file *m, void *p, loff_t *pos) in slab_next() 1448 void slab_stop(struct seq_file *m, void *p) in slab_stop() 1495 static int slab_show(struct seq_file *m, void *p) in slab_show() 1548 void *memcg_slab_next(struct seq_file *m, void *p, loff_t *pos) in memcg_slab_next() 1555 void memcg_slab_stop(struct seq_file *m, void *p) in memcg_slab_stop() 1560 int memcg_slab_show(struct seq_file *m, void *p) in memcg_slab_show() 1673 static __always_inline void *__do_krealloc(const void *p, size_t new_size, in __do_krealloc() 1706 void *__krealloc(const void *p, size_t new_size, gfp_t flags) in __krealloc() [all …]
|
D | vmstat.c | 320 s8 __percpu *p = pcp->vm_stat_diff + item; in __mod_zone_page_state() local 340 s8 __percpu *p = pcp->vm_node_stat_diff + item; in __mod_node_page_state() local 382 s8 __percpu *p = pcp->vm_stat_diff + item; in __inc_zone_state() local 398 s8 __percpu *p = pcp->vm_node_stat_diff + item; in __inc_node_state() local 426 s8 __percpu *p = pcp->vm_stat_diff + item; in __dec_zone_state() local 442 s8 __percpu *p = pcp->vm_node_stat_diff + item; in __dec_node_state() local 484 s8 __percpu *p = pcp->vm_stat_diff + item; in mod_zone_state() local 541 s8 __percpu *p = pcp->vm_node_stat_diff + item; in mod_node_state() local 758 struct per_cpu_pageset __percpu *p = zone->pageset; in refresh_cpu_vm_stats() local 820 struct per_cpu_nodestat __percpu *p = pgdat->per_cpu_nodestats; in refresh_cpu_vm_stats() local [all …]
|
D | cma_debug.c | 20 struct page *p; member 26 unsigned long *p = data; in cma_debugfs_get() local 134 struct page *p; in cma_alloc_mem() local
|
D | util.c | 125 void *p; in kmemdup() local 170 void *p; in memdup_user() local 196 void *p; in vmemdup_user() local 220 char *p; in strndup_user() local 252 char *p; in memdup_user_nul() local
|
D | slub.c | 129 void *fixup_red_left(struct kmem_cache *s, void *p) in fixup_red_left() 224 static inline int sysfs_slab_alias(struct kmem_cache *s, const char *p) in sysfs_slab_alias() 292 void *p; in get_freepointer_safe() local 320 static inline unsigned int slab_index(void *p, struct kmem_cache *s, void *addr) in slab_index() 453 void *p; in get_map() local 468 static inline void *restore_red_left(struct kmem_cache *s, void *p) in restore_red_left() 560 struct track *p; in get_track() local 570 struct track *p = get_track(s, object, alloc); in set_track() local 679 static void print_trailer(struct kmem_cache *s, struct page *page, u8 *p) in print_trailer() 741 u8 *p = object; in init_object() local [all …]
|
D | vmalloc.c | 55 struct vfree_deferred *p = container_of(w, struct vfree_deferred, wq); in free_work() local 1823 struct vm_struct *tmp, **p; in vm_area_add_early() local 1916 struct vfree_deferred *p; in vmalloc_init() local 2279 struct vfree_deferred *p = raw_cpu_ptr(&vfree_deferred); in __vfree_deferred() local 2750 struct page *p; in aligned_vread() local 2789 struct page *p; in aligned_vwrite() local 3081 pte_t ***p = data; in f() local 3441 static void *s_next(struct seq_file *m, void *p, loff_t *pos) in s_next() 3446 static void s_stop(struct seq_file *m, void *p) in s_stop() 3492 static int s_show(struct seq_file *m, void *p) in s_show()
|
D | mempolicy.c | 130 struct mempolicy *get_task_policy(struct task_struct *p) in get_task_policy() 287 void __mpol_put(struct mempolicy *p) in __mpol_put() 828 static void get_policy_nodemask(struct mempolicy *p, nodemask_t *nodes) in get_policy_nodemask() 852 struct page *p; in lookup_node() local 2290 struct sp_node *p = rb_entry(n, struct sp_node, nd); in sp_lookup() local 2320 struct rb_node **p = &sp->root.rb_node; in sp_insert() local 2642 void mpol_free_shared_policy(struct shared_policy *p) in mpol_free_shared_policy() 2928 char *p = buffer; in mpol_to_str() local
|
D | hwpoison-inject.c | 17 struct page *p; in hwpoison_inject() local
|
D | backing-dev.c | 887 struct rb_node **p = &bdi_tree.rb_node; in bdi_lookup_rb_node() local 920 struct rb_node **p; in bdi_get_by_id() local 936 struct rb_node *parent, **p; in bdi_register_va() local
|
D | slab.h | 298 struct kmem_cache *p) in slab_equal_or_root() 426 struct kmem_cache *p) in slab_equal_or_root() 578 size_t size, void **p) in slab_post_alloc_hook()
|
D | memtest.c | 37 u64 *p, *start, *end; in memtest() local
|
D | slob.c | 676 void kmem_cache_free_bulk(struct kmem_cache *s, size_t size, void **p) in kmem_cache_free_bulk() 683 void **p) in kmem_cache_alloc_bulk()
|
D | page_alloc.c | 448 #define kasan_free_nondeferred_pages(p, o) kasan_free_pages(p, o) argument 699 struct page *p = page + i; in prep_compound_page() local 1451 struct page *p = page; in __free_pages_core() local 1990 struct page *p = page; in init_cma_reserved_pageblock() local 2145 struct page *p = page + i; in check_new_pages() local 5158 struct page *p; in alloc_pages_exact_nid() local 5383 char *p = tmp; in show_migration_types() local 6288 static void pageset_set_batch(struct per_cpu_pageset *p, unsigned long batch) in pageset_set_batch() 6293 static void pageset_init(struct per_cpu_pageset *p) in pageset_init() 6305 static void setup_pageset(struct per_cpu_pageset *p, unsigned long batch) in setup_pageset() [all …]
|
D | percpu-stats.c | 57 int *alloc_sizes, *p; in chunk_map_stats() local
|
D | nommu.c | 459 struct rb_node *p, *lastp; in validate_nommu_regions() local 492 struct rb_node **p, *parent; in add_nommu_region() local 590 struct rb_node **p, *parent, *rb_prev; in add_vma_to_mm() local
|
D | early_ioremap.c | 257 char *p; in copy_from_early_mem() local
|
D | memcontrol.c | 527 struct rb_node **p = &mctz->rb_root.rb_node; in __mem_cgroup_insert_exceeded() local 769 void __mod_lruvec_slab_state(void *p, enum node_stat_item idx, int val) in __mod_lruvec_slab_state() 794 void mod_memcg_obj_state(void *p, int idx, int val) in mod_memcg_obj_state() 938 struct mem_cgroup *mem_cgroup_from_task(struct task_struct *p) in mem_cgroup_from_task() 1517 void mem_cgroup_print_oom_context(struct mem_cgroup *memcg, struct task_struct *p) in mem_cgroup_print_oom_context() 2812 struct mem_cgroup *mem_cgroup_from_obj(void *p) in mem_cgroup_from_obj() 5837 struct task_struct *leader, *p; in mem_cgroup_can_attach() local
|
/mm/kasan/ |
D | init.c | 129 pte_t *p; in zero_pmd_populate() local 166 pmd_t *p; in zero_pud_populate() local 207 pud_t *p; in zero_p4d_populate() local 280 p4d_t *p; in kasan_populate_early_shadow() local
|
D | tags_report.c | 69 void *p = reset_tag(addr); in find_first_bad_addr() local
|
D | generic_report.c | 39 void *p = addr; in find_first_bad_addr() local
|
D | common.c | 90 bool __kasan_check_read(const volatile void *p, unsigned int size) in __kasan_check_read() 96 bool __kasan_check_write(const volatile void *p, unsigned int size) in __kasan_check_write()
|