/mm/ |
D | sparse-vmemmap.c | 38 static void * __init_refok __earlyonly_bootmem_alloc(int node, in __earlyonly_bootmem_alloc() 50 void * __meminit vmemmap_alloc_block(unsigned long size, int node) in vmemmap_alloc_block() 73 void * __meminit vmemmap_alloc_block_buf(unsigned long size, int node) in vmemmap_alloc_block_buf() 90 void __meminit vmemmap_verify(pte_t *pte, int node, in vmemmap_verify() 101 pte_t * __meminit vmemmap_pte_populate(pmd_t *pmd, unsigned long addr, int node) in vmemmap_pte_populate() 115 pmd_t * __meminit vmemmap_pmd_populate(pud_t *pud, unsigned long addr, int node) in vmemmap_pmd_populate() 127 pud_t * __meminit vmemmap_pud_populate(pgd_t *pgd, unsigned long addr, int node) in vmemmap_pud_populate() 139 pgd_t * __meminit vmemmap_pgd_populate(unsigned long addr, int node) in vmemmap_pgd_populate() 152 unsigned long end, int node) in vmemmap_populate_basepages()
|
D | interval_tree.c | 29 void vma_interval_tree_insert_after(struct vm_area_struct *node, in vma_interval_tree_insert_after() 86 void anon_vma_interval_tree_remove(struct anon_vma_chain *node, in anon_vma_interval_tree_remove() 100 anon_vma_interval_tree_iter_next(struct anon_vma_chain *node, in anon_vma_interval_tree_iter_next() 107 void anon_vma_interval_tree_verify(struct anon_vma_chain *node) in anon_vma_interval_tree_verify()
|
D | slab.c | 612 int node; in init_reap_node() local 623 int node = __this_cpu_read(slab_reap_node); in next_reap_node() local 678 static struct array_cache *alloc_arraycache(int node, int entries, in alloc_arraycache() 833 static inline struct alien_cache **alloc_alien_cache(int node, in alloc_alien_cache() 865 static struct alien_cache *__alloc_alien_cache(int node, int entries, in __alloc_alien_cache() 877 static struct alien_cache **alloc_alien_cache(int node, int limit, gfp_t gfp) in alloc_alien_cache() 915 struct array_cache *ac, int node, in __drain_alien_cache() 941 int node = __this_cpu_read(slab_reap_node); in reap_alien() local 983 int node, int page_node) in __cache_free_alien() 1016 int node = numa_mem_id(); in cache_free_alien() local [all …]
|
D | slob.c | 190 static void *slob_new_pages(gfp_t gfp, int order, int node) in slob_new_pages() 268 static void *slob_alloc(size_t size, gfp_t gfp, int align, int node) in slob_alloc() 427 __do_kmalloc_node(size_t size, gfp_t gfp, int node, unsigned long caller) in __do_kmalloc_node() 478 int node, unsigned long caller) in __kmalloc_node_track_caller() 535 void *slob_alloc_node(struct kmem_cache *c, gfp_t flags, int node) in slob_alloc_node() 570 void *__kmalloc_node(size_t size, gfp_t gfp, int node) in __kmalloc_node() 576 void *kmem_cache_alloc_node(struct kmem_cache *cachep, gfp_t gfp, int node) in kmem_cache_alloc_node()
|
D | slub.c | 996 static inline unsigned long slabs_node(struct kmem_cache *s, int node) in slabs_node() 1008 static inline void inc_slabs_node(struct kmem_cache *s, int node, int objects) in inc_slabs_node() 1023 static inline void dec_slabs_node(struct kmem_cache *s, int node, int objects) in dec_slabs_node() 1248 static inline unsigned long slabs_node(struct kmem_cache *s, int node) in slabs_node() 1252 static inline void inc_slabs_node(struct kmem_cache *s, int node, in inc_slabs_node() 1254 static inline void dec_slabs_node(struct kmem_cache *s, int node, in dec_slabs_node() 1317 gfp_t flags, int node, struct kmem_cache_order_objects oo) in alloc_slab_page() 1338 static struct page *allocate_slab(struct kmem_cache *s, gfp_t flags, int node) in allocate_slab() 1409 static struct page *new_slab(struct kmem_cache *s, gfp_t flags, int node) in new_slab() 1725 static void *get_partial(struct kmem_cache *s, gfp_t flags, int node, in get_partial() [all …]
|
D | vmalloc.c | 347 int node, gfp_t gfp_mask) in alloc_vmap_area() 801 int node, err; in new_vmap_block() local 1096 void *vm_map_ram(struct page **pages, unsigned int count, int node, pgprot_t prot) in vm_map_ram() 1311 unsigned long end, int node, gfp_t gfp_mask, const void *caller) in __get_vm_area_node() 1560 pgprot_t prot, int node) in __vmalloc_area_node() 1634 pgprot_t prot, int node, const void *caller) in __vmalloc_node_range() 1691 int node, const void *caller) in __vmalloc_node() 1705 int node, gfp_t flags) in __vmalloc_node_flags() 1779 void *vmalloc_node(unsigned long size, int node) in vmalloc_node() 1798 void *vzalloc_node(unsigned long size, int node) in vzalloc_node()
|
D | migrate.c | 1184 int node; member 1339 int node; in do_pages_move() local 1609 bool migrate_ratelimited(int node) in migrate_ratelimited() 1711 int node) in migrate_misplaced_page() 1770 struct page *page, int node) in migrate_misplaced_transhuge_page()
|
D | hugetlb.c | 670 #define for_each_node_mask_to_alloc(hs, nr_nodes, node, mask) \ argument 676 #define for_each_node_mask_to_free(hs, nr_nodes, node, mask) \ argument 799 int nr_nodes, node; in alloc_fresh_gigantic_page() local 1031 int nr_nodes, node; in alloc_fresh_huge_page() local 1059 int nr_nodes, node; in free_pool_huge_page() local 1465 int nr_nodes, node; in alloc_bootmem_huge_page() local 1621 int nr_nodes, node; in adjust_pool_surplus() local 2041 static void hugetlb_unregister_node(struct node *node) in hugetlb_unregister_node() 2085 static void hugetlb_register_node(struct node *node) in hugetlb_register_node() 2122 struct node *node = node_devices[nid]; in hugetlb_register_all_nodes() local [all …]
|
D | mempolicy.c | 129 int node; in get_task_policy() local 371 int node = first_node(pol->w.user_nodemask); in mpol_rebind_preferred() local 984 static struct page *new_node_page(struct page *page, unsigned long node, int **x) in new_node_page() 1739 int node = numa_mem_id(); in mempolicy_slab_node() local 2005 unsigned long addr, int node) in alloc_pages_vma() 2342 static void sp_node_init(struct sp_node *node, unsigned long start, in sp_node_init()
|
D | quicklist.c | 29 int node = numa_node_id(); in max_pages() local
|
D | kmemcheck.c | 8 void kmemcheck_alloc_shadow(struct page *page, int order, gfp_t flags, int node) in kmemcheck_alloc_shadow()
|
D | memory_hotplug.c | 248 int node = pgdat->node_id; in register_page_bootmem_info_node() local 950 static void node_states_set_node(int node, struct memory_notify *arg) in node_states_set_node() 1657 static void node_states_clear_node(int node, struct memory_notify *arg) in node_states_clear_node() 1676 int ret, drain, retry_max, node; in __offline_pages() local
|
D | workingset.c | 309 struct radix_tree_node *node; in shadow_lru_isolate() local
|
D | page_isolation.c | 302 int node = next_online_node(page_to_nid(page)); in alloc_migrate_target() local
|
D | vmpressure.c | 136 struct list_head node; member
|
D | compaction.c | 1540 int compaction_register_node(struct node *node) in compaction_register_node() 1545 void compaction_unregister_node(struct node *node) in compaction_unregister_node()
|
D | page_alloc.c | 3544 static int find_next_best_node(int node, nodemask_t *used_node_mask) in find_next_best_node() 3596 static void build_zonelists_in_node_order(pg_data_t *pgdat, int node) in build_zonelists_in_node_order() 3633 int pos, j, node; in build_zonelists_in_zone_order() local 3690 int j, node, load; in build_zonelists() local 3760 int local_memory_node(int node) in local_memory_node() 3781 int node, local_node; in build_zonelists() local 4483 bool __meminit early_pfn_in_nid(unsigned long pfn, int node) in early_pfn_in_nid() 5039 unsigned int node; in setup_nr_node_ids() local
|
D | nommu.c | 354 void *vmalloc_node(unsigned long size, int node) in vmalloc_node() 372 void *vzalloc_node(unsigned long size, int node) in vzalloc_node() 445 void *vm_map_ram(struct page **pages, unsigned int count, int node, pgprot_t prot) in vm_map_ram()
|
D | huge_memory.c | 2333 int node) in khugepaged_alloc_page() 2402 int node) in khugepaged_alloc_page() 2427 int node) in collapse_huge_page() 2565 int node = NUMA_NO_NODE; in khugepaged_scan_pmd() local
|
D | ksm.c | 136 struct rb_node node; /* when node of stable tree */ member 173 struct rb_node node; /* when node of unstable tree */ member 1997 struct rb_node *node; in ksm_check_stable_tree() local
|
D | memcontrol.c | 1863 int node; in mem_cgroup_select_victim_node() local 3807 int node, int zid, enum lru_list lru) in mem_cgroup_force_empty_list() 3858 int node, zid; in mem_cgroup_reparent_charges() local 5267 static int alloc_mem_cgroup_per_zone_info(struct mem_cgroup *memcg, int node) in alloc_mem_cgroup_per_zone_info() 5297 static void free_mem_cgroup_per_zone_info(struct mem_cgroup *memcg, int node) in free_mem_cgroup_per_zone_info() 5338 int node; in __mem_cgroup_free() local 5377 int tmp, node, zone; in mem_cgroup_soft_limit_tree_init() local 5401 int node; in mem_cgroup_css_alloc() local
|
D | truncate.c | 29 struct radix_tree_node *node; in clear_exceptional_entry() local
|
D | vmstat.c | 693 loff_t node = *pos; in frag_start() local 1381 static void vmstat_cpu_dead(int node) in vmstat_cpu_dead()
|
D | slab.h | 345 static inline struct kmem_cache_node *get_node(struct kmem_cache *s, int node) in get_node()
|
D | filemap.c | 115 struct radix_tree_node *node; in page_cache_tree_delete() local 506 struct radix_tree_node *node; in page_cache_tree_insert() local
|