Home
last modified time | relevance | path

Searched refs:node (Results 1 – 25 of 33) sorted by relevance

12

/mm/
Dinterval_tree.c29 void vma_interval_tree_insert_after(struct vm_area_struct *node, in vma_interval_tree_insert_after() argument
35 unsigned long last = vma_last_pgoff(node); in vma_interval_tree_insert_after()
37 VM_BUG_ON_VMA(vma_start_pgoff(node) != vma_start_pgoff(prev), node); in vma_interval_tree_insert_after()
56 node->shared.rb_subtree_last = last; in vma_interval_tree_insert_after()
57 rb_link_node(&node->shared.rb, &parent->shared.rb, link); in vma_interval_tree_insert_after()
58 rb_insert_augmented(&node->shared.rb, root, in vma_interval_tree_insert_after()
76 void anon_vma_interval_tree_insert(struct anon_vma_chain *node, in INTERVAL_TREE_DEFINE()
80 node->cached_vma_start = avc_start_pgoff(node); in INTERVAL_TREE_DEFINE()
81 node->cached_vma_last = avc_last_pgoff(node); in INTERVAL_TREE_DEFINE()
83 __anon_vma_interval_tree_insert(node, root); in INTERVAL_TREE_DEFINE()
[all …]
Dsparse-vmemmap.c39 static void * __ref __earlyonly_bootmem_alloc(int node, in __earlyonly_bootmem_alloc() argument
45 BOOTMEM_ALLOC_ACCESSIBLE, node); in __earlyonly_bootmem_alloc()
51 void * __meminit vmemmap_alloc_block(unsigned long size, int node) in vmemmap_alloc_block() argument
57 if (node_state(node, N_HIGH_MEMORY)) in vmemmap_alloc_block()
59 node, GFP_KERNEL | __GFP_ZERO | __GFP_REPEAT, in vmemmap_alloc_block()
69 return __earlyonly_bootmem_alloc(node, size, size, in vmemmap_alloc_block()
74 static void * __meminit alloc_block_buf(unsigned long size, int node) in alloc_block_buf() argument
79 return vmemmap_alloc_block(size, node); in alloc_block_buf()
84 return vmemmap_alloc_block(size, node); in alloc_block_buf()
154 void * __meminit __vmemmap_alloc_block_buf(unsigned long size, int node, in __vmemmap_alloc_block_buf() argument
[all …]
Dlist_lru.c49 return !!lru->node[0].memcg_lrus; in list_lru_memcg_aware()
112 struct list_lru_node *nlru = &lru->node[nid]; in list_lru_add()
132 struct list_lru_node *nlru = &lru->node[nid]; in list_lru_del()
167 struct list_lru_node *nlru = &lru->node[nid]; in __list_lru_count_one()
190 nlru = &lru->node[nid]; in list_lru_count_node()
201 struct list_lru_node *nlru = &lru->node[nid]; in __list_lru_walk_one()
396 if (memcg_init_list_lru_node(&lru->node[i])) in memcg_init_list_lru()
402 if (!lru->node[i].memcg_lrus) in memcg_init_list_lru()
404 memcg_destroy_list_lru_node(&lru->node[i]); in memcg_init_list_lru()
417 memcg_destroy_list_lru_node(&lru->node[i]); in memcg_destroy_list_lru()
[all …]
Dslab.c211 int node, struct list_head *list);
532 int node = __this_cpu_read(slab_reap_node); in next_reap_node() local
534 node = next_node_in(node, node_online_map); in next_reap_node()
535 __this_cpu_write(slab_reap_node, node); in next_reap_node()
585 static struct array_cache *alloc_arraycache(int node, int entries, in alloc_arraycache() argument
591 ac = kmalloc_node(memsize, gfp, node); in alloc_arraycache()
641 static inline struct alien_cache **alloc_alien_cache(int node, in alloc_alien_cache() argument
678 static struct alien_cache *__alloc_alien_cache(int node, int entries, in __alloc_alien_cache() argument
684 alc = kmalloc_node(memsize, gfp, node); in __alloc_alien_cache()
690 static struct alien_cache **alloc_alien_cache(int node, int limit, gfp_t gfp) in alloc_alien_cache() argument
[all …]
Dslob.c190 static void *slob_new_pages(gfp_t gfp, int order, int node) in slob_new_pages() argument
195 if (node != NUMA_NO_NODE) in slob_new_pages()
196 page = __alloc_pages_node(node, gfp, order); in slob_new_pages()
268 static void *slob_alloc(size_t size, gfp_t gfp, int align, int node) in slob_alloc() argument
291 if (node != NUMA_NO_NODE && page_to_nid(sp) != node) in slob_alloc()
316 b = slob_new_pages(gfp & ~__GFP_ZERO, 0, node); in slob_alloc()
427 __do_kmalloc_node(size_t size, gfp_t gfp, int node, unsigned long caller) in __do_kmalloc_node() argument
441 m = slob_alloc(size + align, gfp, align, node); in __do_kmalloc_node()
449 size, size + align, gfp, node); in __do_kmalloc_node()
455 ret = slob_new_pages(gfp, order, node); in __do_kmalloc_node()
[all …]
Dslub.c1009 static inline unsigned long slabs_node(struct kmem_cache *s, int node) in slabs_node() argument
1011 struct kmem_cache_node *n = get_node(s, node); in slabs_node()
1021 static inline void inc_slabs_node(struct kmem_cache *s, int node, int objects) in inc_slabs_node() argument
1023 struct kmem_cache_node *n = get_node(s, node); in inc_slabs_node()
1036 static inline void dec_slabs_node(struct kmem_cache *s, int node, int objects) in dec_slabs_node() argument
1038 struct kmem_cache_node *n = get_node(s, node); in dec_slabs_node()
1299 static inline unsigned long slabs_node(struct kmem_cache *s, int node) in slabs_node() argument
1303 static inline void inc_slabs_node(struct kmem_cache *s, int node, in inc_slabs_node() argument
1305 static inline void dec_slabs_node(struct kmem_cache *s, int node, in dec_slabs_node() argument
1398 gfp_t flags, int node, struct kmem_cache_order_objects oo) in alloc_slab_page() argument
[all …]
Dworkingset.c387 struct radix_tree_node *node; in shadow_lru_isolate() local
403 node = container_of(item, struct radix_tree_node, private_list); in shadow_lru_isolate()
404 mapping = node->private_data; in shadow_lru_isolate()
421 BUG_ON(!workingset_node_shadows(node)); in shadow_lru_isolate()
422 BUG_ON(workingset_node_pages(node)); in shadow_lru_isolate()
425 if (node->slots[i]) { in shadow_lru_isolate()
426 BUG_ON(!radix_tree_exceptional_entry(node->slots[i])); in shadow_lru_isolate()
427 node->slots[i] = NULL; in shadow_lru_isolate()
428 workingset_node_shadows_dec(node); in shadow_lru_isolate()
433 BUG_ON(workingset_node_shadows(node)); in shadow_lru_isolate()
[all …]
Dksm.c136 struct rb_node node; /* when node of stable tree */ member
173 struct rb_node node; /* when node of unstable tree */ member
510 rb_erase(&stable_node->node, in remove_node_from_stable_tree()
646 rb_erase(&rmap_item->node, in remove_rmap_item_from_tree()
747 struct stable_node, node); in remove_all_stable_nodes()
1151 stable_node = rb_entry(*new, struct stable_node, node); in stable_tree_search()
1207 rb_link_node(&page_node->node, parent, new); in stable_tree_search()
1208 rb_insert_color(&page_node->node, root); in stable_tree_search()
1216 rb_replace_node(&stable_node->node, &page_node->node, root); in stable_tree_search()
1219 rb_erase(&stable_node->node, root); in stable_tree_search()
[all …]
Dmigrate.c1385 int node; member
1394 while (pm->node != MAX_NUMNODES && pm->page != p) in new_page_node()
1397 if (pm->node == MAX_NUMNODES) in new_page_node()
1404 pm->node); in new_page_node()
1406 return __alloc_pages_node(pm->node, in new_page_node()
1429 for (pp = pm; pp->node != MAX_NUMNODES; pp++) { in do_move_page_to_node_array()
1453 if (err == pp->node) in do_move_page_to_node_array()
1538 int node; in do_pages_move() local
1545 if (get_user(node, nodes + j + chunk_start)) in do_pages_move()
1549 if (node < 0 || node >= MAX_NUMNODES) in do_pages_move()
[all …]
Dvmalloc.c366 int node, gfp_t gfp_mask) in alloc_vmap_area() argument
381 gfp_mask & GFP_RECLAIM_MASK, node); in alloc_vmap_area()
858 int node, err; in new_vmap_block() local
861 node = numa_node_id(); in new_vmap_block()
864 gfp_mask & GFP_RECLAIM_MASK, node); in new_vmap_block()
870 node, gfp_mask); in new_vmap_block()
1151 void *vm_map_ram(struct page **pages, unsigned int count, int node, pgprot_t prot) in vm_map_ram() argument
1165 VMALLOC_START, VMALLOC_END, node, GFP_KERNEL); in vm_map_ram()
1366 unsigned long end, int node, gfp_t gfp_mask, const void *caller) in __get_vm_area_node() argument
1380 area = kzalloc_node(sizeof(*area), gfp_mask & GFP_RECLAIM_MASK, node); in __get_vm_area_node()
[all …]
Dhugetlb.c1013 #define for_each_node_mask_to_alloc(hs, nr_nodes, node, mask) \ argument
1016 ((node = hstate_next_node_to_alloc(hs, mask)) || 1); \
1019 #define for_each_node_mask_to_free(hs, nr_nodes, node, mask) \ argument
1022 ((node = hstate_next_node_to_free(hs, mask)) || 1); \
1147 int nr_nodes, node; in alloc_fresh_gigantic_page() local
1149 for_each_node_mask_to_alloc(h, nr_nodes, node, nodes_allowed) { in alloc_fresh_gigantic_page()
1150 page = alloc_fresh_gigantic_page_node(h, node); in alloc_fresh_gigantic_page()
1381 int nr_nodes, node; in alloc_fresh_huge_page() local
1384 for_each_node_mask_to_alloc(h, nr_nodes, node, nodes_allowed) { in alloc_fresh_huge_page()
1385 page = alloc_fresh_huge_page_node(h, node); in alloc_fresh_huge_page()
[all …]
Dkhugepaged.c751 khugepaged_alloc_page(struct page **hpage, gfp_t gfp, int node) in khugepaged_alloc_page() argument
755 *hpage = __alloc_pages_node(node, gfp, HPAGE_PMD_ORDER); in khugepaged_alloc_page()
815 khugepaged_alloc_page(struct page **hpage, gfp_t gfp, int node) in khugepaged_alloc_page() argument
934 int node, int referenced) in collapse_huge_page() argument
960 new_page = khugepaged_alloc_page(hpage, gfp, node); in collapse_huge_page()
1107 int node = NUMA_NO_NODE, unmapped = 0; in khugepaged_scan_pmd() local
1165 node = page_to_nid(page); in khugepaged_scan_pmd()
1166 if (khugepaged_scan_abort(node)) { in khugepaged_scan_pmd()
1170 khugepaged_node_load[node]++; in khugepaged_scan_pmd()
1211 node = khugepaged_find_target_node(); in khugepaged_scan_pmd()
[all …]
Dvmpressure.c144 struct list_head node; member
155 list_for_each_entry(ev, &vmpr->events, node) { in vmpressure_event()
366 list_add(&ev->node, &vmpr->events); in vmpressure_register_event()
390 list_for_each_entry(ev, &vmpr->events, node) { in vmpressure_unregister_event()
393 list_del(&ev->node); in vmpressure_unregister_event()
Dfilemap.c116 struct radix_tree_node *node; in page_cache_tree_insert() local
121 &node, &slot); in page_cache_tree_insert()
135 if (node) in page_cache_tree_insert()
136 workingset_node_shadows_dec(node); in page_cache_tree_insert()
143 if (node) in page_cache_tree_insert()
144 workingset_node_pages_dec(node); in page_cache_tree_insert()
152 if (node) { in page_cache_tree_insert()
153 workingset_node_pages_inc(node); in page_cache_tree_insert()
162 if (!list_empty(&node->private_list)) in page_cache_tree_insert()
164 &node->private_list); in page_cache_tree_insert()
[all …]
Dtruncate.c30 struct radix_tree_node *node; in clear_exceptional_entry() local
47 if (!__radix_tree_lookup(&mapping->page_tree, index, &node, in clear_exceptional_entry()
54 if (!node) in clear_exceptional_entry()
56 workingset_node_shadows_dec(node); in clear_exceptional_entry()
64 if (!workingset_node_shadows(node) && in clear_exceptional_entry()
65 !list_empty(&node->private_list)) in clear_exceptional_entry()
67 &node->private_list); in clear_exceptional_entry()
68 __radix_tree_delete_node(&mapping->page_tree, node); in clear_exceptional_entry()
Dmempolicy.c128 int node; in get_task_policy() local
133 node = numa_node_id(); in get_task_policy()
134 if (node != NUMA_NO_NODE) { in get_task_policy()
135 pol = &preferred_node_policy[node]; in get_task_policy()
362 int node = first_node(pol->w.user_nodemask); in mpol_rebind_preferred() local
364 if (node_isset(node, *nodes)) { in mpol_rebind_preferred()
365 pol->v.preferred_node = node; in mpol_rebind_preferred()
967 static struct page *new_node_page(struct page *page, unsigned long node, int **x) in new_node_page() argument
971 node); in new_node_page()
973 return __alloc_pages_node(node, GFP_HIGHUSER_MOVABLE | in new_node_page()
[all …]
Dmemory_hotplug.c270 int node = pgdat->node_id; in register_page_bootmem_info_node() local
277 get_page_bootmem(node, page, NODE_INFO); in register_page_bootmem_info_node()
290 if (pfn_valid(pfn) && (early_pfn_to_nid(pfn) == node)) in register_page_bootmem_info_node()
1026 static void node_states_set_node(int node, struct memory_notify *arg) in node_states_set_node() argument
1029 node_set_state(node, N_NORMAL_MEMORY); in node_states_set_node()
1032 node_set_state(node, N_HIGH_MEMORY); in node_states_set_node()
1034 node_set_state(node, N_MEMORY); in node_states_set_node()
1855 static void node_states_clear_node(int node, struct memory_notify *arg) in node_states_clear_node() argument
1858 node_clear_state(node, N_NORMAL_MEMORY); in node_states_clear_node()
1862 node_clear_state(node, N_HIGH_MEMORY); in node_states_clear_node()
[all …]
Dquicklist.c29 int node = numa_node_id(); in max_pages() local
30 struct zone *zones = NODE_DATA(node)->node_zones; in max_pages()
44 num_cpus_on_node = cpumask_weight(cpumask_of_node(node)); in max_pages()
Dcma_debug.c18 struct hlist_node node; member
75 hlist_add_head(&mem->node, &cma->mem_head); in cma_add_to_cma_mem_list()
85 mem = hlist_entry(cma->mem_head.first, struct cma_mem, node); in cma_get_entry_from_list()
86 hlist_del_init(&mem->node); in cma_get_entry_from_list()
Dpage_alloc.c1314 static inline bool __meminit meminit_pfn_in_nid(unsigned long pfn, int node, in meminit_pfn_in_nid() argument
1320 if (nid >= 0 && nid != node) in meminit_pfn_in_nid()
1326 static inline bool __meminit early_pfn_in_nid(unsigned long pfn, int node) in early_pfn_in_nid() argument
1328 return meminit_pfn_in_nid(pfn, node, &early_pfnnid_cache); in early_pfn_in_nid()
1333 static inline bool __meminit early_pfn_in_nid(unsigned long pfn, int node) in early_pfn_in_nid() argument
1337 static inline bool __meminit meminit_pfn_in_nid(unsigned long pfn, int node, in meminit_pfn_in_nid() argument
2602 if (z->node != numa_node_id()) in zone_statistics()
2605 if (z->node == preferred_zone->node) in zone_statistics()
4648 static int find_next_best_node(int node, nodemask_t *used_node_mask) in find_next_best_node() argument
4656 if (!node_isset(node, *used_node_mask)) { in find_next_best_node()
[all …]
Dbacking-dev.c402 struct rb_node **node, *parent; in wb_congested_get_create() local
407 node = &bdi->cgwb_congested_tree.rb_node; in wb_congested_get_create()
410 while (*node != NULL) { in wb_congested_get_create()
411 parent = *node; in wb_congested_get_create()
415 node = &parent->rb_left; in wb_congested_get_create()
417 node = &parent->rb_right; in wb_congested_get_create()
426 rb_link_node(&congested->rb_node, parent, node); in wb_congested_get_create()
Dzswap.c262 struct rb_node *node = root->rb_node; in zswap_rb_search() local
265 while (node) { in zswap_rb_search()
266 entry = rb_entry(node, struct zswap_entry, rbnode); in zswap_rb_search()
268 node = node->rb_left; in zswap_rb_search()
270 node = node->rb_right; in zswap_rb_search()
Dkmemcheck.c8 void kmemcheck_alloc_shadow(struct page *page, int order, gfp_t flags, int node) in kmemcheck_alloc_shadow() argument
20 shadow = alloc_pages_node(node, flags | __GFP_NOTRACK, order); in kmemcheck_alloc_shadow()
Dmemcontrol.c1324 int node; in mem_cgroup_select_victim_node() local
1327 node = memcg->last_scanned_node; in mem_cgroup_select_victim_node()
1329 node = next_node_in(node, memcg->scan_nodes); in mem_cgroup_select_victim_node()
1335 if (unlikely(node == MAX_NUMNODES)) in mem_cgroup_select_victim_node()
1336 node = numa_node_id(); in mem_cgroup_select_victim_node()
1338 memcg->last_scanned_node = node; in mem_cgroup_select_victim_node()
1339 return node; in mem_cgroup_select_victim_node()
4115 static int alloc_mem_cgroup_per_node_info(struct mem_cgroup *memcg, int node) in alloc_mem_cgroup_per_node_info() argument
4118 int tmp = node; in alloc_mem_cgroup_per_node_info()
4127 if (!node_state(node, N_NORMAL_MEMORY)) in alloc_mem_cgroup_per_node_info()
[all …]
Dvmstat.c791 unsigned long sum_zone_node_page_state(int node, in sum_zone_node_page_state() argument
794 struct zone *zones = NODE_DATA(node)->node_zones; in sum_zone_node_page_state()
1101 loff_t node = *pos; in frag_start() local
1104 pgdat && node; in frag_start()
1106 --node; in frag_start()
1736 static void vmstat_cpu_dead(int node) in vmstat_cpu_dead() argument
1742 if (cpu_to_node(cpu) == node) in vmstat_cpu_dead()
1745 node_clear_state(node, N_CPU); in vmstat_cpu_dead()

12