Home
last modified time | relevance | path

Searched refs:nid (Results 1 – 23 of 23) sorted by relevance

/mm/
Dpage_ext.c122 static int __init alloc_node_page_ext(int nid) in alloc_node_page_ext() argument
128 nr_pages = NODE_DATA(nid)->node_spanned_pages; in alloc_node_page_ext()
137 if (!IS_ALIGNED(node_start_pfn(nid), MAX_ORDER_NR_PAGES) || in alloc_node_page_ext()
138 !IS_ALIGNED(node_end_pfn(nid), MAX_ORDER_NR_PAGES)) in alloc_node_page_ext()
145 BOOTMEM_ALLOC_ACCESSIBLE, nid); in alloc_node_page_ext()
148 NODE_DATA(nid)->node_page_ext = base; in alloc_node_page_ext()
156 int nid, fail; in page_ext_init_flatmem() local
161 for_each_online_node(nid) { in page_ext_init_flatmem()
162 fail = alloc_node_page_ext(nid); in page_ext_init_flatmem()
192 static void *__meminit alloc_page_ext(size_t size, int nid) in alloc_page_ext() argument
[all …]
Dsparse.c51 static void set_section_nid(unsigned long section_nr, int nid) in set_section_nid() argument
53 section_to_node_table[section_nr] = nid; in set_section_nid()
56 static inline void set_section_nid(unsigned long section_nr, int nid) in set_section_nid() argument
62 static struct mem_section noinline __init_refok *sparse_index_alloc(int nid) in sparse_index_alloc() argument
69 if (node_state(nid, N_HIGH_MEMORY)) in sparse_index_alloc()
70 section = kzalloc_node(array_size, GFP_KERNEL, nid); in sparse_index_alloc()
74 section = memblock_virt_alloc_node(array_size, nid); in sparse_index_alloc()
80 static int __meminit sparse_index_init(unsigned long section_nr, int nid) in sparse_index_init() argument
88 section = sparse_index_alloc(nid); in sparse_index_init()
97 static inline int sparse_index_init(unsigned long section_nr, int nid) in sparse_index_init() argument
[all …]
Dmemblock.c125 phys_addr_t size, phys_addr_t align, int nid, in __memblock_find_range_bottom_up() argument
131 for_each_free_mem_range(i, nid, flags, &this_start, &this_end, NULL) { in __memblock_find_range_bottom_up()
159 phys_addr_t size, phys_addr_t align, int nid, in __memblock_find_range_top_down() argument
165 for_each_free_mem_range_reverse(i, nid, flags, &this_start, &this_end, in __memblock_find_range_top_down()
197 phys_addr_t end, int nid, ulong flags) in memblock_find_in_range_node() argument
209 nid, flags); in memblock_find_in_range_node()
212 nid, flags); in memblock_find_in_range_node()
457 int nid, unsigned long flags) in memblock_insert_region() argument
466 memblock_set_region_node(rgn, nid); in memblock_insert_region()
489 int nid, unsigned long flags) in memblock_add_range() argument
[all …]
Dmemory_hotplug.c330 int nid = zone->zone_pgdat->node_id; in fix_zone_id() local
334 set_page_links(pfn_to_page(pfn), zid, nid, pfn); in fix_zone_id()
448 int nid = pgdat->node_id; in __add_zone() local
463 memmap_init_zone(nr_pages, nid, zone_type, in __add_zone()
476 static int __meminit __add_section(int nid, struct zone *zone, in __add_section() argument
494 return register_new_memory(nid, __pfn_to_section(phys_start_pfn)); in __add_section()
503 int __ref __add_pages(int nid, struct zone *zone, unsigned long phys_start_pfn, in __add_pages() argument
514 err = __add_section(nid, zone, section_nr_to_pfn(i)); in __add_pages()
533 static int find_smallest_section_pfn(int nid, struct zone *zone, in find_smallest_section_pfn() argument
545 if (unlikely(pfn_to_nid(start_pfn) != nid)) in find_smallest_section_pfn()
[all …]
Dhugetlb.c855 int nid = page_to_nid(page); in enqueue_huge_page() local
856 list_move(&page->lru, &h->hugepage_freelists[nid]); in enqueue_huge_page()
858 h->free_huge_pages_node[nid]++; in enqueue_huge_page()
862 static struct page *dequeue_huge_page_node(struct hstate *h, int nid) in dequeue_huge_page_node() argument
866 list_for_each_entry(page, &h->hugepage_freelists[nid], lru) in dequeue_huge_page_node()
873 if (&h->hugepage_freelists[nid] == &page->lru) in dequeue_huge_page_node()
879 h->free_huge_pages_node[nid]--; in dequeue_huge_page_node()
956 static int next_node_allowed(int nid, nodemask_t *nodes_allowed) in next_node_allowed() argument
958 nid = next_node(nid, *nodes_allowed); in next_node_allowed()
959 if (nid == MAX_NUMNODES) in next_node_allowed()
[all …]
Dpage_alloc.c319 int nid = early_pfn_to_nid(pfn); in early_page_uninitialised() local
321 if (node_online(nid) && pfn >= NODE_DATA(nid)->first_deferred_pfn) in early_page_uninitialised()
327 static inline bool early_page_nid_uninitialised(unsigned long pfn, int nid) in early_page_nid_uninitialised() argument
329 if (pfn >= NODE_DATA(nid)->first_deferred_pfn) in early_page_nid_uninitialised()
366 static inline bool early_page_nid_uninitialised(unsigned long pfn, int nid) in early_page_nid_uninitialised() argument
953 unsigned long zone, int nid) in __init_single_page() argument
955 set_page_links(page, zone, nid, pfn); in __init_single_page()
969 int nid) in __init_single_pfn() argument
971 return __init_single_page(pfn_to_page(pfn), pfn, zone, nid); in __init_single_pfn()
978 int nid, zid; in init_reserved_page() local
[all …]
Dksm.c145 int nid; member
166 int nid; /* when node of unstable tree */ member
515 root_stable_tree + NUMA(stable_node->nid)); in remove_node_from_stable_tree()
652 root_unstable_tree + NUMA(rmap_item->nid)); in remove_rmap_item_from_tree()
747 int nid; in remove_all_stable_nodes() local
750 for (nid = 0; nid < ksm_nr_node_ids; nid++) { in remove_all_stable_nodes()
751 while (root_stable_tree[nid].rb_node) { in remove_all_stable_nodes()
752 stable_node = rb_entry(root_stable_tree[nid].rb_node, in remove_all_stable_nodes()
1152 int nid; in stable_tree_search() local
1166 nid = get_kpfn_nid(page_to_pfn(page)); in stable_tree_search()
[all …]
Dlist_lru.c107 int nid = page_to_nid(virt_to_page(item)); in list_lru_add() local
108 struct list_lru_node *nlru = &lru->node[nid]; in list_lru_add()
127 int nid = page_to_nid(virt_to_page(item)); in list_lru_del() local
128 struct list_lru_node *nlru = &lru->node[nid]; in list_lru_del()
161 int nid, int memcg_idx) in __list_lru_count_one() argument
163 struct list_lru_node *nlru = &lru->node[nid]; in __list_lru_count_one()
176 int nid, struct mem_cgroup *memcg) in list_lru_count_one() argument
178 return __list_lru_count_one(lru, nid, memcg_cache_id(memcg)); in list_lru_count_one()
182 unsigned long list_lru_count_node(struct list_lru *lru, int nid) in list_lru_count_node() argument
186 nlru = &lru->node[nid]; in list_lru_count_node()
[all …]
Dmempolicy.c492 int nid; in queue_pages_pte_range() local
513 nid = page_to_nid(page); in queue_pages_pte_range()
514 if (node_isset(nid, *qp->nmask) == !!(flags & MPOL_MF_INVERT)) in queue_pages_pte_range()
536 int nid; in queue_pages_hugetlb() local
546 nid = page_to_nid(page); in queue_pages_hugetlb()
547 if (node_isset(nid, *qp->nmask) == !!(flags & MPOL_MF_INVERT)) in queue_pages_hugetlb()
1692 unsigned nid, next; in interleave_nodes() local
1695 nid = me->il_next; in interleave_nodes()
1696 next = next_node(nid, policy->v.nodes); in interleave_nodes()
1701 return nid; in interleave_nodes()
[all …]
Dvmscan.c280 int nid = shrinkctl->nid; in do_shrink_slab() local
294 nr = atomic_long_xchg(&shrinker->nr_deferred[nid], 0); in do_shrink_slab()
380 &shrinker->nr_deferred[nid]); in do_shrink_slab()
382 new_nr = atomic_long_read(&shrinker->nr_deferred[nid]); in do_shrink_slab()
384 trace_mm_shrink_slab_end(shrinker, nid, freed, nr, new_nr, total_scan); in do_shrink_slab()
417 static unsigned long shrink_slab(gfp_t gfp_mask, int nid, in shrink_slab() argument
445 .nid = nid, in shrink_slab()
453 sc.nid = 0; in shrink_slab()
464 void drop_slab_node(int nid) in drop_slab_node() argument
473 freed += shrink_slab(GFP_KERNEL, nid, memcg, in drop_slab_node()
[all …]
Dmm_init.c27 int nid; in mminit_verify_zonelist() local
32 for_each_online_node(nid) { in mminit_verify_zonelist()
33 pg_data_t *pgdat = NODE_DATA(nid); in mminit_verify_zonelist()
52 listid > 0 ? "thisnode" : "general", nid, in mminit_verify_zonelist()
Dworkingset.c172 int zid, nid; in unpack_shadow() local
177 nid = entry & ((1UL << NODES_SHIFT) - 1); in unpack_shadow()
181 *zone = NODE_DATA(nid)->node_zones + zid; in unpack_shadow()
281 pages = node_present_pages(sc->nid); in count_shadow_nodes()
Dmmzone.c19 int nid = next_online_node(pgdat->node_id); in next_online_pgdat() local
21 if (nid == MAX_NUMNODES) in next_online_pgdat()
23 return NODE_DATA(nid); in next_online_pgdat()
Dmemcontrol.c393 int nid = zone_to_nid(zone); in mem_cgroup_zone_zoneinfo() local
396 return &memcg->nodeinfo[nid]->zoneinfo[zid]; in mem_cgroup_zone_zoneinfo()
463 int nid = page_to_nid(page); in mem_cgroup_page_zoneinfo() local
466 return &memcg->nodeinfo[nid]->zoneinfo[zid]; in mem_cgroup_page_zoneinfo()
470 soft_limit_tree_node_zone(int nid, int zid) in soft_limit_tree_node_zone() argument
472 return &soft_limit_tree.rb_tree_per_node[nid]->rb_tree_per_zone[zid]; in soft_limit_tree_node_zone()
478 int nid = page_to_nid(page); in soft_limit_tree_from_page() local
481 return &soft_limit_tree.rb_tree_per_node[nid]->rb_tree_per_zone[zid]; in soft_limit_tree_from_page()
586 int nid, zid; in mem_cgroup_remove_from_trees() local
588 for_each_node(nid) { in mem_cgroup_remove_from_trees()
[all …]
Dcompaction.c1692 static void compact_node(int nid) in compact_node() argument
1700 __compact_pgdat(NODE_DATA(nid), &cc); in compact_node()
1706 int nid; in compact_nodes() local
1711 for_each_online_node(nid) in compact_nodes()
1712 compact_node(nid); in compact_nodes()
1741 int nid = dev->id; in sysfs_compact_node() local
1743 if (nid >= 0 && nid < nr_node_ids && node_online(nid)) { in sysfs_compact_node()
1747 compact_node(nid); in sysfs_compact_node()
Doom_kill.c215 int nid; in constrained_alloc() local
238 for_each_node_mask(nid, *oc->nodemask) in constrained_alloc()
239 *totalpages += node_spanned_pages(nid); in constrained_alloc()
251 for_each_node_mask(nid, cpuset_current_mems_allowed) in constrained_alloc()
252 *totalpages += node_spanned_pages(nid); in constrained_alloc()
Dhuge_memory.c2386 static bool khugepaged_scan_abort(int nid) in khugepaged_scan_abort() argument
2398 if (khugepaged_node_load[nid]) in khugepaged_scan_abort()
2404 if (node_distance(nid, i) > RECLAIM_DISTANCE) in khugepaged_scan_abort()
2414 int nid, target_node = 0, max_value = 0; in khugepaged_find_target_node() local
2417 for (nid = 0; nid < MAX_NUMNODES; nid++) in khugepaged_find_target_node()
2418 if (khugepaged_node_load[nid] > max_value) { in khugepaged_find_target_node()
2419 max_value = khugepaged_node_load[nid]; in khugepaged_find_target_node()
2420 target_node = nid; in khugepaged_find_target_node()
2425 for (nid = last_khugepaged_target_node + 1; nid < MAX_NUMNODES; in khugepaged_find_target_node()
2426 nid++) in khugepaged_find_target_node()
[all …]
Dslab.c1315 int nid; in slab_memory_callback() local
1317 nid = mnb->status_change_nid; in slab_memory_callback()
1318 if (nid < 0) in slab_memory_callback()
1324 ret = init_cache_node_node(nid); in slab_memory_callback()
1329 ret = drain_cache_node_node(nid); in slab_memory_callback()
1451 int nid; in kmem_cache_init() local
1453 for_each_online_node(nid) { in kmem_cache_init()
1454 init_list(kmem_cache, &init_kmem_cache_node[CACHE_CACHE + nid], nid); in kmem_cache_init()
1457 &init_kmem_cache_node[SIZE_NODE + nid], nid); in kmem_cache_init()
3021 int nid; in fallback_alloc() local
[all …]
Dsparse-vmemmap.c179 struct page * __meminit sparse_mem_map_populate(unsigned long pnum, int nid) in sparse_mem_map_populate() argument
189 if (vmemmap_populate(start, end, nid)) in sparse_mem_map_populate()
Dnobootmem.c35 static void * __init __alloc_memory_core_early(int nid, u64 size, u64 align, in __alloc_memory_core_early() argument
46 addr = memblock_find_in_range_node(size, align, goal, limit, nid, in __alloc_memory_core_early()
Dmemory-failure.c1526 int nid = page_to_nid(p); in new_page() local
1529 nid); in new_page()
1531 return __alloc_pages_node(nid, GFP_HIGHUSER_MOVABLE, 0); in new_page()
Dmigrate.c1591 int nid = (int) data; in alloc_misplaced_dst_page() local
1594 newpage = __alloc_pages_node(nid, in alloc_misplaced_dst_page()
Dslub.c2269 slab_out_of_memory(struct kmem_cache *s, gfp_t gfpflags, int nid) in slab_out_of_memory() argument
2281 nid, gfpflags); in slab_out_of_memory()
3912 int nid = marg->status_change_nid_normal; in slab_mem_going_online_callback() local
3919 if (nid < 0) in slab_mem_going_online_callback()
3940 s->node[nid] = n; in slab_mem_going_online_callback()