• Home
  • Raw
  • Download

Lines Matching refs:nid

422 	int nid = early_pfn_to_nid(pfn);  in early_page_uninitialised()  local
424 if (node_online(nid) && pfn >= NODE_DATA(nid)->first_deferred_pfn) in early_page_uninitialised()
435 defer_init(int nid, unsigned long pfn, unsigned long end_pfn) in defer_init() argument
449 if (end_pfn < pgdat_end_pfn(NODE_DATA(nid))) in defer_init()
452 if (NODE_DATA(nid)->first_deferred_pfn != ULONG_MAX) in defer_init()
461 NODE_DATA(nid)->first_deferred_pfn = pfn; in defer_init()
479 static inline bool defer_init(int nid, unsigned long pfn, unsigned long end_pfn) in defer_init() argument
1532 unsigned long zone, int nid) in __init_single_page() argument
1535 set_page_links(page, zone, nid, pfn); in __init_single_page()
1553 int nid, zid; in init_reserved_page() local
1558 nid = early_pfn_to_nid(pfn); in init_reserved_page()
1559 pgdat = NODE_DATA(nid); in init_reserved_page()
1567 __init_single_page(pfn_to_page(pfn), pfn, zid, nid); in init_reserved_page()
1670 int nid; in __early_pfn_to_nid() local
1675 nid = memblock_search_pfn_nid(pfn, &start_pfn, &end_pfn); in __early_pfn_to_nid()
1676 if (nid != NUMA_NO_NODE) { in __early_pfn_to_nid()
1679 state->last_nid = nid; in __early_pfn_to_nid()
1682 return nid; in __early_pfn_to_nid()
1689 int nid; in early_pfn_to_nid() local
1692 nid = __early_pfn_to_nid(pfn, &early_pfnnid_cache); in early_pfn_to_nid()
1693 if (nid < 0) in early_pfn_to_nid()
1694 nid = first_online_node; in early_pfn_to_nid()
1697 return nid; in early_pfn_to_nid()
1872 int nid = zone_to_nid(zone); in deferred_init_pages() local
1886 __init_single_page(page, pfn, zid, nid); in deferred_init_pages()
2173 int nid; in page_alloc_init_late() local
2179 for_each_node_state(nid, N_MEMORY) { in page_alloc_init_late()
2180 kthread_run(deferred_init_memmap, NODE_DATA(nid), "pgdatinit%d", nid); in page_alloc_init_late()
2207 for_each_node_state(nid, N_MEMORY) in page_alloc_init_late()
2208 shuffle_free_memory(NODE_DATA(nid)); in page_alloc_init_late()
5468 void * __meminit alloc_pages_exact_nid(int nid, size_t size, gfp_t gfp_mask) in alloc_pages_exact_nid() argument
5476 p = alloc_pages_node(nid, gfp_mask, order); in alloc_pages_exact_nid()
5614 void si_meminfo_node(struct sysinfo *val, int nid) in si_meminfo_node() argument
5620 pg_data_t *pgdat = NODE_DATA(nid); in si_meminfo_node()
5626 val->freeram = sum_zone_node_page_state(nid, NR_FREE_PAGES); in si_meminfo_node()
5650 static bool show_mem_node_skip(unsigned int flags, int nid, nodemask_t *nodemask) in show_mem_node_skip() argument
5663 return !node_isset(nid, *nodemask); in show_mem_node_skip()
6184 int nid; in __build_all_zonelists() local
6215 for_each_online_node(nid) { in __build_all_zonelists()
6216 pg_data_t *pgdat = NODE_DATA(nid); in __build_all_zonelists()
6337 void __meminit memmap_init_zone(unsigned long size, int nid, unsigned long zone, in memmap_init_zone() argument
6374 if (defer_init(nid, pfn, zone_end_pfn)) in memmap_init_zone()
6379 __init_single_page(page, pfn, zone, nid); in memmap_init_zone()
6407 int nid = pgdat->node_id; in memmap_init_zone_device() local
6425 __init_single_page(page, pfn, zone_idx, nid); in memmap_init_zone_device()
6526 int nid = zone_to_nid(zone), zone_id = zone_idx(zone); in memmap_init_zone_range() local
6534 memmap_init_zone(end_pfn - start_pfn, nid, zone_id, start_pfn, in memmap_init_zone_range()
6538 init_unavailable_range(*hole_pfn, start_pfn, zone_id, nid); in memmap_init_zone_range()
6547 int i, j, zone_id, nid; in memmap_init() local
6549 for_each_mem_pfn_range(i, MAX_NUMNODES, &start_pfn, &end_pfn, &nid) { in memmap_init()
6550 struct pglist_data *node = NODE_DATA(nid); in memmap_init()
6577 init_unavailable_range(hole_pfn, end_pfn, zone_id, nid); in memmap_init()
6581 void __meminit __weak arch_memmap_init(unsigned long size, int nid, in arch_memmap_init() argument
6809 void __init get_pfn_range_for_nid(unsigned int nid, in get_pfn_range_for_nid() argument
6818 for_each_mem_pfn_range(i, nid, &this_start_pfn, &this_end_pfn, NULL) { in get_pfn_range_for_nid()
6858 static void __init adjust_zone_range_for_zone_movable(int nid, in adjust_zone_range_for_zone_movable() argument
6866 if (zone_movable_pfn[nid]) { in adjust_zone_range_for_zone_movable()
6869 *zone_start_pfn = zone_movable_pfn[nid]; in adjust_zone_range_for_zone_movable()
6875 *zone_start_pfn < zone_movable_pfn[nid] && in adjust_zone_range_for_zone_movable()
6876 *zone_end_pfn > zone_movable_pfn[nid]) { in adjust_zone_range_for_zone_movable()
6877 *zone_end_pfn = zone_movable_pfn[nid]; in adjust_zone_range_for_zone_movable()
6880 } else if (*zone_start_pfn >= zone_movable_pfn[nid]) in adjust_zone_range_for_zone_movable()
6889 static unsigned long __init zone_spanned_pages_in_node(int nid, in zone_spanned_pages_in_node() argument
6905 adjust_zone_range_for_zone_movable(nid, zone_type, in zone_spanned_pages_in_node()
6925 unsigned long __init __absent_pages_in_range(int nid, in __absent_pages_in_range() argument
6933 for_each_mem_pfn_range(i, nid, &start_pfn, &end_pfn, NULL) { in __absent_pages_in_range()
6955 static unsigned long __init zone_absent_pages_in_node(int nid, in zone_absent_pages_in_node() argument
6972 adjust_zone_range_for_zone_movable(nid, zone_type, in zone_absent_pages_in_node()
6975 nr_absent = __absent_pages_in_range(nid, zone_start_pfn, zone_end_pfn); in zone_absent_pages_in_node()
6982 if (mirrored_kernelcore && zone_movable_pfn[nid]) { in zone_absent_pages_in_node()
7183 static void __meminit zone_init_internals(struct zone *zone, enum zone_type idx, int nid, in zone_init_internals() argument
7187 zone_set_nid(zone, nid); in zone_init_internals()
7189 zone->zone_pgdat = NODE_DATA(nid); in zone_init_internals()
7203 void __ref free_area_init_core_hotplug(int nid) in free_area_init_core_hotplug() argument
7206 pg_data_t *pgdat = NODE_DATA(nid); in free_area_init_core_hotplug()
7210 zone_init_internals(&pgdat->node_zones[z], z, nid, 0); in free_area_init_core_hotplug()
7226 int nid = pgdat->node_id; in free_area_init_core() local
7276 zone_init_internals(zone, j, nid, freesize); in free_area_init_core()
7284 arch_memmap_init(size, nid, j, zone_start_pfn); in free_area_init_core()
7347 static void __init free_area_init_node(int nid) in free_area_init_node() argument
7349 pg_data_t *pgdat = NODE_DATA(nid); in free_area_init_node()
7356 get_pfn_range_for_nid(nid, &start_pfn, &end_pfn); in free_area_init_node()
7358 pgdat->node_id = nid; in free_area_init_node()
7362 pr_info("Initmem setup node %d [mem %#018Lx-%#018Lx]\n", nid, in free_area_init_node()
7373 void __init free_area_init_memoryless_node(int nid) in free_area_init_memoryless_node() argument
7375 free_area_init_node(nid); in free_area_init_memoryless_node()
7415 int i, nid; in node_map_pfn_alignment() local
7417 for_each_mem_pfn_range(i, MAX_NUMNODES, &start, &end, &nid) { in node_map_pfn_alignment()
7418 if (!start || last_nid < 0 || last_nid == nid) { in node_map_pfn_alignment()
7419 last_nid = nid; in node_map_pfn_alignment()
7461 int i, nid; in early_calculate_totalpages() local
7463 for_each_mem_pfn_range(i, MAX_NUMNODES, &start_pfn, &end_pfn, &nid) { in early_calculate_totalpages()
7468 node_set_state(nid, N_MEMORY); in early_calculate_totalpages()
7481 int i, nid; in find_zone_movable_pfns_for_nodes() local
7502 nid = memblock_get_region_node(r); in find_zone_movable_pfns_for_nodes()
7505 zone_movable_pfn[nid] = zone_movable_pfn[nid] ? in find_zone_movable_pfns_for_nodes()
7506 min(usable_startpfn, zone_movable_pfn[nid]) : in find_zone_movable_pfns_for_nodes()
7523 nid = memblock_get_region_node(r); in find_zone_movable_pfns_for_nodes()
7532 zone_movable_pfn[nid] = zone_movable_pfn[nid] ? in find_zone_movable_pfns_for_nodes()
7533 min(usable_startpfn, zone_movable_pfn[nid]) : in find_zone_movable_pfns_for_nodes()
7590 for_each_node_state(nid, N_MEMORY) { in find_zone_movable_pfns_for_nodes()
7609 for_each_mem_pfn_range(i, nid, &start_pfn, &end_pfn, NULL) { in find_zone_movable_pfns_for_nodes()
7612 start_pfn = max(start_pfn, zone_movable_pfn[nid]); in find_zone_movable_pfns_for_nodes()
7636 zone_movable_pfn[nid] = end_pfn; in find_zone_movable_pfns_for_nodes()
7650 zone_movable_pfn[nid] = start_pfn + size_pages; in find_zone_movable_pfns_for_nodes()
7677 for (nid = 0; nid < MAX_NUMNODES; nid++) { in find_zone_movable_pfns_for_nodes()
7680 zone_movable_pfn[nid] = in find_zone_movable_pfns_for_nodes()
7681 roundup(zone_movable_pfn[nid], MAX_ORDER_NR_PAGES); in find_zone_movable_pfns_for_nodes()
7683 get_pfn_range_for_nid(nid, &start_pfn, &end_pfn); in find_zone_movable_pfns_for_nodes()
7684 if (zone_movable_pfn[nid] >= end_pfn) in find_zone_movable_pfns_for_nodes()
7685 zone_movable_pfn[nid] = 0; in find_zone_movable_pfns_for_nodes()
7694 static void check_for_memory(pg_data_t *pgdat, int nid) in check_for_memory() argument
7702 node_set_state(nid, N_HIGH_MEMORY); in check_for_memory()
7704 node_set_state(nid, N_NORMAL_MEMORY); in check_for_memory()
7735 int i, nid, zone; in free_area_init() local
7798 for_each_mem_pfn_range(i, MAX_NUMNODES, &start_pfn, &end_pfn, &nid) { in free_area_init()
7799 pr_info(" node %3d: [mem %#018Lx-%#018Lx]\n", nid, in free_area_init()
7808 for_each_online_node(nid) { in free_area_init()
7809 pg_data_t *pgdat = NODE_DATA(nid); in free_area_init()
7810 free_area_init_node(nid); in free_area_init()
7814 node_set_state(nid, N_MEMORY); in free_area_init()
7815 check_for_memory(pgdat, nid); in free_area_init()
8753 .nid = zone_to_nid(cc->zone), in __alloc_contig_migrate_range()
9043 int nid, nodemask_t *nodemask) in alloc_contig_pages() argument
9050 zonelist = node_zonelist(nid, gfp_mask); in alloc_contig_pages()