Home
last modified time | relevance | path

Searched defs:node (Results 1 – 25 of 28) sorted by relevance

12

/mm/
Dsparse-vmemmap.c40 static void * __ref __earlyonly_bootmem_alloc(int node, in __earlyonly_bootmem_alloc()
49 void * __meminit vmemmap_alloc_block(unsigned long size, int node) in vmemmap_alloc_block()
74 void * __meminit vmemmap_alloc_block_buf(unsigned long size, int node) in vmemmap_alloc_block_buf()
132 void __meminit vmemmap_verify(pte_t *pte, int node, in vmemmap_verify()
143 pte_t * __meminit vmemmap_pte_populate(pmd_t *pmd, unsigned long addr, int node) in vmemmap_pte_populate()
157 static void * __meminit vmemmap_alloc_block_zero(unsigned long size, int node) in vmemmap_alloc_block_zero()
168 pmd_t * __meminit vmemmap_pmd_populate(pud_t *pud, unsigned long addr, int node) in vmemmap_pmd_populate()
180 pud_t * __meminit vmemmap_pud_populate(p4d_t *p4d, unsigned long addr, int node) in vmemmap_pud_populate()
192 p4d_t * __meminit vmemmap_p4d_populate(pgd_t *pgd, unsigned long addr, int node) in vmemmap_p4d_populate()
204 pgd_t * __meminit vmemmap_pgd_populate(unsigned long addr, int node) in vmemmap_pgd_populate()
[all …]
Dinterval_tree.c28 void vma_interval_tree_insert_after(struct vm_area_struct *node, in vma_interval_tree_insert_after()
85 void anon_vma_interval_tree_remove(struct anon_vma_chain *node, in anon_vma_interval_tree_remove()
99 anon_vma_interval_tree_iter_next(struct anon_vma_chain *node, in anon_vma_interval_tree_iter_next()
106 void anon_vma_interval_tree_verify(struct anon_vma_chain *node) in anon_vma_interval_tree_verify()
Dslab.c492 int node = __this_cpu_read(slab_reap_node); in next_reap_node() local
532 static struct array_cache *alloc_arraycache(int node, int entries, in alloc_arraycache()
596 static inline struct alien_cache **alloc_alien_cache(int node, in alloc_alien_cache()
633 static struct alien_cache *__alloc_alien_cache(int node, int entries, in __alloc_alien_cache()
648 static struct alien_cache **alloc_alien_cache(int node, int limit, gfp_t gfp) in alloc_alien_cache()
685 struct array_cache *ac, int node, in __drain_alien_cache()
711 int node = __this_cpu_read(slab_reap_node); in reap_alien() local
753 int node, int page_node) in __cache_free_alien()
786 int node = numa_mem_id(); in cache_free_alien() local
807 static int init_cache_node(struct kmem_cache *cachep, int node, gfp_t gfp) in init_cache_node()
[all …]
Dslob.c191 static void *slob_new_pages(gfp_t gfp, int order, int node) in slob_new_pages()
301 static void *slob_alloc(size_t size, gfp_t gfp, int align, int node, in slob_alloc()
469 __do_kmalloc_node(size_t size, gfp_t gfp, int node, unsigned long caller) in __do_kmalloc_node()
530 int node, unsigned long caller) in __kmalloc_node_track_caller()
592 static void *slob_alloc_node(struct kmem_cache *c, gfp_t flags, int node) in slob_alloc_node()
629 void *__kmalloc_node(size_t size, gfp_t gfp, int node) in __kmalloc_node()
635 void *kmem_cache_alloc_node(struct kmem_cache *cachep, gfp_t gfp, int node) in kmem_cache_alloc_node()
Dslub.c1031 static inline unsigned long slabs_node(struct kmem_cache *s, int node) in slabs_node()
1043 static inline void inc_slabs_node(struct kmem_cache *s, int node, int objects) in inc_slabs_node()
1058 static inline void dec_slabs_node(struct kmem_cache *s, int node, int objects) in dec_slabs_node()
1373 static inline unsigned long slabs_node(struct kmem_cache *s, int node) in slabs_node()
1377 static inline void inc_slabs_node(struct kmem_cache *s, int node, in inc_slabs_node()
1379 static inline void dec_slabs_node(struct kmem_cache *s, int node, in dec_slabs_node()
1489 gfp_t flags, int node, struct kmem_cache_order_objects oo) in alloc_slab_page()
1616 static struct page *allocate_slab(struct kmem_cache *s, gfp_t flags, int node) in allocate_slab()
1696 static struct page *new_slab(struct kmem_cache *s, gfp_t flags, int node) in new_slab()
1948 static void *get_partial(struct kmem_cache *s, gfp_t flags, int node, in get_partial()
[all …]
Dvmalloc.c380 get_subtree_max_size(struct rb_node *node) in get_subtree_max_size()
559 struct rb_node *node; in augment_tree_propagate_check() local
626 struct rb_node *node = &va->rb_node; in augment_tree_propagate_from() local
792 struct rb_node *node; in find_vmap_lowest_match() local
1052 int node, gfp_t gfp_mask) in alloc_vmap_area()
1462 int node, err; in new_vmap_block() local
1777 void *vm_map_ram(struct page **pages, unsigned int count, int node, pgprot_t prot) in vm_map_ram()
2042 unsigned long end, int node, gfp_t gfp_mask, const void *caller) in __get_vm_area_node()
2398 pgprot_t prot, int node) in __vmalloc_area_node()
2480 pgprot_t prot, unsigned long vm_flags, int node, in __vmalloc_node_range()
[all …]
Dworkingset.c368 void workingset_update_node(struct xa_node *node) in workingset_update_node()
454 struct xa_node *node = container_of(item, struct xa_node, private_list); in shadow_lru_isolate() local
Dkhugepaged.c781 khugepaged_alloc_page(struct page **hpage, gfp_t gfp, int node) in khugepaged_alloc_page()
845 khugepaged_alloc_page(struct page **hpage, gfp_t gfp, int node) in khugepaged_alloc_page()
949 int node, int referenced) in collapse_huge_page()
1126 int node = NUMA_NO_NODE, unmapped = 0; in khugepaged_scan_pmd() local
1494 struct page **hpage, int node) in collapse_file()
1821 int node = NUMA_NO_NODE; in khugepaged_scan_file() local
Dzswap.c127 struct hlist_node node; member
269 struct rb_node *node = root->rb_node; in zswap_rb_search() local
398 static int zswap_cpu_comp_prepare(unsigned int cpu, struct hlist_node *node) in zswap_cpu_comp_prepare()
416 static int zswap_cpu_comp_dead(unsigned int cpu, struct hlist_node *node) in zswap_cpu_comp_dead()
Dhugetlb.c901 int node = NUMA_NO_NODE; in dequeue_huge_page_nodemask() local
1037 #define for_each_node_mask_to_alloc(hs, nr_nodes, node, mask) \ argument
1043 #define for_each_node_mask_to_free(hs, nr_nodes, node, mask) \ argument
1336 struct llist_node *node; in free_hpage_workfn() local
1535 int nr_nodes, node; in alloc_pool_huge_page() local
1562 int nr_nodes, node; in free_pool_huge_page() local
1802 int node; in alloc_huge_page_vma() local
2222 int nr_nodes, node; in __alloc_bootmem_huge_page() local
2399 int nr_nodes, node; in adjust_pool_surplus() local
2856 static void hugetlb_unregister_node(struct node *node) in hugetlb_unregister_node()
[all …]
Dmigrate.c1502 struct list_head *pagelist, int node) in do_move_pages_to_node()
1526 int node, struct list_head *pagelist, bool migrate_all) in add_page_for_migration()
1610 int node; in do_pages_move() local
1949 int node) in migrate_misplaced_page()
2007 struct page *page, int node) in migrate_misplaced_transhuge_page()
Dmemory_hotplug.c228 int node = pgdat->node_id; in register_page_bootmem_info_node() local
657 static void node_states_set_node(int node, struct memory_notify *arg) in node_states_set_node()
1469 static void node_states_clear_node(int node, struct memory_notify *arg) in node_states_clear_node()
1486 int ret, node, nr_isolate_pageblock; in __offline_pages() local
Dvmstat.c959 unsigned long sum_zone_node_page_state(int node, in sum_zone_node_page_state()
976 unsigned long sum_zone_numa_state(int node, in sum_zone_numa_state()
1306 loff_t node = *pos; in frag_start() local
1928 int node; in init_cpu_node_state() local
1952 int node; in vmstat_cpu_dead() local
Dmempolicy.c133 int node; in get_task_policy() local
324 int node = first_node(pol->w.user_nodemask); in mpol_rebind_preferred() local
1001 struct page *alloc_new_node_page(struct page *page, unsigned long node) in alloc_new_node_page()
1845 int node = numa_mem_id(); in mempolicy_slab_node() local
2100 unsigned long addr, int node, bool hugepage) in alloc_pages_vma()
2476 static void sp_node_init(struct sp_node *node, unsigned long start, in sp_node_init()
Dnommu.c153 void *__vmalloc_node_flags(unsigned long size, int node, gfp_t flags) in __vmalloc_node_flags()
256 void *vmalloc_node(unsigned long size, int node) in vmalloc_node()
274 void *vzalloc_node(unsigned long size, int node) in vzalloc_node()
343 void *vm_map_ram(struct page **pages, unsigned int count, int node, pgprot_t prot) in vm_map_ram()
Dcma_debug.c19 struct hlist_node node; member
Dvmpressure.c155 struct list_head node; member
Dmemcontrol.c1672 int node; in mem_cgroup_select_victim_node() local
3411 int node, cpu, i; in memcg_flush_percpu_vmstats() local
4951 static int alloc_mem_cgroup_per_node_info(struct mem_cgroup *memcg, int node) in alloc_mem_cgroup_per_node_info()
4991 static void free_mem_cgroup_per_node_info(struct mem_cgroup *memcg, int node) in free_mem_cgroup_per_node_info()
5005 int node; in __mem_cgroup_free() local
5030 int node; in mem_cgroup_alloc() local
6923 int cpu, node; in mem_cgroup_init() local
Dksm.c157 struct rb_node node; /* when node of stable tree */ member
207 struct rb_node node; /* when node of unstable tree */ member
2761 struct rb_node *node; in ksm_check_stable_tree() local
Dpage_alloc.c1484 static inline bool __meminit early_pfn_in_nid(unsigned long pfn, int node) in early_pfn_in_nid()
1495 static inline bool __meminit early_pfn_in_nid(unsigned long pfn, int node) in early_pfn_in_nid()
5544 static int find_next_best_node(int node, nodemask_t *used_node_mask) in find_next_best_node()
5607 pg_data_t *node = NODE_DATA(node_order[i]); in build_zonelists_in_node_order() local
5641 int node, load, nr_nodes = 0; in build_zonelists() local
5678 int local_memory_node(int node) in local_memory_node()
5695 int node, local_node; in build_zonelists() local
Dcompaction.c2484 int compaction_register_node(struct node *node) in compaction_register_node()
2489 void compaction_unregister_node(struct node *node) in compaction_unregister_node()
Dutil.c538 void *kvmalloc_node(size_t size, gfp_t flags, int node) in kvmalloc_node()
Dbacking-dev.c408 struct rb_node **node, *parent; in wb_congested_get_create() local
Dslab.h628 static inline struct kmem_cache_node *get_node(struct kmem_cache *s, int node) in get_node()
/mm/kasan/
Dinit.c84 static __init void *early_alloc(size_t size, int node) in early_alloc()

12