• Home
  • Raw
  • Download

Lines Matching refs:node

462 get_subtree_max_size(struct rb_node *node)  in get_subtree_max_size()  argument
466 va = rb_entry_safe(node, struct vmap_area, rb_node); in get_subtree_max_size()
862 struct rb_node *node; in find_vmap_lowest_match() local
866 node = free_vmap_area_root.rb_node; in find_vmap_lowest_match()
871 while (node) { in find_vmap_lowest_match()
872 va = rb_entry(node, struct vmap_area, rb_node); in find_vmap_lowest_match()
874 if (get_subtree_max_size(node->rb_left) >= length && in find_vmap_lowest_match()
876 node = node->rb_left; in find_vmap_lowest_match()
886 if (get_subtree_max_size(node->rb_right) >= length) { in find_vmap_lowest_match()
887 node = node->rb_right; in find_vmap_lowest_match()
896 while ((node = rb_parent(node))) { in find_vmap_lowest_match()
897 va = rb_entry(node, struct vmap_area, rb_node); in find_vmap_lowest_match()
901 if (get_subtree_max_size(node->rb_right) >= length && in find_vmap_lowest_match()
903 node = node->rb_right; in find_vmap_lowest_match()
1155 int node, gfp_t gfp_mask) in alloc_vmap_area() argument
1172 va = kmem_cache_alloc_node(vmap_area_cachep, gfp_mask, node); in alloc_vmap_area()
1206 pva = kmem_cache_alloc_node(vmap_area_cachep, gfp_mask, node); in alloc_vmap_area()
1552 int node, err; in new_vmap_block() local
1555 node = numa_node_id(); in new_vmap_block()
1558 gfp_mask & GFP_RECLAIM_MASK, node); in new_vmap_block()
1564 node, gfp_mask); in new_vmap_block()
1849 void *vm_map_ram(struct page **pages, unsigned int count, int node) in vm_map_ram() argument
1863 VMALLOC_START, VMALLOC_END, node, GFP_KERNEL); in vm_map_ram()
2066 unsigned long end, int node, gfp_t gfp_mask, const void *caller) in __get_vm_area_node() argument
2081 area = kzalloc_node(sizeof(*area), gfp_mask & GFP_RECLAIM_MASK, node); in __get_vm_area_node()
2088 va = alloc_vmap_area(size, align, start, end, node, gfp_mask); in __get_vm_area_node()
2476 pgprot_t prot, int node) in __vmalloc_area_node() argument
2489 pages = __vmalloc_node(array_size, 1, nested_gfp, node, in __vmalloc_area_node()
2492 pages = kmalloc_node(array_size, nested_gfp, node); in __vmalloc_area_node()
2507 if (node == NUMA_NO_NODE) in __vmalloc_area_node()
2510 page = alloc_pages_node(node, gfp_mask, 0); in __vmalloc_area_node()
2558 pgprot_t prot, unsigned long vm_flags, int node, in __vmalloc_node_range() argument
2570 vm_flags, start, end, node, gfp_mask, caller); in __vmalloc_node_range()
2574 addr = __vmalloc_area_node(area, gfp_mask, prot, node); in __vmalloc_node_range()
2615 gfp_t gfp_mask, int node, const void *caller) in __vmalloc_node() argument
2618 gfp_mask, PAGE_KERNEL, 0, node, caller); in __vmalloc_node()
2706 void *vmalloc_node(unsigned long size, int node) in vmalloc_node() argument
2708 return __vmalloc_node(size, 1, GFP_KERNEL, node, in vmalloc_node()
2724 void *vzalloc_node(unsigned long size, int node) in vzalloc_node() argument
2726 return __vmalloc_node(size, 1, GFP_KERNEL | __GFP_ZERO, node, in vzalloc_node()