• Home
  • Raw
  • Download

Lines Matching refs:node

380 get_subtree_max_size(struct rb_node *node)  in get_subtree_max_size()  argument
384 va = rb_entry_safe(node, struct vmap_area, rb_node); in get_subtree_max_size()
559 struct rb_node *node; in augment_tree_propagate_check() local
568 node = n; in augment_tree_propagate_check()
570 while (node) { in augment_tree_propagate_check()
571 va = rb_entry(node, struct vmap_area, rb_node); in augment_tree_propagate_check()
573 if (get_subtree_max_size(node->rb_left) == size) { in augment_tree_propagate_check()
574 node = node->rb_left; in augment_tree_propagate_check()
581 node = node->rb_right; in augment_tree_propagate_check()
626 struct rb_node *node = &va->rb_node; in augment_tree_propagate_from() local
629 while (node) { in augment_tree_propagate_from()
630 va = rb_entry(node, struct vmap_area, rb_node); in augment_tree_propagate_from()
643 node = rb_parent(&va->rb_node); in augment_tree_propagate_from()
792 struct rb_node *node; in find_vmap_lowest_match() local
796 node = free_vmap_area_root.rb_node; in find_vmap_lowest_match()
801 while (node) { in find_vmap_lowest_match()
802 va = rb_entry(node, struct vmap_area, rb_node); in find_vmap_lowest_match()
804 if (get_subtree_max_size(node->rb_left) >= length && in find_vmap_lowest_match()
806 node = node->rb_left; in find_vmap_lowest_match()
816 if (get_subtree_max_size(node->rb_right) >= length) { in find_vmap_lowest_match()
817 node = node->rb_right; in find_vmap_lowest_match()
826 while ((node = rb_parent(node))) { in find_vmap_lowest_match()
827 va = rb_entry(node, struct vmap_area, rb_node); in find_vmap_lowest_match()
831 if (get_subtree_max_size(node->rb_right) >= length && in find_vmap_lowest_match()
833 node = node->rb_right; in find_vmap_lowest_match()
1052 int node, gfp_t gfp_mask) in alloc_vmap_area() argument
1068 gfp_mask & GFP_RECLAIM_MASK, node); in alloc_vmap_area()
1094 pva = kmem_cache_alloc_node(vmap_area_cachep, GFP_KERNEL, node); in alloc_vmap_area()
1462 int node, err; in new_vmap_block() local
1465 node = numa_node_id(); in new_vmap_block()
1468 gfp_mask & GFP_RECLAIM_MASK, node); in new_vmap_block()
1474 node, gfp_mask); in new_vmap_block()
1777 void *vm_map_ram(struct page **pages, unsigned int count, int node, pgprot_t prot) in vm_map_ram() argument
1791 VMALLOC_START, VMALLOC_END, node, GFP_KERNEL); in vm_map_ram()
2042 unsigned long end, int node, gfp_t gfp_mask, const void *caller) in __get_vm_area_node() argument
2056 area = kzalloc_node(sizeof(*area), gfp_mask & GFP_RECLAIM_MASK, node); in __get_vm_area_node()
2063 va = alloc_vmap_area(size, align, start, end, node, gfp_mask); in __get_vm_area_node()
2396 int node, const void *caller);
2398 pgprot_t prot, int node) in __vmalloc_area_node() argument
2414 PAGE_KERNEL, node, area->caller); in __vmalloc_area_node()
2416 pages = kmalloc_node(array_size, nested_gfp, node); in __vmalloc_area_node()
2431 if (node == NUMA_NO_NODE) in __vmalloc_area_node()
2434 page = alloc_pages_node(node, alloc_mask|highmem_mask, 0); in __vmalloc_area_node()
2480 pgprot_t prot, unsigned long vm_flags, int node, in __vmalloc_node_range() argument
2492 vm_flags, start, end, node, gfp_mask, caller); in __vmalloc_node_range()
2496 addr = __vmalloc_area_node(area, gfp_mask, prot, node); in __vmalloc_node_range()
2549 int node, const void *caller) in __vmalloc_node() argument
2552 gfp_mask, prot, 0, node, caller); in __vmalloc_node()
2563 int node, gfp_t flags) in __vmalloc_node_flags() argument
2566 node, __builtin_return_address(0)); in __vmalloc_node_flags()
2570 void *__vmalloc_node_flags_caller(unsigned long size, int node, gfp_t flags, in __vmalloc_node_flags_caller() argument
2573 return __vmalloc_node(size, 1, flags, PAGE_KERNEL, node, caller); in __vmalloc_node_flags_caller()
2646 void *vmalloc_node(unsigned long size, int node) in vmalloc_node() argument
2649 node, __builtin_return_address(0)); in vmalloc_node()
2667 void *vzalloc_node(unsigned long size, int node) in vzalloc_node() argument
2669 return __vmalloc_node_flags(size, node, in vzalloc_node()