Lines Matching refs:rb_node
775 get_subtree_max_size(struct rb_node *node) in get_subtree_max_size()
779 va = rb_entry_safe(node, struct vmap_area, rb_node); in get_subtree_max_size()
790 get_subtree_max_size(va->rb_node.rb_left), in compute_subtree_max_size()
791 get_subtree_max_size(va->rb_node.rb_right)); in compute_subtree_max_size()
795 struct vmap_area, rb_node, unsigned long, subtree_max_size, va_size)
812 struct rb_node *n = vmap_area_root.rb_node; in find_vmap_area_exceed_addr()
819 tmp = rb_entry(n, struct vmap_area, rb_node); in find_vmap_area_exceed_addr()
835 struct rb_node *n = vmap_area_root.rb_node; in __find_vmap_area()
842 va = rb_entry(n, struct vmap_area, rb_node); in __find_vmap_area()
862 static __always_inline struct rb_node **
864 struct rb_root *root, struct rb_node *from, in find_va_links()
865 struct rb_node **parent) in find_va_links()
868 struct rb_node **link; in find_va_links()
871 link = &root->rb_node; in find_va_links()
886 tmp_va = rb_entry(*link, struct vmap_area, rb_node); in find_va_links()
907 *parent = &tmp_va->rb_node; in find_va_links()
912 get_va_next_sibling(struct rb_node *parent, struct rb_node **link) in get_va_next_sibling()
925 list = &rb_entry(parent, struct vmap_area, rb_node)->list; in get_va_next_sibling()
931 struct rb_node *parent, struct rb_node **link, struct list_head *head) in link_va()
938 head = &rb_entry(parent, struct vmap_area, rb_node)->list; in link_va()
944 rb_link_node(&va->rb_node, parent, link); in link_va()
957 rb_insert_augmented(&va->rb_node, in link_va()
961 rb_insert_color(&va->rb_node, root); in link_va()
971 if (WARN_ON(RB_EMPTY_NODE(&va->rb_node))) in unlink_va()
975 rb_erase_augmented(&va->rb_node, in unlink_va()
978 rb_erase(&va->rb_node, root); in unlink_va()
981 RB_CLEAR_NODE(&va->rb_node); in unlink_va()
1035 free_vmap_area_rb_augment_cb_propagate(&va->rb_node, NULL); in augment_tree_propagate_from()
1046 struct rb_node **link; in insert_vmap_area()
1047 struct rb_node *parent; in insert_vmap_area()
1056 struct rb_node *from, struct rb_root *root, in insert_vmap_area_augment()
1059 struct rb_node **link; in insert_vmap_area_augment()
1060 struct rb_node *parent; in insert_vmap_area_augment()
1090 struct rb_node **link; in merge_or_add_vmap_area()
1091 struct rb_node *parent; in merge_or_add_vmap_area()
1208 struct rb_node *node; in find_vmap_lowest_match()
1212 node = free_vmap_area_root.rb_node; in find_vmap_lowest_match()
1218 va = rb_entry(node, struct vmap_area, rb_node); in find_vmap_lowest_match()
1243 va = rb_entry(node, struct vmap_area, rb_node); in find_vmap_lowest_match()
1424 insert_vmap_area_augment(lva, &va->rb_node, in adjust_va_to_fit_type()
1550 kmemleak_scan_area(&va->rb_node, SIZE_MAX, gfp_mask); in alloc_vmap_area()
3555 static struct vmap_area *node_to_va(struct rb_node *n) in node_to_va()
3557 return rb_entry_safe(n, struct vmap_area, rb_node); in node_to_va()
3573 struct rb_node *n; in pvm_find_va_enclose_addr()
3575 n = free_vmap_area_root.rb_node; in pvm_find_va_enclose_addr()
3579 tmp = rb_entry(n, struct vmap_area, rb_node); in pvm_find_va_enclose_addr()
3737 va = node_to_va(rb_prev(&va->rb_node)); in pcpu_get_vm_areas()