Lines Matching refs:node
78 static inline unsigned long *node_marks(struct xa_node *node, xa_mark_t mark) in node_marks() argument
80 return node->marks[(__force unsigned)mark]; in node_marks()
83 static inline bool node_get_mark(struct xa_node *node, in node_get_mark() argument
86 return test_bit(offset, node_marks(node, mark)); in node_get_mark()
90 static inline bool node_set_mark(struct xa_node *node, unsigned int offset, in node_set_mark() argument
93 return __test_and_set_bit(offset, node_marks(node, mark)); in node_set_mark()
97 static inline bool node_clear_mark(struct xa_node *node, unsigned int offset, in node_clear_mark() argument
100 return __test_and_clear_bit(offset, node_marks(node, mark)); in node_clear_mark()
103 static inline bool node_any_mark(struct xa_node *node, xa_mark_t mark) in node_any_mark() argument
105 return !bitmap_empty(node_marks(node, mark), XA_CHUNK_SIZE); in node_any_mark()
108 static inline void node_mark_all(struct xa_node *node, xa_mark_t mark) in node_mark_all() argument
110 bitmap_fill(node_marks(node, mark), XA_CHUNK_SIZE); in node_mark_all()
142 static unsigned int get_offset(unsigned long index, struct xa_node *node) in get_offset() argument
144 return (index >> node->shift) & XA_CHUNK_MASK; in get_offset()
201 static void *xas_descend(struct xa_state *xas, struct xa_node *node) in xas_descend() argument
203 unsigned int offset = get_offset(xas->xa_index, node); in xas_descend()
204 void *entry = xa_entry(xas->xa, node, offset); in xas_descend()
206 xas->xa_node = node; in xas_descend()
209 entry = xa_entry(xas->xa, node, offset); in xas_descend()
236 struct xa_node *node = xa_to_node(entry); in xas_load() local
238 if (xas->xa_shift > node->shift) in xas_load()
240 entry = xas_descend(xas, node); in xas_load()
241 if (node->shift == 0) in xas_load()
254 static void xa_node_free(struct xa_node *node) in xa_node_free() argument
256 XA_NODE_BUG_ON(node, !list_empty(&node->private_list)); in xa_node_free()
257 node->array = XA_RCU_FREE; in xa_node_free()
258 call_rcu(&node->rcu_head, radix_tree_node_rcu_free); in xa_node_free()
269 struct xa_node *node = xas->xa_alloc; in xas_destroy() local
271 if (!node) in xas_destroy()
273 XA_NODE_BUG_ON(node, !list_empty(&node->private_list)); in xas_destroy()
274 kmem_cache_free(radix_tree_node_cachep, node); in xas_destroy()
347 static void xas_update(struct xa_state *xas, struct xa_node *node) in xas_update() argument
350 xas->xa_update(node); in xas_update()
352 XA_NODE_BUG_ON(node, !list_empty(&node->private_list)); in xas_update()
358 struct xa_node *node = xas->xa_alloc; in xas_alloc() local
363 if (node) { in xas_alloc()
371 node = kmem_cache_alloc(radix_tree_node_cachep, gfp); in xas_alloc()
372 if (!node) { in xas_alloc()
379 node->offset = xas->xa_offset; in xas_alloc()
381 XA_NODE_BUG_ON(node, parent->count > XA_CHUNK_SIZE); in xas_alloc()
384 XA_NODE_BUG_ON(node, shift > BITS_PER_LONG); in xas_alloc()
385 XA_NODE_BUG_ON(node, !list_empty(&node->private_list)); in xas_alloc()
386 node->shift = shift; in xas_alloc()
387 node->count = 0; in xas_alloc()
388 node->nr_values = 0; in xas_alloc()
389 RCU_INIT_POINTER(node->parent, xas->xa_node); in xas_alloc()
390 node->array = xas->xa; in xas_alloc()
392 return node; in xas_alloc()
436 struct xa_node *node = xas->xa_node; in xas_shrink() local
441 XA_NODE_BUG_ON(node, node->count > XA_CHUNK_SIZE); in xas_shrink()
442 if (node->count != 1) in xas_shrink()
444 entry = xa_entry_locked(xa, node, 0); in xas_shrink()
447 if (!xa_is_node(entry) && node->shift) in xas_shrink()
454 if (xa_track_free(xa) && !node_get_mark(node, 0, XA_FREE_MARK)) in xas_shrink()
457 node->count = 0; in xas_shrink()
458 node->nr_values = 0; in xas_shrink()
460 RCU_INIT_POINTER(node->slots[0], XA_RETRY_ENTRY); in xas_shrink()
461 xas_update(xas, node); in xas_shrink()
462 xa_node_free(node); in xas_shrink()
465 node = xa_to_node(entry); in xas_shrink()
466 node->parent = NULL; in xas_shrink()
479 struct xa_node *node = xas->xa_node; in xas_delete_node() local
484 XA_NODE_BUG_ON(node, node->count > XA_CHUNK_SIZE); in xas_delete_node()
485 if (node->count) in xas_delete_node()
488 parent = xa_parent_locked(xas->xa, node); in xas_delete_node()
490 xas->xa_offset = node->offset; in xas_delete_node()
491 xa_node_free(node); in xas_delete_node()
502 node = parent; in xas_delete_node()
503 xas_update(xas, node); in xas_delete_node()
506 if (!node->parent) in xas_delete_node()
522 struct xa_node *node = top; in xas_free_nodes() local
525 void *entry = xa_entry_locked(xas->xa, node, offset); in xas_free_nodes()
527 if (node->shift && xa_is_node(entry)) { in xas_free_nodes()
528 node = xa_to_node(entry); in xas_free_nodes()
533 RCU_INIT_POINTER(node->slots[offset], XA_RETRY_ENTRY); in xas_free_nodes()
538 parent = xa_parent_locked(xas->xa, node); in xas_free_nodes()
539 offset = node->offset + 1; in xas_free_nodes()
540 node->count = 0; in xas_free_nodes()
541 node->nr_values = 0; in xas_free_nodes()
542 xas_update(xas, node); in xas_free_nodes()
543 xa_node_free(node); in xas_free_nodes()
544 if (node == top) in xas_free_nodes()
546 node = parent; in xas_free_nodes()
558 struct xa_node *node = NULL; in xas_expand() local
569 node = xa_to_node(head); in xas_expand()
570 shift = node->shift + XA_CHUNK_SHIFT; in xas_expand()
577 XA_NODE_BUG_ON(node, shift > BITS_PER_LONG); in xas_expand()
578 node = xas_alloc(xas, shift); in xas_expand()
579 if (!node) in xas_expand()
582 node->count = 1; in xas_expand()
584 node->nr_values = 1; in xas_expand()
585 RCU_INIT_POINTER(node->slots[0], head); in xas_expand()
590 node_mark_all(node, XA_FREE_MARK); in xas_expand()
592 node_clear_mark(node, 0, XA_FREE_MARK); in xas_expand()
596 node_set_mark(node, 0, mark); in xas_expand()
609 rcu_assign_pointer(xa_to_node(head)->parent, node); in xas_expand()
611 head = xa_mk_node(node); in xas_expand()
613 xas_update(xas, node); in xas_expand()
618 xas->xa_node = node; in xas_expand()
640 struct xa_node *node = xas->xa_node; in xas_create() local
644 if (xas_top(node)) { in xas_create()
658 } else if (node) { in xas_create()
661 shift = node->shift; in xas_create()
662 entry = xa_entry_locked(xa, node, offset); in xas_create()
663 slot = &node->slots[offset]; in xas_create()
673 node = xas_alloc(xas, shift); in xas_create()
674 if (!node) in xas_create()
677 node_mark_all(node, XA_FREE_MARK); in xas_create()
678 rcu_assign_pointer(*slot, xa_mk_node(node)); in xas_create()
680 node = xa_to_node(entry); in xas_create()
684 entry = xas_descend(xas, node); in xas_create()
685 slot = &node->slots[xas->xa_offset]; in xas_create()
721 struct xa_node *node = xas->xa_node; in xas_create_range() local
722 xas->xa_node = xa_parent_locked(xas->xa, node); in xas_create_range()
723 xas->xa_offset = node->offset - 1; in xas_create_range()
724 if (node->offset != 0) in xas_create_range()
741 static void update_node(struct xa_state *xas, struct xa_node *node, in update_node() argument
744 if (!node || (!count && !values)) in update_node()
747 node->count += count; in update_node()
748 node->nr_values += values; in update_node()
749 XA_NODE_BUG_ON(node, node->count > XA_CHUNK_SIZE); in update_node()
750 XA_NODE_BUG_ON(node, node->nr_values > XA_CHUNK_SIZE); in update_node()
751 xas_update(xas, node); in update_node()
771 struct xa_node *node; in xas_store() local
788 node = xas->xa_node; in xas_store()
789 if (node && (xas->xa_shift < node->shift)) in xas_store()
797 if (node) { in xas_store()
798 slot = &node->slots[offset]; in xas_store()
814 if (xa_is_node(next) && (!node || node->shift)) in xas_store()
816 if (!node) in xas_store()
829 next = xa_entry_locked(xas->xa, node, ++offset); in xas_store()
838 update_node(xas, node, count, values); in xas_store()
872 struct xa_node *node = xas->xa_node; in xas_set_mark() local
878 while (node) { in xas_set_mark()
879 if (node_set_mark(node, offset, mark)) in xas_set_mark()
881 offset = node->offset; in xas_set_mark()
882 node = xa_parent_locked(xas->xa, node); in xas_set_mark()
901 struct xa_node *node = xas->xa_node; in xas_clear_mark() local
907 while (node) { in xas_clear_mark()
908 if (!node_clear_mark(node, offset, mark)) in xas_clear_mark()
910 if (node_any_mark(node, mark)) in xas_clear_mark()
913 offset = node->offset; in xas_clear_mark()
914 node = xa_parent_locked(xas->xa, node); in xas_clear_mark()
966 struct xa_node *node = xas->xa_node; in xas_pause() local
971 if (node) { in xas_pause()
974 if (!xa_is_sibling(xa_entry(xas->xa, node, offset))) in xas_pause()
977 xas->xa_index += (offset - xas->xa_offset) << node->shift; in xas_pause()
1249 struct xa_node *node = xa_to_node(curr); in xas_find_conflict() local
1250 curr = xas_descend(xas, node); in xas_find_conflict()
1834 struct xa_node *node = xas->xa_node; in xas_sibling() local
1837 if (!node) in xas_sibling()
1839 mask = (XA_CHUNK_SIZE << node->shift) - 1; in xas_sibling()
1840 return (xas->xa_index & mask) > (xas->xa_offset << node->shift); in xas_sibling()
2002 void xa_dump_node(const struct xa_node *node) in xa_dump_node() argument
2006 if (!node) in xa_dump_node()
2008 if ((unsigned long)node & 3) { in xa_dump_node()
2009 pr_cont("node %px\n", node); in xa_dump_node()
2015 node, node->parent ? "offset" : "max", node->offset, in xa_dump_node()
2016 node->parent, node->shift, node->count, node->nr_values, in xa_dump_node()
2017 node->array, node->private_list.prev, node->private_list.next); in xa_dump_node()
2020 pr_cont(" %lx", node->marks[i][j]); in xa_dump_node()
2046 struct xa_node *node = xa_to_node(entry); in xa_dump_entry() local
2047 xa_dump_node(node); in xa_dump_entry()
2049 xa_dump_entry(node->slots[i], in xa_dump_entry()
2050 index + (i << node->shift), node->shift); in xa_dump_entry()