Lines Matching refs:entry
221 const struct maple_enode *entry) in mte_node_type() argument
223 return ((unsigned long)entry >> MAPLE_NODE_TYPE_SHIFT) & in mte_node_type()
237 static __always_inline bool mte_is_leaf(const struct maple_enode *entry) in mte_is_leaf() argument
239 return ma_is_leaf(mte_node_type(entry)); in mte_is_leaf()
246 static __always_inline bool mt_is_reserved(const void *entry) in mt_is_reserved() argument
248 return ((unsigned long)entry < MAPLE_RESERVED_RANGE) && in mt_is_reserved()
249 xa_is_internal(entry); in mt_is_reserved()
289 const struct maple_enode *entry) in mte_to_node() argument
291 return (struct maple_node *)((unsigned long)entry & ~MAPLE_NODE_MASK); in mte_to_node()
300 static inline struct maple_topiary *mte_to_mat(const struct maple_enode *entry) in mte_to_mat() argument
303 ((unsigned long)entry & ~MAPLE_NODE_MASK); in mte_to_mat()
1755 struct maple_enode *entry; in mas_find_child() local
1765 entry = mas_slot_locked(mas, slots, offset); in mas_find_child()
1766 if (mte_parent(entry) == node) { in mas_find_child()
2085 b_node->slot[b_end] = wr_mas->entry; in mas_store_b_node()
2391 void *entry) in mab_set_b_end() argument
2393 if (!entry) in mab_set_b_end()
2396 b_node->slot[b_node->b_end] = entry; in mab_set_b_end()
3392 static inline int mas_root_expand(struct ma_state *mas, void *entry) in mas_root_expand() argument
3417 rcu_assign_pointer(slots[slot], entry); in mas_root_expand()
3439 static inline void mas_store_root(struct ma_state *mas, void *entry) in mas_store_root() argument
3441 if (!entry) { in mas_store_root()
3445 mas_root_expand(mas, entry); in mas_store_root()
3446 else if (((unsigned long) (entry) & 3) == 2) in mas_store_root()
3447 mas_root_expand(mas, entry); in mas_store_root()
3449 rcu_assign_pointer(mas->tree->ma_root, entry); in mas_store_root()
3469 void *entry = wr_mas->entry; in mas_is_span_wr() local
3486 if (entry || last == ULONG_MAX) in mas_is_span_wr()
3490 trace_ma_write(__func__, wr_mas->mas, wr_mas->r_max, entry); in mas_is_span_wr()
3592 void *entry; in mas_state_walk() local
3594 entry = mas_start(mas); in mas_state_walk()
3599 return entry; in mas_state_walk()
3657 static inline void mas_new_root(struct ma_state *mas, void *entry) in mas_new_root() argument
3665 if (!entry && !mas->index && mas->last == ULONG_MAX) { in mas_new_root()
3668 rcu_assign_pointer(mas->tree->ma_root, entry); in mas_new_root()
3679 rcu_assign_pointer(slots[0], entry); in mas_new_root()
3708 MA_WR_STATE(r_wr_mas, &r_mas, wr_mas->entry); in mas_wr_spanning_store()
3709 MA_WR_STATE(l_wr_mas, &l_mas, wr_mas->entry); in mas_wr_spanning_store()
3727 return mas_new_root(mas, wr_mas->entry); in mas_wr_spanning_store()
3752 if (!wr_mas->entry) { in mas_wr_spanning_store()
3762 return mas_new_root(mas, wr_mas->entry); in mas_wr_spanning_store()
3831 rcu_assign_pointer(dst_slots[mas->offset], wr_mas->entry); in mas_wr_node_store()
3861 trace_ma_write(__func__, mas, 0, wr_mas->entry); in mas_wr_node_store()
3884 rcu_assign_pointer(slots[offset], wr_mas->entry); in mas_wr_slot_store()
3888 rcu_assign_pointer(slots[offset + 1], wr_mas->entry); in mas_wr_slot_store()
3898 rcu_assign_pointer(slots[offset + 1], wr_mas->entry); in mas_wr_slot_store()
3906 trace_ma_write(__func__, mas, 0, wr_mas->entry); in mas_wr_slot_store()
3911 if (!wr_mas->entry || gap) in mas_wr_slot_store()
4005 rcu_assign_pointer(slots[new_end], wr_mas->entry); in mas_wr_append()
4012 rcu_assign_pointer(slots[end], wr_mas->entry); in mas_wr_append()
4018 rcu_assign_pointer(slots[end + 1], wr_mas->entry); in mas_wr_append()
4023 if (!wr_mas->content || !wr_mas->entry) in mas_wr_append()
4027 trace_ma_write(__func__, mas, new_end, wr_mas->entry); in mas_wr_append()
4041 trace_ma_write(__func__, wr_mas->mas, 0, wr_mas->entry); in mas_wr_bnode()
4061 mas_new_root(mas, wr_mas->entry); in mas_wr_store_entry()
4064 mas_store_root(mas, wr_mas->entry); in mas_wr_store_entry()
4067 rcu_assign_pointer(wr_mas->slots[mas->offset], wr_mas->entry); in mas_wr_store_entry()
4068 if (!!wr_mas->entry ^ !!wr_mas->content) in mas_wr_store_entry()
4121 if (wr_mas->entry) in mas_wr_prealloc_setup()
4143 static inline int mas_prealloc_calc(struct ma_state *mas, void *entry) in mas_prealloc_calc() argument
4157 else if (((unsigned long) (entry) & 3) == 2) in mas_prealloc_calc()
4205 if (!wr_mas->entry) in mas_wr_store_type()
4261 static inline void mas_wr_preallocate(struct ma_wr_state *wr_mas, void *entry) in mas_wr_preallocate() argument
4268 request = mas_prealloc_calc(mas, entry); in mas_wr_preallocate()
4283 static inline void *mas_insert(struct ma_state *mas, void *entry) in mas_insert() argument
4285 MA_WR_STATE(wr_mas, mas, entry); in mas_insert()
4305 mas_wr_preallocate(&wr_mas, entry); in mas_insert()
4346 void *entry, unsigned long range_lo, unsigned long range_hi, in mas_alloc_cyclic() argument
4368 mas_insert(mas, entry); in mas_alloc_cyclic()
4495 void *entry; in mas_prev_slot() local
4545 entry = mas_slot(mas, slots, mas->offset); in mas_prev_slot()
4550 if (likely(entry)) in mas_prev_slot()
4551 return entry; in mas_prev_slot()
4562 return entry; in mas_prev_slot()
4674 void *entry; in mas_next_slot() local
4729 entry = mt_slot(mas->tree, slots, mas->offset); in mas_next_slot()
4733 if (entry) in mas_next_slot()
4734 return entry; in mas_next_slot()
4747 return entry; in mas_next_slot()
4944 void *entry; in mas_walk() local
4949 entry = mas_state_walk(mas); in mas_walk()
4958 return entry; in mas_walk()
4967 return entry; in mas_walk()
5210 void *entry; in mte_dead_leaves() local
5214 entry = mt_slot(mt, slots, offset); in mte_dead_leaves()
5215 type = mte_node_type(entry); in mte_dead_leaves()
5216 node = mte_to_node(entry); in mte_dead_leaves()
5221 mte_set_node_dead(entry); in mte_dead_leaves()
5416 void *mas_store(struct ma_state *mas, void *entry) in mas_store() argument
5419 MA_WR_STATE(wr_mas, mas, entry); in mas_store()
5421 trace_ma_write(__func__, mas, 0, entry); in mas_store()
5424 pr_err("Error %lX > %lX %p\n", mas->index, mas->last, entry); in mas_store()
5447 request = mas_prealloc_calc(mas, entry); in mas_store()
5471 int mas_store_gfp(struct ma_state *mas, void *entry, gfp_t gfp) in mas_store_gfp() argument
5475 MA_WR_STATE(wr_mas, mas, entry); in mas_store_gfp()
5479 mas_wr_preallocate(&wr_mas, entry); in mas_store_gfp()
5481 if (!entry) in mas_store_gfp()
5504 void mas_store_prealloc(struct ma_state *mas, void *entry) in mas_store_prealloc() argument
5506 MA_WR_STATE(wr_mas, mas, entry); in mas_store_prealloc()
5521 trace_ma_write(__func__, mas, 0, entry); in mas_store_prealloc()
5536 int mas_preallocate(struct ma_state *mas, void *entry, gfp_t gfp) in mas_preallocate() argument
5538 MA_WR_STATE(wr_mas, mas, entry); in mas_preallocate()
5544 request = mas_prealloc_calc(mas, entry); in mas_preallocate()
5678 void **entry) in mas_next_setup() argument
5705 *entry = mas_walk(mas); in mas_next_setup()
5706 if (*entry) in mas_next_setup()
5719 *entry = NULL; in mas_next_setup()
5749 void *entry = NULL; in mas_next() local
5751 if (mas_next_setup(mas, max, &entry)) in mas_next()
5752 return entry; in mas_next()
5772 void *entry = NULL; in mas_next_range() local
5774 if (mas_next_setup(mas, max, &entry)) in mas_next_range()
5775 return entry; in mas_next_range()
5796 void *entry = NULL; in mt_next() local
5800 entry = mas_next(&mas, max); in mt_next()
5802 return entry; in mt_next()
5806 static bool mas_prev_setup(struct ma_state *mas, unsigned long min, void **entry) in mas_prev_setup() argument
5830 *entry = mas_walk(mas); in mas_prev_setup()
5831 if (*entry) in mas_prev_setup()
5849 *entry = mas_root(mas); in mas_prev_setup()
5858 *entry = mas_root(mas); in mas_prev_setup()
5880 void *entry = NULL; in mas_prev() local
5882 if (mas_prev_setup(mas, min, &entry)) in mas_prev()
5883 return entry; in mas_prev()
5903 void *entry = NULL; in mas_prev_range() local
5905 if (mas_prev_setup(mas, min, &entry)) in mas_prev_range()
5906 return entry; in mas_prev_range()
5926 void *entry = NULL; in mt_prev() local
5930 entry = mas_prev(&mas, min); in mt_prev()
5932 return entry; in mt_prev()
5964 static __always_inline bool mas_find_setup(struct ma_state *mas, unsigned long max, void **entry) in mas_find_setup() argument
5995 *entry = mas_walk(mas); in mas_find_setup()
5996 if (*entry) in mas_find_setup()
6004 *entry = mas_walk(mas); in mas_find_setup()
6005 if (*entry) in mas_find_setup()
6019 *entry = mas_walk(mas); in mas_find_setup()
6020 if (*entry) in mas_find_setup()
6057 void *entry = NULL; in mas_find() local
6059 if (mas_find_setup(mas, max, &entry)) in mas_find()
6060 return entry; in mas_find()
6063 entry = mas_next_slot(mas, max, false); in mas_find()
6066 return entry; in mas_find()
6084 void *entry = NULL; in mas_find_range() local
6086 if (mas_find_setup(mas, max, &entry)) in mas_find_range()
6087 return entry; in mas_find_range()
6103 void **entry) in mas_find_rev_setup() argument
6151 *entry = mas_walk(mas); in mas_find_rev_setup()
6152 if (*entry) in mas_find_rev_setup()
6166 *entry = mas_root(mas); in mas_find_rev_setup()
6196 void *entry = NULL; in mas_find_rev() local
6198 if (mas_find_rev_setup(mas, min, &entry)) in mas_find_rev()
6199 return entry; in mas_find_rev()
6222 void *entry = NULL; in mas_find_range_rev() local
6224 if (mas_find_rev_setup(mas, min, &entry)) in mas_find_range_rev()
6225 return entry; in mas_find_range_rev()
6245 void *entry; in mas_erase() local
6253 entry = mas_state_walk(mas); in mas_erase()
6254 if (!entry) in mas_erase()
6272 return entry; in mas_erase()
6321 void *entry; in mtree_load() local
6326 entry = mas_start(&mas); in mtree_load()
6332 entry = NULL; in mtree_load()
6337 entry = mtree_lookup_walk(&mas); in mtree_load()
6338 if (!entry && unlikely(mas_is_start(&mas))) in mtree_load()
6342 if (xa_is_zero(entry)) in mtree_load()
6345 return entry; in mtree_load()
6361 unsigned long last, void *entry, gfp_t gfp) in mtree_store_range() argument
6366 trace_ma_write(__func__, &mas, 0, entry); in mtree_store_range()
6367 if (WARN_ON_ONCE(xa_is_advanced(entry))) in mtree_store_range()
6374 ret = mas_store_gfp(&mas, entry, gfp); in mtree_store_range()
6391 int mtree_store(struct maple_tree *mt, unsigned long index, void *entry, in mtree_store() argument
6394 return mtree_store_range(mt, index, index, entry, gfp); in mtree_store()
6410 unsigned long last, void *entry, gfp_t gfp) in mtree_insert_range() argument
6415 if (WARN_ON_ONCE(xa_is_advanced(entry))) in mtree_insert_range()
6423 mas_insert(&ms, entry); in mtree_insert_range()
6446 int mtree_insert(struct maple_tree *mt, unsigned long index, void *entry, in mtree_insert() argument
6449 return mtree_insert_range(mt, index, index, entry, gfp); in mtree_insert()
6454 void *entry, unsigned long size, unsigned long min, in mtree_alloc_range() argument
6463 if (WARN_ON_ONCE(mt_is_reserved(entry))) in mtree_alloc_range()
6472 mas_insert(&mas, entry); in mtree_alloc_range()
6516 void *entry, unsigned long range_lo, unsigned long range_hi, in mtree_alloc_cyclic() argument
6525 if (WARN_ON_ONCE(mt_is_reserved(entry))) in mtree_alloc_cyclic()
6528 ret = mas_alloc_cyclic(&mas, startp, entry, range_lo, range_hi, in mtree_alloc_cyclic()
6536 void *entry, unsigned long size, unsigned long min, in mtree_alloc_rrange() argument
6545 if (WARN_ON_ONCE(mt_is_reserved(entry))) in mtree_alloc_rrange()
6554 mas_insert(&mas, entry); in mtree_alloc_rrange()
6586 void *entry = NULL; in mtree_erase() local
6592 entry = mas_erase(&mas); in mtree_erase()
6595 return entry; in mtree_erase()
6922 void *entry; in mt_find() local
6934 entry = mas_state_walk(&mas); in mt_find()
6938 if (unlikely(xa_is_zero(entry))) in mt_find()
6939 entry = NULL; in mt_find()
6941 if (entry) in mt_find()
6945 entry = mas_next_entry(&mas, max); in mt_find()
6946 if (likely(entry && !xa_is_zero(entry))) in mt_find()
6950 if (unlikely(xa_is_zero(entry))) in mt_find()
6951 entry = NULL; in mt_find()
6954 if (likely(entry)) { in mt_find()
6963 return entry; in mt_find()
7102 static void mt_dump_node(const struct maple_tree *mt, void *entry,
7125 static void mt_dump_entry(void *entry, unsigned long min, unsigned long max, in mt_dump_entry() argument
7130 if (xa_is_value(entry)) in mt_dump_entry()
7131 pr_cont("value %ld (0x%lx) [%p]\n", xa_to_value(entry), in mt_dump_entry()
7132 xa_to_value(entry), entry); in mt_dump_entry()
7133 else if (xa_is_zero(entry)) in mt_dump_entry()
7134 pr_cont("zero (%ld)\n", xa_to_internal(entry)); in mt_dump_entry()
7135 else if (mt_is_reserved(entry)) in mt_dump_entry()
7136 pr_cont("UNKNOWN ENTRY (%p)\n", entry); in mt_dump_entry()
7138 pr_cont("%p\n", entry); in mt_dump_entry()
7141 static void mt_dump_range64(const struct maple_tree *mt, void *entry, in mt_dump_range64() argument
7145 struct maple_range_64 *node = &mte_to_node(entry)->mr64; in mt_dump_range64()
7146 bool leaf = mte_is_leaf(entry); in mt_dump_range64()
7166 else if (!node->slot[i] && max != mt_node_max(entry)) in mt_dump_range64()
7194 static void mt_dump_arange64(const struct maple_tree *mt, void *entry, in mt_dump_arange64() argument
7198 struct maple_arange_64 *node = &mte_to_node(entry)->ma64; in mt_dump_arange64()
7253 static void mt_dump_node(const struct maple_tree *mt, void *entry, in mt_dump_node() argument
7257 struct maple_node *node = mte_to_node(entry); in mt_dump_node()
7258 unsigned int type = mte_node_type(entry); in mt_dump_node()
7277 mt_dump_range64(mt, entry, min, max, depth, format); in mt_dump_node()
7280 mt_dump_arange64(mt, entry, min, max, depth, format); in mt_dump_node()
7290 void *entry = rcu_dereference_check(mt->ma_root, mt_locked(mt)); in mt_dump() local
7293 mt, mt->ma_flags, mt_height(mt), entry); in mt_dump()
7294 if (!xa_is_node(entry)) in mt_dump()
7295 mt_dump_entry(entry, 0, 0, 0, format); in mt_dump()
7296 else if (entry) in mt_dump()
7297 mt_dump_node(mt, entry, 0, mt_node_max(entry), 0, format); in mt_dump()
7338 void *entry = mas_get_slot(mas, i); in mas_validate_gaps() local
7341 MT_BUG_ON(mas->tree, !entry); in mas_validate_gaps()
7522 void *entry = mas_slot(mas, slots, i); in mas_validate_limits() local
7524 if (entry && (i != mt_slots[type] - 1)) { in mas_validate_limits()
7526 i, entry); in mas_validate_limits()
7527 MT_BUG_ON(mas->tree, entry != NULL); in mas_validate_limits()
7545 void *entry, *last = (void *)1; in mt_validate_nulls() local
7559 entry = mas_slot(&mas, slots, offset); in mt_validate_nulls()
7560 if (!last && !entry) { in mt_validate_nulls()
7564 MT_BUG_ON(mt, !last && !entry); in mt_validate_nulls()
7565 last = entry; in mt_validate_nulls()