Lines Matching refs:end
871 unsigned char offset, unsigned char end) in ma_set_meta() argument
876 meta->end = end; in ma_set_meta()
913 meta->end = 0; in mt_clear_meta()
926 return meta->end; in ma_meta_end()
1753 unsigned char end; in mas_find_child() local
1763 end = ma_data_end(node, mt, pivots, mas->max); in mas_find_child()
1764 for (offset = mas->offset; offset <= end; offset++) { in mas_find_child()
1963 enum maple_type mt, unsigned char end) in mas_leaf_set_meta() argument
1965 if (end < mt_slots[mt] - 1) in mas_leaf_set_meta()
1966 ma_set_meta(node, mt, 0, end); in mas_leaf_set_meta()
1986 unsigned char end; in mab_mas_cp() local
2005 end = j - 1; in mab_mas_cp()
2019 ma_set_meta(node, mt, offset, end); in mab_mas_cp()
2021 mas_leaf_set_meta(node, mt, end); in mab_mas_cp()
2031 static inline void mas_bulk_rebalance(struct ma_state *mas, unsigned char end, in mas_bulk_rebalance() argument
2040 if (end > mt_min_slots[mt]) { in mas_bulk_rebalance()
2109 if (slot > mas->end) in mas_store_b_node()
2113 mas_mab_cp(mas, slot, mas->end + 1, b_node, ++b_end); in mas_store_b_node()
2207 count = mas->end = ma_data_end(wr_mas->node, wr_mas->type, in mas_wr_node_walk()
2238 unsigned char end = mas_data_end(mast->orig_l) + 1; in mast_rebalance_prev() local
2241 mab_shift_right(mast->bn, end); in mast_rebalance_prev()
2242 mas_mab_cp(mast->orig_l, 0, end - 1, mast->bn, 0); in mast_rebalance_prev()
2245 mast->bn->b_end = end + b_end; in mast_rebalance_prev()
2246 mast->l->offset += end; in mast_rebalance_prev()
2747 unsigned char end; in mtree_range_walk() local
2759 end = ma_data_end(node, type, pivots, max); in mtree_range_walk()
2769 while (offset < end) { in mtree_range_walk()
2785 mas->end = end; in mtree_range_walk()
2996 static inline void mas_destroy_rebalance(struct ma_state *mas, unsigned char end) in mas_destroy_rebalance() argument
3032 memcpy(slots + tmp, ma_slots(node, mt), sizeof(void *) * end); in mas_destroy_rebalance()
3033 memcpy(pivs + tmp, ma_pivots(node, mt), sizeof(unsigned long) * end); in mas_destroy_rebalance()
3039 tmp += end; in mas_destroy_rebalance()
3225 unsigned char end, space, split; in mas_push_data() local
3236 end = mas_data_end(&tmp_mas); in mas_push_data()
3237 slot_total += end; in mas_push_data()
3252 mab_shift_right(mast->bn, end + 1); in mas_push_data()
3253 mas_mab_cp(&tmp_mas, 0, end, mast->bn, 0); in mas_push_data()
3256 mas_mab_cp(&tmp_mas, 0, end, mast->bn, mast->bn->b_end); in mas_push_data()
3275 mast->orig_l->offset += end + 1; in mas_push_data()
3621 unsigned char end; in mtree_lookup_walk() local
3628 end = mt_pivots[type]; in mtree_lookup_walk()
3633 } while (++offset < end); in mtree_lookup_walk()
3767 mas_store_b_node(&l_wr_mas, &b_node, l_mas.end); in mas_wr_spanning_store()
3770 mas_mab_cp(&r_mas, r_mas.offset, r_mas.end, in mas_wr_spanning_store()
3805 mas_bulk_rebalance(mas, mas->end, wr_mas->type); in mas_wr_node_store()
3837 if (offset_end > mas->end) in mas_wr_node_store()
3842 copy_size = mas->end - offset_end + 1; in mas_wr_node_store()
3863 mas->end = new_end; in mas_wr_node_store()
3927 (mas->end != wr_mas->offset_end) && in mas_wr_extend_null()
3930 if (wr_mas->offset_end == mas->end) in mas_wr_extend_null()
3955 while ((wr_mas->offset_end < wr_mas->mas->end) && in mas_wr_end_piv()
3959 if (wr_mas->offset_end < wr_mas->mas->end) in mas_wr_end_piv()
3968 unsigned char new_end = mas->end + 2; in mas_wr_new_end()
3994 unsigned char end = mas->end; in mas_wr_append() local
3997 wr_mas->pivots[new_end] = wr_mas->pivots[end]; in mas_wr_append()
4002 if (new_end == end + 1) { in mas_wr_append()
4006 wr_mas->pivots[end] = mas->index - 1; in mas_wr_append()
4011 wr_mas->pivots[end] = mas->last; in mas_wr_append()
4012 rcu_assign_pointer(slots[end], wr_mas->entry); in mas_wr_append()
4017 wr_mas->pivots[end + 1] = mas->last; in mas_wr_append()
4018 rcu_assign_pointer(slots[end + 1], wr_mas->entry); in mas_wr_append()
4019 wr_mas->pivots[end] = mas->index - 1; in mas_wr_append()
4020 mas->offset = end + 1; in mas_wr_append()
4026 mas->end = new_end; in mas_wr_append()
4234 if (!mt_in_rcu(mas->tree) && (mas->offset == mas->end)) { in mas_wr_store_type()
4239 if ((new_end == mas->end) && (!mt_in_rcu(mas->tree) || in mas_wr_store_type()
4473 mas->end = mas->offset; in mas_prev_node()
4641 mas->end = ma_data_end(tmp, mt, pivots, mas->max); in mas_next_node()
4684 if (likely(mas->offset < mas->end)) in mas_next_slot()
4698 if (likely(mas->offset < mas->end)) { in mas_next_slot()
4702 if (likely(mas->offset < mas->end)) in mas_next_slot()
5128 mas->end = ma_data_end(node, mt, pivots, mas->max); in mas_empty_area()
5189 mas->end = mas_data_end(mas); in mas_empty_area_rev()
5584 unsigned char end; in mas_destroy() local
5589 end = mas->end + 1; in mas_destroy()
5590 if (end < mt_min_slot_count(mas->node) - 1) in mas_destroy()
5591 mas_destroy_rebalance(mas, end); in mas_destroy()
7212 pr_cont("| %02X %02X| ", node->meta.end, node->meta.gap); in mt_dump_arange64()
7588 unsigned char end; in mt_validate() local
7600 end = mas_data_end(&mas); in mt_validate()
7601 if (MAS_WARN_ON(&mas, (end < mt_min_slot_count(mas.node)) && in mt_validate()
7603 pr_err("Invalid size %u of %p\n", end, mas_mn(&mas)); in mt_validate()
7681 pr_err("[%u/%u] index=%lx last=%lx\n", mas->offset, mas->end, in mas_dump()
7695 wr_mas->type, wr_mas->offset_end, wr_mas->mas->end, in mas_wr_dump()