• Home
  • Raw
  • Download

Lines Matching refs:shift

144 	return (index >> node->shift) & XA_CHUNK_MASK;  in get_offset()
155 unsigned int shift = xas->xa_node->shift; in xas_move_index() local
156 xas->xa_index &= ~XA_CHUNK_MASK << shift; in xas_move_index()
157 xas->xa_index += offset << shift; in xas_move_index()
193 if ((xas->xa_index >> xa_to_node(entry)->shift) > XA_CHUNK_MASK) in xas_start()
210 if (node->shift && xa_is_node(entry)) in xas_descend()
240 if (xas->xa_shift > node->shift) in xas_load()
243 if (node->shift == 0) in xas_load()
361 static void *xas_alloc(struct xa_state *xas, unsigned int shift) in xas_alloc() argument
390 XA_NODE_BUG_ON(node, shift > BITS_PER_LONG); in xas_alloc()
392 node->shift = shift; in xas_alloc()
436 return (XA_CHUNK_SIZE << xa_to_node(entry)->shift) - 1; in max_index()
453 if (!xa_is_node(entry) && node->shift) in xas_shrink()
533 if (node->shift && xa_is_node(entry)) { in xas_free_nodes()
565 unsigned int shift = 0; in xas_expand() local
571 while ((max >> shift) >= XA_CHUNK_SIZE) in xas_expand()
572 shift += XA_CHUNK_SHIFT; in xas_expand()
573 return shift + XA_CHUNK_SHIFT; in xas_expand()
576 shift = node->shift + XA_CHUNK_SHIFT; in xas_expand()
583 XA_NODE_BUG_ON(node, shift > BITS_PER_LONG); in xas_expand()
584 node = xas_alloc(xas, shift); in xas_expand()
621 shift += XA_CHUNK_SHIFT; in xas_expand()
625 return shift; in xas_expand()
647 int shift; in xas_create() local
655 shift = xas_expand(xas, entry); in xas_create()
656 if (shift < 0) in xas_create()
658 if (!shift && !allow_root) in xas_create()
659 shift = XA_CHUNK_SHIFT; in xas_create()
667 shift = node->shift; in xas_create()
671 shift = 0; in xas_create()
676 while (shift > order) { in xas_create()
677 shift -= XA_CHUNK_SHIFT; in xas_create()
679 node = xas_alloc(xas, shift); in xas_create()
709 unsigned char shift = xas->xa_shift; in xas_create_range() local
712 xas->xa_index |= ((sibs + 1UL) << shift) - 1; in xas_create_range()
713 if (xas_is_node(xas) && xas->xa_node->shift == xas->xa_shift) in xas_create_range()
728 if (node->shift >= shift) in xas_create_range()
738 xas->xa_shift = shift; in xas_create_range()
797 if (node && (xas->xa_shift < node->shift)) in xas_store()
822 if (xa_is_node(next) && (!node || node->shift)) in xas_store()
1072 if (xas->xa_shift < node->shift) { in xas_split()
1076 child->shift = node->shift - XA_CHUNK_SHIFT; in xas_split()
1136 xas->xa_index += (offset - xas->xa_offset) << node->shift; in xas_pause()
1255 } else if (!xas->xa_node->shift && in xas_find()
1335 xas->xa_offset = xas->xa_index >> xas->xa_node->shift; in xas_find_marked()
1418 if (xas->xa_node->shift > xas->xa_shift) in xas_find_conflict()
1422 if (xas->xa_node->shift == xas->xa_shift) { in xas_find_conflict()
1672 unsigned int shift = 0; in xas_set_range() local
1683 shift += XA_CHUNK_SHIFT; in xas_set_range()
1693 if ((((first + sibs + 1) << shift) - 1) > last) in xas_set_range()
1696 xas->xa_shift = shift; in xas_set_range()
1786 order += xas.xa_node->shift; in xa_get_order()
2041 mask = (XA_CHUNK_SIZE << node->shift) - 1; in xas_sibling()
2043 ((unsigned long)xas->xa_offset << node->shift); in xas_sibling()
2186 (node->shift + XA_CHUNK_SHIFT), in xa_delete_node()
2187 .xa_shift = node->shift + XA_CHUNK_SHIFT, in xa_delete_node()
2242 node->parent, node->shift, node->count, node->nr_values, in xa_dump_node()
2250 void xa_dump_index(unsigned long index, unsigned int shift) in xa_dump_index() argument
2252 if (!shift) in xa_dump_index()
2254 else if (shift >= BITS_PER_LONG) in xa_dump_index()
2257 pr_info("%lu-%lu: ", index, index | ((1UL << shift) - 1)); in xa_dump_index()
2260 void xa_dump_entry(const void *entry, unsigned long index, unsigned long shift) in xa_dump_entry() argument
2265 xa_dump_index(index, shift); in xa_dump_entry()
2268 if (shift == 0) { in xa_dump_entry()
2276 index + (i << node->shift), node->shift); in xa_dump_entry()
2296 unsigned int shift = 0; in xa_dump() local
2302 shift = xa_to_node(entry)->shift + XA_CHUNK_SHIFT; in xa_dump()
2303 xa_dump_entry(entry, 0, shift); in xa_dump()