Home
last modified time | relevance | path

Searched refs:xa_index (Results 1 – 18 of 18) sorted by relevance

/kernel/linux/linux-5.10/lib/
Dxarray.c149 xas->xa_offset = get_offset(xas->xa_index, xas->xa_node); in xas_set_offset()
156 xas->xa_index &= ~XA_CHUNK_MASK << shift; in xas_move_index()
157 xas->xa_index += offset << shift; in xas_move_index()
190 if (xas->xa_index) in xas_start()
193 if ((xas->xa_index >> xa_to_node(entry)->shift) > XA_CHUNK_MASK) in xas_start()
203 unsigned int offset = get_offset(xas->xa_index, node); in xas_descend()
414 unsigned long max = xas->xa_index; in xas_max()
705 unsigned long index = xas->xa_index; in xas_create_range()
709 xas->xa_index |= ((sibs + 1UL) << shift) - 1; in xas_create_range()
719 if (xas->xa_index <= (index | XA_CHUNK_MASK)) in xas_create_range()
[all …]
Didr.c398 if (xas.xa_index > min / IDA_BITMAP_BITS) in ida_alloc_range()
400 if (xas.xa_index * IDA_BITMAP_BITS + bit > max) in ida_alloc_range()
408 if (xas.xa_index * IDA_BITMAP_BITS + bit > max) in ida_alloc_range()
431 if (xas.xa_index * IDA_BITMAP_BITS + bit > max) in ida_alloc_range()
455 xas.xa_index = min / IDA_BITMAP_BITS; in ida_alloc_range()
463 return xas.xa_index * IDA_BITMAP_BITS + bit; in ida_alloc_range()
Dtest_xarray.c139 xas_store(&xas, xa_mk_index(xas.xa_index)); in check_xas_retry()
284 XA_BUG_ON(xa, !xa_get_mark(xa, xas.xa_index, XA_MARK_0)); in check_xa_mark_2()
564 XA_BUG_ON(xa, xas.xa_index != index); in check_multi_store_2()
1149 return entry ? xas.xa_index : -1; in xa_find_entry()
1276 XA_BUG_ON(xa, xas.xa_index != i); in check_move_small()
1283 XA_BUG_ON(xa, xas.xa_index != i); in check_move_small()
1290 XA_BUG_ON(xa, xas.xa_index != i); in check_move_small()
1299 XA_BUG_ON(xa, xas.xa_index != ULONG_MAX); in check_move_small()
1301 XA_BUG_ON(xa, xas.xa_index != 0); in check_move_small()
1303 XA_BUG_ON(xa, xas.xa_index != ULONG_MAX); in check_move_small()
[all …]
/kernel/linux/linux-5.10/tools/testing/radix-tree/
Dmultiorder.c62 assert((xas.xa_index | mask) == (index[i] | mask)); in multiorder_iteration()
114 assert((xas.xa_index | mask) == (tag_index[i] | mask)); in multiorder_tagged_iteration()
141 assert((xas.xa_index | mask) == (tag_index[i] | mask)); in multiorder_tagged_iteration()
154 assert(xas.xa_index == tag_index[i]); in multiorder_tagged_iteration()
191 item_sanity(item, xas.xa_index); in iterator_func()
Diteration_check_2.c26 assert(xas.xa_index >= 100); in iterator()
Dtest.c262 item_free(entry, xas.xa_index); in item_kill_tree()
/kernel/linux/linux-5.10/include/linux/
Dxarray.h1310 unsigned long xa_index; member
1330 .xa_index = index, \
1555 offset = (xas->xa_index >> node->shift) & XA_CHUNK_MASK; in xas_reload()
1577 xas->xa_index = index; in xas_set()
1591 xas->xa_index = order < BITS_PER_LONG ? (index >> order) << order : 0; in xas_set_order()
1631 xas->xa_offset != (xas->xa_index & XA_CHUNK_MASK))) in xas_next_entry()
1635 if (unlikely(xas->xa_index >= max)) in xas_next_entry()
1643 xas->xa_index++; in xas_next_entry()
1693 xas->xa_index = (xas->xa_index & ~XA_CHUNK_MASK) + offset; in xas_next_marked()
1694 if (xas->xa_index > max) in xas_next_marked()
[all …]
/kernel/linux/linux-5.10/mm/
Dfilemap.c326 if (page->index == xas.xa_index) in page_cache_delete_batch()
335 if (page->index + compound_nr(page) - 1 == xas.xa_index) in page_cache_delete_batch()
858 unsigned int order = xa_get_order(xas.xa, xas.xa_index); in __add_to_page_cache_locked()
862 xas_split_alloc(&xas, xa_load(xas.xa, xas.xa_index), in __add_to_page_cache_locked()
877 order = xa_get_order(xas.xa, xas.xa_index); in __add_to_page_cache_locked()
1633 if (xas.xa_index == 0) in page_cache_next_miss()
1637 return xas.xa_index; in page_cache_next_miss()
1669 if (xas.xa_index == ULONG_MAX) in page_cache_prev_miss()
1673 return xas.xa_index; in page_cache_prev_miss()
1933 page = find_subpage(page, xas.xa_index); in find_get_entries()
[all …]
Dmemfd.c42 page = find_subpage(page, xas->xa_index); in memfd_tag_pins()
92 page = find_subpage(page, xas.xa_index); in memfd_wait_for_pins()
Dkhugepaged.c1694 VM_BUG_ON(index != xas.xa_index); in collapse_file()
1936 if (!page || xas.xa_index < page->index) { in collapse_file()
1945 VM_BUG_ON_PAGE(page->index != xas.xa_index, page); in collapse_file()
Dswap_state.c152 VM_BUG_ON_PAGE(xas.xa_index != idx + i, page); in add_to_swap_cache()
Dshmem.c1220 indices[ret] = xas.xa_index; in shmem_find_swap_entries()
/kernel/linux/linux-5.10/fs/
Ddax.c161 unsigned long index = xas->xa_index; in dax_entry_waitqueue()
490 unsigned long index = xas->xa_index; in grab_mapping_entry()
531 xas->xa_index & ~PG_PMD_COLOUR, in grab_mapping_entry()
754 unsigned long index = xas->xa_index; in dax_insert_entry()
938 index = xas->xa_index & ~(count - 1); in dax_writeback_one()
983 trace_dax_writeback_range(inode, xas.xa_index, end_index); in dax_writeback_mapping_range()
985 tag_pages_for_writeback(mapping, xas.xa_index, end_index); in dax_writeback_mapping_range()
1003 trace_dax_writeback_range_done(inode, xas.xa_index, end_index); in dax_writeback_mapping_range()
1530 if (xas.xa_index >= max_pgoff) { in dax_iomap_pmd_fault()
1536 if ((xas.xa_index | PG_PMD_COLOUR) >= max_pgoff) in dax_iomap_pmd_fault()
[all …]
/kernel/linux/linux-5.10/drivers/infiniband/core/
Dib_core_uverbs.c298 xa_first = xas.xa_index; in rdma_user_mmap_entry_insert_range()
309 if (xas.xa_node == XAS_BOUNDS || xas.xa_index >= xa_last) in rdma_user_mmap_entry_insert_range()
Ddevice.c169 *indexp = xas.xa_index; in xan_find_marked()
/kernel/linux/linux-5.10/arch/arm64/mm/
Dmteswap.c79 __xa_erase(&mte_pages, xa_state.xa_index); in mte_invalidate_tags_area()
/kernel/linux/linux-5.10/arch/arm64/kernel/
Dhibernate.c371 unsigned long pfn = xa_state.xa_index; in swsusp_mte_restore_tags()
/kernel/linux/linux-5.10/Documentation/core-api/
Dxarray.rst470 If xas_load() encounters a multi-index entry, the xa_index