Lines Matching refs:xas
122 XA_STATE(xas, &mapping->i_pages, page->index); in page_cache_delete()
125 mapping_set_update(&xas, mapping); in page_cache_delete()
129 xas_set_order(&xas, page->index, compound_order(page)); in page_cache_delete()
137 xas_store(&xas, shadow); in page_cache_delete()
138 xas_init_marks(&xas); in page_cache_delete()
297 XA_STATE(xas, &mapping->i_pages, pvec->pages[0]->index); in page_cache_delete_batch()
302 mapping_set_update(&xas, mapping); in page_cache_delete_batch()
303 xas_for_each(&xas, page, ULONG_MAX) { in page_cache_delete_batch()
325 if (page->index == xas.xa_index) in page_cache_delete_batch()
334 if (page->index + compound_nr(page) - 1 == xas.xa_index) in page_cache_delete_batch()
336 xas_store(&xas, NULL); in page_cache_delete_batch()
476 XA_STATE(xas, &mapping->i_pages, start_byte >> PAGE_SHIFT); in filemap_range_has_page()
484 page = xas_find(&xas, max); in filemap_range_has_page()
485 if (xas_retry(&xas, page)) in filemap_range_has_page()
816 XA_STATE(xas, &mapping->i_pages, offset); in replace_page_cache_page()
827 xas_lock_irqsave(&xas, flags); in replace_page_cache_page()
828 xas_store(&xas, new); in replace_page_cache_page()
840 xas_unlock_irqrestore(&xas, flags); in replace_page_cache_page()
855 XA_STATE(xas, &mapping->i_pages, offset); in __add_to_page_cache_locked()
862 mapping_set_update(&xas, mapping); in __add_to_page_cache_locked()
877 unsigned int order = xa_get_order(xas.xa, xas.xa_index); in __add_to_page_cache_locked()
881 xas_split_alloc(&xas, xa_load(xas.xa, xas.xa_index), in __add_to_page_cache_locked()
883 xas_lock_irq(&xas); in __add_to_page_cache_locked()
884 xas_for_each_conflict(&xas, entry) { in __add_to_page_cache_locked()
887 xas_set_err(&xas, -EEXIST); in __add_to_page_cache_locked()
896 order = xa_get_order(xas.xa, xas.xa_index); in __add_to_page_cache_locked()
898 xas_split(&xas, old, order); in __add_to_page_cache_locked()
899 xas_reset(&xas); in __add_to_page_cache_locked()
903 xas_store(&xas, page); in __add_to_page_cache_locked()
904 if (xas_error(&xas)) in __add_to_page_cache_locked()
915 xas_unlock_irq(&xas); in __add_to_page_cache_locked()
916 } while (xas_nomem(&xas, gfp_mask)); in __add_to_page_cache_locked()
918 if (xas_error(&xas)) in __add_to_page_cache_locked()
931 return xas_error(&xas); in __add_to_page_cache_locked()
1620 XA_STATE(xas, &mapping->i_pages, index); in page_cache_next_miss()
1623 void *entry = xas_next(&xas); in page_cache_next_miss()
1626 if (xas.xa_index == 0) in page_cache_next_miss()
1630 return xas.xa_index; in page_cache_next_miss()
1656 XA_STATE(xas, &mapping->i_pages, index); in page_cache_prev_miss()
1659 void *entry = xas_prev(&xas); in page_cache_prev_miss()
1662 if (xas.xa_index == ULONG_MAX) in page_cache_prev_miss()
1666 return xas.xa_index; in page_cache_prev_miss()
1685 XA_STATE(xas, &mapping->i_pages, offset); in find_get_entry()
1690 xas_reset(&xas); in find_get_entry()
1691 page = xas_load(&xas); in find_get_entry()
1692 if (xas_retry(&xas, page)) in find_get_entry()
1709 if (unlikely(page != xas_reload(&xas))) { in find_get_entry()
1886 XA_STATE(xas, &mapping->i_pages, start); in find_get_entries()
1894 xas_for_each(&xas, page, ULONG_MAX) { in find_get_entries()
1895 if (xas_retry(&xas, page)) in find_get_entries()
1909 if (unlikely(page != xas_reload(&xas))) in find_get_entries()
1911 page = find_subpage(page, xas.xa_index); in find_get_entries()
1914 indices[ret] = xas.xa_index; in find_get_entries()
1922 xas_reset(&xas); in find_get_entries()
1953 XA_STATE(xas, &mapping->i_pages, *start); in find_get_pages_range()
1961 xas_for_each(&xas, page, end) { in find_get_pages_range()
1962 if (xas_retry(&xas, page)) in find_get_pages_range()
1972 if (unlikely(page != xas_reload(&xas))) in find_get_pages_range()
1975 pages[ret] = find_subpage(page, xas.xa_index); in find_get_pages_range()
1977 *start = xas.xa_index + 1; in find_get_pages_range()
1984 xas_reset(&xas); in find_get_pages_range()
2018 XA_STATE(xas, &mapping->i_pages, index); in find_get_pages_contig()
2026 for (page = xas_load(&xas); page; page = xas_next(&xas)) { in find_get_pages_contig()
2027 if (xas_retry(&xas, page)) in find_get_pages_contig()
2040 if (unlikely(page != xas_reload(&xas))) in find_get_pages_contig()
2043 pages[ret] = find_subpage(page, xas.xa_index); in find_get_pages_contig()
2050 xas_reset(&xas); in find_get_pages_contig()
2075 XA_STATE(xas, &mapping->i_pages, *index); in find_get_pages_range_tag()
2083 xas_for_each_marked(&xas, page, end, tag) { in find_get_pages_range_tag()
2084 if (xas_retry(&xas, page)) in find_get_pages_range_tag()
2098 if (unlikely(page != xas_reload(&xas))) in find_get_pages_range_tag()
2101 pages[ret] = find_subpage(page, xas.xa_index); in find_get_pages_range_tag()
2103 *index = xas.xa_index + 1; in find_get_pages_range_tag()
2110 xas_reset(&xas); in find_get_pages_range_tag()
2767 XA_STATE(xas, &mapping->i_pages, start_pgoff); in filemap_map_pages()
2771 xas_for_each(&xas, page, end_pgoff) { in filemap_map_pages()
2772 if (xas_retry(&xas, page)) in filemap_map_pages()
2787 if (unlikely(page != xas_reload(&xas))) in filemap_map_pages()
2789 page = find_subpage(page, xas.xa_index); in filemap_map_pages()
2808 vmf->address += (xas.xa_index - last_pgoff) << PAGE_SHIFT; in filemap_map_pages()
2810 vmf->pte += xas.xa_index - last_pgoff; in filemap_map_pages()
2811 last_pgoff = xas.xa_index; in filemap_map_pages()