Lines Matching refs:xas
123 XA_STATE(xas, &mapping->i_pages, page->index); in page_cache_delete()
126 mapping_set_update(&xas, mapping); in page_cache_delete()
130 xas_set_order(&xas, page->index, compound_order(page)); in page_cache_delete()
138 xas_store(&xas, shadow); in page_cache_delete()
139 xas_init_marks(&xas); in page_cache_delete()
298 XA_STATE(xas, &mapping->i_pages, pvec->pages[0]->index); in page_cache_delete_batch()
303 mapping_set_update(&xas, mapping); in page_cache_delete_batch()
304 xas_for_each(&xas, page, ULONG_MAX) { in page_cache_delete_batch()
326 if (page->index == xas.xa_index) in page_cache_delete_batch()
335 if (page->index + compound_nr(page) - 1 == xas.xa_index) in page_cache_delete_batch()
337 xas_store(&xas, NULL); in page_cache_delete_batch()
477 XA_STATE(xas, &mapping->i_pages, start_byte >> PAGE_SHIFT); in filemap_range_has_page()
485 page = xas_find(&xas, max); in filemap_range_has_page()
486 if (xas_retry(&xas, page)) in filemap_range_has_page()
795 XA_STATE(xas, &mapping->i_pages, offset); in replace_page_cache_page()
808 xas_lock_irqsave(&xas, flags); in replace_page_cache_page()
809 xas_store(&xas, new); in replace_page_cache_page()
821 xas_unlock_irqrestore(&xas, flags); in replace_page_cache_page()
835 XA_STATE(xas, &mapping->i_pages, offset); in __add_to_page_cache_locked()
841 mapping_set_update(&xas, mapping); in __add_to_page_cache_locked()
856 unsigned int order = xa_get_order(xas.xa, xas.xa_index); in __add_to_page_cache_locked()
860 xas_split_alloc(&xas, xa_load(xas.xa, xas.xa_index), in __add_to_page_cache_locked()
862 xas_lock_irq(&xas); in __add_to_page_cache_locked()
863 xas_for_each_conflict(&xas, entry) { in __add_to_page_cache_locked()
866 xas_set_err(&xas, -EEXIST); in __add_to_page_cache_locked()
875 order = xa_get_order(xas.xa, xas.xa_index); in __add_to_page_cache_locked()
877 xas_split(&xas, old, order); in __add_to_page_cache_locked()
878 xas_reset(&xas); in __add_to_page_cache_locked()
882 xas_store(&xas, page); in __add_to_page_cache_locked()
883 if (xas_error(&xas)) in __add_to_page_cache_locked()
894 xas_unlock_irq(&xas); in __add_to_page_cache_locked()
895 } while (xas_nomem(&xas, gfp)); in __add_to_page_cache_locked()
897 if (xas_error(&xas)) { in __add_to_page_cache_locked()
898 error = xas_error(&xas); in __add_to_page_cache_locked()
1623 XA_STATE(xas, &mapping->i_pages, index); in page_cache_next_miss()
1626 void *entry = xas_next(&xas); in page_cache_next_miss()
1629 if (xas.xa_index == 0) in page_cache_next_miss()
1633 return xas.xa_index; in page_cache_next_miss()
1659 XA_STATE(xas, &mapping->i_pages, index); in page_cache_prev_miss()
1662 void *entry = xas_prev(&xas); in page_cache_prev_miss()
1665 if (xas.xa_index == ULONG_MAX) in page_cache_prev_miss()
1669 return xas.xa_index; in page_cache_prev_miss()
1688 XA_STATE(xas, &mapping->i_pages, index); in find_get_entry()
1693 xas_reset(&xas); in find_get_entry()
1694 page = xas_load(&xas); in find_get_entry()
1695 if (xas_retry(&xas, page)) in find_get_entry()
1712 if (unlikely(page != xas_reload(&xas))) { in find_get_entry()
1898 XA_STATE(xas, &mapping->i_pages, start); in find_get_entries()
1906 xas_for_each(&xas, page, ULONG_MAX) { in find_get_entries()
1907 if (xas_retry(&xas, page)) in find_get_entries()
1921 if (unlikely(page != xas_reload(&xas))) in find_get_entries()
1929 page = find_subpage(page, xas.xa_index); in find_get_entries()
1933 indices[ret] = xas.xa_index; in find_get_entries()
1941 xas_reset(&xas); in find_get_entries()
1972 XA_STATE(xas, &mapping->i_pages, *start); in find_get_pages_range()
1980 xas_for_each(&xas, page, end) { in find_get_pages_range()
1981 if (xas_retry(&xas, page)) in find_get_pages_range()
1991 if (unlikely(page != xas_reload(&xas))) in find_get_pages_range()
1994 pages[ret] = find_subpage(page, xas.xa_index); in find_get_pages_range()
1996 *start = xas.xa_index + 1; in find_get_pages_range()
2003 xas_reset(&xas); in find_get_pages_range()
2037 XA_STATE(xas, &mapping->i_pages, index); in find_get_pages_contig()
2045 for (page = xas_load(&xas); page; page = xas_next(&xas)) { in find_get_pages_contig()
2046 if (xas_retry(&xas, page)) in find_get_pages_contig()
2059 if (unlikely(page != xas_reload(&xas))) in find_get_pages_contig()
2062 pages[ret] = find_subpage(page, xas.xa_index); in find_get_pages_contig()
2069 xas_reset(&xas); in find_get_pages_contig()
2094 XA_STATE(xas, &mapping->i_pages, *index); in find_get_pages_range_tag()
2102 xas_for_each_marked(&xas, page, end, tag) { in find_get_pages_range_tag()
2103 if (xas_retry(&xas, page)) in find_get_pages_range_tag()
2117 if (unlikely(page != xas_reload(&xas))) in find_get_pages_range_tag()
2120 pages[ret] = find_subpage(page, xas.xa_index); in find_get_pages_range_tag()
2122 *index = xas.xa_index + 1; in find_get_pages_range_tag()
2129 xas_reset(&xas); in find_get_pages_range_tag()
2841 XA_STATE(xas, &mapping->i_pages, start_pgoff); in filemap_map_pages()
2846 xas_for_each(&xas, head, end_pgoff) { in filemap_map_pages()
2847 if (xas_retry(&xas, head)) in filemap_map_pages()
2862 if (unlikely(head != xas_reload(&xas))) in filemap_map_pages()
2864 page = find_subpage(head, xas.xa_index); in filemap_map_pages()
2877 if (xas.xa_index >= max_idx) in filemap_map_pages()
2883 vmf->address += (xas.xa_index - last_pgoff) << PAGE_SHIFT; in filemap_map_pages()
2885 vmf->pte += xas.xa_index - last_pgoff; in filemap_map_pages()
2886 last_pgoff = xas.xa_index; in filemap_map_pages()