Lines Matching refs:xa

31 static inline unsigned int xa_lock_type(const struct xarray *xa)  in xa_lock_type()  argument
33 return (__force unsigned int)xa->xa_flags & 3; in xa_lock_type()
56 static inline bool xa_track_free(const struct xarray *xa) in xa_track_free() argument
58 return xa->xa_flags & XA_FLAGS_TRACK_FREE; in xa_track_free()
61 static inline bool xa_zero_busy(const struct xarray *xa) in xa_zero_busy() argument
63 return xa->xa_flags & XA_FLAGS_ZERO_BUSY; in xa_zero_busy()
66 static inline void xa_mark_set(struct xarray *xa, xa_mark_t mark) in xa_mark_set() argument
68 if (!(xa->xa_flags & XA_FLAGS_MARK(mark))) in xa_mark_set()
69 xa->xa_flags |= XA_FLAGS_MARK(mark); in xa_mark_set()
72 static inline void xa_mark_clear(struct xarray *xa, xa_mark_t mark) in xa_mark_clear() argument
74 if (xa->xa_flags & XA_FLAGS_MARK(mark)) in xa_mark_clear()
75 xa->xa_flags &= ~(XA_FLAGS_MARK(mark)); in xa_mark_clear()
188 entry = xa_head(xas->xa); in xas_start()
204 void *entry = xa_entry(xas->xa, node, offset); in xas_descend()
209 entry = xa_entry(xas->xa, node, offset); in xas_descend()
306 if (xas->xa->xa_flags & XA_FLAGS_ACCOUNT) in xas_nomem()
328 __must_hold(xas->xa->xa_lock) in __xas_nomem()
330 unsigned int lock_type = xa_lock_type(xas->xa); in __xas_nomem()
336 if (xas->xa->xa_flags & XA_FLAGS_ACCOUNT) in __xas_nomem()
374 if (xas->xa->xa_flags & XA_FLAGS_ACCOUNT) in xas_alloc()
396 node->array = xas->xa; in xas_alloc()
441 struct xarray *xa = xas->xa; in xas_shrink() local
450 entry = xa_entry_locked(xa, node, 0); in xas_shrink()
455 if (xa_is_zero(entry) && xa_zero_busy(xa)) in xas_shrink()
459 RCU_INIT_POINTER(xa->xa_head, entry); in xas_shrink()
460 if (xa_track_free(xa) && !node_get_mark(node, 0, XA_FREE_MARK)) in xas_shrink()
461 xa_mark_clear(xa, XA_FREE_MARK); in xas_shrink()
494 parent = xa_parent_locked(xas->xa, node); in xas_delete_node()
500 xas->xa->xa_head = NULL; in xas_delete_node()
531 void *entry = xa_entry_locked(xas->xa, node, offset); in xas_free_nodes()
544 parent = xa_parent_locked(xas->xa, node); in xas_free_nodes()
563 struct xarray *xa = xas->xa; in xas_expand() local
595 if (xa_track_free(xa) && mark == XA_FREE_MARK) { in xas_expand()
597 if (!xa_marked(xa, XA_FREE_MARK)) { in xas_expand()
599 xa_mark_set(xa, XA_FREE_MARK); in xas_expand()
601 } else if (xa_marked(xa, mark)) { in xas_expand()
618 rcu_assign_pointer(xa->xa_head, head); in xas_expand()
643 struct xarray *xa = xas->xa; in xas_create() local
651 entry = xa_head_locked(xa); in xas_create()
653 if (!entry && xa_zero_busy(xa)) in xas_create()
660 entry = xa_head_locked(xa); in xas_create()
661 slot = &xa->xa_head; in xas_create()
668 entry = xa_entry_locked(xa, node, offset); in xas_create()
672 entry = xa_head_locked(xa); in xas_create()
673 slot = &xa->xa_head; in xas_create()
682 if (xa_track_free(xa)) in xas_create()
730 xas->xa_node = xa_parent_locked(xas->xa, node); in xas_create_range()
780 void __rcu **slot = &xas->xa->xa_head; in xas_store()
837 next = xa_entry_locked(xas->xa, node, ++offset); in xas_store()
864 return xa_marked(xas->xa, mark); in xas_get_mark()
890 node = xa_parent_locked(xas->xa, node); in xas_set_mark()
893 if (!xa_marked(xas->xa, mark)) in xas_set_mark()
894 xa_mark_set(xas->xa, mark); in xas_set_mark()
922 node = xa_parent_locked(xas->xa, node); in xas_clear_mark()
925 if (xa_marked(xas->xa, mark)) in xas_clear_mark()
926 xa_mark_clear(xas->xa, mark); in xas_clear_mark()
946 if (xa_track_free(xas->xa) && mark == XA_FREE_MARK) in xas_init_marks()
1025 node->array = xas->xa; in xas_split_alloc()
1133 if (!xa_is_sibling(xa_entry(xas->xa, node, offset))) in xas_pause()
1168 xas->xa_node = xa_parent(xas->xa, xas->xa_node); in __xas_prev()
1174 entry = xa_entry(xas->xa, xas->xa_node, xas->xa_offset); in __xas_prev()
1207 xas->xa_node = xa_parent(xas->xa, xas->xa_node); in __xas_next()
1213 entry = xa_entry(xas->xa, xas->xa_node, xas->xa_offset); in __xas_next()
1265 xas->xa_node = xa_parent(xas->xa, xas->xa_node); in xas_find()
1269 entry = xa_entry(xas->xa, xas->xa_node, xas->xa_offset); in xas_find()
1324 entry = xa_head(xas->xa); in xas_find_marked()
1329 if (xa_marked(xas->xa, mark)) in xas_find_marked()
1341 xas->xa_node = xa_parent(xas->xa, xas->xa_node); in xas_find_marked()
1349 entry = xa_entry(xas->xa, xas->xa_node, xas->xa_offset); in xas_find_marked()
1368 entry = xa_entry(xas->xa, xas->xa_node, xas->xa_offset); in xas_find_marked()
1369 if (!entry && !(xa_track_free(xas->xa) && mark == XA_FREE_MARK)) in xas_find_marked()
1427 xas->xa_node = xa_parent_locked(xas->xa, xas->xa_node); in xas_find_conflict()
1432 curr = xa_entry_locked(xas->xa, xas->xa_node, ++xas->xa_offset); in xas_find_conflict()
1438 curr = xa_entry_locked(xas->xa, xas->xa_node, 0); in xas_find_conflict()
1456 void *xa_load(struct xarray *xa, unsigned long index) in xa_load() argument
1458 XA_STATE(xas, xa, index); in xa_load()
1494 void *__xa_erase(struct xarray *xa, unsigned long index) in __xa_erase() argument
1496 XA_STATE(xas, xa, index); in __xa_erase()
1513 void *xa_erase(struct xarray *xa, unsigned long index) in xa_erase() argument
1517 xa_lock(xa); in xa_erase()
1518 entry = __xa_erase(xa, index); in xa_erase()
1519 xa_unlock(xa); in xa_erase()
1540 void *__xa_store(struct xarray *xa, unsigned long index, void *entry, gfp_t gfp) in __xa_store() argument
1542 XA_STATE(xas, xa, index); in __xa_store()
1547 if (xa_track_free(xa) && !entry) in __xa_store()
1552 if (xa_track_free(xa)) in __xa_store()
1577 void *xa_store(struct xarray *xa, unsigned long index, void *entry, gfp_t gfp) in xa_store() argument
1581 xa_lock(xa); in xa_store()
1582 curr = __xa_store(xa, index, entry, gfp); in xa_store()
1583 xa_unlock(xa); in xa_store()
1605 void *__xa_cmpxchg(struct xarray *xa, unsigned long index, in __xa_cmpxchg() argument
1608 XA_STATE(xas, xa, index); in __xa_cmpxchg()
1618 if (xa_track_free(xa) && entry && !curr) in __xa_cmpxchg()
1643 int __xa_insert(struct xarray *xa, unsigned long index, void *entry, gfp_t gfp) in __xa_insert() argument
1645 XA_STATE(xas, xa, index); in __xa_insert()
1657 if (xa_track_free(xa)) in __xa_insert()
1718 void *xa_store_range(struct xarray *xa, unsigned long first, in xa_store_range() argument
1721 XA_STATE(xas, xa, 0); in xa_store_range()
1761 int xa_get_order(struct xarray *xa, unsigned long index) in xa_get_order() argument
1763 XA_STATE(xas, xa, index); in xa_get_order()
1812 int __xa_alloc(struct xarray *xa, u32 *id, void *entry, in __xa_alloc() argument
1815 XA_STATE(xas, xa, 0); in __xa_alloc()
1819 if (WARN_ON_ONCE(!xa_track_free(xa))) in __xa_alloc()
1861 int __xa_alloc_cyclic(struct xarray *xa, u32 *id, void *entry, in __xa_alloc_cyclic() argument
1868 ret = __xa_alloc(xa, id, entry, limit, gfp); in __xa_alloc_cyclic()
1869 if ((xa->xa_flags & XA_FLAGS_ALLOC_WRAPPED) && ret == 0) { in __xa_alloc_cyclic()
1870 xa->xa_flags &= ~XA_FLAGS_ALLOC_WRAPPED; in __xa_alloc_cyclic()
1876 ret = __xa_alloc(xa, id, entry, limit, gfp); in __xa_alloc_cyclic()
1884 xa->xa_flags |= XA_FLAGS_ALLOC_WRAPPED; in __xa_alloc_cyclic()
1900 void __xa_set_mark(struct xarray *xa, unsigned long index, xa_mark_t mark) in __xa_set_mark() argument
1902 XA_STATE(xas, xa, index); in __xa_set_mark()
1918 void __xa_clear_mark(struct xarray *xa, unsigned long index, xa_mark_t mark) in __xa_clear_mark() argument
1920 XA_STATE(xas, xa, index); in __xa_clear_mark()
1940 bool xa_get_mark(struct xarray *xa, unsigned long index, xa_mark_t mark) in xa_get_mark() argument
1942 XA_STATE(xas, xa, index); in xa_get_mark()
1970 void xa_set_mark(struct xarray *xa, unsigned long index, xa_mark_t mark) in xa_set_mark() argument
1972 xa_lock(xa); in xa_set_mark()
1973 __xa_set_mark(xa, index, mark); in xa_set_mark()
1974 xa_unlock(xa); in xa_set_mark()
1988 void xa_clear_mark(struct xarray *xa, unsigned long index, xa_mark_t mark) in xa_clear_mark() argument
1990 xa_lock(xa); in xa_clear_mark()
1991 __xa_clear_mark(xa, index, mark); in xa_clear_mark()
1992 xa_unlock(xa); in xa_clear_mark()
2013 void *xa_find(struct xarray *xa, unsigned long *indexp, in xa_find() argument
2016 XA_STATE(xas, xa, *indexp); in xa_find()
2063 void *xa_find_after(struct xarray *xa, unsigned long *indexp, in xa_find_after() argument
2066 XA_STATE(xas, xa, *indexp + 1); in xa_find_after()
2160 unsigned int xa_extract(struct xarray *xa, void **dst, unsigned long start, in xa_extract() argument
2163 XA_STATE(xas, xa, start); in xa_extract()
2184 .xa = node->array, in xa_delete_node()
2207 void xa_destroy(struct xarray *xa) in xa_destroy() argument
2209 XA_STATE(xas, xa, 0); in xa_destroy()
2215 entry = xa_head_locked(xa); in xa_destroy()
2216 RCU_INIT_POINTER(xa->xa_head, NULL); in xa_destroy()
2218 if (xa_zero_busy(xa)) in xa_destroy()
2219 xa_mark_clear(xa, XA_FREE_MARK); in xa_destroy()
2293 void xa_dump(const struct xarray *xa) in xa_dump() argument
2295 void *entry = xa->xa_head; in xa_dump()
2298 pr_info("xarray: %px head %px flags %x marks %d %d %d\n", xa, entry, in xa_dump()
2299 xa->xa_flags, xa_marked(xa, XA_MARK_0), in xa_dump()
2300 xa_marked(xa, XA_MARK_1), xa_marked(xa, XA_MARK_2)); in xa_dump()