• Home
  • Raw
  • Download

Lines Matching refs:xa

363 void *xa_find(struct xarray *xa, unsigned long *index,
365 void *xa_find_after(struct xarray *xa, unsigned long *index,
382 static inline void xa_init_flags(struct xarray *xa, gfp_t flags) in xa_init_flags() argument
384 spin_lock_init(&xa->xa_lock); in xa_init_flags()
385 xa->xa_flags = flags; in xa_init_flags()
386 xa->xa_head = NULL; in xa_init_flags()
397 static inline void xa_init(struct xarray *xa) in xa_init() argument
399 xa_init_flags(xa, 0); in xa_init()
409 static inline bool xa_empty(const struct xarray *xa) in xa_empty() argument
411 return xa->xa_head == NULL; in xa_empty()
422 static inline bool xa_marked(const struct xarray *xa, xa_mark_t mark) in xa_marked() argument
424 return xa->xa_flags & XA_FLAGS_MARK(mark); in xa_marked()
451 #define xa_for_each_range(xa, index, entry, start, last) \ argument
453 entry = xa_find(xa, &index, last, XA_PRESENT); \
455 entry = xa_find_after(xa, &index, last, XA_PRESENT))
480 #define xa_for_each_start(xa, index, entry, start) \ argument
481 xa_for_each_range(xa, index, entry, start, ULONG_MAX)
504 #define xa_for_each(xa, index, entry) \ argument
505 xa_for_each_start(xa, index, entry, 0)
531 #define xa_for_each_marked(xa, index, entry, filter) \ argument
532 for (index = 0, entry = xa_find(xa, &index, ULONG_MAX, filter); \
533 entry; entry = xa_find_after(xa, &index, ULONG_MAX, filter))
535 #define xa_trylock(xa) spin_trylock(&(xa)->xa_lock) argument
536 #define xa_lock(xa) spin_lock(&(xa)->xa_lock) argument
537 #define xa_unlock(xa) spin_unlock(&(xa)->xa_lock) argument
538 #define xa_lock_bh(xa) spin_lock_bh(&(xa)->xa_lock) argument
539 #define xa_unlock_bh(xa) spin_unlock_bh(&(xa)->xa_lock) argument
540 #define xa_lock_irq(xa) spin_lock_irq(&(xa)->xa_lock) argument
541 #define xa_unlock_irq(xa) spin_unlock_irq(&(xa)->xa_lock) argument
542 #define xa_lock_irqsave(xa, flags) \ argument
543 spin_lock_irqsave(&(xa)->xa_lock, flags)
544 #define xa_unlock_irqrestore(xa, flags) \ argument
545 spin_unlock_irqrestore(&(xa)->xa_lock, flags)
546 #define xa_lock_nested(xa, subclass) \ argument
547 spin_lock_nested(&(xa)->xa_lock, subclass)
548 #define xa_lock_bh_nested(xa, subclass) \ argument
549 spin_lock_bh_nested(&(xa)->xa_lock, subclass)
550 #define xa_lock_irq_nested(xa, subclass) \ argument
551 spin_lock_irq_nested(&(xa)->xa_lock, subclass)
552 #define xa_lock_irqsave_nested(xa, flags, subclass) \ argument
553 spin_lock_irqsave_nested(&(xa)->xa_lock, flags, subclass)
589 static inline void *xa_store_bh(struct xarray *xa, unsigned long index, in xa_store_bh() argument
595 xa_lock_bh(xa); in xa_store_bh()
596 curr = __xa_store(xa, index, entry, gfp); in xa_store_bh()
597 xa_unlock_bh(xa); in xa_store_bh()
616 static inline void *xa_store_irq(struct xarray *xa, unsigned long index, in xa_store_irq() argument
622 xa_lock_irq(xa); in xa_store_irq()
623 curr = __xa_store(xa, index, entry, gfp); in xa_store_irq()
624 xa_unlock_irq(xa); in xa_store_irq()
642 static inline void *xa_erase_bh(struct xarray *xa, unsigned long index) in xa_erase_bh() argument
646 xa_lock_bh(xa); in xa_erase_bh()
647 entry = __xa_erase(xa, index); in xa_erase_bh()
648 xa_unlock_bh(xa); in xa_erase_bh()
666 static inline void *xa_erase_irq(struct xarray *xa, unsigned long index) in xa_erase_irq() argument
670 xa_lock_irq(xa); in xa_erase_irq()
671 entry = __xa_erase(xa, index); in xa_erase_irq()
672 xa_unlock_irq(xa); in xa_erase_irq()
692 static inline void *xa_cmpxchg(struct xarray *xa, unsigned long index, in xa_cmpxchg() argument
698 xa_lock(xa); in xa_cmpxchg()
699 curr = __xa_cmpxchg(xa, index, old, entry, gfp); in xa_cmpxchg()
700 xa_unlock(xa); in xa_cmpxchg()
720 static inline void *xa_cmpxchg_bh(struct xarray *xa, unsigned long index, in xa_cmpxchg_bh() argument
726 xa_lock_bh(xa); in xa_cmpxchg_bh()
727 curr = __xa_cmpxchg(xa, index, old, entry, gfp); in xa_cmpxchg_bh()
728 xa_unlock_bh(xa); in xa_cmpxchg_bh()
748 static inline void *xa_cmpxchg_irq(struct xarray *xa, unsigned long index, in xa_cmpxchg_irq() argument
754 xa_lock_irq(xa); in xa_cmpxchg_irq()
755 curr = __xa_cmpxchg(xa, index, old, entry, gfp); in xa_cmpxchg_irq()
756 xa_unlock_irq(xa); in xa_cmpxchg_irq()
778 static inline int __must_check xa_insert(struct xarray *xa, in xa_insert() argument
784 xa_lock(xa); in xa_insert()
785 err = __xa_insert(xa, index, entry, gfp); in xa_insert()
786 xa_unlock(xa); in xa_insert()
808 static inline int __must_check xa_insert_bh(struct xarray *xa, in xa_insert_bh() argument
814 xa_lock_bh(xa); in xa_insert_bh()
815 err = __xa_insert(xa, index, entry, gfp); in xa_insert_bh()
816 xa_unlock_bh(xa); in xa_insert_bh()
838 static inline int __must_check xa_insert_irq(struct xarray *xa, in xa_insert_irq() argument
844 xa_lock_irq(xa); in xa_insert_irq()
845 err = __xa_insert(xa, index, entry, gfp); in xa_insert_irq()
846 xa_unlock_irq(xa); in xa_insert_irq()
871 static inline __must_check int xa_alloc(struct xarray *xa, u32 *id, in xa_alloc() argument
877 xa_lock(xa); in xa_alloc()
878 err = __xa_alloc(xa, id, entry, limit, gfp); in xa_alloc()
879 xa_unlock(xa); in xa_alloc()
904 static inline int __must_check xa_alloc_bh(struct xarray *xa, u32 *id, in xa_alloc_bh() argument
910 xa_lock_bh(xa); in xa_alloc_bh()
911 err = __xa_alloc(xa, id, entry, limit, gfp); in xa_alloc_bh()
912 xa_unlock_bh(xa); in xa_alloc_bh()
937 static inline int __must_check xa_alloc_irq(struct xarray *xa, u32 *id, in xa_alloc_irq() argument
943 xa_lock_irq(xa); in xa_alloc_irq()
944 err = __xa_alloc(xa, id, entry, limit, gfp); in xa_alloc_irq()
945 xa_unlock_irq(xa); in xa_alloc_irq()
974 static inline int xa_alloc_cyclic(struct xarray *xa, u32 *id, void *entry, in xa_alloc_cyclic() argument
980 xa_lock(xa); in xa_alloc_cyclic()
981 err = __xa_alloc_cyclic(xa, id, entry, limit, next, gfp); in xa_alloc_cyclic()
982 xa_unlock(xa); in xa_alloc_cyclic()
1011 static inline int xa_alloc_cyclic_bh(struct xarray *xa, u32 *id, void *entry, in xa_alloc_cyclic_bh() argument
1017 xa_lock_bh(xa); in xa_alloc_cyclic_bh()
1018 err = __xa_alloc_cyclic(xa, id, entry, limit, next, gfp); in xa_alloc_cyclic_bh()
1019 xa_unlock_bh(xa); in xa_alloc_cyclic_bh()
1048 static inline int xa_alloc_cyclic_irq(struct xarray *xa, u32 *id, void *entry, in xa_alloc_cyclic_irq() argument
1054 xa_lock_irq(xa); in xa_alloc_cyclic_irq()
1055 err = __xa_alloc_cyclic(xa, id, entry, limit, next, gfp); in xa_alloc_cyclic_irq()
1056 xa_unlock_irq(xa); in xa_alloc_cyclic_irq()
1080 int xa_reserve(struct xarray *xa, unsigned long index, gfp_t gfp) in xa_reserve() argument
1082 return xa_err(xa_cmpxchg(xa, index, NULL, XA_ZERO_ENTRY, gfp)); in xa_reserve()
1098 int xa_reserve_bh(struct xarray *xa, unsigned long index, gfp_t gfp) in xa_reserve_bh() argument
1100 return xa_err(xa_cmpxchg_bh(xa, index, NULL, XA_ZERO_ENTRY, gfp)); in xa_reserve_bh()
1116 int xa_reserve_irq(struct xarray *xa, unsigned long index, gfp_t gfp) in xa_reserve_irq() argument
1118 return xa_err(xa_cmpxchg_irq(xa, index, NULL, XA_ZERO_ENTRY, gfp)); in xa_reserve_irq()
1130 static inline void xa_release(struct xarray *xa, unsigned long index) in xa_release() argument
1132 xa_cmpxchg(xa, index, XA_ZERO_ENTRY, NULL, 0); in xa_release()
1184 #define XA_BUG_ON(xa, x) do { \ argument
1186 xa_dump(xa); \
1197 #define XA_BUG_ON(xa, x) do { } while (0) argument
1202 static inline void *xa_head(const struct xarray *xa) in xa_head() argument
1204 return rcu_dereference_check(xa->xa_head, in xa_head()
1205 lockdep_is_held(&xa->xa_lock)); in xa_head()
1209 static inline void *xa_head_locked(const struct xarray *xa) in xa_head_locked() argument
1211 return rcu_dereference_protected(xa->xa_head, in xa_head_locked()
1212 lockdep_is_held(&xa->xa_lock)); in xa_head_locked()
1216 static inline void *xa_entry(const struct xarray *xa, in xa_entry() argument
1221 lockdep_is_held(&xa->xa_lock)); in xa_entry()
1225 static inline void *xa_entry_locked(const struct xarray *xa, in xa_entry_locked() argument
1230 lockdep_is_held(&xa->xa_lock)); in xa_entry_locked()
1234 static inline struct xa_node *xa_parent(const struct xarray *xa, in xa_parent() argument
1238 lockdep_is_held(&xa->xa_lock)); in xa_parent()
1242 static inline struct xa_node *xa_parent_locked(const struct xarray *xa, in xa_parent_locked() argument
1246 lockdep_is_held(&xa->xa_lock)); in xa_parent_locked()
1349 struct xarray *xa; member
1370 .xa = array, \
1410 #define xas_marked(xas, mark) xa_marked((xas)->xa, (mark))
1411 #define xas_trylock(xas) xa_trylock((xas)->xa)
1412 #define xas_lock(xas) xa_lock((xas)->xa)
1413 #define xas_unlock(xas) xa_unlock((xas)->xa)
1414 #define xas_lock_bh(xas) xa_lock_bh((xas)->xa)
1415 #define xas_unlock_bh(xas) xa_unlock_bh((xas)->xa)
1416 #define xas_lock_irq(xas) xa_lock_irq((xas)->xa)
1417 #define xas_unlock_irq(xas) xa_unlock_irq((xas)->xa)
1419 xa_lock_irqsave((xas)->xa, flags)
1421 xa_unlock_irqrestore((xas)->xa, flags)
1559 static inline int xa_get_order(struct xarray *xa, unsigned long index) in xa_get_order() argument
1602 return xa_head(xas->xa); in xas_reload()
1605 entry = xa_entry(xas->xa, node, offset); in xas_reload()
1612 return xa_entry(xas->xa, node, offset); in xas_reload()
1712 entry = xa_entry(xas->xa, node, xas->xa_offset + 1); in xas_next_entry()
1771 entry = xa_entry(xas->xa, node, offset); in xas_next_marked()
1864 return xa_entry(xas->xa, node, xas->xa_offset); in xas_prev()
1893 return xa_entry(xas->xa, node, xas->xa_offset); in xas_next()