Home
last modified time | relevance | path

Searched refs:arch_spin_is_locked (Results 1 – 23 of 23) sorted by relevance

/arch/parisc/include/asm/
Dspinlock.h9 static inline int arch_spin_is_locked(arch_spinlock_t *x) in arch_spin_is_locked() function
17 do { cpu_relax(); } while (arch_spin_is_locked(x))
111 while (arch_spin_is_locked(&rw->lock) && rw->counter >= 0) in arch_read_trylock()
/arch/hexagon/include/asm/
Dspinlock.h180 do {while (arch_spin_is_locked(lock)) cpu_relax(); } while (0)
181 #define arch_spin_is_locked(x) ((x)->lock != 0) macro
/arch/xtensa/include/asm/
Dspinlock.h31 #define arch_spin_is_locked(x) ((x)->slock != 0) macro
33 do { while (arch_spin_is_locked(lock)) cpu_relax(); } while (0)
/arch/sh/include/asm/
Dspinlock.h26 #define arch_spin_is_locked(x) ((x)->lock <= 0) macro
29 do { while (arch_spin_is_locked(x)) cpu_relax(); } while (0)
/arch/blackfin/include/asm/
Dspinlock.h27 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
51 while (arch_spin_is_locked(lock)) in arch_spin_unlock_wait()
/arch/mn10300/include/asm/
Dspinlock.h25 #define arch_spin_is_locked(x) (*(volatile signed char *)(&(x)->slock) != 0) macro
26 #define arch_spin_unlock_wait(x) do { barrier(); } while (arch_spin_is_locked(x))
/arch/sparc/include/asm/
Dspinlock_32.h14 #define arch_spin_is_locked(lock) (*((volatile unsigned char *)(lock)) != 0) macro
17 do { while (arch_spin_is_locked(lock)) cpu_relax(); } while (0)
Dspinlock_64.h24 #define arch_spin_is_locked(lp) ((lp)->lock != 0) macro
/arch/m32r/include/asm/
Dspinlock.h28 #define arch_spin_is_locked(x) (*(volatile int *)(&(x)->slock) <= 0) macro
31 do { cpu_relax(); } while (arch_spin_is_locked(x))
/arch/metag/include/asm/
Dspinlock.h11 do { while (arch_spin_is_locked(lock)) cpu_relax(); } while (0)
Dspinlock_lnkget.h10 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
Dspinlock_lock1.h7 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
/arch/powerpc/include/asm/
Dspinlock.h60 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
169 do { while (arch_spin_is_locked(lock)) cpu_relax(); } while (0)
/arch/arm/include/asm/
Dspinlock.h54 do { while (arch_spin_is_locked(lock)) cpu_relax(); } while (0)
121 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
/arch/tile/include/asm/
Dspinlock_32.h36 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
Dspinlock_64.h47 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
/arch/mips/include/asm/
Dspinlock.h38 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
52 while (arch_spin_is_locked(x)) { cpu_relax(); }
/arch/s390/include/asm/
Dspinlock.h54 static inline int arch_spin_is_locked(arch_spinlock_t *lp) in arch_spin_is_locked() function
98 while (arch_spin_is_locked(lock)) in arch_spin_unlock_wait()
/arch/alpha/include/asm/
Dspinlock.h15 #define arch_spin_is_locked(x) ((x)->lock != 0) macro
/arch/arm64/include/asm/
Dspinlock.h153 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
/arch/arc/include/asm/
Dspinlock.h16 #define arch_spin_is_locked(x) ((x)->slock != __ARCH_SPIN_LOCK_UNLOCKED__) macro
19 do { while (arch_spin_is_locked(x)) cpu_relax(); } while (0)
/arch/x86/include/asm/
Dspinlock.h164 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
/arch/ia64/include/asm/
Dspinlock.h110 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function