Home
last modified time | relevance | path

Searched refs:arch_spin_is_locked (Results 1 – 23 of 23) sorted by relevance

/arch/hexagon/include/asm/
Dspinlock.h180 do {while (arch_spin_is_locked(lock)) cpu_relax(); } while (0)
181 #define arch_spin_is_locked(x) ((x)->lock != 0) macro
/arch/parisc/include/asm/
Dspinlock.h9 static inline int arch_spin_is_locked(arch_spinlock_t *x) in arch_spin_is_locked() function
17 do { cpu_relax(); } while (arch_spin_is_locked(x))
115 while (arch_spin_is_locked(&rw->lock) && rw->counter >= 0) in arch_read_trylock()
/arch/s390/lib/
Dspinlock.c38 if (arch_spin_is_locked(lp)) in arch_spin_lock_wait()
67 if (arch_spin_is_locked(lp)) in arch_spin_lock_wait_flags()
95 if (arch_spin_is_locked(lp)) in arch_spin_trylock_retry()
/arch/sh/include/asm/
Dspinlock.h26 #define arch_spin_is_locked(x) ((x)->lock <= 0) macro
29 do { while (arch_spin_is_locked(x)) cpu_relax(); } while (0)
/arch/blackfin/include/asm/
Dspinlock.h27 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
51 while (arch_spin_is_locked(lock)) in arch_spin_unlock_wait()
/arch/mn10300/include/asm/
Dspinlock.h25 #define arch_spin_is_locked(x) (*(volatile signed char *)(&(x)->slock) != 0) macro
26 #define arch_spin_unlock_wait(x) do { barrier(); } while (arch_spin_is_locked(x))
/arch/cris/include/arch-v32/arch/
Dspinlock.h12 static inline int arch_spin_is_locked(arch_spinlock_t *x) in arch_spin_is_locked() function
27 while (arch_spin_is_locked(lock)) in arch_spin_unlock_wait()
/arch/m32r/include/asm/
Dspinlock.h28 #define arch_spin_is_locked(x) (*(volatile int *)(&(x)->slock) <= 0) macro
31 do { cpu_relax(); } while (arch_spin_is_locked(x))
/arch/sparc/include/asm/
Dspinlock_32.h14 #define arch_spin_is_locked(lock) (*((volatile unsigned char *)(lock)) != 0) macro
17 do { while (arch_spin_is_locked(lock)) cpu_relax(); } while (0)
Dspinlock_64.h24 #define arch_spin_is_locked(lp) ((lp)->lock != 0) macro
/arch/arm/include/asm/
Dspinlock.h73 #define arch_spin_is_locked(x) ((x)->lock != 0) macro
75 do { while (arch_spin_is_locked(lock)) cpu_relax(); } while (0)
/arch/s390/include/asm/
Dspinlock.h39 #define arch_spin_is_locked(x) ((x)->owner_cpu != 0) macro
41 do { while (arch_spin_is_locked(lock)) \
/arch/powerpc/include/asm/
Dspinlock.h31 #define arch_spin_is_locked(x) ((x)->slock != 0) macro
156 do { while (arch_spin_is_locked(lock)) cpu_relax(); } while (0)
/arch/tile/include/asm/
Dspinlock_32.h36 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
Dspinlock_64.h45 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
/arch/x86/include/asm/
Dspinlock.h103 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
139 while (arch_spin_is_locked(lock)) in arch_spin_unlock_wait()
Dparavirt.h757 static inline int arch_spin_is_locked(struct arch_spinlock *lock) in arch_spin_is_locked() function
/arch/mips/include/asm/
Dspinlock.h37 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
46 while (arch_spin_is_locked(x)) { cpu_relax(); }
/arch/tile/lib/
Dspinlock_64.c68 while (arch_spin_is_locked(lock)) in arch_spin_unlock_wait()
Dspinlock_32.c68 while (arch_spin_is_locked(lock)) in arch_spin_unlock_wait()
/arch/alpha/include/asm/
Dspinlock.h15 #define arch_spin_is_locked(x) ((x)->lock != 0) macro
/arch/mips/pmc-sierra/yosemite/
Dsmp.c23 while (arch_spin_is_locked(&launch_lock)); in prom_smp_bootstrap()
/arch/ia64/include/asm/
Dspinlock.h105 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function