Lines Matching refs:lock
28 arch_spinlock_t lock; member
32 .lock = __ARCH_SPIN_LOCK_UNLOCKED,
42 return &atomic64_lock[addr & (NR_LOCKS - 1)].lock; in lock_addr()
48 arch_spinlock_t *lock = lock_addr(v); in generic_atomic64_read() local
52 arch_spin_lock(lock); in generic_atomic64_read()
54 arch_spin_unlock(lock); in generic_atomic64_read()
63 arch_spinlock_t *lock = lock_addr(v); in generic_atomic64_set() local
66 arch_spin_lock(lock); in generic_atomic64_set()
68 arch_spin_unlock(lock); in generic_atomic64_set()
77 arch_spinlock_t *lock = lock_addr(v); \
80 arch_spin_lock(lock); \
82 arch_spin_unlock(lock); \
91 arch_spinlock_t *lock = lock_addr(v); \
95 arch_spin_lock(lock); \
97 arch_spin_unlock(lock); \
107 arch_spinlock_t *lock = lock_addr(v); \
111 arch_spin_lock(lock); \
114 arch_spin_unlock(lock); \
144 arch_spinlock_t *lock = lock_addr(v); in generic_atomic64_dec_if_positive() local
148 arch_spin_lock(lock); in generic_atomic64_dec_if_positive()
152 arch_spin_unlock(lock); in generic_atomic64_dec_if_positive()
161 arch_spinlock_t *lock = lock_addr(v); in generic_atomic64_cmpxchg() local
165 arch_spin_lock(lock); in generic_atomic64_cmpxchg()
169 arch_spin_unlock(lock); in generic_atomic64_cmpxchg()
178 arch_spinlock_t *lock = lock_addr(v); in generic_atomic64_xchg() local
182 arch_spin_lock(lock); in generic_atomic64_xchg()
185 arch_spin_unlock(lock); in generic_atomic64_xchg()
194 arch_spinlock_t *lock = lock_addr(v); in generic_atomic64_fetch_add_unless() local
198 arch_spin_lock(lock); in generic_atomic64_fetch_add_unless()
202 arch_spin_unlock(lock); in generic_atomic64_fetch_add_unless()