• Home
  • Raw
  • Download

Lines Matching refs:lock

148 static __always_inline void clear_pending(struct qspinlock *lock)  in clear_pending()  argument
150 WRITE_ONCE(lock->pending, 0); in clear_pending()
161 static __always_inline void clear_pending_set_locked(struct qspinlock *lock) in clear_pending_set_locked() argument
163 WRITE_ONCE(lock->locked_pending, _Q_LOCKED_VAL); in clear_pending_set_locked()
176 static __always_inline u32 xchg_tail(struct qspinlock *lock, u32 tail) in xchg_tail() argument
182 return (u32)xchg_relaxed(&lock->tail, in xchg_tail()
194 static __always_inline void clear_pending(struct qspinlock *lock) in clear_pending() argument
196 atomic_andnot(_Q_PENDING_VAL, &lock->val); in clear_pending()
205 static __always_inline void clear_pending_set_locked(struct qspinlock *lock) in clear_pending_set_locked() argument
207 atomic_add(-_Q_PENDING_VAL + _Q_LOCKED_VAL, &lock->val); in clear_pending_set_locked()
220 static __always_inline u32 xchg_tail(struct qspinlock *lock, u32 tail) in xchg_tail() argument
222 u32 old, new, val = atomic_read(&lock->val); in xchg_tail()
231 old = atomic_cmpxchg_relaxed(&lock->val, val, new); in xchg_tail()
249 static __always_inline u32 queued_fetch_set_pending_acquire(struct qspinlock *lock) in queued_fetch_set_pending_acquire() argument
251 return atomic_fetch_or_acquire(_Q_PENDING_VAL, &lock->val); in queued_fetch_set_pending_acquire()
261 static __always_inline void set_locked(struct qspinlock *lock) in set_locked() argument
263 WRITE_ONCE(lock->locked, _Q_LOCKED_VAL); in set_locked()
275 static __always_inline void __pv_kick_node(struct qspinlock *lock, in __pv_kick_node() argument
277 static __always_inline u32 __pv_wait_head_or_lock(struct qspinlock *lock, in __pv_wait_head_or_lock() argument
315 void queued_spin_lock_slowpath(struct qspinlock *lock, u32 val) in queued_spin_lock_slowpath() argument
326 if (virt_spin_lock(lock)) in queued_spin_lock_slowpath()
337 val = atomic_cond_read_relaxed(&lock->val, in queued_spin_lock_slowpath()
352 val = queued_fetch_set_pending_acquire(lock); in queued_spin_lock_slowpath()
365 clear_pending(lock); in queued_spin_lock_slowpath()
382 atomic_cond_read_acquire(&lock->val, !(VAL & _Q_LOCKED_MASK)); in queued_spin_lock_slowpath()
389 clear_pending_set_locked(lock); in queued_spin_lock_slowpath()
415 while (!queued_spin_trylock(lock)) in queued_spin_lock_slowpath()
443 if (queued_spin_trylock(lock)) in queued_spin_lock_slowpath()
460 old = xchg_tail(lock, tail); in queued_spin_lock_slowpath()
508 if ((val = pv_wait_head_or_lock(lock, node))) in queued_spin_lock_slowpath()
511 val = atomic_cond_read_acquire(&lock->val, !(VAL & _Q_LOCKED_PENDING_MASK)); in queued_spin_lock_slowpath()
536 if (atomic_try_cmpxchg_relaxed(&lock->val, &val, _Q_LOCKED_VAL)) in queued_spin_lock_slowpath()
545 set_locked(lock); in queued_spin_lock_slowpath()
554 pv_kick_node(lock, next); in queued_spin_lock_slowpath()