Lines Matching refs:locked
91 (cmpxchg_acquire(&lock->locked, 0, _Q_LOCKED_VAL) == 0)) { in pv_hybrid_queued_unfair_trylock()
121 return !READ_ONCE(lock->locked) && in trylock_clear_pending()
302 if (READ_ONCE(node->locked)) in pv_wait_node()
322 if (!READ_ONCE(node->locked)) { in pv_wait_node()
343 !READ_ONCE(node->locked)); in pv_wait_node()
391 WRITE_ONCE(lock->locked, _Q_SLOW_VAL); in pv_kick_node()
456 if (xchg(&lock->locked, _Q_SLOW_VAL) == 0) { in pv_wait_head_or_lock()
462 WRITE_ONCE(lock->locked, _Q_LOCKED_VAL); in pv_wait_head_or_lock()
470 pv_wait(&lock->locked, _Q_SLOW_VAL); in pv_wait_head_or_lock()
493 __pv_queued_spin_unlock_slowpath(struct qspinlock *lock, u8 locked) in __pv_queued_spin_unlock_slowpath() argument
497 if (unlikely(locked != _Q_SLOW_VAL)) { in __pv_queued_spin_unlock_slowpath()
523 smp_store_release(&lock->locked, 0); in __pv_queued_spin_unlock_slowpath()
549 u8 locked; in __pv_queued_spin_unlock() local
556 locked = cmpxchg_release(&lock->locked, _Q_LOCKED_VAL, 0); in __pv_queued_spin_unlock()
557 if (likely(locked == _Q_LOCKED_VAL)) in __pv_queued_spin_unlock()
560 __pv_queued_spin_unlock_slowpath(lock, locked); in __pv_queued_spin_unlock()