• Home
  • Raw
  • Download

Lines Matching refs:val

197 	atomic_andnot(_Q_PENDING_VAL, &lock->val);  in clear_pending()
208 atomic_add(-_Q_PENDING_VAL + _Q_LOCKED_VAL, &lock->val); in clear_pending_set_locked()
223 u32 old, new, val = atomic_read(&lock->val); in xchg_tail() local
226 new = (val & _Q_LOCKED_PENDING_MASK) | tail; in xchg_tail()
232 old = atomic_cmpxchg_relaxed(&lock->val, val, new); in xchg_tail()
233 if (old == val) in xchg_tail()
236 val = old; in xchg_tail()
252 return atomic_fetch_or_acquire(_Q_PENDING_VAL, &lock->val); in queued_fetch_set_pending_acquire()
316 void __lockfunc queued_spin_lock_slowpath(struct qspinlock *lock, u32 val) in queued_spin_lock_slowpath() argument
336 if (val == _Q_PENDING_VAL) { in queued_spin_lock_slowpath()
338 val = atomic_cond_read_relaxed(&lock->val, in queued_spin_lock_slowpath()
345 if (val & ~_Q_LOCKED_MASK) in queued_spin_lock_slowpath()
353 val = queued_fetch_set_pending_acquire(lock); in queued_spin_lock_slowpath()
362 if (unlikely(val & ~_Q_LOCKED_MASK)) { in queued_spin_lock_slowpath()
365 if (!(val & _Q_PENDING_MASK)) in queued_spin_lock_slowpath()
382 if (val & _Q_LOCKED_MASK) in queued_spin_lock_slowpath()
383 atomic_cond_read_acquire(&lock->val, !(VAL & _Q_LOCKED_MASK)); in queued_spin_lock_slowpath()
511 if ((val = pv_wait_head_or_lock(lock, node))) in queued_spin_lock_slowpath()
514 val = atomic_cond_read_acquire(&lock->val, !(VAL & _Q_LOCKED_PENDING_MASK)); in queued_spin_lock_slowpath()
538 if ((val & _Q_TAIL_MASK) == tail) { in queued_spin_lock_slowpath()
539 if (atomic_try_cmpxchg_relaxed(&lock->val, &val, _Q_LOCKED_VAL)) in queued_spin_lock_slowpath()