Lines Matching refs:node
270 static __always_inline void __pv_init_node(struct mcs_spinlock *node) { } in __pv_init_node() argument
271 static __always_inline void __pv_wait_node(struct mcs_spinlock *node, in __pv_wait_node() argument
274 struct mcs_spinlock *node) { } in __pv_kick_node() argument
276 struct mcs_spinlock *node) in __pv_wait_head_or_lock() argument
315 struct mcs_spinlock *prev, *next, *node; in queued_spin_lock_slowpath() local
398 node = this_cpu_ptr(&qnodes[0].mcs); in queued_spin_lock_slowpath()
399 idx = node->count++; in queued_spin_lock_slowpath()
420 node = grab_mcs_node(node, idx); in queued_spin_lock_slowpath()
434 node->locked = 0; in queued_spin_lock_slowpath()
435 node->next = NULL; in queued_spin_lock_slowpath()
436 pv_init_node(node); in queued_spin_lock_slowpath()
471 WRITE_ONCE(prev->next, node); in queued_spin_lock_slowpath()
473 pv_wait_node(node, prev); in queued_spin_lock_slowpath()
474 arch_mcs_spin_lock_contended(&node->locked); in queued_spin_lock_slowpath()
482 next = READ_ONCE(node->next); in queued_spin_lock_slowpath()
508 if ((val = pv_wait_head_or_lock(lock, node))) in queued_spin_lock_slowpath()
551 next = smp_cond_load_relaxed(&node->next, (VAL)); in queued_spin_lock_slowpath()