Lines Matching refs:node
273 static __always_inline void __pv_init_node(struct mcs_spinlock *node) { } in __pv_init_node() argument
274 static __always_inline void __pv_wait_node(struct mcs_spinlock *node, in __pv_wait_node() argument
277 struct mcs_spinlock *node) { } in __pv_kick_node() argument
279 struct mcs_spinlock *node) in __pv_wait_head_or_lock() argument
318 struct mcs_spinlock *prev, *next, *node; in queued_spin_lock_slowpath() local
401 node = this_cpu_ptr(&qnodes[0].mcs); in queued_spin_lock_slowpath()
402 idx = node->count++; in queued_spin_lock_slowpath()
423 node = grab_mcs_node(node, idx); in queued_spin_lock_slowpath()
437 node->locked = 0; in queued_spin_lock_slowpath()
438 node->next = NULL; in queued_spin_lock_slowpath()
439 pv_init_node(node); in queued_spin_lock_slowpath()
474 WRITE_ONCE(prev->next, node); in queued_spin_lock_slowpath()
476 pv_wait_node(node, prev); in queued_spin_lock_slowpath()
477 arch_mcs_spin_lock_contended(&node->locked); in queued_spin_lock_slowpath()
485 next = READ_ONCE(node->next); in queued_spin_lock_slowpath()
511 if ((val = pv_wait_head_or_lock(lock, node))) in queued_spin_lock_slowpath()
554 next = smp_cond_load_relaxed(&node->next, (VAL)); in queued_spin_lock_slowpath()