Lines Matching refs:node
32 static inline int node_cpu(struct optimistic_spin_node *node) in node_cpu() argument
34 return node->cpu - 1; in node_cpu()
55 struct optimistic_spin_node *node, in osq_wait_next() argument
81 if (node->next) { in osq_wait_next()
84 next = xchg(&node->next, NULL); in osq_wait_next()
95 struct optimistic_spin_node *node = this_cpu_ptr(&osq_node); in osq_lock() local
100 node->locked = 0; in osq_lock()
101 node->next = NULL; in osq_lock()
102 node->cpu = curr; in osq_lock()
115 node->prev = prev; in osq_lock()
129 WRITE_ONCE(prev->next, node); in osq_lock()
146 if (smp_cond_load_relaxed(&node->locked, VAL || need_resched() || in osq_lock()
147 vcpu_is_preempted(node_cpu(node->prev)))) in osq_lock()
164 if (data_race(prev->next) == node && in osq_lock()
165 cmpxchg(&prev->next, node, NULL) == node) in osq_lock()
173 if (smp_load_acquire(&node->locked)) in osq_lock()
182 prev = READ_ONCE(node->prev); in osq_lock()
192 next = osq_wait_next(lock, node, prev->cpu); in osq_lock()
213 struct optimistic_spin_node *node, *next; in osq_unlock() local
226 node = this_cpu_ptr(&osq_node); in osq_unlock()
227 next = xchg(&node->next, NULL); in osq_unlock()
233 next = osq_wait_next(lock, node, OSQ_UNLOCKED_VAL); in osq_unlock()