Searched refs:atomic_read (Results 1 – 25 of 70) sorted by relevance
123
31 return atomic_read(&rwb->readers) != READER_BIAS; in rw_base_is_locked()36 return atomic_read(&rwb->readers) == WRITER_BIAS; in rw_base_is_write_locked()41 return atomic_read(&rwb->readers) > 0; in rw_base_is_contended()
44 return atomic_read(&work->node.a_flags) & IRQ_WORK_PENDING; in irq_work_is_pending()49 return atomic_read(&work->node.a_flags) & IRQ_WORK_BUSY; in irq_work_is_busy()54 return atomic_read(&work->node.a_flags) & IRQ_WORK_HARD_IRQ; in irq_work_is_hard()
630 atomic_read(&maple_tree_tests_passed), \631 atomic_read(&maple_tree_tests_run)); \646 atomic_read(&maple_tree_tests_passed), \647 atomic_read(&maple_tree_tests_run)); \663 atomic_read(&maple_tree_tests_passed), \664 atomic_read(&maple_tree_tests_run)); \679 atomic_read(&maple_tree_tests_passed), \680 atomic_read(&maple_tree_tests_run)); \697 atomic_read(&maple_tree_tests_passed), \698 atomic_read(&maple_tree_tests_run)); \[all …]
33 return atomic_read(&lock->tail) != OSQ_UNLOCKED_VAL; in osq_is_locked()
61 return atomic_read(this_cpu_ptr(&context_tracking.state)) & CT_RCU_WATCHING_MASK; in ct_rcu_watching()68 return atomic_read(&ct->state) & CT_RCU_WATCHING_MASK; in ct_rcu_watching_cpu()
317 v = atomic_read(&key->enabled); in static_key_fast_inc_not_disabled()347 if (atomic_read(&key->enabled) != 0) { in static_key_enable()348 WARN_ON_ONCE(atomic_read(&key->enabled) != 1); in static_key_enable()358 if (atomic_read(&key->enabled) != 1) { in static_key_disable()359 WARN_ON_ONCE(atomic_read(&key->enabled) != 0); in static_key_disable()
53 state = atomic_read(&once->state); in call_once()
37 unsigned int c = atomic_read(&ref->refcnt); in rcuref_read()
127 if (likely(atomic_read(&kfence_allocation_gate) > 0)) in kfence_alloc()
179 return atomic_read(&cl->remaining) & CLOSURE_REMAINING_MASK; in closure_nr_remaining()293 unsigned old = atomic_read(&cl->remaining); in closure_get_not_zero()
138 return atomic_read(&sem->block); in percpu_is_write_locked()
557 int old = atomic_read(index); in sbq_index_atomic_inc()573 ws = &sbq->ws[atomic_read(wait_index)]; in sbq_wait_ptr()
55 u32 old = atomic_read(lock); in arch_spin_trylock()66 u32 val = atomic_read(lock); in arch_spin_unlock()87 u32 val = atomic_read(lock); in arch_spin_is_contended()
57 return atomic_read(&lock->val); in queued_spin_is_locked()83 return atomic_read(&lock->val) & ~_Q_LOCKED_MASK; in queued_spin_is_contended()92 int val = atomic_read(&lock->val); in queued_spin_trylock()
48 cnts = atomic_read(&lock->cnts); in queued_read_trylock()67 cnts = atomic_read(&lock->cnts); in queued_write_trylock()
22 return (atomic_read(&cxl_use_count) != 0); in cxl_ctx_in_use()
42 if (atomic_read(&net->ct.labels_used) == 0) in nf_ct_labels_ext_add()
38 __entry->usage_count = atomic_read(44 __entry->child_count = atomic_read(
33 __entry->mapcount = atomic_read(&page->_mapcount);82 __entry->mapcount = atomic_read(&page->_mapcount);
29 atomic_read(&q->owned_by_drv_count);
190 __entry->wcount = atomic_read(&inode->i_writecount);191 __entry->rcount = atomic_read(&inode->i_readcount);192 __entry->icount = atomic_read(&inode->i_count);
84 __entry->refcnt = atomic_read(&mod->refcnt);
247 return atomic_read(&queue->qlen); in reqsk_queue_len()252 return atomic_read(&queue->young); in reqsk_queue_len_young()
522 return atomic_read(&net->ipv4.rt_genid); in rt_genid_ipv4()528 return atomic_read(&net->ipv6.fib6_sernum); in rt_genid_ipv6()561 return atomic_read(&net->fnhe_genid); in fnhe_genid()
62 return atomic_read(&queue->job_count); in spsc_queue_count()