/arch/mips/kernel/ |
D | sync-r4k.c | 49 while (atomic_read(&count_count_start) != 1) in synchronise_count_master() 70 while (atomic_read(&count_count_stop) != 1) in synchronise_count_master() 100 while (atomic_read(&count_count_start) != 2) in synchronise_count_slave() 110 while (atomic_read(&count_count_stop) != 2) in synchronise_count_slave()
|
D | spinlock_test.c | 66 while (atomic_read(&s->enter_wait)) in multi_other() 73 while (atomic_read(&s->start_wait)) in multi_other() 85 while (atomic_read(&s->exit_wait)) in multi_other()
|
/arch/x86/kernel/ |
D | tsc_sync.c | 146 while (atomic_read(&start_count) != cpus-1) in check_tsc_sync_source() 155 while (atomic_read(&stop_count) != cpus-1) in check_tsc_sync_source() 199 while (atomic_read(&start_count) != cpus) in check_tsc_sync_target() 212 while (atomic_read(&stop_count) != cpus) in check_tsc_sync_target()
|
/arch/x86/platform/uv/ |
D | uv_nmi.c | 222 nmi = atomic_read(&hub_nmi->in_nmi); in uv_check_nmi() 244 nmi = atomic_read(&hub_nmi->in_nmi); in uv_check_nmi() 251 nmi = atomic_read(&uv_in_nmi); in uv_check_nmi() 269 if (cpu == atomic_read(&hub_nmi->cpu_owner)) { in uv_clear_nmi() 377 atomic_read(&uv_nmi_cpus_in_nmi), num_online_cpus()); in uv_nmi_wait() 445 while (atomic_read(&uv_nmi_cpus_in_nmi) > 0) in uv_nmi_sync_exit() 449 while (atomic_read(&uv_nmi_slave_continue)) in uv_nmi_sync_exit() 464 atomic_read(&uv_nmi_cpus_in_nmi), cpu); in uv_nmi_dump_state() 482 while (!atomic_read(&uv_nmi_slave_continue)) in uv_nmi_dump_state() 519 while (atomic_read(&uv_nmi_kexec_failed) == 0) in uv_nmi_kdump() [all …]
|
/arch/m68k/include/asm/ |
D | atomic.h | 20 #define atomic_read(v) READ_ONCE((v)->counter) macro 52 : "g" (i), "2" (atomic_read(v))); \ 67 : "g" (i), "2" (atomic_read(v))); \ 171 prev = atomic_read(v); in atomic_cmpxchg() 184 prev = atomic_read(v); in atomic_xchg() 216 c = atomic_read(v); in __atomic_add_unless()
|
/arch/powerpc/platforms/powermac/ |
D | backlight.c | 102 if (atomic_read(&kernel_backlight_disabled)) in pmac_backlight_key_worker() 130 if (atomic_read(&kernel_backlight_disabled)) in pmac_backlight_key() 169 if (atomic_read(&kernel_backlight_disabled)) in pmac_backlight_set_legacy_worker() 177 if (atomic_read(&kernel_backlight_disabled)) in pmac_backlight_set_legacy_brightness_pmu()
|
/arch/sh/kernel/ |
D | ftrace.c | 108 atomic_read(&nmi_update_count)); in ftrace_arch_read_dyn_info() 114 int old = atomic_read(&nmi_running); in clear_mod_flag() 174 if (!atomic_read(&nmi_running)) in wait_for_nmi() 179 } while (atomic_read(&nmi_running)); in wait_for_nmi() 348 if (unlikely(atomic_read(¤t->tracing_graph_pause))) in prepare_ftrace_return()
|
/arch/arm/mach-exynos/ |
D | pm.c | 217 if (atomic_read(&cpu1_wakeup)) in exynos_cpu0_enter_aftr() 260 !atomic_read(&cpu1_wakeup)) in exynos_cpu0_enter_aftr() 263 if (!atomic_read(&cpu1_wakeup)) in exynos_cpu0_enter_aftr() 267 while (!atomic_read(&cpu1_wakeup)) { in exynos_cpu0_enter_aftr()
|
/arch/powerpc/kernel/ |
D | crash.c | 124 while ((atomic_read(&cpus_in_crash) < ncpus) && (--msecs > 0)) in crash_kexec_prepare_cpus() 129 if (atomic_read(&cpus_in_crash) >= ncpus) { in crash_kexec_prepare_cpus() 135 ncpus - atomic_read(&cpus_in_crash)); in crash_kexec_prepare_cpus() 166 while (atomic_read(&cpus_in_crash) < ncpus) in crash_kexec_prepare_cpus()
|
D | rtas.c | 756 while (rc == H_MULTI_THREADS_ACTIVE && !atomic_read(&data->done) && in __rtas_suspend_last_cpu() 757 !atomic_read(&data->error)) in __rtas_suspend_last_cpu() 760 if (rc || atomic_read(&data->error)) { in __rtas_suspend_last_cpu() 765 if (atomic_read(&data->error)) in __rtas_suspend_last_cpu() 766 rc = atomic_read(&data->error); in __rtas_suspend_last_cpu() 802 while (rc == H_SUCCESS && !atomic_read(&data->done) && !atomic_read(&data->error)) in __rtas_suspend_cpu() 993 if (atomic_read(&data.error) != 0) in rtas_ibm_suspend_me() 1006 return atomic_read(&data.error); in rtas_ibm_suspend_me()
|
/arch/sh/include/asm/ |
D | atomic.h | 23 #define atomic_read(v) READ_ONCE((v)->counter) macro 59 c = atomic_read(v); in __atomic_add_unless()
|
/arch/x86/include/asm/ |
D | atomic.h | 24 static __always_inline int atomic_read(const atomic_t *v) in atomic_read() function 206 int old, val = atomic_read(v); \ 240 c = atomic_read(v); in __atomic_add_unless()
|
D | mutex_32.h | 104 if (likely(atomic_read(count) == 1 && atomic_cmpxchg(count, 1, 0) == 1)) in __mutex_fastpath_trylock()
|
D | qspinlock.h | 56 while (atomic_read(&lock->val) != 0) in virt_spin_lock()
|
/arch/sparc/kernel/ |
D | nmi.c | 56 if (atomic_read(&nmi_active)) { in touch_nmi_watchdog() 166 if (!atomic_read(&nmi_active)) in check_nmi_watchdog() 191 if (!atomic_read(&nmi_active)) { in check_nmi_watchdog()
|
/arch/m68k/kernel/ |
D | irq.c | 37 seq_printf(p, "%*s: %10u\n", prec, "ERR", atomic_read(&irq_err_count)); in arch_show_interrupts()
|
/arch/arm/kernel/ |
D | machine_kexec.c | 126 while ((atomic_read(&waiting_for_crash_ipi) > 0) && msecs) { in machine_crash_shutdown() 130 if (atomic_read(&waiting_for_crash_ipi) > 0) in machine_crash_shutdown()
|
/arch/mn10300/include/asm/ |
D | atomic.h | 37 #define atomic_read(v) READ_ONCE((v)->counter) macro 150 c = atomic_read(v); \
|
/arch/ia64/include/asm/ |
D | mutex.h | 85 if (atomic_read(count) == 1 && cmpxchg_acq(count, 1, 0) == 1) in __mutex_fastpath_trylock()
|
/arch/powerpc/mm/ |
D | icswx.c | 113 if (atomic_read(&mm->mm_users) > 1) in use_cop() 153 if (atomic_read(&mm->mm_users) > 1) in drop_cop()
|
/arch/sparc/oprofile/ |
D | init.c | 61 if (atomic_read(&nmi_active) <= 0) in op_nmi_timer_init()
|
/arch/blackfin/kernel/ |
D | nmi.c | 151 if (!atomic_read(&nmi_touched[this_cpu])) in check_nmi_wdt_touched() 160 if (!atomic_read(&nmi_touched[cpu])) in check_nmi_wdt_touched()
|
/arch/xtensa/include/asm/ |
D | atomic.h | 50 #define atomic_read(v) READ_ONCE((v)->counter) macro 289 c = atomic_read(v); in ATOMIC_OPS()
|
/arch/m32r/include/asm/ |
D | atomic.h | 31 #define atomic_read(v) READ_ONCE((v)->counter) macro 262 c = atomic_read(v); in __atomic_add_unless()
|
/arch/parisc/kernel/ |
D | ftrace.c | 38 if (unlikely(atomic_read(¤t->tracing_graph_pause))) in prepare_ftrace_return()
|