Home
last modified time | relevance | path

Searched refs:__this_cpu_read (Results 1 – 25 of 67) sorted by relevance

123

/arch/sparc/kernel/
Dnmi.c102 if (__this_cpu_read(nmi_touch)) { in perfctr_irq()
106 if (!touched && __this_cpu_read(last_irq_sum) == sum) { in perfctr_irq()
108 if (__this_cpu_read(alert_counter) == 30 * nmi_hz) in perfctr_irq()
115 if (__this_cpu_read(wd_enabled)) { in perfctr_irq()
155 if (!__this_cpu_read(wd_enabled)) in stop_nmi_watchdog()
212 if (__this_cpu_read(wd_enabled)) in start_nmi_watchdog()
226 if (!__this_cpu_read(wd_enabled)) in nmi_adjust_hz_one()
/arch/x86/include/asm/
Dirq_stack.h12 return __this_cpu_read(irq_count) != -1; in irqstack_active()
23 void *tos = __this_cpu_read(hardirq_stack_ptr); in __run_on_irqstack()
34 void *tos = __this_cpu_read(hardirq_stack_ptr); in __run_sysvec_on_irqstack()
45 void *tos = __this_cpu_read(hardirq_stack_ptr); in __run_irq_on_irqstack()
Dcpu_entry_area.h152 CEA_ESTACK_TOP(__this_cpu_read(cea_exception_stacks), name)
155 CEA_ESTACK_BOT(__this_cpu_read(cea_exception_stacks), name)
Dhardirq.h77 return __this_cpu_read(irq_stat.kvm_cpu_l1tf_flush_l1d); in kvm_get_cpu_l1tf_flush_l1d()
Ddebugreg.h112 return __this_cpu_read(cpu_dr7) & DR_GLOBAL_ENABLE_MASK; in hw_breakpoint_active()
Dsmp.h159 #define __smp_processor_id() __this_cpu_read(cpu_number)
/arch/mips/kernel/
Dmips-r2-to-r6-emul.c2243 (unsigned long)__this_cpu_read(mipsr2emustats.movs), in mipsr2_emul_show()
2244 (unsigned long)__this_cpu_read(mipsr2bdemustats.movs)); in mipsr2_emul_show()
2246 (unsigned long)__this_cpu_read(mipsr2emustats.hilo), in mipsr2_emul_show()
2247 (unsigned long)__this_cpu_read(mipsr2bdemustats.hilo)); in mipsr2_emul_show()
2249 (unsigned long)__this_cpu_read(mipsr2emustats.muls), in mipsr2_emul_show()
2250 (unsigned long)__this_cpu_read(mipsr2bdemustats.muls)); in mipsr2_emul_show()
2252 (unsigned long)__this_cpu_read(mipsr2emustats.divs), in mipsr2_emul_show()
2253 (unsigned long)__this_cpu_read(mipsr2bdemustats.divs)); in mipsr2_emul_show()
2255 (unsigned long)__this_cpu_read(mipsr2emustats.dsps), in mipsr2_emul_show()
2256 (unsigned long)__this_cpu_read(mipsr2bdemustats.dsps)); in mipsr2_emul_show()
[all …]
/arch/x86/kernel/cpu/mce/
Dintel.c132 if (__this_cpu_read(cmci_storm_state) == CMCI_STORM_NONE) in mce_intel_cmci_poll()
179 (__this_cpu_read(cmci_storm_state) == CMCI_STORM_ACTIVE)) { in cmci_intel_adjust_timer()
184 switch (__this_cpu_read(cmci_storm_state)) { in cmci_intel_adjust_timer()
218 unsigned int cnt = __this_cpu_read(cmci_storm_cnt); in cmci_storm_detect()
219 unsigned long ts = __this_cpu_read(cmci_time_stamp); in cmci_storm_detect()
223 if (__this_cpu_read(cmci_storm_state) != CMCI_STORM_NONE) in cmci_storm_detect()
/arch/x86/kernel/
Dhw_breakpoint.c485 set_debugreg(__this_cpu_read(cpu_debugreg[0]), 0); in hw_breakpoint_restore()
486 set_debugreg(__this_cpu_read(cpu_debugreg[1]), 1); in hw_breakpoint_restore()
487 set_debugreg(__this_cpu_read(cpu_debugreg[2]), 2); in hw_breakpoint_restore()
488 set_debugreg(__this_cpu_read(cpu_debugreg[3]), 3); in hw_breakpoint_restore()
490 set_debugreg(__this_cpu_read(cpu_dr7), 7); in hw_breakpoint_restore()
Dirq_32.c79 irqstk = __this_cpu_read(hardirq_stack_ptr); in execute_on_irq_stack()
139 irqstk = __this_cpu_read(softirq_stack_ptr); in do_softirq_own_stack()
Dirq.c247 desc = __this_cpu_read(vector_irq[vector]); in DEFINE_IDTENTRY_IRQ()
358 if (IS_ERR_OR_NULL(__this_cpu_read(vector_irq[vector]))) in fixup_irqs()
363 desc = __this_cpu_read(vector_irq[vector]); in fixup_irqs()
374 if (__this_cpu_read(vector_irq[vector]) != VECTOR_RETRIGGERED) in fixup_irqs()
Dkvm.c245 if (__this_cpu_read(apf_reason.enabled)) { in kvm_read_and_reset_apf_flags()
246 flags = __this_cpu_read(apf_reason.flags); in kvm_read_and_reset_apf_flags()
296 if (__this_cpu_read(apf_reason.enabled)) { in DEFINE_IDTENTRY_SYSVEC()
297 token = __this_cpu_read(apf_reason.token); in DEFINE_IDTENTRY_SYSVEC()
384 if (!__this_cpu_read(apf_reason.enabled)) in kvm_pv_disable_apf()
Dnmi.c325 if (regs->ip == __this_cpu_read(last_nmi_rip)) in default_do_nmi()
413 if (b2b && __this_cpu_read(swallow_nmi)) in default_do_nmi()
Ddumpstack_64.c97 begin = (unsigned long)__this_cpu_read(cea_exception_stacks); in in_exception_stack()
/arch/arm64/kvm/hyp/vhe/
Dswitch.c70 write_sysreg(__this_cpu_read(kvm_hyp_vector), vbar_el1); in __activate_traps()
92 host_vectors = __this_cpu_read(this_cpu_vector); in __deactivate_traps()
/arch/arm64/include/asm/
Darch_timer.h27 __wa = __this_cpu_read(timer_unstable_counter_workaround); \
34 __wa = __this_cpu_read(timer_unstable_counter_workaround); \
Dpercpu.h255 #undef __this_cpu_read
256 #define __this_cpu_read raw_cpu_read macro
/arch/powerpc/kernel/
Dmce.c194 int index = __this_cpu_read(mce_nest_count) - 1; in get_mce_event()
292 while (__this_cpu_read(mce_ue_count) > 0) { in machine_process_ue_event()
293 index = __this_cpu_read(mce_ue_count) - 1; in machine_process_ue_event()
341 while (__this_cpu_read(mce_queue_count) > 0) { in machine_check_process_queued_event()
342 index = __this_cpu_read(mce_queue_count) - 1; in machine_check_process_queued_event()
/arch/x86/oprofile/
Dop_model_ppro.c87 __this_cpu_read(cpu_info.x86) == 6 && in ppro_setup_ctrs()
88 __this_cpu_read(cpu_info.x86_model) == 15)) { in ppro_setup_ctrs()
/arch/ia64/include/asm/
Dhw_irq.h165 return __this_cpu_read(vector_irq[vec]); in local_vector_to_irq()
/arch/x86/xen/
Dtime.c52 src = &__this_cpu_read(xen_vcpu)->time; in xen_clocksource_read()
512 pvti = &__this_cpu_read(xen_vcpu)->time; in xen_time_init()
586 if (!__this_cpu_read(xen_vcpu)) { in xen_hvm_init_time_ops()
Dspinlock.c39 int irq = __this_cpu_read(lock_kicker_irq); in xen_qlock_wait()
/arch/x86/kernel/acpi/
Dsleep.c92 if (__this_cpu_read(cpu_info.cpuid_level) >= 0) { in x86_acpi_suspend_lowlevel()
/arch/powerpc/sysdev/xive/
Dcommon.c310 struct xive_cpu *xc = __this_cpu_read(xive_cpu); in xive_get_irq()
415 struct xive_cpu *xc = __this_cpu_read(xive_cpu); in xive_irq_eoi()
1111 struct xive_cpu *xc = __this_cpu_read(xive_cpu); in xive_ipi_eoi()
1373 struct xive_cpu *xc = __this_cpu_read(xive_cpu); in xive_setup_cpu()
1466 struct xive_cpu *xc = __this_cpu_read(xive_cpu); in xive_smp_disable_cpu()
1486 struct xive_cpu *xc = __this_cpu_read(xive_cpu); in xive_flush_interrupt()
1499 struct xive_cpu *xc = __this_cpu_read(xive_cpu); in xive_teardown_cpu()
/arch/powerpc/perf/
Dhv-24x7.c1469 txn_flags = __this_cpu_read(hv_24x7_txn_flags); in h_24x7_event_read()
1482 if (__this_cpu_read(hv_24x7_txn_err)) in h_24x7_event_read()
1540 WARN_ON_ONCE(__this_cpu_read(hv_24x7_txn_flags)); in h_24x7_event_start_txn()
1585 txn_flags = __this_cpu_read(hv_24x7_txn_flags); in h_24x7_event_commit_txn()
1592 ret = __this_cpu_read(hv_24x7_txn_err); in h_24x7_event_commit_txn()
1636 WARN_ON_ONCE(!__this_cpu_read(hv_24x7_txn_flags)); in h_24x7_event_cancel_txn()

123