Home
last modified time | relevance | path

Searched refs:this_cpu_ptr (Results 1 – 25 of 350) sorted by relevance

12345678910>>...14

/kernel/linux/linux-5.10/kernel/
Dirq_work.c57 if (llist_add(&work->llnode, this_cpu_ptr(&lazy_list)) && in __irq_work_queue_local()
61 if (llist_add(&work->llnode, this_cpu_ptr(&raised_list))) in __irq_work_queue_local()
120 raised = this_cpu_ptr(&raised_list); in irq_work_needs_cpu()
121 lazy = this_cpu_ptr(&lazy_list); in irq_work_needs_cpu()
179 irq_work_run_list(this_cpu_ptr(&raised_list)); in irq_work_run()
180 irq_work_run_list(this_cpu_ptr(&lazy_list)); in irq_work_run()
186 struct llist_head *raised = this_cpu_ptr(&raised_list); in irq_work_tick()
190 irq_work_run_list(this_cpu_ptr(&lazy_list)); in irq_work_tick()
Dwatchdog.c336 complete(this_cpu_ptr(&softlockup_completion)); in softlockup_fn()
356 if (completion_done(this_cpu_ptr(&softlockup_completion))) { in watchdog_timer_fn()
357 reinit_completion(this_cpu_ptr(&softlockup_completion)); in watchdog_timer_fn()
360 this_cpu_ptr(&softlockup_stop_work)); in watchdog_timer_fn()
435 struct hrtimer *hrtimer = this_cpu_ptr(&watchdog_hrtimer); in watchdog_enable()
436 struct completion *done = this_cpu_ptr(&softlockup_completion); in watchdog_enable()
437 unsigned int *enabled = this_cpu_ptr(&watchdog_en); in watchdog_enable()
Duser-return-notifier.c18 hlist_add_head(&urn->link, this_cpu_ptr(&return_notifier_list)); in user_return_notifier_register()
29 if (hlist_empty(this_cpu_ptr(&return_notifier_list))) in user_return_notifier_unregister()
/kernel/linux/linux-5.10/arch/powerpc/include/asm/
Didle.h14 *this_cpu_ptr(&idle_entry_purr_snap) = mfspr(SPRN_PURR); in snapshot_purr_idle_entry()
19 *this_cpu_ptr(&idle_entry_spurr_snap) = mfspr(SPRN_SPURR); in snapshot_spurr_idle_entry()
25 u64 in_purr = *this_cpu_ptr(&idle_entry_purr_snap); in update_idle_purr_accounting()
34 u64 *idle_spurr_cycles_ptr = this_cpu_ptr(&idle_spurr_cycles); in update_idle_spurr_accounting()
35 u64 in_spurr = *this_cpu_ptr(&idle_entry_spurr_snap); in update_idle_spurr_accounting()
89 return *this_cpu_ptr(&idle_spurr_cycles); in read_this_idle_spurr()
Dxics.h110 struct xics_cppr *os_cppr = this_cpu_ptr(&xics_cppr); in xics_push_cppr()
123 struct xics_cppr *os_cppr = this_cpu_ptr(&xics_cppr); in xics_pop_cppr()
133 struct xics_cppr *os_cppr = this_cpu_ptr(&xics_cppr); in xics_set_base_cppr()
145 struct xics_cppr *os_cppr = this_cpu_ptr(&xics_cppr); in xics_cppr_top()
/kernel/linux/linux-5.10/include/linux/
Dlocal_lock_internal.h66 local_lock_acquire(this_cpu_ptr(lock)); \
72 local_lock_acquire(this_cpu_ptr(lock)); \
78 local_lock_acquire(this_cpu_ptr(lock)); \
83 local_lock_release(this_cpu_ptr(lock)); \
89 local_lock_release(this_cpu_ptr(lock)); \
95 local_lock_release(this_cpu_ptr(lock)); \
/kernel/linux/linux-5.10/arch/x86/kernel/cpu/mce/
Dintel.c139 if (machine_check_poll(0, this_cpu_ptr(&mce_banks_owned))) in mce_intel_cmci_poll()
162 owned = this_cpu_ptr(mce_banks_owned); in cmci_toggle_interrupt_mode()
259 machine_check_poll(MCP_TIMESTAMP, this_cpu_ptr(&mce_banks_owned)); in intel_threshold_interrupt()
269 unsigned long *owned = (void *)this_cpu_ptr(&mce_banks_owned); in cmci_discover()
291 __clear_bit(i, this_cpu_ptr(mce_poll_banks)); in cmci_discover()
315 __clear_bit(i, this_cpu_ptr(mce_poll_banks)); in cmci_discover()
326 WARN_ON(!test_bit(i, this_cpu_ptr(mce_poll_banks))); in cmci_discover()
351 machine_check_poll(0, this_cpu_ptr(&mce_banks_owned)); in cmci_recheck()
360 if (!test_bit(bank, this_cpu_ptr(mce_banks_owned))) in __cmci_disable_bank()
365 __clear_bit(bank, this_cpu_ptr(mce_banks_owned)); in __cmci_disable_bank()
Dcore.c392 ret = *(u64 *)((char *)this_cpu_ptr(&injectm) + offset); in mce_rdmsrl()
443 *(u64 *)((char *)this_cpu_ptr(&injectm) + offset) = v; in mce_wrmsrl()
723 struct mce_bank *mce_banks = this_cpu_ptr(mce_banks_array); in machine_check_poll()
1169 struct mce_bank *mce_banks = this_cpu_ptr(mce_banks_array); in __mc_scan_banks()
1355 final = this_cpu_ptr(&mces_seen); in do_machine_check()
1523 struct timer_list *cpu_t = this_cpu_ptr(&mce_timer); in mce_timer_fn()
1530 if (mce_available(this_cpu_ptr(&cpu_info))) { in mce_timer_fn()
1531 machine_check_poll(0, this_cpu_ptr(&mce_poll_banks)); in mce_timer_fn()
1558 struct timer_list *t = this_cpu_ptr(&mce_timer); in mce_timer_kick()
1600 struct mce_bank *mce_banks = this_cpu_ptr(mce_banks_array); in __mcheck_cpu_mce_banks_init()
[all …]
/kernel/linux/linux-5.10/net/core/
Ddst_cache.c68 return dst_cache_per_cpu_get(dst_cache, this_cpu_ptr(dst_cache->cache)); in dst_cache_get()
80 idst = this_cpu_ptr(dst_cache->cache); in dst_cache_get_ip4()
98 idst = this_cpu_ptr(dst_cache->cache); in dst_cache_set_ip4()
113 idst = this_cpu_ptr(dst_cache->cache); in dst_cache_set_ip6()
114 dst_cache_per_cpu_dst_set(this_cpu_ptr(dst_cache->cache), dst, in dst_cache_set_ip6()
129 idst = this_cpu_ptr(dst_cache->cache); in dst_cache_get_ip6()
/kernel/linux/linux-5.10/kernel/time/
Dtick-sched.c262 irq_work_queue(this_cpu_ptr(&nohz_full_kick_work)); in tick_nohz_full_kick()
410 ts = this_cpu_ptr(&tick_cpu_sched); in __tick_nohz_task_switch()
504 struct tick_sched *ts = this_cpu_ptr(&tick_cpu_sched); in tick_nohz_tick_stopped()
994 __tick_nohz_idle_stop_tick(this_cpu_ptr(&tick_cpu_sched)); in tick_nohz_idle_stop_tick()
999 tick_nohz_retain_tick(this_cpu_ptr(&tick_cpu_sched)); in tick_nohz_idle_retain_tick()
1020 ts = this_cpu_ptr(&tick_cpu_sched); in tick_nohz_idle_enter()
1040 struct tick_sched *ts = this_cpu_ptr(&tick_cpu_sched); in tick_nohz_irq_exit()
1053 struct tick_sched *ts = this_cpu_ptr(&tick_cpu_sched); in tick_nohz_idle_got_tick()
1083 struct tick_sched *ts = this_cpu_ptr(&tick_cpu_sched); in tick_nohz_get_sleep_length()
1133 struct tick_sched *ts = this_cpu_ptr(&tick_cpu_sched); in tick_nohz_get_idle_calls()
[all …]
/kernel/linux/linux-5.10/arch/xtensa/kernel/
Dhw_breakpoint.c177 i = alloc_slot(this_cpu_ptr(bp_on_reg), XCHAL_NUM_IBREAK, bp); in arch_install_hw_breakpoint()
184 i = alloc_slot(this_cpu_ptr(wp_on_reg), XCHAL_NUM_DBREAK, bp); in arch_install_hw_breakpoint()
215 i = free_slot(this_cpu_ptr(bp_on_reg), XCHAL_NUM_IBREAK, bp); in arch_uninstall_hw_breakpoint()
223 i = free_slot(this_cpu_ptr(wp_on_reg), XCHAL_NUM_DBREAK, bp); in arch_uninstall_hw_breakpoint()
268 struct perf_event *bp = this_cpu_ptr(wp_on_reg)[i]; in restore_dbreak()
280 struct perf_event **bp = this_cpu_ptr(bp_on_reg); in check_hw_breakpoint()
289 struct perf_event **bp = this_cpu_ptr(wp_on_reg); in check_hw_breakpoint()
/kernel/linux/linux-5.10/arch/x86/events/intel/
Dbts.c228 struct bts_ctx *bts = this_cpu_ptr(&bts_ctx); in __bts_event_start()
256 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in bts_event_start()
257 struct bts_ctx *bts = this_cpu_ptr(&bts_ctx); in bts_event_start()
287 struct bts_ctx *bts = this_cpu_ptr(&bts_ctx); in __bts_event_stop()
301 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in bts_event_stop()
302 struct bts_ctx *bts = this_cpu_ptr(&bts_ctx); in bts_event_stop()
335 struct bts_ctx *bts = this_cpu_ptr(&bts_ctx); in intel_bts_enable_local()
355 struct bts_ctx *bts = this_cpu_ptr(&bts_ctx); in intel_bts_disable_local()
446 struct debug_store *ds = this_cpu_ptr(&cpu_hw_events)->ds; in intel_bts_interrupt()
447 struct bts_ctx *bts = this_cpu_ptr(&bts_ctx); in intel_bts_interrupt()
[all …]
/kernel/linux/linux-5.10/arch/s390/kernel/
Dperf_cpum_cf.c123 struct cpu_cf_events *cpuhw = this_cpu_ptr(&cpu_cf_events); in cpumf_pmu_enable()
146 struct cpu_cf_events *cpuhw = this_cpu_ptr(&cpu_cf_events); in cpumf_pmu_disable()
384 struct cpu_cf_events *cpuhw = this_cpu_ptr(&cpu_cf_events); in cpumf_pmu_start()
415 struct cpu_cf_events *cpuhw = this_cpu_ptr(&cpu_cf_events); in cpumf_pmu_stop()
436 struct cpu_cf_events *cpuhw = this_cpu_ptr(&cpu_cf_events); in cpumf_pmu_add()
460 struct cpu_cf_events *cpuhw = this_cpu_ptr(&cpu_cf_events); in cpumf_pmu_del()
488 struct cpu_cf_events *cpuhw = this_cpu_ptr(&cpu_cf_events); in cpumf_pmu_start_txn()
508 struct cpu_cf_events *cpuhw = this_cpu_ptr(&cpu_cf_events); in cpumf_pmu_cancel_txn()
529 struct cpu_cf_events *cpuhw = this_cpu_ptr(&cpu_cf_events); in cpumf_pmu_commit_txn()
Dperf_cpum_cf_diag.c109 struct cpu_cf_events *cpuhw = this_cpu_ptr(&cpu_cf_events); in cf_diag_trailer()
132 struct cpu_cf_events *cpuhw = this_cpu_ptr(&cpu_cf_events); in cf_diag_enable()
158 struct cpu_cf_events *cpuhw = this_cpu_ptr(&cpu_cf_events); in cf_diag_disable()
371 struct cpu_cf_events *cpuhw = this_cpu_ptr(&cpu_cf_events); in cf_diag_getctrset()
523 struct cpu_cf_events *cpuhw = this_cpu_ptr(&cpu_cf_events); in cf_diag_start()
524 struct cf_diag_csd *csd = this_cpu_ptr(&cf_diag_csd); in cf_diag_start()
546 struct cpu_cf_events *cpuhw = this_cpu_ptr(&cpu_cf_events); in cf_diag_stop()
547 struct cf_diag_csd *csd = this_cpu_ptr(&cf_diag_csd); in cf_diag_stop()
566 struct cpu_cf_events *cpuhw = this_cpu_ptr(&cpu_cf_events); in cf_diag_add()
590 struct cpu_cf_events *cpuhw = this_cpu_ptr(&cpu_cf_events); in cf_diag_del()
Dperf_cpum_cf_common.c48 cpuhw = this_cpu_ptr(&cpu_cf_events); in cpumf_measurement_alert()
76 struct cpu_cf_events *cpuhw = this_cpu_ptr(&cpu_cf_events); in cpum_cf_setup_cpu()
130 struct cpu_cf_events *cpuhw = this_cpu_ptr(&cpu_cf_events); in kernel_cpumcf_alert()
/kernel/linux/linux-5.10/arch/arm/kernel/
Dperf_event_xscale.c149 struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events); in xscale1pmu_handle_irq()
209 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events); in xscale1pmu_enable_event()
245 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events); in xscale1pmu_disable_event()
304 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events); in xscale1pmu_start()
316 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events); in xscale1pmu_stop()
501 struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events); in xscale2pmu_handle_irq()
555 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events); in xscale2pmu_enable_event()
601 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events); in xscale2pmu_disable_event()
667 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events); in xscale2pmu_start()
679 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events); in xscale2pmu_stop()
/kernel/linux/linux-5.10/arch/arm64/kernel/
Dhw_breakpoint.c236 slots = this_cpu_ptr(bp_on_reg); in hw_breakpoint_control()
243 slots = this_cpu_ptr(wp_on_reg); in hw_breakpoint_control()
588 slots = this_cpu_ptr(bp_on_reg); in toggle_bp_registers()
592 slots = this_cpu_ptr(wp_on_reg); in toggle_bp_registers()
630 slots = this_cpu_ptr(bp_on_reg); in breakpoint_handler()
680 kernel_step = this_cpu_ptr(&stepping_kernel_bp); in breakpoint_handler()
765 slots = this_cpu_ptr(wp_on_reg); in watchpoint_handler()
831 kernel_step = this_cpu_ptr(&stepping_kernel_bp); in watchpoint_handler()
856 kernel_step = this_cpu_ptr(&stepping_kernel_bp); in reinstall_suspended_bps()
955 for (slots = this_cpu_ptr(bp_on_reg), i = 0; i < core_num_brps; ++i) { in hw_breakpoint_reset()
[all …]
Dfpsimd.c303 this_cpu_ptr(&fpsimd_last_state); in fpsimd_save()
1107 this_cpu_ptr(&fpsimd_last_state); in fpsimd_bind_task_to_cpu()
1130 this_cpu_ptr(&fpsimd_last_state); in fpsimd_bind_state_to_cpu()
1341 char *sve_state = this_cpu_ptr(efi_sve_state); in __efi_fpsimd_begin()
1346 &this_cpu_ptr(&efi_fpsimd_state)->fpsr); in __efi_fpsimd_begin()
1348 fpsimd_save_state(this_cpu_ptr(&efi_fpsimd_state)); in __efi_fpsimd_begin()
1368 char const *sve_state = this_cpu_ptr(efi_sve_state); in __efi_fpsimd_end()
1371 &this_cpu_ptr(&efi_fpsimd_state)->fpsr, in __efi_fpsimd_end()
1376 fpsimd_load_state(this_cpu_ptr(&efi_fpsimd_state)); in __efi_fpsimd_end()
/kernel/linux/linux-5.10/net/netfilter/
Dnft_counter.c36 this_cpu = this_cpu_ptr(priv->counter); in nft_counter_do_eval()
37 myseq = this_cpu_ptr(&nft_counter_seq); in nft_counter_do_eval()
68 this_cpu = this_cpu_ptr(cpu_stats); in nft_counter_do_init()
110 this_cpu = this_cpu_ptr(priv->counter); in nft_counter_reset()
242 this_cpu = this_cpu_ptr(cpu_stats); in nft_counter_clone()
/kernel/linux/linux-5.10/arch/x86/xen/
Dmulticalls.c58 struct mc_buffer *b = this_cpu_ptr(&mc_buffer); in xen_mc_flush()
140 struct mc_buffer *b = this_cpu_ptr(&mc_buffer); in __xen_mc_entry()
171 struct mc_buffer *b = this_cpu_ptr(&mc_buffer); in xen_mc_extend_args()
201 struct mc_buffer *b = this_cpu_ptr(&mc_buffer); in xen_mc_callback()
/kernel/linux/linux-5.10/arch/powerpc/platforms/powernv/
Dopal-tracepoints.c55 depth = this_cpu_ptr(&opal_trace_depth); in __trace_opal_entry()
76 depth = this_cpu_ptr(&opal_trace_depth); in __trace_opal_exit()
/kernel/linux/linux-5.10/arch/sh/kernel/
Dkprobes.c102 struct kprobe *saved = this_cpu_ptr(&saved_next_opcode); in arch_remove_kprobe()
111 saved = this_cpu_ptr(&saved_next_opcode2); in arch_remove_kprobe()
153 op1 = this_cpu_ptr(&saved_next_opcode); in prepare_singlestep()
154 op2 = this_cpu_ptr(&saved_next_opcode2); in prepare_singlestep()
326 p = this_cpu_ptr(&saved_next_opcode); in post_kprobe_handler()
338 p = this_cpu_ptr(&saved_next_opcode2); in post_kprobe_handler()
/kernel/linux/linux-5.10/lib/
Dirq_poll.c37 list_add_tail(&iop->list, this_cpu_ptr(&blk_cpu_iopoll)); in irq_poll_sched()
80 struct list_head *list = this_cpu_ptr(&blk_cpu_iopoll); in irq_poll_softirq()
196 this_cpu_ptr(&blk_cpu_iopoll)); in irq_poll_cpu_dead()
/kernel/linux/linux-5.10/arch/arm64/include/asm/
Dhardirq.h39 ___ctx = this_cpu_ptr(&nmi_contexts); \
72 ___ctx = this_cpu_ptr(&nmi_contexts); \
/kernel/linux/linux-5.10/kernel/irq/
Dmatrix.c80 struct cpumap *cm = this_cpu_ptr(m->maps); in irq_matrix_online()
101 struct cpumap *cm = this_cpu_ptr(m->maps); in irq_matrix_offline()
184 struct cpumap *cm = this_cpu_ptr(m->maps); in irq_matrix_assign_system()
323 struct cpumap *cm = this_cpu_ptr(m->maps); in irq_matrix_assign()
452 struct cpumap *cm = this_cpu_ptr(m->maps); in irq_matrix_available()
476 struct cpumap *cm = this_cpu_ptr(m->maps); in irq_matrix_allocated()

12345678910>>...14