Home
last modified time | relevance | path

Searched refs:cpu (Results 1 – 25 of 160) sorted by relevance

1234567

/include/linux/
Dtopology.h94 static inline int cpu_to_node(int cpu) in cpu_to_node() argument
96 return per_cpu(numa_node, cpu); in cpu_to_node()
108 static inline void set_cpu_numa_node(int cpu, int node) in set_cpu_numa_node() argument
110 per_cpu(numa_node, cpu) = node; in set_cpu_numa_node()
151 static inline int cpu_to_mem(int cpu) in cpu_to_mem() argument
153 return per_cpu(_numa_mem_, cpu); in cpu_to_mem()
158 static inline void set_cpu_numa_mem(int cpu, int node) in set_cpu_numa_mem() argument
160 per_cpu(_numa_mem_, cpu) = node; in set_cpu_numa_mem()
175 static inline int cpu_to_mem(int cpu) in cpu_to_mem() argument
177 return cpu_to_node(cpu); in cpu_to_mem()
[all …]
Darch_topology.h15 bool topology_parse_cpu_capacity(struct device_node *cpu_node, int cpu);
19 static inline unsigned long topology_get_cpu_scale(int cpu) in topology_get_cpu_scale() argument
21 return per_cpu(cpu_scale, cpu); in topology_get_cpu_scale()
24 void topology_set_cpu_scale(unsigned int cpu, unsigned long capacity);
28 static inline unsigned long topology_get_freq_ref(int cpu) in topology_get_freq_ref() argument
30 return per_cpu(capacity_freq_ref, cpu); in topology_get_freq_ref()
35 static inline unsigned long topology_get_freq_scale(int cpu) in topology_get_freq_scale() argument
37 return per_cpu(arch_freq_scale, cpu); in topology_get_freq_scale()
62 static inline unsigned long topology_get_hw_pressure(int cpu) in topology_get_hw_pressure() argument
64 return per_cpu(hw_pressure, cpu); in topology_get_hw_pressure()
[all …]
Dcpumask.h132 static __always_inline void cpu_max_bits_warn(unsigned int cpu, unsigned int bits) in cpu_max_bits_warn() argument
135 WARN_ON_ONCE(cpu >= bits); in cpu_max_bits_warn()
140 static __always_inline unsigned int cpumask_check(unsigned int cpu) in cpumask_check() argument
142 cpu_max_bits_warn(cpu, small_cpumask_bits); in cpumask_check()
143 return cpu; in cpumask_check()
294 #define for_each_cpu(cpu, mask) \ argument
295 for_each_set_bit(cpu, cpumask_bits(mask), small_cpumask_bits)
328 #define for_each_cpu_wrap(cpu, mask, start) \ argument
329 for_each_set_bit_wrap(cpu, cpumask_bits(mask), small_cpumask_bits, start)
345 #define for_each_cpu_and(cpu, mask1, mask2) \ argument
[all …]
Dkernel_stat.h51 #define kstat_cpu(cpu) per_cpu(kstat, cpu) argument
52 #define kcpustat_cpu(cpu) per_cpu(kernel_cpustat, cpu) argument
54 extern unsigned long long nr_context_switches_cpu(int cpu);
57 extern unsigned int kstat_irqs_cpu(unsigned int irq, int cpu);
65 static inline unsigned int kstat_softirqs_cpu(unsigned int irq, int cpu) in kstat_softirqs_cpu() argument
67 return kstat_cpu(cpu).softirqs[irq]; in kstat_softirqs_cpu()
70 static inline unsigned int kstat_cpu_softirqs_sum(int cpu) in kstat_cpu_softirqs_sum() argument
76 sum += kstat_softirqs_cpu(i, cpu); in kstat_cpu_softirqs_sum()
97 static inline unsigned long kstat_cpu_irqs_sum(unsigned int cpu) in kstat_cpu_irqs_sum() argument
99 return kstat_cpu(cpu).irqs_sum; in kstat_cpu_irqs_sum()
[all …]
Dcacheinfo.h84 struct cpu_cacheinfo *get_cpu_cacheinfo(unsigned int cpu);
85 int early_cache_level(unsigned int cpu);
86 int init_cache_level(unsigned int cpu);
87 int init_of_cache_level(unsigned int cpu);
88 int populate_cache_leaves(unsigned int cpu);
89 int cache_setup_acpi(unsigned int cpu);
90 bool last_level_cache_is_valid(unsigned int cpu);
92 int fetch_cache_info(unsigned int cpu);
93 int detect_cache_attributes(unsigned int cpu);
104 int acpi_get_cache_info(unsigned int cpu, in acpi_get_cache_info() argument
[all …]
Dsmpboot.h34 int (*thread_should_run)(unsigned int cpu);
35 void (*thread_fn)(unsigned int cpu);
36 void (*create)(unsigned int cpu);
37 void (*setup)(unsigned int cpu);
38 void (*cleanup)(unsigned int cpu, bool online);
39 void (*park)(unsigned int cpu);
40 void (*unpark)(unsigned int cpu);
Dtick.h23 extern void tick_cleanup_dead_cpu(int cpu);
28 static inline void tick_cleanup_dead_cpu(int cpu) { } in tick_cleanup_dead_cpu() argument
32 extern int tick_cpu_dying(unsigned int cpu);
132 extern bool tick_nohz_tick_stopped_cpu(int cpu);
142 extern unsigned long tick_nohz_get_idle_calls_cpu(int cpu);
143 extern u64 get_cpu_idle_time_us(int cpu, u64 *last_update_time);
144 extern u64 get_cpu_iowait_time_us(int cpu, u64 *last_update_time);
148 static inline int tick_nohz_tick_stopped_cpu(int cpu) { return 0; } in tick_nohz_tick_stopped_cpu() argument
165 static inline u64 get_cpu_idle_time_us(int cpu, u64 *unused) { return -1; } in get_cpu_idle_time_us() argument
166 static inline u64 get_cpu_iowait_time_us(int cpu, u64 *unused) { return -1; } in get_cpu_iowait_time_us() argument
[all …]
Dring_buffer.h170 int ring_buffer_wait(struct trace_buffer *buffer, int cpu, int full,
172 __poll_t ring_buffer_poll_wait(struct trace_buffer *buffer, int cpu,
174 void ring_buffer_wake_waiters(struct trace_buffer *buffer, int cpu);
180 int ring_buffer_resize(struct trace_buffer *buffer, unsigned long size, int cpu);
194 ring_buffer_peek(struct trace_buffer *buffer, int cpu, u64 *ts,
197 ring_buffer_consume(struct trace_buffer *buffer, int cpu, u64 *ts,
201 ring_buffer_read_start(struct trace_buffer *buffer, int cpu, gfp_t flags);
211 unsigned long ring_buffer_size(struct trace_buffer *buffer, int cpu);
214 void ring_buffer_reset_cpu(struct trace_buffer *buffer, int cpu);
220 struct trace_buffer *buffer_b, int cpu);
[all …]
Dcpu.h27 struct cpu { struct
38 extern int register_cpu(struct cpu *cpu, int num); argument
39 extern struct device *get_cpu_device(unsigned cpu);
40 extern bool cpu_is_hotpluggable(unsigned cpu);
41 extern bool arch_match_cpu_phys_id(int cpu, u64 phys_id);
43 int cpu, unsigned int *thread);
89 extern bool arch_cpu_is_hotpluggable(int cpu);
90 extern int arch_register_cpu(int cpu);
91 extern void arch_unregister_cpu(int cpu);
93 extern void unregister_cpu(struct cpu *cpu);
[all …]
Dcontext_tracking_state.h64 static __always_inline int ct_rcu_watching_cpu(int cpu) in ct_rcu_watching_cpu() argument
66 struct context_tracking *ct = per_cpu_ptr(&context_tracking, cpu); in ct_rcu_watching_cpu()
71 static __always_inline int ct_rcu_watching_cpu_acquire(int cpu) in ct_rcu_watching_cpu_acquire() argument
73 struct context_tracking *ct = per_cpu_ptr(&context_tracking, cpu); in ct_rcu_watching_cpu_acquire()
83 static __always_inline long ct_nesting_cpu(int cpu) in ct_nesting_cpu() argument
85 struct context_tracking *ct = per_cpu_ptr(&context_tracking, cpu); in ct_nesting_cpu()
95 static __always_inline long ct_nmi_nesting_cpu(int cpu) in ct_nmi_nesting_cpu() argument
97 struct context_tracking *ct = per_cpu_ptr(&context_tracking, cpu); in ct_nmi_nesting_cpu()
111 static __always_inline bool context_tracking_enabled_cpu(int cpu) in context_tracking_enabled_cpu() argument
113 return context_tracking_enabled() && per_cpu(context_tracking.active, cpu); in context_tracking_enabled_cpu()
[all …]
Dintel_tcc.h13 int intel_tcc_get_tjmax(int cpu);
14 int intel_tcc_get_offset(int cpu);
15 int intel_tcc_set_offset(int cpu, int offset);
16 int intel_tcc_get_temp(int cpu, int *temp, bool pkg);
Drcutree.h112 int rcutree_prepare_cpu(unsigned int cpu);
113 int rcutree_online_cpu(unsigned int cpu);
114 void rcutree_report_cpu_starting(unsigned int cpu);
117 int rcutree_dead_cpu(unsigned int cpu);
118 int rcutree_dying_cpu(unsigned int cpu);
119 int rcutree_offline_cpu(unsigned int cpu);
126 void rcutree_migrate_callbacks(int cpu);
Dnmi.h48 extern int lockup_detector_online_cpu(unsigned int cpu);
49 extern int lockup_detector_offline_cpu(unsigned int cpu);
98 void watchdog_hardlockup_touch_cpu(unsigned int cpu);
99 void watchdog_hardlockup_check(unsigned int cpu, struct pt_regs *regs);
115 void watchdog_hardlockup_enable(unsigned int cpu);
116 void watchdog_hardlockup_disable(unsigned int cpu);
174 static inline bool trigger_single_cpu_backtrace(int cpu) in trigger_single_cpu_backtrace() argument
176 arch_trigger_cpumask_backtrace(cpumask_of(cpu), -1); in trigger_single_cpu_backtrace()
199 static inline bool trigger_single_cpu_backtrace(int cpu) in trigger_single_cpu_backtrace() argument
Dcpuhotplug.h252 int (*startup)(unsigned int cpu),
253 int (*teardown)(unsigned int cpu), bool multi_instance);
257 int (*startup)(unsigned int cpu),
258 int (*teardown)(unsigned int cpu),
273 int (*startup)(unsigned int cpu), in cpuhp_setup_state() argument
274 int (*teardown)(unsigned int cpu)) in cpuhp_setup_state() argument
293 int (*startup)(unsigned int cpu), in cpuhp_setup_state_cpuslocked() argument
294 int (*teardown)(unsigned int cpu)) in cpuhp_setup_state_cpuslocked() argument
313 int (*startup)(unsigned int cpu), in cpuhp_setup_state_nocalls() argument
314 int (*teardown)(unsigned int cpu)) in cpuhp_setup_state_nocalls() argument
[all …]
/include/trace/events/
Dcpuhp.h12 TP_PROTO(unsigned int cpu,
17 TP_ARGS(cpu, target, idx, fun),
20 __field( unsigned int, cpu )
27 __entry->cpu = cpu;
34 __entry->cpu, __entry->target, __entry->idx, __entry->fun)
39 TP_PROTO(unsigned int cpu,
45 TP_ARGS(cpu, target, idx, fun, node),
48 __field( unsigned int, cpu )
55 __entry->cpu = cpu;
62 __entry->cpu, __entry->target, __entry->idx, __entry->fun)
[all …]
Dhw_pressure.h11 TP_PROTO(int cpu, unsigned long hw_pressure),
12 TP_ARGS(cpu, hw_pressure),
16 __field(int, cpu)
21 __entry->cpu = cpu;
24 TP_printk("cpu=%d hw_pressure=%lu", __entry->cpu, __entry->hw_pressure)
Dirq_matrix.h67 TP_PROTO(int bit, unsigned int cpu, struct irq_matrix *matrix,
70 TP_ARGS(bit, cpu, matrix, cmap),
74 __field( unsigned int, cpu )
87 __entry->cpu = cpu;
99 __entry->bit, __entry->cpu, __entry->online,
143 TP_PROTO(int bit, unsigned int cpu,
146 TP_ARGS(bit, cpu, matrix, cmap)
151 TP_PROTO(int bit, unsigned int cpu,
154 TP_ARGS(bit, cpu, matrix, cmap)
159 TP_PROTO(int bit, unsigned int cpu,
[all …]
Dcgroup.h209 TP_PROTO(struct cgroup *cgrp, int cpu, bool contended),
211 TP_ARGS(cgrp, cpu, contended),
217 __field( int, cpu )
225 __entry->cpu = cpu;
231 __entry->cpu, __entry->contended)
237 TP_PROTO(struct cgroup *cgrp, int cpu, bool contended),
239 TP_ARGS(cgrp, cpu, contended)
244 TP_PROTO(struct cgroup *cgrp, int cpu, bool contended),
246 TP_ARGS(cgrp, cpu, contended)
251 TP_PROTO(struct cgroup *cgrp, int cpu, bool contended),
[all …]
/include/asm-generic/
Dnuma.h18 void numa_clear_node(unsigned int cpu);
35 void __init early_map_cpu_to_node(unsigned int cpu, int nid);
36 int early_cpu_to_node(int cpu);
37 void numa_store_cpu_info(unsigned int cpu);
38 void numa_add_cpu(unsigned int cpu);
39 void numa_remove_cpu(unsigned int cpu);
43 static inline void numa_store_cpu_info(unsigned int cpu) { } in numa_store_cpu_info() argument
44 static inline void numa_add_cpu(unsigned int cpu) { } in numa_add_cpu() argument
45 static inline void numa_remove_cpu(unsigned int cpu) { } in numa_remove_cpu() argument
47 static inline void early_map_cpu_to_node(unsigned int cpu, int nid) { } in early_map_cpu_to_node() argument
[all …]
Dtopology.h35 #define cpu_to_node(cpu) ((void)(cpu),0) argument
41 #define set_cpu_numa_node(cpu, node) argument
44 #define cpu_to_mem(cpu) ((void)(cpu),0) argument
72 #define set_cpu_numa_mem(cpu, node) argument
/include/linux/clk/
Dtegra.h33 void (*wait_for_reset)(u32 cpu);
34 void (*put_in_reset)(u32 cpu);
35 void (*out_of_reset)(u32 cpu);
36 void (*enable_clock)(u32 cpu);
37 void (*disable_clock)(u32 cpu);
48 static inline void tegra_wait_cpu_in_reset(u32 cpu) in tegra_wait_cpu_in_reset() argument
53 tegra_cpu_car_ops->wait_for_reset(cpu); in tegra_wait_cpu_in_reset()
56 static inline void tegra_put_cpu_in_reset(u32 cpu) in tegra_put_cpu_in_reset() argument
61 tegra_cpu_car_ops->put_in_reset(cpu); in tegra_put_cpu_in_reset()
64 static inline void tegra_cpu_out_of_reset(u32 cpu) in tegra_cpu_out_of_reset() argument
[all …]
/include/linux/sched/
Dhotplug.h9 extern int sched_cpu_starting(unsigned int cpu);
10 extern int sched_cpu_activate(unsigned int cpu);
11 extern int sched_cpu_deactivate(unsigned int cpu);
14 extern int sched_cpu_wait_empty(unsigned int cpu);
15 extern int sched_cpu_dying(unsigned int cpu);
Disolation.h28 extern bool housekeeping_test_cpu(int cpu, enum hk_type type);
51 static inline bool housekeeping_test_cpu(int cpu, enum hk_type type) in housekeeping_test_cpu() argument
59 static inline bool housekeeping_cpu(int cpu, enum hk_type type) in housekeeping_cpu() argument
63 return housekeeping_test_cpu(cpu, type); in housekeeping_cpu()
68 static inline bool cpu_is_isolated(int cpu) in cpu_is_isolated() argument
70 return !housekeeping_test_cpu(cpu, HK_TYPE_DOMAIN) || in cpu_is_isolated()
71 !housekeeping_test_cpu(cpu, HK_TYPE_TICK) || in cpu_is_isolated()
72 cpuset_cpu_is_isolated(cpu); in cpu_is_isolated()
Dnohz.h10 extern void nohz_balance_enter_idle(int cpu);
13 static inline void nohz_balance_enter_idle(int cpu) { } in nohz_balance_enter_idle() argument
27 extern void wake_up_nohz_cpu(int cpu);
29 static inline void wake_up_nohz_cpu(int cpu) { } in wake_up_nohz_cpu() argument
/include/acpi/
Dcppc_acpi.h145 extern int cppc_get_perf_ctrs(int cpu, struct cppc_perf_fb_ctrs *perf_fb_ctrs);
146 extern int cppc_set_perf(int cpu, struct cppc_perf_ctrls *perf_ctrls);
147 extern int cppc_set_enable(int cpu, bool enable);
148 extern int cppc_get_perf_caps(int cpu, struct cppc_perf_caps *caps);
154 extern int acpi_get_psd_map(unsigned int cpu, struct cppc_cpudata *cpu_data);
155 extern unsigned int cppc_get_transition_latency(int cpu);
161 extern int cppc_set_epp_perf(int cpu, struct cppc_perf_ctrls *perf_ctrls, bool enable);
163 extern int cppc_set_auto_sel(int cpu, bool enable);
164 extern int amd_get_highest_perf(unsigned int cpu, u32 *highest_perf);
165 extern int amd_get_boost_ratio_numerator(unsigned int cpu, u64 *numerator);
[all …]

1234567