/kernel/ |
D | smpboot.c | 30 struct task_struct *idle_thread_get(unsigned int cpu) in idle_thread_get() 51 static inline void idle_init(unsigned int cpu) in idle_init() 69 unsigned int cpu, boot_cpu; in idle_threads_init() local 86 unsigned int cpu; member 171 __smpboot_create_thread(struct smp_hotplug_thread *ht, unsigned int cpu) in __smpboot_create_thread() 213 int smpboot_create_threads(unsigned int cpu) in smpboot_create_threads() 228 static void smpboot_unpark_thread(struct smp_hotplug_thread *ht, unsigned int cpu) in smpboot_unpark_thread() 236 int smpboot_unpark_threads(unsigned int cpu) in smpboot_unpark_threads() 247 static void smpboot_park_thread(struct smp_hotplug_thread *ht, unsigned int cpu) in smpboot_park_thread() 255 int smpboot_park_threads(unsigned int cpu) in smpboot_park_threads() [all …]
|
D | cpu.c | 147 static int cpuhp_invoke_callback(unsigned int cpu, enum cpuhp_state state, in cpuhp_invoke_callback() 424 static inline bool cpu_smt_allowed(unsigned int cpu) in cpu_smt_allowed() 449 static inline bool cpu_smt_allowed(unsigned int cpu) { return true; } in cpu_smt_allowed() 519 static int bringup_wait_for_ap(unsigned int cpu) in bringup_wait_for_ap() 548 static int bringup_cpu(unsigned int cpu) in bringup_cpu() 572 static void undo_cpu_up(unsigned int cpu, struct cpuhp_cpu_state *st) in undo_cpu_up() 592 static int cpuhp_up_callbacks(unsigned int cpu, struct cpuhp_cpu_state *st, in cpuhp_up_callbacks() 615 static void cpuhp_create(unsigned int cpu) in cpuhp_create() 623 static int cpuhp_should_run(unsigned int cpu) in cpuhp_should_run() 644 static void cpuhp_thread_fun(unsigned int cpu) in cpuhp_thread_fun() [all …]
|
D | smp.c | 43 int smpcfd_prepare_cpu(unsigned int cpu) in smpcfd_prepare_cpu() 65 int smpcfd_dead_cpu(unsigned int cpu) in smpcfd_dead_cpu() 75 int smpcfd_dying_cpu(unsigned int cpu) in smpcfd_dying_cpu() 142 static int generic_exec_single(int cpu, call_single_data_t *csd, in generic_exec_single() 269 int smp_call_function_single(int cpu, smp_call_func_t func, void *info, in smp_call_function_single() 335 int smp_call_function_single_async(int cpu, call_single_data_t *csd) in smp_call_function_single_async() 372 unsigned int cpu; in smp_call_function_any() local 416 int cpu, next_cpu, this_cpu = smp_processor_id(); in smp_call_function_many() local 581 unsigned int cpu; in smp_init() local 643 int cpu = get_cpu(); in on_each_cpu_mask() local [all …]
|
D | stop_machine.c | 77 static bool cpu_stop_queue_work(unsigned int cpu, struct cpu_stop_work *work) in cpu_stop_queue_work() 123 int stop_one_cpu(unsigned int cpu, cpu_stop_fn_t fn, void *arg) in stop_one_cpu() 191 int cpu = smp_processor_id(), err = 0; in multi_cpu_stop() local 366 bool stop_one_cpu_nowait(unsigned int cpu, cpu_stop_fn_t fn, void *arg, in stop_one_cpu_nowait() 378 unsigned int cpu; in queue_stop_cpus_work() local 485 static int cpu_stop_should_run(unsigned int cpu) in cpu_stop_should_run() 497 static void cpu_stopper_thread(unsigned int cpu) in cpu_stopper_thread() 533 void stop_machine_park(int cpu) in stop_machine_park() 547 static void cpu_stop_create(unsigned int cpu) in cpu_stop_create() 552 static void cpu_stop_park(unsigned int cpu) in cpu_stop_park() [all …]
|
D | up.c | 12 int smp_call_function_single(int cpu, void (*func) (void *info), void *info, in smp_call_function_single() 27 int smp_call_function_single_async(int cpu, call_single_data_t *csd) in smp_call_function_single_async() 71 void on_each_cpu_cond_mask(bool (*cond_func)(int cpu, void *info), in on_each_cpu_cond_mask() 87 void on_each_cpu_cond(bool (*cond_func)(int cpu, void *info), in on_each_cpu_cond() 95 int smp_call_on_cpu(unsigned int cpu, int (*func)(void *), void *par, bool phys) in smp_call_on_cpu()
|
D | padata.c | 40 int cpu, target_cpu; in padata_index_to_cpu() local 104 int i, cpu, cpu_index, target_cpu, err; in padata_do_parallel() local 178 int cpu = pd->cpu; in padata_find_next() local 389 int cpu; in padata_init_squeues() local 403 int cpu; in padata_init_pqueues() local 472 int cpu; in padata_flush_queues() local 696 static int __padata_add_cpu(struct padata_instance *pinst, int cpu) in __padata_add_cpu() 716 static int __padata_remove_cpu(struct padata_instance *pinst, int cpu) in __padata_remove_cpu() 751 int padata_remove_cpu(struct padata_instance *pinst, int cpu, int mask) in padata_remove_cpu() 775 static inline int pinst_has_cpu(struct padata_instance *pinst, int cpu) in pinst_has_cpu() [all …]
|
D | watchdog.c | 105 int __weak watchdog_nmi_enable(unsigned int cpu) in watchdog_nmi_enable() 111 void __weak watchdog_nmi_disable(unsigned int cpu) in watchdog_nmi_disable() 289 int cpu; in touch_all_softlockup_watchdogs() local 479 static void watchdog_enable(unsigned int cpu) in watchdog_enable() 505 static void watchdog_disable(unsigned int cpu) in watchdog_disable() 529 int cpu; in softlockup_stop_all() local 548 int cpu; in softlockup_start_all() local 555 int lockup_detector_online_cpu(unsigned int cpu) in lockup_detector_online_cpu() 562 int lockup_detector_offline_cpu(unsigned int cpu) in lockup_detector_offline_cpu()
|
D | watchdog_hld.c | 168 unsigned int cpu = smp_processor_id(); in hardlockup_detector_event_create() local 225 int cpu; in hardlockup_detector_perf_cleanup() local 248 int cpu; in hardlockup_detector_perf_stop() local 267 int cpu; in hardlockup_detector_perf_restart() local
|
D | profile.c | 242 int cpu = smp_processor_id(); in __profile_flip_buffers() local 249 int i, j, cpu; in profile_flip_buffers() local 272 int i, cpu; in profile_discard_flip_buffers() local 288 int i, j, cpu; in do_profile_hits() local 334 static int profile_dead_cpu(unsigned int cpu) in profile_dead_cpu() 352 static int profile_prepare_cpu(unsigned int cpu) in profile_prepare_cpu() 374 static int profile_online_cpu(unsigned int cpu) in profile_online_cpu()
|
/kernel/time/ |
D | tick-common.c | 61 struct tick_device *tick_get_device(int cpu) in tick_get_device() 83 static void tick_periodic(int cpu) in tick_periodic() 105 int cpu = smp_processor_id(); in tick_handle_periodic() local 181 int cpu = *(unsigned int *)info; in giveup_do_timer() local 190 int cpu = smp_processor_id(); in tick_take_do_timer_from_boot() local 202 struct clock_event_device *newdev, int cpu, in tick_setup_device() 277 int cpu = smp_processor_id(); in tick_install_replacement() local 286 struct clock_event_device *newdev, int cpu) in tick_check_percpu() 342 int cpu; in tick_check_new_device() local 413 int cpu = cpumask_first(cpu_online_mask); in tick_handover_do_timer() local [all …]
|
D | tick-sched.c | 40 struct tick_sched *tick_get_tick_sched(int cpu) in tick_get_tick_sched() 119 int cpu = smp_processor_id(); in tick_sched_do_timer() local 208 static bool can_stop_full_tick(int cpu, struct tick_sched *ts) in can_stop_full_tick() 257 void tick_nohz_full_kick_cpu(int cpu) in tick_nohz_full_kick_cpu() 271 int cpu; in tick_nohz_full_kick_all() local 310 void tick_nohz_dep_set_cpu(int cpu, enum tick_dep_bits bit) in tick_nohz_dep_set_cpu() 332 void tick_nohz_dep_clear_cpu(int cpu, enum tick_dep_bits bit) in tick_nohz_dep_clear_cpu() 405 static int tick_nohz_cpu_down(unsigned int cpu) in tick_nohz_cpu_down() 419 int cpu, ret; in tick_nohz_init() local 485 bool tick_nohz_tick_stopped_cpu(int cpu) in tick_nohz_tick_stopped_cpu() [all …]
|
D | tick-broadcast.c | 44 static inline void tick_broadcast_clear_oneshot(int cpu) { } in tick_broadcast_clear_oneshot() 47 static inline void tick_broadcast_oneshot_offline(unsigned int cpu) { } in tick_broadcast_oneshot_offline() 163 int tick_device_uses_broadcast(struct clock_event_device *dev, int cpu) in tick_device_uses_broadcast() 266 int cpu = smp_processor_id(); in tick_do_broadcast() local 360 int cpu, bc_stopped; in tick_broadcast_control() local 455 void tick_broadcast_offline(unsigned int cpu) in tick_broadcast_offline() 565 static void tick_broadcast_set_event(struct clock_event_device *bc, int cpu, in tick_broadcast_set_event() 608 int cpu, next_cpu = 0; in tick_handle_oneshot_broadcast() local 684 static int broadcast_needs_cpu(struct clock_event_device *bc, int cpu) in broadcast_needs_cpu() 713 int cpu, ret = 0; in __tick_broadcast_oneshot_control() local [all …]
|
/kernel/cgroup/ |
D | rstat.c | 11 static struct cgroup_rstat_cpu *cgroup_rstat_cpu(struct cgroup *cgrp, int cpu) in cgroup_rstat_cpu() 25 void cgroup_rstat_updated(struct cgroup *cgrp, int cpu) in cgroup_rstat_updated() 88 struct cgroup *root, int cpu) in cgroup_rstat_cpu_pop_updated() 155 int cpu; in cgroup_rstat_flush_locked() local 254 int cpu; in cgroup_rstat_init() local 276 int cpu; in cgroup_rstat_exit() local 295 int cpu; in cgroup_rstat_boot() local 315 static void cgroup_base_stat_flush(struct cgroup *cgrp, int cpu) in cgroup_base_stat_flush()
|
/kernel/sched/ |
D | topology.c | 28 static int sched_domain_debug_one(struct sched_domain *sd, int cpu, int level, in sched_domain_debug_one() 115 static void sched_domain_debug(struct sched_domain *sd, int cpu) in sched_domain_debug() 141 # define sched_domain_debug(sd, cpu) do { } while (0) argument 246 static struct perf_domain *find_pd(struct perf_domain *pd, int cpu) in find_pd() 257 static struct perf_domain *pd_init(int cpu) in pd_init() 346 int cpu = cpumask_first(cpu_map); in build_perf_domains() local 630 static void update_top_cache_domain(int cpu) in update_top_cache_domain() 664 cpu_attach_domain(struct sched_domain *sd, struct root_domain *rd, int cpu) in cpu_attach_domain() 880 build_group_from_child_sched_domain(struct sched_domain *sd, int cpu) in build_group_from_child_sched_domain() 907 int cpu; in init_overlap_sched_group() local [all …]
|
D | cpudeadline.c | 150 void cpudl_clear(struct cpudl *cp, int cpu) in cpudl_clear() 190 void cpudl_set(struct cpudl *cp, int cpu, u64 dl) in cpudl_set() 221 void cpudl_set_freecpu(struct cpudl *cp, int cpu) in cpudl_set_freecpu() 231 void cpudl_clear_freecpu(struct cpudl *cp, int cpu) in cpudl_clear_freecpu()
|
/kernel/power/ |
D | energy_model.c | 52 static void em_debug_create_pd(struct em_perf_domain *pd, int cpu) in em_debug_create_pd() 79 static void em_debug_create_pd(struct em_perf_domain *pd, int cpu) {} in em_debug_create_pd() 86 int i, ret, cpu = cpumask_first(span); in em_create_pd() local 179 struct em_perf_domain *em_cpu_get(int cpu) in em_cpu_get() 204 int cpu, ret = 0; in em_register_perf_domain() local
|
/kernel/trace/ |
D | ring_buffer.c | 270 #define for_each_buffer_cpu(buffer, cpu) \ argument 444 int cpu; member 521 size_t ring_buffer_nr_pages(struct ring_buffer *buffer, int cpu) in ring_buffer_nr_pages() 533 size_t ring_buffer_nr_dirty_pages(struct ring_buffer *buffer, int cpu) in ring_buffer_nr_dirty_pages() 576 int ring_buffer_wait(struct ring_buffer *buffer, int cpu, int full) in ring_buffer_wait() 687 __poll_t ring_buffer_poll_wait(struct ring_buffer *buffer, int cpu, in ring_buffer_poll_wait() 751 u64 ring_buffer_time_stamp(struct ring_buffer *buffer, int cpu) in ring_buffer_time_stamp() 764 int cpu, u64 *ts) in ring_buffer_normalize_time_stamp() 1189 static int __rb_allocate_pages(long nr_pages, struct list_head *pages, int cpu) in __rb_allocate_pages() 1286 rb_allocate_cpu_buffer(struct ring_buffer *buffer, long nr_pages, int cpu) in rb_allocate_cpu_buffer() [all …]
|
D | trace_functions_graph.c | 37 int cpu; member 134 int cpu; in trace_graph_entry() local 243 int cpu; in trace_graph_return() local 333 static void print_graph_cpu(struct trace_seq *s, int cpu) in print_graph_cpu() 385 verif_pid(struct trace_seq *s, pid_t pid, int cpu, struct fgraph_data *data) in verif_pid() 514 enum trace_type type, int cpu, pid_t pid, u32 flags) in print_graph_irq() 636 int cpu = iter->cpu; in print_graph_entry_leaf() local 679 struct trace_seq *s, int cpu, u32 flags) in print_graph_entry_nested() 688 int cpu = iter->cpu; in print_graph_entry_nested() local 725 int cpu = iter->cpu; in print_graph_prologue() local [all …]
|
/kernel/rcu/ |
D | tree_stall.h | 246 int cpu; in rcu_dump_cpu_stacks() local 262 static void print_cpu_stall_fast_no_hz(char *cp, int cpu) in print_cpu_stall_fast_no_hz() 275 static void print_cpu_stall_fast_no_hz(char *cp, int cpu) in print_cpu_stall_fast_no_hz() 293 static void print_cpu_stall_info(int cpu) in print_cpu_stall_info() 356 int cpu; in print_other_cpu_stall() local 427 int cpu; in print_cpu_stall() local 554 int cpu; in show_rcu_gp_kthreads() local 663 int cpu; in rcu_fwd_progress_check() local
|
D | tree.c | 210 static long rcu_get_n_cbs_cpu(int cpu) in rcu_get_n_cbs_cpu() 341 bool rcu_eqs_special_set(int cpu) in rcu_eqs_special_set() 916 int cpu; in rcu_request_urgent_qs_task() local 1946 rcu_report_qs_rdp(int cpu, struct rcu_data *rdp) in rcu_report_qs_rdp() 2026 int rcutree_dying_cpu(unsigned int cpu) in rcutree_dying_cpu() 2092 int rcutree_dead_cpu(unsigned int cpu) in rcutree_dead_cpu() 2259 int cpu; in force_qs_rnp() local 2425 static void rcu_cpu_kthread_park(unsigned int cpu) in rcu_cpu_kthread_park() 2430 static int rcu_cpu_kthread_should_run(unsigned int cpu) in rcu_cpu_kthread_should_run() 2440 static void rcu_cpu_kthread(unsigned int cpu) in rcu_cpu_kthread() [all …]
|
/kernel/irq/ |
D | ipi.c | 162 irq_hw_number_t ipi_get_hwirq(unsigned int irq, unsigned int cpu) in ipi_get_hwirq() 187 const struct cpumask *dest, unsigned int cpu) in ipi_send_verify() 221 int __ipi_send_single(struct irq_desc *desc, unsigned int cpu) in __ipi_send_single() 267 unsigned int cpu; in __ipi_send_mask() local 307 int ipi_send_single(unsigned int virq, unsigned int cpu) in ipi_send_single()
|
D | matrix.c | 132 unsigned int cpu, best_cpu, maxavl = 0; in matrix_find_best_cpu() local 153 unsigned int cpu, best_cpu, allocated = UINT_MAX; in matrix_find_best_cpu_managed() local 212 unsigned int cpu, failed_cpu; in irq_matrix_reserve_managed() local 253 unsigned int cpu; in irq_matrix_remove_managed() local 288 unsigned int bit, cpu, end = m->alloc_end; in irq_matrix_alloc_managed() local 380 unsigned int cpu, bit; in irq_matrix_alloc() local 411 void irq_matrix_free(struct irq_matrix *m, unsigned int cpu, in irq_matrix_free() 484 int cpu; in irq_matrix_debug_show() local
|
D | cpuhotplug.c | 22 unsigned int cpu = smp_processor_id(); in irq_needs_fixup() local 174 static void irq_restore_affinity_of_irq(struct irq_desc *desc, unsigned int cpu) in irq_restore_affinity_of_irq() 201 int irq_affinity_online_cpu(unsigned int cpu) in irq_affinity_online_cpu()
|
/kernel/events/ |
D | hw_breakpoint.c | 52 static struct bp_cpuinfo *get_bp_info(int cpu, enum bp_type_idx type) in get_bp_info() 88 static unsigned int max_task_bp_pinned(int cpu, enum bp_type_idx type) in max_task_bp_pinned() 105 static int task_bp_pinned(int cpu, struct perf_event *bp, enum bp_type_idx type) in task_bp_pinned() 137 int cpu; in fetch_bp_busy_slots() local 172 static void toggle_bp_task_slot(struct perf_event *bp, int cpu, in toggle_bp_task_slot() 195 int cpu; in toggle_bp_slot() local 547 int cpu; in register_wide_hw_breakpoint() local 580 int cpu; in unregister_wide_hw_breakpoint() local 663 int cpu, err_cpu; in init_hw_breakpoint() local
|
/kernel/livepatch/ |
D | transition.c | 74 unsigned int cpu; in klp_complete_transition() local 390 unsigned int cpu; in klp_try_complete_transition() local 466 unsigned int cpu; in klp_start_transition() local 507 unsigned int cpu; in klp_init_transition() local 580 unsigned int cpu; in klp_reverse_transition() local 632 unsigned int cpu; in klp_force_transition() local
|