Searched refs:next_cpu (Results 1 – 9 of 9) sorted by relevance
/kernel/trace/ |
D | trace_hwlat.c | 272 int next_cpu; in move_to_next_cpu() local 283 next_cpu = cpumask_first(current_mask); in move_to_next_cpu() 297 next_cpu = cpumask_next(smp_processor_id(), current_mask); in move_to_next_cpu() 300 if (next_cpu >= nr_cpu_ids) in move_to_next_cpu() 301 next_cpu = cpumask_first(current_mask); in move_to_next_cpu() 304 if (next_cpu >= nr_cpu_ids) /* Shouldn't happen! */ in move_to_next_cpu() 308 cpumask_set_cpu(next_cpu, current_mask); in move_to_next_cpu()
|
D | trace_entries.h | 121 __field( unsigned int, next_cpu ) \ 138 __entry->next_cpu), 158 __entry->next_cpu),
|
D | trace_output.c | 1049 field->next_cpu, in trace_ctxwake_print() 1083 field->next_cpu, in trace_ctxwake_raw() 1119 SEQ_PUT_HEX_FIELD(s, field->next_cpu); in trace_ctxwake_hex() 1150 SEQ_PUT_FIELD(s, field->next_cpu); in trace_ctxwake_bin()
|
D | trace_sched_wakeup.c | 392 entry->next_cpu = task_cpu(next); in tracing_sched_switch_trace() 420 entry->next_cpu = task_cpu(wakee); in tracing_sched_wakeup_trace()
|
D | trace.c | 2678 int next_cpu = -1; in __find_next_entry() local 2708 next_cpu = cpu; in __find_next_entry() 2718 *ent_cpu = next_cpu; in __find_next_entry()
|
/kernel/ |
D | smp.c | 393 int cpu, next_cpu, this_cpu = smp_processor_id(); in smp_call_function_many() local 414 next_cpu = cpumask_next_and(cpu, mask, cpu_online_mask); in smp_call_function_many() 415 if (next_cpu == this_cpu) in smp_call_function_many() 416 next_cpu = cpumask_next_and(next_cpu, mask, cpu_online_mask); in smp_call_function_many() 419 if (next_cpu >= nr_cpu_ids) { in smp_call_function_many()
|
/kernel/time/ |
D | clocksource.c | 174 int next_cpu, reset_pending; in clocksource_watchdog() local 272 next_cpu = cpumask_next(raw_smp_processor_id(), cpu_online_mask); in clocksource_watchdog() 273 if (next_cpu >= nr_cpu_ids) in clocksource_watchdog() 274 next_cpu = cpumask_first(cpu_online_mask); in clocksource_watchdog() 276 add_timer_on(&watchdog_timer, next_cpu); in clocksource_watchdog()
|
D | tick-broadcast.c | 603 int cpu, next_cpu = 0; in tick_handle_oneshot_broadcast() local 624 next_cpu = cpu; in tick_handle_oneshot_broadcast() 661 tick_broadcast_set_event(dev, next_cpu, next_event); in tick_handle_oneshot_broadcast()
|
/kernel/sched/ |
D | fair.c | 5820 goto next_cpu; in compute_energy() 5835 next_cpu: in compute_energy() 7067 int next_cpu; in select_energy_cpu_brute() local 7101 next_cpu = find_best_target(p, &backup_cpu, boosted, prefer_idle); in select_energy_cpu_brute() 7102 if (next_cpu == -1) { in select_energy_cpu_brute() 7108 if ((boosted || prefer_idle) && idle_cpu(next_cpu)) { in select_energy_cpu_brute() 7111 target_cpu = next_cpu; in select_energy_cpu_brute() 7116 if (next_cpu != prev_cpu) { in select_energy_cpu_brute() 7127 .cpu_id = next_cpu, in select_energy_cpu_brute() 7145 target_cpu = next_cpu; in select_energy_cpu_brute()
|