Lines Matching refs:cpu
30 struct task_struct *idle_thread_get(unsigned int cpu) in idle_thread_get() argument
32 struct task_struct *tsk = per_cpu(idle_threads, cpu); in idle_thread_get()
36 init_idle(tsk, cpu); in idle_thread_get()
51 static inline void idle_init(unsigned int cpu) in idle_init() argument
53 struct task_struct *tsk = per_cpu(idle_threads, cpu); in idle_init()
56 tsk = fork_idle(cpu); in idle_init()
58 pr_err("SMP: fork_idle() failed for CPU %u\n", cpu); in idle_init()
60 per_cpu(idle_threads, cpu) = tsk; in idle_init()
69 unsigned int cpu, boot_cpu; in idle_threads_init() local
73 for_each_possible_cpu(cpu) { in idle_threads_init()
74 if (cpu != boot_cpu) in idle_threads_init()
75 idle_init(cpu); in idle_threads_init()
86 unsigned int cpu; member
120 ht->cleanup(td->cpu, cpu_online(td->cpu)); in smpboot_thread_fn()
129 BUG_ON(td->cpu != smp_processor_id()); in smpboot_thread_fn()
130 ht->park(td->cpu); in smpboot_thread_fn()
138 BUG_ON(td->cpu != smp_processor_id()); in smpboot_thread_fn()
146 ht->setup(td->cpu); in smpboot_thread_fn()
154 ht->unpark(td->cpu); in smpboot_thread_fn()
159 if (!ht->thread_should_run(td->cpu)) { in smpboot_thread_fn()
165 ht->thread_fn(td->cpu); in smpboot_thread_fn()
171 __smpboot_create_thread(struct smp_hotplug_thread *ht, unsigned int cpu) in __smpboot_create_thread() argument
173 struct task_struct *tsk = *per_cpu_ptr(ht->store, cpu); in __smpboot_create_thread()
179 td = kzalloc_node(sizeof(*td), GFP_KERNEL, cpu_to_node(cpu)); in __smpboot_create_thread()
182 td->cpu = cpu; in __smpboot_create_thread()
185 tsk = kthread_create_on_cpu(smpboot_thread_fn, td, cpu, in __smpboot_create_thread()
197 *per_cpu_ptr(ht->store, cpu) = tsk; in __smpboot_create_thread()
208 ht->create(cpu); in __smpboot_create_thread()
213 int smpboot_create_threads(unsigned int cpu) in smpboot_create_threads() argument
220 ret = __smpboot_create_thread(cur, cpu); in smpboot_create_threads()
228 static void smpboot_unpark_thread(struct smp_hotplug_thread *ht, unsigned int cpu) in smpboot_unpark_thread() argument
230 struct task_struct *tsk = *per_cpu_ptr(ht->store, cpu); in smpboot_unpark_thread()
236 int smpboot_unpark_threads(unsigned int cpu) in smpboot_unpark_threads() argument
242 smpboot_unpark_thread(cur, cpu); in smpboot_unpark_threads()
247 static void smpboot_park_thread(struct smp_hotplug_thread *ht, unsigned int cpu) in smpboot_park_thread() argument
249 struct task_struct *tsk = *per_cpu_ptr(ht->store, cpu); in smpboot_park_thread()
255 int smpboot_park_threads(unsigned int cpu) in smpboot_park_threads() argument
261 smpboot_park_thread(cur, cpu); in smpboot_park_threads()
268 unsigned int cpu; in smpboot_destroy_threads() local
271 for_each_possible_cpu(cpu) { in smpboot_destroy_threads()
272 struct task_struct *tsk = *per_cpu_ptr(ht->store, cpu); in smpboot_destroy_threads()
277 *per_cpu_ptr(ht->store, cpu) = NULL; in smpboot_destroy_threads()
291 unsigned int cpu; in smpboot_register_percpu_thread() local
296 for_each_online_cpu(cpu) { in smpboot_register_percpu_thread()
297 ret = __smpboot_create_thread(plug_thread, cpu); in smpboot_register_percpu_thread()
302 smpboot_unpark_thread(plug_thread, cpu); in smpboot_register_percpu_thread()
335 int cpu_report_state(int cpu) in cpu_report_state() argument
337 return atomic_read(&per_cpu(cpu_hotplug_state, cpu)); in cpu_report_state()
352 int cpu_check_up_prepare(int cpu) in cpu_check_up_prepare() argument
355 atomic_set(&per_cpu(cpu_hotplug_state, cpu), CPU_UP_PREPARE); in cpu_check_up_prepare()
359 switch (atomic_read(&per_cpu(cpu_hotplug_state, cpu))) { in cpu_check_up_prepare()
364 atomic_set(&per_cpu(cpu_hotplug_state, cpu), CPU_UP_PREPARE); in cpu_check_up_prepare()
408 void cpu_set_state_online(int cpu) in cpu_set_state_online() argument
410 (void)atomic_xchg(&per_cpu(cpu_hotplug_state, cpu), CPU_ONLINE); in cpu_set_state_online()
418 bool cpu_wait_death(unsigned int cpu, int seconds) in cpu_wait_death() argument
428 if (atomic_read(&per_cpu(cpu_hotplug_state, cpu)) == CPU_DEAD) in cpu_wait_death()
433 while (atomic_read(&per_cpu(cpu_hotplug_state, cpu)) != CPU_DEAD) { in cpu_wait_death()
441 oldstate = atomic_read(&per_cpu(cpu_hotplug_state, cpu)); in cpu_wait_death()
445 atomic_set(&per_cpu(cpu_hotplug_state, cpu), CPU_POST_DEAD); in cpu_wait_death()
448 if (atomic_cmpxchg(&per_cpu(cpu_hotplug_state, cpu), in cpu_wait_death()
469 int cpu = smp_processor_id(); in cpu_report_death() local
472 oldstate = atomic_read(&per_cpu(cpu_hotplug_state, cpu)); in cpu_report_death()
477 } while (atomic_cmpxchg(&per_cpu(cpu_hotplug_state, cpu), in cpu_report_death()