Searched refs:cpus_allowed (Results 1 – 8 of 8) sorted by relevance
/kernel/ |
D | cpuset.c | 95 cpumask_var_t cpus_allowed; /* CPUs allowed to tasks in cpuset */ member 303 while (cs && !cpumask_intersects(cs->cpus_allowed, cpu_online_mask)) in guarantee_online_cpus() 306 cpumask_and(pmask, cs->cpus_allowed, cpu_online_mask); in guarantee_online_cpus() 419 return cpumask_subset(p->cpus_allowed, q->cpus_allowed) && in is_cpuset_subset() 437 if (!alloc_cpumask_var(&trial->cpus_allowed, GFP_KERNEL)) { in alloc_trial_cpuset() 441 cpumask_copy(trial->cpus_allowed, cs->cpus_allowed); in alloc_trial_cpuset() 452 free_cpumask_var(trial->cpus_allowed); in free_trial_cpuset() 505 cpumask_intersects(trial->cpus_allowed, c->cpus_allowed)) in validate_change() 515 if (cpumask_empty(trial->cpus_allowed) || in validate_change() 530 return cpumask_intersects(a->cpus_allowed, b->cpus_allowed); in cpusets_overlap() [all …]
|
D | sched_cpupri.c | 81 if (cpumask_any_and(&p->cpus_allowed, vec->mask) >= nr_cpu_ids) in cpupri_find() 84 cpumask_and(lowest_mask, &p->cpus_allowed, vec->mask); in cpupri_find()
|
D | sched.c | 2116 &p->cpus_allowed)) in find_idlest_group() 2164 for_each_cpu_and(i, sched_group_cpus(group), &p->cpus_allowed) { in find_idlest_cpu() 2866 if (!cpumask_test_cpu(dest_cpu, &p->cpus_allowed) in sched_migrate_task() 2931 if (!cpumask_test_cpu(this_cpu, &p->cpus_allowed)) { in can_migrate_task() 3553 &busiest->curr->cpus_allowed)) { in load_balance() 3730 if (!cpumask_test_cpu(this_cpu, &busiest->curr->cpus_allowed)) { in load_balance_newidle() 5584 cpumask_var_t cpus_allowed, new_mask; in sched_setaffinity() local 5606 if (!alloc_cpumask_var(&cpus_allowed, GFP_KERNEL)) { in sched_setaffinity() 5622 cpuset_cpus_allowed(p, cpus_allowed); in sched_setaffinity() 5623 cpumask_and(new_mask, in_mask, cpus_allowed); in sched_setaffinity() [all …]
|
D | kthread.c | 184 k->cpus_allowed = cpumask_of_cpu(cpu); in kthread_bind()
|
D | sched_fair.c | 1060 cpu_isset(chosen_wakeup_cpu, p->cpus_allowed)) in wake_idle() 1080 &p->cpus_allowed) { in wake_idle() 1280 if (unlikely(!cpumask_test_cpu(this_cpu, &p->cpus_allowed))) in select_task_rq_fair()
|
D | sched_rt.c | 922 (cpu < 0 || cpumask_test_cpu(cpu, &p->cpus_allowed)) && in pick_rt_task() 1064 &task->cpus_allowed) || in find_lock_lowest_rq() 1337 cpumask_copy(&p->cpus_allowed, new_mask); in set_cpus_allowed_rt()
|
D | fork.c | 1212 p->cpus_allowed = current->cpus_allowed; in copy_process() 1214 if (unlikely(!cpu_isset(task_cpu(p), p->cpus_allowed) || in copy_process()
|
D | cpu.c | 228 cpumask_copy(old_allowed, ¤t->cpus_allowed); in _cpu_down()
|