Searched refs:cpu_active_mask (Results 1 – 13 of 13) sorted by relevance
/kernel/linux/linux-5.10/include/linux/ |
D | cpumask.h | 98 #define cpu_active_mask ((const struct cpumask *)&__cpu_active_mask) macro 122 #define num_active_cpus() cpumask_weight(cpu_active_mask) 126 #define cpu_active(cpu) cpumask_test_cpu((cpu), cpu_active_mask)
|
/kernel/linux/linux-5.10/kernel/cgroup/ |
D | cpuset.c | 1015 !cpumask_equal(top_cpuset.effective_cpus, cpu_active_mask)) in rebuild_sched_domains_locked() 1031 cpu_active_mask)) { in rebuild_sched_domains_locked() 1097 cpumask_and(new_cpus, new_cpus, cpu_active_mask); in compute_effective_cpumask() 1233 cpu_active_mask)) in update_parent_subparts_cpumask() 1309 cpumask_and(tmp->delmask, tmp->delmask, cpu_active_mask); in update_parent_subparts_cpumask() 1546 cpu_active_mask); in update_cpumask() 3186 cpumask_copy(&new_cpus, cpu_active_mask); in cpuset_hotplug_workfn() 3309 cpumask_copy(top_cpuset.cpus_allowed, cpu_active_mask); in cpuset_init_smp() 3313 cpumask_copy(top_cpuset.effective_cpus, cpu_active_mask); in cpuset_init_smp()
|
/kernel/linux/linux-5.10/arch/mips/kernel/ |
D | mips-mt-fpaff.c | 181 cpumask_and(&mask, &allowed, cpu_active_mask); in mipsmt_sys_sched_getaffinity()
|
/kernel/linux/patches/linux-5.10/yangfan_patch/ |
D | kernel.patch | 245 + * of cpu_active_mask. 257 + if (WARN_ON(!cpumask_and(pmask, possible_mask, cpu_active_mask))) 258 + cpumask_copy(pmask, cpu_active_mask); 345 cpumask_and(new_cpus, new_cpus, cpu_active_mask); 631 cpumask_copy(&new_cpus, cpu_active_mask); 1075 + for_each_cpu_and(i, cpu_active_mask, 1087 + default_cpu = cpumask_any(cpu_active_mask); 1143 + !cpumask_intersects(cpu_active_mask, task_cpu_possible_mask(p)))
|
D | include.patch | 3715 + __entry->active_cpus = cpumask_bits(cpu_active_mask)[0];
|
/kernel/linux/linux-5.10/kernel/sched/ |
D | deadline.c | 85 if (cpumask_subset(rd->span, cpu_active_mask)) in dl_bw_cpus() 90 for_each_cpu_and(i, rd->span, cpu_active_mask) in dl_bw_cpus() 104 for_each_cpu_and(i, rd->span, cpu_active_mask) in __dl_bw_capacity() 606 cpu = cpumask_any_and(cpu_active_mask, p->cpus_ptr); in dl_task_offline_migration() 619 cpu = cpumask_any(cpu_active_mask); in dl_task_offline_migration() 2844 dest_cpu = cpumask_any_and(cpu_active_mask, cs_cpus_allowed); in dl_task_can_attach()
|
D | topology.c | 480 if (cpumask_test_cpu(rq->cpu, cpu_active_mask)) in rq_attach_root() 2321 cpumask_and(doms_new[0], cpu_active_mask, in partition_sched_domains_locked() 2356 cpumask_and(doms_new[0], cpu_active_mask, in partition_sched_domains_locked()
|
D | core.c | 1939 const struct cpumask *cpu_valid_mask = cpu_active_mask; in __set_cpus_allowed_ptr() 2004 !cpumask_intersects(new_mask, cpu_active_mask) && in __set_cpus_allowed_ptr() 6171 dest_cpu = cpumask_any_and(cpu_active_mask, &allowed_mask); in sched_setaffinity() 6255 cpumask_and(mask, &p->cpus_mask, cpu_active_mask); in sched_getaffinity() 7579 sched_init_domains(cpu_active_mask); in sched_init_smp()
|
D | sched.h | 2476 for_each_cpu_and(i, rd->span, cpu_active_mask) { in __dl_update()
|
D | fair.c | 9902 cpumask_and(cpus, sched_domain_span(sd), cpu_active_mask); in load_balance()
|
/kernel/linux/linux-5.10/arch/powerpc/perf/ |
D | hv-gpci.c | 306 target = cpumask_last(cpu_active_mask); in ppc_hv_gpci_cpu_offline()
|
D | hv-24x7.c | 1680 target = cpumask_last(cpu_active_mask); in ppc_hv_24x7_cpu_offline()
|
/kernel/linux/linux-5.10/kernel/ |
D | stop_machine.c | 659 queue_stop_cpus_work(cpu_active_mask, multi_cpu_stop, &msdata, in stop_machine_from_inactive_cpu()
|