Lines Matching refs:cpus
247 unsigned int cpus = min_t(unsigned int, num_online_cpus(), 8); in get_update_sysctl_factor() local
255 factor = cpus; in get_update_sysctl_factor()
259 factor = 1 + ilog2(cpus); in get_update_sysctl_factor()
6713 static int select_idle_core(struct task_struct *p, int core, struct cpumask *cpus, int *idle_cpu) in select_idle_core() argument
6737 cpumask_andnot(cpus, cpus, cpu_smt_mask(core)); in select_idle_core()
6769 static inline int select_idle_core(struct task_struct *p, int core, struct cpumask *cpus, int *idle… in select_idle_core() argument
6788 struct cpumask *cpus = this_cpu_cpumask_var_ptr(select_rq_mask); in select_idle_cpu() local
6796 cpumask_and(cpus, sched_domain_span(sd), p->cpus_ptr); in select_idle_cpu()
6841 for_each_cpu_wrap(cpu, cpus, target + 1) { in select_idle_cpu()
6843 i = select_idle_core(p, cpu, cpus, &idle_cpu); in select_idle_cpu()
6885 struct cpumask *cpus; in select_idle_capacity() local
6887 cpus = this_cpu_cpumask_var_ptr(select_rq_mask); in select_idle_capacity()
6888 cpumask_and(cpus, sched_domain_span(sd), p->cpus_ptr); in select_idle_capacity()
6894 for_each_cpu_wrap(cpu, cpus, target) { in select_idle_capacity()
7298 struct cpumask *cpus = this_cpu_cpumask_var_ptr(select_rq_mask); in find_energy_efficient_cpu() local
7357 cpumask_and(cpus, perf_domain_span(pd), cpu_online_mask); in find_energy_efficient_cpu()
7359 if (cpumask_empty(cpus)) in find_energy_efficient_cpu()
7363 cpu = cpumask_first(cpus); in find_energy_efficient_cpu()
7370 for_each_cpu(cpu, cpus) { in find_energy_efficient_cpu()
7432 eenv_pd_busy_time(&eenv, cpus, p); in find_energy_efficient_cpu()
7434 base_energy = compute_energy(&eenv, pd, cpus, p, -1); in find_energy_efficient_cpu()
7438 prev_delta = compute_energy(&eenv, pd, cpus, p, in find_energy_efficient_cpu()
7462 cur_delta = compute_energy(&eenv, pd, cpus, p, in find_energy_efficient_cpu()
8272 struct cpumask *cpus; member
8442 for_each_cpu_and(cpu, env->dst_grpmask, env->cpus) { in can_migrate_task()
9366 for_each_cpu_and(i, sched_group_span(group), env->cpus) { in update_sg_lb_stats()
9888 struct cpumask *cpus = this_cpu_cpumask_var_ptr(select_rq_mask); in find_idlest_group() local
9890 cpumask_and(cpus, sched_group_span(local), p->cpus_ptr); in find_idlest_group()
9891 imb_numa_nr = min(cpumask_weight(cpus), sd->imb_numa_nr); in find_idlest_group()
10407 trace_android_rvh_find_busiest_queue(env->dst_cpu, group, env->cpus, in find_busiest_queue()
10412 for_each_cpu_and(i, sched_group_span(group), env->cpus) { in find_busiest_queue()
10614 if (!cpumask_test_cpu(env->dst_cpu, env->cpus)) in should_we_balance()
10631 for_each_cpu_and(cpu, group_balance_mask(sg), env->cpus) { in should_we_balance()
10656 struct cpumask *cpus = this_cpu_cpumask_var_ptr(load_balance_mask); in load_balance() local
10664 .cpus = cpus, in load_balance()
10669 cpumask_and(cpus, sched_domain_span(sd), cpu_active_mask); in load_balance()
10767 __cpumask_clear_cpu(env.dst_cpu, env.cpus); in load_balance()
10794 __cpumask_clear_cpu(cpu_of(busiest), cpus); in load_balance()
10803 if (!cpumask_subset(cpus, env.dst_grpmask)) { in load_balance()