Lines Matching refs:d
1441 static void __free_domain_allocs(struct s_data *d, enum s_alloc what, in __free_domain_allocs() argument
1446 if (!atomic_read(&d->rd->refcount)) in __free_domain_allocs()
1447 free_rootdomain(&d->rd->rcu); in __free_domain_allocs()
1450 free_percpu(d->sd); in __free_domain_allocs()
1461 __visit_domain_allocation_hell(struct s_data *d, const struct cpumask *cpu_map) in __visit_domain_allocation_hell() argument
1463 memset(d, 0, sizeof(*d)); in __visit_domain_allocation_hell()
1467 d->sd = alloc_percpu(struct sched_domain *); in __visit_domain_allocation_hell()
1468 if (!d->sd) in __visit_domain_allocation_hell()
1470 d->rd = alloc_rootdomain(); in __visit_domain_allocation_hell()
1471 if (!d->rd) in __visit_domain_allocation_hell()
2245 struct s_data d; in build_sched_domains() local
2253 alloc_state = __visit_domain_allocation_hell(&d, cpu_map); in build_sched_domains()
2272 *per_cpu_ptr(d.sd, i) = sd; in build_sched_domains()
2282 for (sd = *per_cpu_ptr(d.sd, i); sd; sd = sd->parent) { in build_sched_domains()
2302 for (sd = *per_cpu_ptr(d.sd, i); sd; sd = sd->parent) { in build_sched_domains()
2357 for (sd = *per_cpu_ptr(d.sd, i); sd; sd = sd->parent) { in build_sched_domains()
2367 sd = *per_cpu_ptr(d.sd, i); in build_sched_domains()
2370 if (rq->cpu_capacity_orig > READ_ONCE(d.rd->max_cpu_capacity)) in build_sched_domains()
2371 WRITE_ONCE(d.rd->max_cpu_capacity, rq->cpu_capacity_orig); in build_sched_domains()
2373 cpu_attach_domain(sd, d.rd, i); in build_sched_domains()
2388 __free_domain_allocs(&d, alloc_state, cpu_map); in build_sched_domains()