Lines Matching refs:d
1222 static void __free_domain_allocs(struct s_data *d, enum s_alloc what, in __free_domain_allocs() argument
1227 if (!atomic_read(&d->rd->refcount)) in __free_domain_allocs()
1228 free_rootdomain(&d->rd->rcu); in __free_domain_allocs()
1231 free_percpu(d->sd); in __free_domain_allocs()
1242 __visit_domain_allocation_hell(struct s_data *d, const struct cpumask *cpu_map) in __visit_domain_allocation_hell() argument
1244 memset(d, 0, sizeof(*d)); in __visit_domain_allocation_hell()
1248 d->sd = alloc_percpu(struct sched_domain *); in __visit_domain_allocation_hell()
1249 if (!d->sd) in __visit_domain_allocation_hell()
1251 d->rd = alloc_rootdomain(); in __visit_domain_allocation_hell()
1252 if (!d->rd) in __visit_domain_allocation_hell()
1976 struct s_data d; in build_sched_domains() local
1985 alloc_state = __visit_domain_allocation_hell(&d, cpu_map); in build_sched_domains()
2009 *per_cpu_ptr(d.sd, i) = sd; in build_sched_domains()
2019 for (sd = *per_cpu_ptr(d.sd, i); sd; sd = sd->parent) { in build_sched_domains()
2036 for (sd = *per_cpu_ptr(d.sd, i); sd; sd = sd->parent) { in build_sched_domains()
2046 sd = *per_cpu_ptr(d.sd, i); in build_sched_domains()
2049 if (rq->cpu_capacity_orig > READ_ONCE(d.rd->max_cpu_capacity)) in build_sched_domains()
2050 WRITE_ONCE(d.rd->max_cpu_capacity, rq->cpu_capacity_orig); in build_sched_domains()
2052 cpu_attach_domain(sd, d.rd, i); in build_sched_domains()
2067 __free_domain_allocs(&d, alloc_state, cpu_map); in build_sched_domains()