Lines Matching refs:rd
347 struct root_domain *rd = cpu_rq(cpu)->rd; in build_perf_domains() local
375 if (rd->pd) in build_perf_domains()
406 tmp = rd->pd; in build_perf_domains()
407 rcu_assign_pointer(rd->pd, pd); in build_perf_domains()
415 tmp = rd->pd; in build_perf_domains()
416 rcu_assign_pointer(rd->pd, NULL); in build_perf_domains()
428 struct root_domain *rd = container_of(rcu, struct root_domain, rcu); in free_rootdomain() local
430 cpupri_cleanup(&rd->cpupri); in free_rootdomain()
431 cpudl_cleanup(&rd->cpudl); in free_rootdomain()
432 free_cpumask_var(rd->dlo_mask); in free_rootdomain()
433 free_cpumask_var(rd->rto_mask); in free_rootdomain()
434 free_cpumask_var(rd->online); in free_rootdomain()
435 free_cpumask_var(rd->span); in free_rootdomain()
436 free_pd(rd->pd); in free_rootdomain()
437 kfree(rd); in free_rootdomain()
440 void rq_attach_root(struct rq *rq, struct root_domain *rd) in rq_attach_root() argument
447 if (rq->rd) { in rq_attach_root()
448 old_rd = rq->rd; in rq_attach_root()
464 atomic_inc(&rd->refcount); in rq_attach_root()
465 rq->rd = rd; in rq_attach_root()
467 cpumask_set_cpu(rq->cpu, rd->span); in rq_attach_root()
477 void sched_get_rd(struct root_domain *rd) in sched_get_rd() argument
479 atomic_inc(&rd->refcount); in sched_get_rd()
482 void sched_put_rd(struct root_domain *rd) in sched_put_rd() argument
484 if (!atomic_dec_and_test(&rd->refcount)) in sched_put_rd()
487 call_rcu(&rd->rcu, free_rootdomain); in sched_put_rd()
490 static int init_rootdomain(struct root_domain *rd) in init_rootdomain() argument
492 if (!zalloc_cpumask_var(&rd->span, GFP_KERNEL)) in init_rootdomain()
494 if (!zalloc_cpumask_var(&rd->online, GFP_KERNEL)) in init_rootdomain()
496 if (!zalloc_cpumask_var(&rd->dlo_mask, GFP_KERNEL)) in init_rootdomain()
498 if (!zalloc_cpumask_var(&rd->rto_mask, GFP_KERNEL)) in init_rootdomain()
502 rd->rto_cpu = -1; in init_rootdomain()
503 raw_spin_lock_init(&rd->rto_lock); in init_rootdomain()
504 init_irq_work(&rd->rto_push_work, rto_push_irq_work_func); in init_rootdomain()
507 init_dl_bw(&rd->dl_bw); in init_rootdomain()
508 if (cpudl_init(&rd->cpudl) != 0) in init_rootdomain()
511 if (cpupri_init(&rd->cpupri) != 0) in init_rootdomain()
514 init_max_cpu_capacity(&rd->max_cpu_capacity); in init_rootdomain()
519 cpudl_cleanup(&rd->cpudl); in init_rootdomain()
521 free_cpumask_var(rd->rto_mask); in init_rootdomain()
523 free_cpumask_var(rd->dlo_mask); in init_rootdomain()
525 free_cpumask_var(rd->online); in init_rootdomain()
527 free_cpumask_var(rd->span); in init_rootdomain()
547 struct root_domain *rd; in alloc_rootdomain() local
549 rd = kzalloc(sizeof(*rd), GFP_KERNEL); in alloc_rootdomain()
550 if (!rd) in alloc_rootdomain()
553 if (init_rootdomain(rd) != 0) { in alloc_rootdomain()
554 kfree(rd); in alloc_rootdomain()
558 return rd; in alloc_rootdomain()
664 cpu_attach_domain(struct sched_domain *sd, struct root_domain *rd, int cpu) in cpu_attach_domain() argument
701 rq_attach_root(rq, rd); in cpu_attach_domain()
712 struct root_domain *rd; member
1228 if (!atomic_read(&d->rd->refcount)) in __free_domain_allocs()
1229 free_rootdomain(&d->rd->rcu); in __free_domain_allocs()
1252 d->rd = alloc_rootdomain(); in __visit_domain_allocation_hell()
1253 if (!d->rd) in __visit_domain_allocation_hell()
2050 cpu_attach_domain(sd, d.rd, i); in build_sched_domains()
2233 struct root_domain *rd; in partition_sched_domains_locked() local
2241 rd = cpu_rq(cpumask_any(doms_cur[i]))->rd; in partition_sched_domains_locked()
2242 dl_clear_root_domain(rd); in partition_sched_domains_locked()
2278 cpu_rq(cpumask_first(doms_cur[j]))->rd->pd) { in partition_sched_domains_locked()