Lines Matching refs:rd
380 struct root_domain *rd = cpu_rq(cpu)->rd; in build_perf_domains() local
444 tmp = rd->pd; in build_perf_domains()
445 rcu_assign_pointer(rd->pd, pd); in build_perf_domains()
453 tmp = rd->pd; in build_perf_domains()
454 rcu_assign_pointer(rd->pd, NULL); in build_perf_domains()
466 struct root_domain *rd = container_of(rcu, struct root_domain, rcu); in free_rootdomain() local
468 cpupri_cleanup(&rd->cpupri); in free_rootdomain()
469 cpudl_cleanup(&rd->cpudl); in free_rootdomain()
470 free_cpumask_var(rd->dlo_mask); in free_rootdomain()
471 free_cpumask_var(rd->rto_mask); in free_rootdomain()
472 free_cpumask_var(rd->online); in free_rootdomain()
473 free_cpumask_var(rd->span); in free_rootdomain()
474 free_pd(rd->pd); in free_rootdomain()
475 kfree(rd); in free_rootdomain()
478 void rq_attach_root(struct rq *rq, struct root_domain *rd) in rq_attach_root() argument
485 if (rq->rd) { in rq_attach_root()
486 old_rd = rq->rd; in rq_attach_root()
502 atomic_inc(&rd->refcount); in rq_attach_root()
503 rq->rd = rd; in rq_attach_root()
505 cpumask_set_cpu(rq->cpu, rd->span); in rq_attach_root()
515 void sched_get_rd(struct root_domain *rd) in sched_get_rd() argument
517 atomic_inc(&rd->refcount); in sched_get_rd()
520 void sched_put_rd(struct root_domain *rd) in sched_put_rd() argument
522 if (!atomic_dec_and_test(&rd->refcount)) in sched_put_rd()
525 call_rcu(&rd->rcu, free_rootdomain); in sched_put_rd()
528 static int init_rootdomain(struct root_domain *rd) in init_rootdomain() argument
530 if (!zalloc_cpumask_var(&rd->span, GFP_KERNEL)) in init_rootdomain()
532 if (!zalloc_cpumask_var(&rd->online, GFP_KERNEL)) in init_rootdomain()
534 if (!zalloc_cpumask_var(&rd->dlo_mask, GFP_KERNEL)) in init_rootdomain()
536 if (!zalloc_cpumask_var(&rd->rto_mask, GFP_KERNEL)) in init_rootdomain()
540 rd->rto_cpu = -1; in init_rootdomain()
541 raw_spin_lock_init(&rd->rto_lock); in init_rootdomain()
542 rd->rto_push_work = IRQ_WORK_INIT_HARD(rto_push_irq_work_func); in init_rootdomain()
545 rd->visit_gen = 0; in init_rootdomain()
546 init_dl_bw(&rd->dl_bw); in init_rootdomain()
547 if (cpudl_init(&rd->cpudl) != 0) in init_rootdomain()
550 if (cpupri_init(&rd->cpupri) != 0) in init_rootdomain()
555 cpudl_cleanup(&rd->cpudl); in init_rootdomain()
557 free_cpumask_var(rd->rto_mask); in init_rootdomain()
559 free_cpumask_var(rd->dlo_mask); in init_rootdomain()
561 free_cpumask_var(rd->online); in init_rootdomain()
563 free_cpumask_var(rd->span); in init_rootdomain()
583 struct root_domain *rd; in alloc_rootdomain() local
585 rd = kzalloc(sizeof(*rd), GFP_KERNEL); in alloc_rootdomain()
586 if (!rd) in alloc_rootdomain()
589 if (init_rootdomain(rd) != 0) { in alloc_rootdomain()
590 kfree(rd); in alloc_rootdomain()
594 return rd; in alloc_rootdomain()
700 cpu_attach_domain(struct sched_domain *sd, struct root_domain *rd, int cpu) in cpu_attach_domain() argument
749 rq_attach_root(rq, rd); in cpu_attach_domain()
760 struct root_domain *rd; member
1446 if (!atomic_read(&d->rd->refcount)) in __free_domain_allocs()
1447 free_rootdomain(&d->rd->rcu); in __free_domain_allocs()
1470 d->rd = alloc_rootdomain(); in __visit_domain_allocation_hell()
1471 if (!d->rd) in __visit_domain_allocation_hell()
2370 if (rq->cpu_capacity_orig > READ_ONCE(d.rd->max_cpu_capacity)) in build_sched_domains()
2371 WRITE_ONCE(d.rd->max_cpu_capacity, rq->cpu_capacity_orig); in build_sched_domains()
2373 cpu_attach_domain(sd, d.rd, i); in build_sched_domains()
2382 cpumask_pr_args(cpu_map), rq->rd->max_cpu_capacity); in build_sched_domains()
2562 struct root_domain *rd; in partition_sched_domains_locked() local
2570 rd = cpu_rq(cpumask_any(doms_cur[i]))->rd; in partition_sched_domains_locked()
2571 dl_clear_root_domain(rd); in partition_sched_domains_locked()
2607 cpu_rq(cpumask_first(doms_cur[j]))->rd->pd) { in partition_sched_domains_locked()