• Home
  • Raw
  • Download

Lines Matching refs:cp

851 	struct cpuset *cp;  in update_domain_attr_tree()  local
855 cpuset_for_each_descendant_pre(cp, pos_css, root_cs) { in update_domain_attr_tree()
857 if (cpumask_empty(cp->cpus_allowed)) { in update_domain_attr_tree()
862 if (is_sched_load_balance(cp)) in update_domain_attr_tree()
863 update_domain_attr(dattr, cp); in update_domain_attr_tree()
931 struct cpuset *cp; /* top-down scan of cpusets */ in generate_sched_domains() local
964 csa = kmalloc_array(nr_cpusets(), sizeof(cp), GFP_KERNEL); in generate_sched_domains()
972 cpuset_for_each_descendant_pre(cp, pos_css, &top_cpuset) { in generate_sched_domains()
973 if (cp == &top_cpuset) in generate_sched_domains()
986 if (!cpumask_empty(cp->cpus_allowed) && in generate_sched_domains()
987 !(is_sched_load_balance(cp) && in generate_sched_domains()
988 cpumask_intersects(cp->cpus_allowed, in generate_sched_domains()
993 cpumask_subset(cp->cpus_allowed, top_cpuset.effective_cpus)) in generate_sched_domains()
996 if (is_sched_load_balance(cp) && in generate_sched_domains()
997 !cpumask_empty(cp->effective_cpus)) in generate_sched_domains()
998 csa[csn++] = cp; in generate_sched_domains()
1001 if (!is_partition_valid(cp)) in generate_sched_domains()
1600 struct cpuset *cp; in update_cpumasks_hier() local
1606 cpuset_for_each_descendant_pre(cp, pos_css, cs) { in update_cpumasks_hier()
1607 struct cpuset *parent = parent_cs(cp); in update_cpumasks_hier()
1610 compute_effective_cpumask(tmp->new_cpus, cp, parent); in update_cpumasks_hier()
1619 if (is_partition_valid(cp) && in update_cpumasks_hier()
1620 cpumask_equal(cp->cpus_allowed, cp->subparts_cpus)) in update_cpumasks_hier()
1624 if (!cp->use_parent_ecpus) { in update_cpumasks_hier()
1625 cp->use_parent_ecpus = true; in update_cpumasks_hier()
1628 } else if (cp->use_parent_ecpus) { in update_cpumasks_hier()
1629 cp->use_parent_ecpus = false; in update_cpumasks_hier()
1641 if (!cp->partition_root_state && !force && in update_cpumasks_hier()
1642 cpumask_equal(tmp->new_cpus, cp->effective_cpus) && in update_cpumasks_hier()
1644 (is_sched_load_balance(parent) == is_sched_load_balance(cp)))) { in update_cpumasks_hier()
1656 old_prs = new_prs = cp->partition_root_state; in update_cpumasks_hier()
1657 if ((cp != cs) && old_prs) { in update_cpumasks_hier()
1670 if (is_partition_valid(cp)) in update_cpumasks_hier()
1671 new_prs = -cp->partition_root_state; in update_cpumasks_hier()
1672 WRITE_ONCE(cp->prs_err, in update_cpumasks_hier()
1679 if (!css_tryget_online(&cp->css)) in update_cpumasks_hier()
1684 update_parent_subparts_cpumask(cp, partcmd_update, NULL, in update_cpumasks_hier()
1690 new_prs = cp->partition_root_state; in update_cpumasks_hier()
1695 if (cp->nr_subparts_cpus && !is_partition_valid(cp)) { in update_cpumasks_hier()
1700 cp->subparts_cpus); in update_cpumasks_hier()
1703 cp->nr_subparts_cpus = 0; in update_cpumasks_hier()
1704 cpumask_clear(cp->subparts_cpus); in update_cpumasks_hier()
1707 cpumask_copy(cp->effective_cpus, tmp->new_cpus); in update_cpumasks_hier()
1708 if (cp->nr_subparts_cpus) { in update_cpumasks_hier()
1713 cpumask_andnot(cp->effective_cpus, cp->effective_cpus, in update_cpumasks_hier()
1714 cp->subparts_cpus); in update_cpumasks_hier()
1717 cp->partition_root_state = new_prs; in update_cpumasks_hier()
1720 notify_partition_change(cp, old_prs); in update_cpumasks_hier()
1723 !cpumask_equal(cp->cpus_allowed, cp->effective_cpus)); in update_cpumasks_hier()
1725 update_tasks_cpumask(cp, tmp->new_cpus); in update_cpumasks_hier()
1733 !is_partition_valid(cp) && in update_cpumasks_hier()
1734 (is_sched_load_balance(parent) != is_sched_load_balance(cp))) { in update_cpumasks_hier()
1736 set_bit(CS_SCHED_LOAD_BALANCE, &cp->flags); in update_cpumasks_hier()
1738 clear_bit(CS_SCHED_LOAD_BALANCE, &cp->flags); in update_cpumasks_hier()
1747 if (!cpumask_empty(cp->cpus_allowed) && in update_cpumasks_hier()
1748 is_sched_load_balance(cp) && in update_cpumasks_hier()
1750 is_partition_valid(cp))) in update_cpumasks_hier()
1754 css_put(&cp->css); in update_cpumasks_hier()
1857 struct cpuset *cp, *parent; in update_cpumask() local
1870 cpuset_for_each_child(cp, css, parent) in update_cpumask()
1871 if (is_partition_valid(cp) && in update_cpumask()
1872 cpumask_intersects(trialcs->cpus_allowed, cp->cpus_allowed)) { in update_cpumask()
1874 update_parent_subparts_cpumask(cp, partcmd_invalidate, NULL, &tmp); in update_cpumask()
2094 struct cpuset *cp; in update_nodemasks_hier() local
2098 cpuset_for_each_descendant_pre(cp, pos_css, cs) { in update_nodemasks_hier()
2099 struct cpuset *parent = parent_cs(cp); in update_nodemasks_hier()
2101 nodes_and(*new_mems, cp->mems_allowed, parent->effective_mems); in update_nodemasks_hier()
2111 if (nodes_equal(*new_mems, cp->effective_mems)) { in update_nodemasks_hier()
2116 if (!css_tryget_online(&cp->css)) in update_nodemasks_hier()
2121 cp->effective_mems = *new_mems; in update_nodemasks_hier()
2125 !nodes_equal(cp->mems_allowed, cp->effective_mems)); in update_nodemasks_hier()
2127 update_tasks_nodemask(cp); in update_nodemasks_hier()
2130 css_put(&cp->css); in update_nodemasks_hier()