Lines Matching refs:top_cpuset
334 static struct cpuset top_cpuset = { variable
739 if (cur == &top_cpuset) in validate_change()
900 bool root_load_balance = is_sched_load_balance(&top_cpuset); in generate_sched_domains()
907 if (root_load_balance && !top_cpuset.nr_subparts_cpus) { in generate_sched_domains()
916 update_domain_attr_tree(dattr, &top_cpuset); in generate_sched_domains()
918 cpumask_and(doms[0], top_cpuset.effective_cpus, in generate_sched_domains()
931 csa[csn++] = &top_cpuset; in generate_sched_domains()
932 cpuset_for_each_descendant_pre(cp, pos_css, &top_cpuset) { in generate_sched_domains()
933 if (cp == &top_cpuset) in generate_sched_domains()
953 cpumask_subset(cp->cpus_allowed, top_cpuset.effective_cpus)) in generate_sched_domains()
1095 cpuset_for_each_descendant_pre(cs, pos_css, &top_cpuset) { in rebuild_root_domains()
1155 if (!top_cpuset.nr_subparts_cpus && in rebuild_sched_domains_locked()
1156 !cpumask_equal(top_cpuset.effective_cpus, cpu_active_mask)) in rebuild_sched_domains_locked()
1164 if (top_cpuset.nr_subparts_cpus) { in rebuild_sched_domains_locked()
1166 cpuset_for_each_descendant_pre(cs, pos_css, &top_cpuset) { in rebuild_sched_domains_locked()
1213 bool top_cs = cs == &top_cpuset; in update_tasks_cpumask()
1736 if (cs == &top_cpuset) in update_cpumask()
1753 top_cpuset.cpus_allowed)) in update_cpumask()
2068 if (cs == &top_cpuset) { in update_nodemask()
2087 top_cpuset.mems_allowed)) { in update_nodemask()
2522 if (cs != &top_cpuset) in cpuset_attach()
3060 return &top_cpuset.css; in cpuset_css_alloc()
3203 cpumask_copy(top_cpuset.cpus_allowed, cpu_possible_mask); in cpuset_bind()
3204 top_cpuset.mems_allowed = node_possible_map; in cpuset_bind()
3206 cpumask_copy(top_cpuset.cpus_allowed, in cpuset_bind()
3207 top_cpuset.effective_cpus); in cpuset_bind()
3208 top_cpuset.mems_allowed = top_cpuset.effective_mems; in cpuset_bind()
3256 BUG_ON(!alloc_cpumask_var(&top_cpuset.cpus_allowed, GFP_KERNEL)); in cpuset_init()
3257 BUG_ON(!alloc_cpumask_var(&top_cpuset.effective_cpus, GFP_KERNEL)); in cpuset_init()
3258 BUG_ON(!zalloc_cpumask_var(&top_cpuset.subparts_cpus, GFP_KERNEL)); in cpuset_init()
3260 cpumask_setall(top_cpuset.cpus_allowed); in cpuset_init()
3261 nodes_setall(top_cpuset.mems_allowed); in cpuset_init()
3262 cpumask_setall(top_cpuset.effective_cpus); in cpuset_init()
3263 nodes_setall(top_cpuset.effective_mems); in cpuset_init()
3265 fmeter_init(&top_cpuset.fmeter); in cpuset_init()
3266 set_bit(CS_SCHED_LOAD_BALANCE, &top_cpuset.flags); in cpuset_init()
3267 top_cpuset.relax_domain_level = -1; in cpuset_init()
3530 cpus_updated = !cpumask_equal(top_cpuset.effective_cpus, &new_cpus); in cpuset_hotplug_workfn()
3531 mems_updated = !nodes_equal(top_cpuset.effective_mems, new_mems); in cpuset_hotplug_workfn()
3537 if (!cpus_updated && top_cpuset.nr_subparts_cpus) in cpuset_hotplug_workfn()
3544 cpumask_copy(top_cpuset.cpus_allowed, &new_cpus); in cpuset_hotplug_workfn()
3551 if (top_cpuset.nr_subparts_cpus) { in cpuset_hotplug_workfn()
3553 top_cpuset.subparts_cpus)) { in cpuset_hotplug_workfn()
3554 top_cpuset.nr_subparts_cpus = 0; in cpuset_hotplug_workfn()
3555 cpumask_clear(top_cpuset.subparts_cpus); in cpuset_hotplug_workfn()
3558 top_cpuset.subparts_cpus); in cpuset_hotplug_workfn()
3561 cpumask_copy(top_cpuset.effective_cpus, &new_cpus); in cpuset_hotplug_workfn()
3570 top_cpuset.mems_allowed = new_mems; in cpuset_hotplug_workfn()
3571 top_cpuset.effective_mems = new_mems; in cpuset_hotplug_workfn()
3573 update_tasks_nodemask(&top_cpuset); in cpuset_hotplug_workfn()
3584 cpuset_for_each_descendant_pre(cs, pos_css, &top_cpuset) { in cpuset_hotplug_workfn()
3585 if (cs == &top_cpuset || !css_tryget_online(&cs->css)) in cpuset_hotplug_workfn()
3650 top_cpuset.old_mems_allowed = top_cpuset.mems_allowed; in cpuset_init_smp()
3652 cpumask_copy(top_cpuset.effective_cpus, cpu_active_mask); in cpuset_init_smp()
3653 top_cpuset.effective_mems = node_states[N_MEMORY]; in cpuset_init_smp()