Lines Matching refs:effective_cpus

143 	cpumask_var_t effective_cpus;  member
513 while (!cpumask_intersects(cs->effective_cpus, pmask)) { in guarantee_online_cpus()
526 cpumask_and(pmask, pmask, cs->effective_cpus); in guarantee_online_cpus()
599 pmask2 = &cs->effective_cpus; in alloc_cpumasks()
634 free_cpumask_var(cs->effective_cpus); in free_cpumasks()
662 cpumask_copy(trial->effective_cpus, cs->effective_cpus); in alloc_trial_cpuset()
797 return cpumask_intersects(a->effective_cpus, b->effective_cpus); in cpusets_overlap()
918 cpumask_and(doms[0], top_cpuset.effective_cpus, in generate_sched_domains()
953 cpumask_subset(cp->cpus_allowed, top_cpuset.effective_cpus)) in generate_sched_domains()
957 !cpumask_empty(cp->effective_cpus)) in generate_sched_domains()
1037 cpumask_or(dp, dp, b->effective_cpus); in generate_sched_domains()
1097 if (cpumask_empty(cs->effective_cpus)) { in rebuild_root_domains()
1156 !cpumask_equal(top_cpuset.effective_cpus, cpu_active_mask)) in rebuild_sched_domains_locked()
1171 if (!cpumask_subset(cs->effective_cpus, in rebuild_sched_domains_locked()
1223 set_cpus_allowed_ptr(task, cs->effective_cpus); in update_tasks_cpumask()
1243 cpumask_or(new_cpus, parent->effective_cpus, in compute_effective_cpumask()
1248 cpumask_and(new_cpus, cs->cpus_allowed, parent->effective_cpus); in compute_effective_cpumask()
1345 if (!cpumask_intersects(cs->cpus_allowed, parent->effective_cpus) && in update_parent_subparts_cpumask()
1395 cpumask_subset(parent->effective_cpus, tmp->addmask) && in update_parent_subparts_cpumask()
1429 cpumask_subset(parent->effective_cpus, tmp->addmask) && in update_parent_subparts_cpumask()
1486 cpumask_andnot(parent->effective_cpus, in update_parent_subparts_cpumask()
1487 parent->effective_cpus, tmp->addmask); in update_parent_subparts_cpumask()
1496 cpumask_or(parent->effective_cpus, in update_parent_subparts_cpumask()
1497 parent->effective_cpus, tmp->delmask); in update_parent_subparts_cpumask()
1563 cpumask_copy(tmp->new_cpus, parent->effective_cpus); in update_cpumasks_hier()
1579 cpumask_equal(tmp->new_cpus, cp->effective_cpus)) { in update_cpumasks_hier()
1642 cpumask_copy(cp->effective_cpus, tmp->new_cpus); in update_cpumasks_hier()
1648 cpumask_andnot(cp->effective_cpus, cp->effective_cpus, in update_cpumasks_hier()
1658 !cpumask_equal(cp->cpus_allowed, cp->effective_cpus)); in update_cpumasks_hier()
1767 tmp.delmask = trialcs->effective_cpus; in update_cpumask()
1809 compute_effective_cpumask(trialcs->effective_cpus, trialcs, in update_cpumask()
1821 (cpumask_subset(trialcs->effective_cpus, cs->subparts_cpus) && in update_cpumask()
2294 compute_effective_cpumask(cs->effective_cpus, cs, parent); in update_prstate()
2462 if (cpumask_empty(cs->effective_cpus)) in cpuset_can_attach()
2466 ret = task_can_attach(task, cs->effective_cpus); in cpuset_can_attach()
2760 seq_printf(sf, "%*pbl\n", cpumask_pr_args(cs->effective_cpus)); in cpuset_common_seq_show()
3107 cpumask_copy(cs->effective_cpus, parent->effective_cpus); in cpuset_css_online()
3143 cpumask_copy(cs->effective_cpus, parent->cpus_allowed); in cpuset_css_online()
3207 top_cpuset.effective_cpus); in cpuset_bind()
3257 BUG_ON(!alloc_cpumask_var(&top_cpuset.effective_cpus, GFP_KERNEL)); in cpuset_init()
3262 cpumask_setall(top_cpuset.effective_cpus); in cpuset_init()
3310 cpumask_copy(cs->effective_cpus, new_cpus); in hotplug_update_tasks_legacy()
3347 cpumask_copy(new_cpus, parent_cs(cs)->effective_cpus); in hotplug_update_tasks()
3352 cpumask_copy(cs->effective_cpus, new_cpus); in hotplug_update_tasks()
3476 cpus_updated = !cpumask_equal(&new_cpus, cs->effective_cpus); in cpuset_hotplug_update_tasks()
3530 cpus_updated = !cpumask_equal(top_cpuset.effective_cpus, &new_cpus); in cpuset_hotplug_workfn()
3561 cpumask_copy(top_cpuset.effective_cpus, &new_cpus); in cpuset_hotplug_workfn()
3652 cpumask_copy(top_cpuset.effective_cpus, cpu_active_mask); in cpuset_init_smp()