Home
last modified time | relevance | path

Searched refs:sched_group_span (Results 1 – 3 of 3) sorted by relevance

/Linux-v5.4/kernel/sched/
Dtopology.c50 if (group && !cpumask_test_cpu(cpu, sched_group_span(group))) { in sched_domain_debug_one()
62 if (!cpumask_weight(sched_group_span(group))) { in sched_domain_debug_one()
69 cpumask_intersects(groupmask, sched_group_span(group))) { in sched_domain_debug_one()
75 cpumask_or(groupmask, groupmask, sched_group_span(group)); in sched_domain_debug_one()
79 cpumask_pr_args(sched_group_span(group))); in sched_domain_debug_one()
82 !cpumask_equal(group_balance_mask(group), sched_group_span(group))) { in sched_domain_debug_one()
92 sched_group_span(group))) { in sched_domain_debug_one()
842 const struct cpumask *sg_span = sched_group_span(sg); in build_balance_mask()
888 sg_span = sched_group_span(sg); in build_group_from_child_sched_domain()
907 cpu = cpumask_first_and(sched_group_span(sg), mask); in init_overlap_sched_group()
[all …]
Dfair.c5575 if (!cpumask_intersects(sched_group_span(group), in find_idlest_group()
5580 sched_group_span(group)); in find_idlest_group()
5590 for_each_cpu(i, sched_group_span(group)) { in find_idlest_group()
5700 return cpumask_first(sched_group_span(group)); in find_idlest_group_cpu()
5703 for_each_cpu_and(i, sched_group_span(group), p->cpus_ptr) { in find_idlest_group_cpu()
7798 for_each_cpu(cpu, sched_group_span(sdg)) { in update_group_capacity()
8023 for_each_cpu_and(i, sched_group_span(group), env->cpus) { in update_sg_lb_stats()
8210 local_group = cpumask_test_cpu(env->dst_cpu, sched_group_span(sg)); in update_sd_lb_stats()
8605 for_each_cpu_and(i, sched_group_span(group), env->cpus) { in find_busiest_queue()
8806 .dst_grpmask = sched_group_span(sd->groups), in load_balance()
Dsched.h1422 static inline struct cpumask *sched_group_span(struct sched_group *sg) in sched_group_span() function
1441 return cpumask_first(sched_group_span(group)); in group_first_cpu()