Searched refs:sched_group_span (Results 1 – 3 of 3) sorted by relevance
/Linux-v5.10/kernel/sched/ |
D | topology.c | 50 if (group && !cpumask_test_cpu(cpu, sched_group_span(group))) { in sched_domain_debug_one() 77 if (!cpumask_weight(sched_group_span(group))) { in sched_domain_debug_one() 84 cpumask_intersects(groupmask, sched_group_span(group))) { in sched_domain_debug_one() 90 cpumask_or(groupmask, groupmask, sched_group_span(group)); in sched_domain_debug_one() 94 cpumask_pr_args(sched_group_span(group))); in sched_domain_debug_one() 97 !cpumask_equal(group_balance_mask(group), sched_group_span(group))) { in sched_domain_debug_one() 107 sched_group_span(group))) { in sched_domain_debug_one() 854 const struct cpumask *sg_span = sched_group_span(sg); in build_balance_mask() 900 sg_span = sched_group_span(sg); in build_group_from_child_sched_domain() 919 cpu = cpumask_first_and(sched_group_span(sg), mask); in init_overlap_sched_group() [all …]
|
D | fair.c | 5892 return cpumask_first(sched_group_span(group)); in find_idlest_group_cpu() 5895 for_each_cpu_and(i, sched_group_span(group), p->cpus_ptr) { in find_idlest_group_cpu() 8164 for_each_cpu(cpu, sched_group_span(sdg)) { in update_group_capacity() 8386 local_group = cpumask_test_cpu(env->dst_cpu, sched_group_span(group)); in update_sg_lb_stats() 8388 for_each_cpu_and(i, sched_group_span(group), env->cpus) { in update_sg_lb_stats() 8666 for_each_cpu(i, sched_group_span(group)) { in update_sg_wakeup_stats() 8785 if (!cpumask_intersects(sched_group_span(group), in find_idlest_group() 8790 sched_group_span(group)); in find_idlest_group() 8880 idlest_cpu = cpumask_first(sched_group_span(idlest)); in find_idlest_group() 8931 local_group = cpumask_test_cpu(env->dst_cpu, sched_group_span(sg)); in update_sd_lb_stats() [all …]
|
D | sched.h | 1495 static inline struct cpumask *sched_group_span(struct sched_group *sg) in sched_group_span() function 1514 return cpumask_first(sched_group_span(group)); in group_first_cpu()
|