Lines Matching refs:per_cpu_ptr

908 		sibling = *per_cpu_ptr(sdd->sd, i);  in build_balance_mask()
969 sg->sgc = *per_cpu_ptr(sdd->sgc, cpu); in init_overlap_sched_group()
1029 sibling = *per_cpu_ptr(sdd->sd, i); in build_overlap_sched_groups()
1179 struct sched_domain *sd = *per_cpu_ptr(sdd->sd, cpu); in get_group()
1187 sg = *per_cpu_ptr(sdd->sg, cpu); in get_group()
1188 sg->sgc = *per_cpu_ptr(sdd->sgc, cpu); in get_group()
1493 WARN_ON_ONCE(*per_cpu_ptr(sdd->sd, cpu) != sd); in claim_allocations()
1494 *per_cpu_ptr(sdd->sd, cpu) = NULL; in claim_allocations()
1496 if (atomic_read(&(*per_cpu_ptr(sdd->sds, cpu))->ref)) in claim_allocations()
1497 *per_cpu_ptr(sdd->sds, cpu) = NULL; in claim_allocations()
1499 if (atomic_read(&(*per_cpu_ptr(sdd->sg, cpu))->ref)) in claim_allocations()
1500 *per_cpu_ptr(sdd->sg, cpu) = NULL; in claim_allocations()
1502 if (atomic_read(&(*per_cpu_ptr(sdd->sgc, cpu))->ref)) in claim_allocations()
1503 *per_cpu_ptr(sdd->sgc, cpu) = NULL; in claim_allocations()
1545 struct sched_domain *sd = *per_cpu_ptr(sdd->sd, cpu); in sd_init()
1641 sd->shared = *per_cpu_ptr(sdd->sds, sd_id); in sd_init()
2107 *per_cpu_ptr(sdd->sd, j) = sd; in __sdt_alloc()
2114 *per_cpu_ptr(sdd->sds, j) = sds; in __sdt_alloc()
2123 *per_cpu_ptr(sdd->sg, j) = sg; in __sdt_alloc()
2134 *per_cpu_ptr(sdd->sgc, j) = sgc; in __sdt_alloc()
2153 sd = *per_cpu_ptr(sdd->sd, j); in __sdt_free()
2156 kfree(*per_cpu_ptr(sdd->sd, j)); in __sdt_free()
2160 kfree(*per_cpu_ptr(sdd->sds, j)); in __sdt_free()
2162 kfree(*per_cpu_ptr(sdd->sg, j)); in __sdt_free()
2164 kfree(*per_cpu_ptr(sdd->sgc, j)); in __sdt_free()
2279 *per_cpu_ptr(d.sd, i) = sd; in build_sched_domains()
2289 for (sd = *per_cpu_ptr(d.sd, i); sd; sd = sd->parent) { in build_sched_domains()
2309 for (sd = *per_cpu_ptr(d.sd, i); sd; sd = sd->parent) { in build_sched_domains()
2364 for (sd = *per_cpu_ptr(d.sd, i); sd; sd = sd->parent) { in build_sched_domains()
2374 sd = *per_cpu_ptr(d.sd, i); in build_sched_domains()