Lines Matching refs:per_cpu_ptr
622 sibling = *per_cpu_ptr(sdd->sd, i); in build_balance_mask()
681 sg->sgc = *per_cpu_ptr(sdd->sgc, cpu); in init_overlap_sched_group()
715 sibling = *per_cpu_ptr(sdd->sd, i); in build_overlap_sched_groups()
830 struct sched_domain *sd = *per_cpu_ptr(sdd->sd, cpu); in get_group()
837 sg = *per_cpu_ptr(sdd->sg, cpu); in get_group()
838 sg->sgc = *per_cpu_ptr(sdd->sgc, cpu); in get_group()
1029 WARN_ON_ONCE(*per_cpu_ptr(sdd->sd, cpu) != sd); in claim_allocations()
1030 *per_cpu_ptr(sdd->sd, cpu) = NULL; in claim_allocations()
1032 if (atomic_read(&(*per_cpu_ptr(sdd->sds, cpu))->ref)) in claim_allocations()
1033 *per_cpu_ptr(sdd->sds, cpu) = NULL; in claim_allocations()
1035 if (atomic_read(&(*per_cpu_ptr(sdd->sg, cpu))->ref)) in claim_allocations()
1036 *per_cpu_ptr(sdd->sg, cpu) = NULL; in claim_allocations()
1038 if (atomic_read(&(*per_cpu_ptr(sdd->sgc, cpu))->ref)) in claim_allocations()
1039 *per_cpu_ptr(sdd->sgc, cpu) = NULL; in claim_allocations()
1085 struct sched_domain *sd = *per_cpu_ptr(sdd->sd, cpu); in sd_init()
1192 sd->shared = *per_cpu_ptr(sdd->sds, sd_id); in sd_init()
1535 *per_cpu_ptr(sdd->sd, j) = sd; in __sdt_alloc()
1542 *per_cpu_ptr(sdd->sds, j) = sds; in __sdt_alloc()
1551 *per_cpu_ptr(sdd->sg, j) = sg; in __sdt_alloc()
1562 *per_cpu_ptr(sdd->sgc, j) = sgc; in __sdt_alloc()
1581 sd = *per_cpu_ptr(sdd->sd, j); in __sdt_free()
1584 kfree(*per_cpu_ptr(sdd->sd, j)); in __sdt_free()
1588 kfree(*per_cpu_ptr(sdd->sds, j)); in __sdt_free()
1590 kfree(*per_cpu_ptr(sdd->sg, j)); in __sdt_free()
1592 kfree(*per_cpu_ptr(sdd->sgc, j)); in __sdt_free()
1660 *per_cpu_ptr(d.sd, i) = sd; in build_sched_domains()
1670 for (sd = *per_cpu_ptr(d.sd, i); sd; sd = sd->parent) { in build_sched_domains()
1687 for (sd = *per_cpu_ptr(d.sd, i); sd; sd = sd->parent) { in build_sched_domains()
1697 sd = *per_cpu_ptr(d.sd, i); in build_sched_domains()