Lines Matching refs:per_cpu_ptr
888 sibling = *per_cpu_ptr(sdd->sd, i); in build_balance_mask()
949 sg->sgc = *per_cpu_ptr(sdd->sgc, cpu); in init_overlap_sched_group()
1009 sibling = *per_cpu_ptr(sdd->sd, i); in build_overlap_sched_groups()
1159 struct sched_domain *sd = *per_cpu_ptr(sdd->sd, cpu); in get_group()
1167 sg = *per_cpu_ptr(sdd->sg, cpu); in get_group()
1168 sg->sgc = *per_cpu_ptr(sdd->sgc, cpu); in get_group()
1473 WARN_ON_ONCE(*per_cpu_ptr(sdd->sd, cpu) != sd); in claim_allocations()
1474 *per_cpu_ptr(sdd->sd, cpu) = NULL; in claim_allocations()
1476 if (atomic_read(&(*per_cpu_ptr(sdd->sds, cpu))->ref)) in claim_allocations()
1477 *per_cpu_ptr(sdd->sds, cpu) = NULL; in claim_allocations()
1479 if (atomic_read(&(*per_cpu_ptr(sdd->sg, cpu))->ref)) in claim_allocations()
1480 *per_cpu_ptr(sdd->sg, cpu) = NULL; in claim_allocations()
1482 if (atomic_read(&(*per_cpu_ptr(sdd->sgc, cpu))->ref)) in claim_allocations()
1483 *per_cpu_ptr(sdd->sgc, cpu) = NULL; in claim_allocations()
1527 struct sched_domain *sd = *per_cpu_ptr(sdd->sd, cpu); in sd_init()
1623 sd->shared = *per_cpu_ptr(sdd->sds, sd_id); in sd_init()
2051 *per_cpu_ptr(sdd->sd, j) = sd; in __sdt_alloc()
2058 *per_cpu_ptr(sdd->sds, j) = sds; in __sdt_alloc()
2067 *per_cpu_ptr(sdd->sg, j) = sg; in __sdt_alloc()
2078 *per_cpu_ptr(sdd->sgc, j) = sgc; in __sdt_alloc()
2097 sd = *per_cpu_ptr(sdd->sd, j); in __sdt_free()
2100 kfree(*per_cpu_ptr(sdd->sd, j)); in __sdt_free()
2104 kfree(*per_cpu_ptr(sdd->sds, j)); in __sdt_free()
2106 kfree(*per_cpu_ptr(sdd->sg, j)); in __sdt_free()
2108 kfree(*per_cpu_ptr(sdd->sgc, j)); in __sdt_free()
2223 *per_cpu_ptr(d.sd, i) = sd; in build_sched_domains()
2233 for (sd = *per_cpu_ptr(d.sd, i); sd; sd = sd->parent) { in build_sched_domains()
2250 for (sd = *per_cpu_ptr(d.sd, i); sd; sd = sd->parent) { in build_sched_domains()
2260 sd = *per_cpu_ptr(d.sd, i); in build_sched_domains()