Home
last modified time | relevance | path

Searched refs:sds (Results 1 – 3 of 3) sorted by relevance

/kernel/sched/
Dtopology.c648 struct sched_domain_shared *sds = NULL; in update_top_cache_domain() local
657 sds = sd->shared; in update_top_cache_domain()
663 rcu_assign_pointer(per_cpu(sd_llc_shared, cpu), sds); in update_top_cache_domain()
1458 if (atomic_read(&(*per_cpu_ptr(sdd->sds, cpu))->ref)) in claim_allocations()
1459 *per_cpu_ptr(sdd->sds, cpu) = NULL; in claim_allocations()
1606 sd->shared = *per_cpu_ptr(sdd->sds, sd_id); in sd_init()
2006 sdd->sds = alloc_percpu(struct sched_domain_shared *); in __sdt_alloc()
2007 if (!sdd->sds) in __sdt_alloc()
2020 struct sched_domain_shared *sds; in __sdt_alloc() local
2031 sds = kzalloc_node(sizeof(struct sched_domain_shared), in __sdt_alloc()
[all …]
Dfair.c6393 struct sched_domain_shared *sds; in set_idle_cores() local
6395 sds = rcu_dereference(per_cpu(sd_llc_shared, cpu)); in set_idle_cores()
6396 if (sds) in set_idle_cores()
6397 WRITE_ONCE(sds->has_idle_cores, val); in set_idle_cores()
6402 struct sched_domain_shared *sds; in test_idle_cores() local
6404 sds = rcu_dereference(per_cpu(sd_llc_shared, cpu)); in test_idle_cores()
6405 if (sds) in test_idle_cores()
6406 return READ_ONCE(sds->has_idle_cores); in test_idle_cores()
8730 static inline void init_sd_lb_stats(struct sd_lb_stats *sds) in init_sd_lb_stats() argument
8739 *sds = (struct sd_lb_stats){ in init_sd_lb_stats()
[all …]
/kernel/trace/
Dbpf_trace.c600 struct perf_sample_data sds[3]; member
608 struct bpf_trace_sample_data *sds = this_cpu_ptr(&bpf_trace_sds); in BPF_CALL_5() local
619 if (WARN_ON_ONCE(nest_level > ARRAY_SIZE(sds->sds))) { in BPF_CALL_5()
624 sd = &sds->sds[nest_level - 1]; in BPF_CALL_5()
684 if (WARN_ON_ONCE(nest_level > ARRAY_SIZE(bpf_misc_sds.sds))) { in bpf_event_output()
688 sd = this_cpu_ptr(&bpf_misc_sds.sds[nest_level - 1]); in bpf_event_output()