Home
last modified time | relevance | path

Searched refs:cpus (Results 1 – 25 of 122) sorted by relevance

12345

/tools/lib/perf/
Dcpumap.c15 struct perf_cpu_map *cpus = malloc(sizeof(*cpus) + sizeof(int)); in perf_cpu_map__dummy_new() local
17 if (cpus != NULL) { in perf_cpu_map__dummy_new()
18 cpus->nr = 1; in perf_cpu_map__dummy_new()
19 cpus->map[0] = -1; in perf_cpu_map__dummy_new()
20 refcount_set(&cpus->refcnt, 1); in perf_cpu_map__dummy_new()
23 return cpus; in perf_cpu_map__dummy_new()
50 struct perf_cpu_map *cpus; in cpu_map__default_new() local
57 cpus = malloc(sizeof(*cpus) + nr_cpus * sizeof(int)); in cpu_map__default_new()
58 if (cpus != NULL) { in cpu_map__default_new()
62 cpus->map[i] = i; in cpu_map__default_new()
[all …]
Devlist.c43 perf_cpu_map__put(evsel->cpus); in __perf_evlist__propagate_maps()
44 evsel->cpus = perf_cpu_map__get(evlist->cpus); in __perf_evlist__propagate_maps()
45 } else if (!evsel->system_wide && perf_cpu_map__empty(evlist->cpus)) { in __perf_evlist__propagate_maps()
46 perf_cpu_map__put(evsel->cpus); in __perf_evlist__propagate_maps()
47 evsel->cpus = perf_cpu_map__get(evlist->cpus); in __perf_evlist__propagate_maps()
48 } else if (evsel->cpus != evsel->own_cpus) { in __perf_evlist__propagate_maps()
49 perf_cpu_map__put(evsel->cpus); in __perf_evlist__propagate_maps()
50 evsel->cpus = perf_cpu_map__get(evsel->own_cpus); in __perf_evlist__propagate_maps()
55 evlist->all_cpus = perf_cpu_map__merge(evlist->all_cpus, evsel->cpus); in __perf_evlist__propagate_maps()
126 perf_cpu_map__put(evlist->cpus); in perf_evlist__exit()
[all …]
/tools/perf/tests/
Dopenat-syscall-all-cpus.c25 struct perf_cpu_map *cpus; in test__openat_syscall_event_on_all_cpus() local
38 cpus = perf_cpu_map__new(NULL); in test__openat_syscall_event_on_all_cpus()
39 if (cpus == NULL) { in test__openat_syscall_event_on_all_cpus()
53 if (evsel__open(evsel, cpus, threads) < 0) { in test__openat_syscall_event_on_all_cpus()
60 for (cpu = 0; cpu < cpus->nr; ++cpu) { in test__openat_syscall_event_on_all_cpus()
68 if (cpus->map[cpu] >= CPU_SETSIZE) { in test__openat_syscall_event_on_all_cpus()
69 pr_debug("Ignoring CPU %d\n", cpus->map[cpu]); in test__openat_syscall_event_on_all_cpus()
73 CPU_SET(cpus->map[cpu], &cpu_set); in test__openat_syscall_event_on_all_cpus()
76 cpus->map[cpu], in test__openat_syscall_event_on_all_cpus()
84 CPU_CLR(cpus->map[cpu], &cpu_set); in test__openat_syscall_event_on_all_cpus()
[all …]
Dcpumap.c54 struct cpu_map_entries *cpus; in process_event_cpus() local
62 cpus = (struct cpu_map_entries *)data->data; in process_event_cpus()
64 TEST_ASSERT_VAL("wrong nr", cpus->nr == 2); in process_event_cpus()
65 TEST_ASSERT_VAL("wrong cpu", cpus->cpu[0] == 1); in process_event_cpus()
66 TEST_ASSERT_VAL("wrong cpu", cpus->cpu[1] == 256); in process_event_cpus()
80 struct perf_cpu_map *cpus; in test__cpu_map_synthesize() local
83 cpus = perf_cpu_map__new("0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19"); in test__cpu_map_synthesize()
86 !perf_event__synthesize_cpu_map(NULL, cpus, process_event_mask, NULL)); in test__cpu_map_synthesize()
88 perf_cpu_map__put(cpus); in test__cpu_map_synthesize()
91 cpus = perf_cpu_map__new("1,256"); in test__cpu_map_synthesize()
[all …]
Devent-times.c115 struct perf_cpu_map *cpus; in attach__cpu_disabled() local
120 cpus = perf_cpu_map__new("0"); in attach__cpu_disabled()
121 if (cpus == NULL) { in attach__cpu_disabled()
128 err = evsel__open_per_cpu(evsel, cpus, -1); in attach__cpu_disabled()
137 perf_cpu_map__put(cpus); in attach__cpu_disabled()
144 struct perf_cpu_map *cpus; in attach__cpu_enabled() local
149 cpus = perf_cpu_map__new("0"); in attach__cpu_enabled()
150 if (cpus == NULL) { in attach__cpu_enabled()
155 err = evsel__open_per_cpu(evsel, cpus, -1); in attach__cpu_enabled()
159 perf_cpu_map__put(cpus); in attach__cpu_enabled()
Dmmap-basic.c37 struct perf_cpu_map *cpus; in test__basic_mmap() local
55 cpus = perf_cpu_map__new(NULL); in test__basic_mmap()
56 if (cpus == NULL) { in test__basic_mmap()
62 CPU_SET(cpus->map[0], &cpu_set); in test__basic_mmap()
66 cpus->map[0], str_error_r(errno, sbuf, sizeof(sbuf))); in test__basic_mmap()
76 perf_evlist__set_maps(&evlist->core, cpus, threads); in test__basic_mmap()
93 if (evsel__open(evsels[i], cpus, threads) < 0) { in test__basic_mmap()
162 perf_cpu_map__put(cpus); in test__basic_mmap()
Dsw-clock.c45 struct perf_cpu_map *cpus = NULL; in __test__sw_clock_freq() local
64 cpus = perf_cpu_map__dummy_new(); in __test__sw_clock_freq()
66 if (!cpus || !threads) { in __test__sw_clock_freq()
72 perf_evlist__set_maps(&evlist->core, cpus, threads); in __test__sw_clock_freq()
130 perf_cpu_map__put(cpus); in __test__sw_clock_freq()
Dtask-exit.c54 struct perf_cpu_map *cpus; in test__task_exit() local
73 cpus = perf_cpu_map__dummy_new(); in test__task_exit()
75 if (!cpus || !threads) { in test__task_exit()
81 perf_evlist__set_maps(&evlist->core, cpus, threads); in test__task_exit()
149 perf_cpu_map__put(cpus); in test__task_exit()
Dkeep-tracking.c75 struct perf_cpu_map *cpus = NULL; in test__keep_tracking() local
84 cpus = perf_cpu_map__new(NULL); in test__keep_tracking()
85 CHECK_NOT_NULL__(cpus); in test__keep_tracking()
90 perf_evlist__set_maps(&evlist->core, cpus, threads); in test__keep_tracking()
158 perf_cpu_map__put(cpus); in test__keep_tracking()
/tools/perf/arch/arm64/util/
Dheader.c17 static int _get_cpuid(char *buf, size_t sz, struct perf_cpu_map *cpus) in _get_cpuid() argument
26 cpus = perf_cpu_map__get(cpus); in _get_cpuid()
28 for (cpu = 0; cpu < perf_cpu_map__nr(cpus); cpu++) { in _get_cpuid()
33 sysfs, cpus->map[cpu]); in _get_cpuid()
57 perf_cpu_map__put(cpus); in _get_cpuid()
67 struct perf_cpu_map *cpus = perf_cpu_map__new(NULL); in get_cpuid() local
70 if (!cpus) in get_cpuid()
73 ret = _get_cpuid(buf, sz, cpus); in get_cpuid()
75 perf_cpu_map__put(cpus); in get_cpuid()
85 if (!pmu || !pmu->cpus) in get_cpuid_str()
[all …]
/tools/perf/util/
Devlist-hybrid.c24 struct perf_cpu_map *cpus; in evlist__add_default_hybrid() local
34 cpus = perf_cpu_map__get(pmu->cpus); in evlist__add_default_hybrid()
35 evsel->core.cpus = cpus; in evlist__add_default_hybrid()
36 evsel->core.own_cpus = perf_cpu_map__get(cpus); in evlist__add_default_hybrid()
92 struct perf_cpu_map *cpus; in evlist__fix_hybrid_cpus() local
100 cpus = perf_cpu_map__new(cpu_list); in evlist__fix_hybrid_cpus()
101 if (!cpus) in evlist__fix_hybrid_cpus()
120 ret = perf_pmu__cpus_match(pmu, cpus, &matched_cpus, in evlist__fix_hybrid_cpus()
128 matched_cpus->nr < cpus->nr || in evlist__fix_hybrid_cpus()
129 matched_cpus->nr < pmu->cpus->nr)) { in evlist__fix_hybrid_cpus()
[all …]
Dcpumap.c21 static struct perf_cpu_map *cpu_map__from_entries(struct cpu_map_entries *cpus) in cpu_map__from_entries() argument
25 map = perf_cpu_map__empty_new(cpus->nr); in cpu_map__from_entries()
29 for (i = 0; i < cpus->nr; i++) { in cpu_map__from_entries()
35 if (cpus->cpu[i] == (u16) -1) in cpu_map__from_entries()
38 map->map[i] = (int) cpus->cpu[i]; in cpu_map__from_entries()
83 struct perf_cpu_map *cpus = malloc(sizeof(*cpus) + sizeof(int) * nr); in perf_cpu_map__empty_new() local
85 if (cpus != NULL) { in perf_cpu_map__empty_new()
88 cpus->nr = nr; in perf_cpu_map__empty_new()
90 cpus->map[i] = -1; in perf_cpu_map__empty_new()
92 refcount_set(&cpus->refcnt, 1); in perf_cpu_map__empty_new()
[all …]
Dperf_api_probe.c63 struct perf_cpu_map *cpus; in perf_probe_api() local
66 cpus = perf_cpu_map__new(NULL); in perf_probe_api()
67 if (!cpus) in perf_probe_api()
69 cpu = cpus->map[0]; in perf_probe_api()
70 perf_cpu_map__put(cpus); in perf_probe_api()
138 struct perf_cpu_map *cpus; in perf_can_record_cpu_wide() local
141 cpus = perf_cpu_map__new(NULL); in perf_can_record_cpu_wide()
142 if (!cpus) in perf_can_record_cpu_wide()
144 cpu = cpus->map[0]; in perf_can_record_cpu_wide()
145 perf_cpu_map__put(cpus); in perf_can_record_cpu_wide()
Dcpumap.h41 int cpu_map__build_socket_map(struct perf_cpu_map *cpus, struct cpu_aggr_map **sockp);
42 int cpu_map__build_die_map(struct perf_cpu_map *cpus, struct cpu_aggr_map **diep);
43 int cpu_map__build_core_map(struct perf_cpu_map *cpus, struct cpu_aggr_map **corep);
44 int cpu_map__build_node_map(struct perf_cpu_map *cpus, struct cpu_aggr_map **nodep);
61 int cpu_map__build_map(struct perf_cpu_map *cpus, struct cpu_aggr_map **res,
65 int cpu_map__cpu(struct perf_cpu_map *cpus, int idx);
66 bool cpu_map__has(struct perf_cpu_map *cpus, int cpu);
/tools/lib/perf/tests/
Dtest-cpumap.c16 struct perf_cpu_map *cpus; in test_cpumap() local
22 cpus = perf_cpu_map__dummy_new(); in test_cpumap()
23 if (!cpus) in test_cpumap()
26 perf_cpu_map__get(cpus); in test_cpumap()
27 perf_cpu_map__put(cpus); in test_cpumap()
28 perf_cpu_map__put(cpus); in test_cpumap()
Dtest-evlist.c32 struct perf_cpu_map *cpus; in test_stat_cpu() local
45 cpus = perf_cpu_map__new(NULL); in test_stat_cpu()
46 __T("failed to create cpus", cpus); in test_stat_cpu()
65 perf_evlist__set_maps(evlist, cpus, NULL); in test_stat_cpu()
71 cpus = perf_evsel__cpus(evsel); in test_stat_cpu()
73 for (idx = 0; idx < perf_cpu_map__nr(cpus); idx++) { in test_stat_cpu()
84 perf_cpu_map__put(cpus); in test_stat_cpu()
214 struct perf_cpu_map *cpus; in test_mmap_thread() local
260 cpus = perf_cpu_map__dummy_new(); in test_mmap_thread()
261 __T("failed to create cpus", cpus); in test_mmap_thread()
[all …]
/tools/perf/arch/nds32/util/
Dheader.c15 struct cpu_map *cpus; in get_cpuid_str() local
18 if (!sysfs || !pmu || !pmu->cpus) in get_cpuid_str()
25 cpus = cpu_map__get(pmu->cpus); in get_cpuid_str()
26 sprintf(buf, "0x%x", cpus->nr - 1); in get_cpuid_str()
27 cpu_map__put(cpus); in get_cpuid_str()
/tools/lib/perf/include/perf/
Dcpumap.h19 LIBPERF_API int perf_cpu_map__cpu(const struct perf_cpu_map *cpus, int idx);
20 LIBPERF_API int perf_cpu_map__nr(const struct perf_cpu_map *cpus);
24 #define perf_cpu_map__for_each_cpu(cpu, idx, cpus) \ argument
25 for ((idx) = 0, (cpu) = perf_cpu_map__cpu(cpus, idx); \
26 (idx) < perf_cpu_map__nr(cpus); \
27 (idx)++, (cpu) = perf_cpu_map__cpu(cpus, idx))
/tools/testing/selftests/rcutorture/bin/
Djitter.sh58 if cpus=`grep 1 /sys/devices/system/cpu/*/online 2>&1 |
63 cpus=
66 cpus="$cpus $nohotplugcpus"
68 cpumask=`awk -v cpus="$cpus" -v me=$me -v n=$n 'BEGIN {
/tools/perf/python/
Dtwatch.py12 cpus = perf.cpu_map()
28 evsel.open(cpus = cpus, threads = threads);
29 evlist = perf.evlist(cpus, threads)
34 for cpu in cpus:
/tools/testing/selftests/cpufreq/
Dcpu.sh17 cpus=$(ls $CPUROOT | grep "cpu[0-9].*")
18 for cpu in $cpus; do
25 cpus=$(ls $CPUROOT | grep "cpu[1-9].*")
26 for cpu in $cpus; do
/tools/power/cpupower/utils/
Dcpufreq-set.c299 struct cpufreq_affected_cpus *cpus; in cmd_freq_set() local
305 cpus = cpufreq_get_related_cpus(cpu); in cmd_freq_set()
306 if (!cpus) in cmd_freq_set()
308 while (cpus->next) { in cmd_freq_set()
309 bitmask_setbit(cpus_chosen, cpus->cpu); in cmd_freq_set()
310 cpus = cpus->next; in cmd_freq_set()
313 bitmask_setbit(cpus_chosen, cpus->cpu); in cmd_freq_set()
314 cpufreq_put_related_cpus(cpus); in cmd_freq_set()
Dcpufreq-info.c392 struct cpufreq_affected_cpus *cpus = cpufreq_get_affected_cpus(cpu); in get_affected_cpus() local
395 if (!cpus) { in get_affected_cpus()
400 while (cpus->next) { in get_affected_cpus()
401 printf("%d ", cpus->cpu); in get_affected_cpus()
402 cpus = cpus->next; in get_affected_cpus()
404 printf("%d\n", cpus->cpu); in get_affected_cpus()
405 cpufreq_put_affected_cpus(cpus); in get_affected_cpus()
413 struct cpufreq_affected_cpus *cpus = cpufreq_get_related_cpus(cpu); in get_related_cpus() local
416 if (!cpus) { in get_related_cpus()
421 while (cpus->next) { in get_related_cpus()
[all …]
/tools/lib/perf/Documentation/examples/
Dsampling.c28 struct perf_cpu_map *cpus; in main() local
42 cpus = perf_cpu_map__new(NULL); in main()
43 if (!cpus) { in main()
62 perf_evlist__set_maps(evlist, cpus, NULL); in main()
117 perf_cpu_map__put(cpus); in main()
/tools/testing/selftests/rcutorture/formal/srcu-cbmc/tests/
Dtest_script.sh71 cpus=${NR_CPUS:-${default_cpus}}
74 if test $cpus -lt ${min_cpus_fail:-0}; then
78 cbmc_opts="-DNR_CPUS=${cpus} ${sync_srcu_mode_flags} ${test_cbmc_options} ${CBMC_FLAGS}"

12345