Home
last modified time | relevance | path

Searched refs:nr_cpus (Results 1 – 15 of 15) sorted by relevance

/tools/perf/util/
Dcpumap.c23 int nr_cpus; in cpu_map__default_new() local
25 nr_cpus = sysconf(_SC_NPROCESSORS_ONLN); in cpu_map__default_new()
26 if (nr_cpus < 0) in cpu_map__default_new()
29 cpus = malloc(sizeof(*cpus) + nr_cpus * sizeof(int)); in cpu_map__default_new()
32 for (i = 0; i < nr_cpus; ++i) in cpu_map__default_new()
35 cpus->nr = nr_cpus; in cpu_map__default_new()
42 static struct cpu_map *cpu_map__trim_new(int nr_cpus, int *tmp_cpus) in cpu_map__trim_new() argument
44 size_t payload_size = nr_cpus * sizeof(int); in cpu_map__trim_new()
48 cpus->nr = nr_cpus; in cpu_map__trim_new()
59 int nr_cpus = 0; in cpu_map__read() local
[all …]
Denv.c66 int cpu, nr_cpus; in perf_env__read_cpu_topology_map() local
74 nr_cpus = env->nr_cpus_avail; in perf_env__read_cpu_topology_map()
75 if (nr_cpus == -1) in perf_env__read_cpu_topology_map()
78 env->cpu = calloc(nr_cpus, sizeof(env->cpu[0])); in perf_env__read_cpu_topology_map()
82 for (cpu = 0; cpu < nr_cpus; ++cpu) { in perf_env__read_cpu_topology_map()
87 env->nr_cpus_avail = nr_cpus; in perf_env__read_cpu_topology_map()
Devlist.c411 int nr_cpus = cpu_map__nr(evlist->cpus); in perf_evlist__enable_event_thread() local
416 for (cpu = 0; cpu < nr_cpus; cpu++) { in perf_evlist__enable_event_thread()
437 int nr_cpus = cpu_map__nr(evlist->cpus); in perf_evlist__alloc_pollfd() local
444 nfds += nr_cpus; in perf_evlist__alloc_pollfd()
446 nfds += nr_cpus * nr_threads; in perf_evlist__alloc_pollfd()
1129 int nr_cpus = cpu_map__nr(evlist->cpus); in perf_evlist__mmap_per_cpu() local
1133 for (cpu = 0; cpu < nr_cpus; cpu++) { in perf_evlist__mmap_per_cpu()
Devsel.c1624 int nr_cpus, int nr_threads, in perf_evsel__remove_fd() argument
1627 for (int cpu = 0; cpu < nr_cpus; cpu++) in perf_evsel__remove_fd()
1633 int nr_cpus, int cpu_idx, in update_fds() argument
1638 if (cpu_idx >= nr_cpus || thread_idx >= nr_threads) in update_fds()
1642 nr_cpus = pos != evsel ? nr_cpus : cpu_idx; in update_fds()
1644 perf_evsel__remove_fd(pos, nr_cpus, nr_threads, thread_idx); in update_fds()
1657 int nr_cpus, int cpu, in ignore_missing_thread() argument
1682 if (update_fds(evsel, nr_cpus, cpu, threads->nr, thread)) in ignore_missing_thread()
/tools/testing/selftests/bpf/
Dtest_lru_map.c27 static int nr_cpus; variable
45 unsigned long long value0[nr_cpus], value1[nr_cpus]; in map_subset()
76 while (next < nr_cpus) { in sched_next_online()
99 unsigned long long key, value[nr_cpus]; in test_lru_sanity0()
109 lru_map_fd = create_map(map_type, map_flags, 2 * nr_cpus); in test_lru_sanity0()
187 unsigned long long key, end_key, value[nr_cpus]; in test_lru_sanity1()
263 unsigned long long key, value[nr_cpus]; in test_lru_sanity2()
370 unsigned long long key, end_key, value[nr_cpus]; in test_lru_sanity3()
435 unsigned long long key, value[nr_cpus]; in test_lru_sanity4()
446 3 * tgt_free * nr_cpus); in test_lru_sanity4()
[all …]
Dtest_maps.c142 unsigned int nr_cpus = bpf_num_possible_cpus(); in test_hashmap_percpu() local
155 for (i = 0; i < nr_cpus; i++) in test_hashmap_percpu()
217 for (i = 0; i < nr_cpus; i++) in test_hashmap_percpu()
350 unsigned int nr_cpus = bpf_num_possible_cpus(); in test_arraymap_percpu() local
361 for (i = 0; i < nr_cpus; i++) in test_arraymap_percpu()
380 bpf_percpu(values, nr_cpus - 1) == 0); in test_arraymap_percpu()
409 unsigned int nr_cpus = bpf_num_possible_cpus(); in test_arraymap_percpu_many_keys() local
425 for (i = 0; i < nr_cpus; i++) in test_arraymap_percpu_many_keys()
432 for (i = 0; i < nr_cpus; i++) in test_arraymap_percpu_many_keys()
437 for (i = 0; i < nr_cpus; i++) in test_arraymap_percpu_many_keys()
Dtest_progs.c169 unsigned int nr_cpus = bpf_num_possible_cpus(); in test_l4lb() local
180 } stats[nr_cpus]; in test_l4lb()
235 for (i = 0; i < nr_cpus; i++) { in test_l4lb()
/tools/testing/selftests/rcutorture/bin/
Dfunctions.sh76 if test "$3" -gt "$nr_cpus"
78 echo $nr_cpus
/tools/virtio/virtio-trace/
Dtrace-agent.c33 int nr_cpus = (int)sysconf(_SC_NPROCESSORS_CONF); in get_total_cpus() local
35 if (nr_cpus <= 0) { in get_total_cpus()
38 } else if (nr_cpus > MAX_CPUS) { in get_total_cpus()
43 return nr_cpus; in get_total_cpus()
/tools/testing/selftests/rcutorture/configs/rcu/
DTREE01.boot1 rcutorture.torture_type=rcu_bh maxcpus=8 nr_cpus=43
/tools/testing/selftests/vm/
Duserfaultfd.c76 static unsigned long nr_cpus, nr_pages, nr_pages_per_cpu, page_size; variable
567 pthread_t locking_threads[nr_cpus]; in stress()
568 pthread_t uffd_threads[nr_cpus]; in stress()
569 pthread_t background_threads[nr_cpus]; in stress()
573 for (cpu = 0; cpu < nr_cpus; cpu++) { in stress()
592 for (cpu = 0; cpu < nr_cpus; cpu++) in stress()
608 for (cpu = 0; cpu < nr_cpus; cpu++) { in stress()
626 for (cpu = 0; cpu < nr_cpus; cpu++) in stress()
1021 unsigned long userfaults[nr_cpus]; in userfaultfd_stress()
1053 pipefd = malloc(sizeof(int) * nr_cpus * 2); in userfaultfd_stress()
[all …]
Drun_vmtests26 nr_cpus=$(nproc)
28 half_ufd_size_MB=$((((nr_cpus * hpgsize_MB + 127) / 128) * 128))
/tools/perf/bench/
Dnuma.c126 int nr_cpus; member
277 for (cpu = 0; cpu < g->p.nr_cpus; cpu++) in bind_to_cpu()
280 BUG_ON(target_cpu < 0 || target_cpu >= g->p.nr_cpus); in bind_to_cpu()
292 int cpus_per_node = g->p.nr_cpus / nr_numa_nodes(); in bind_to_node()
297 BUG_ON(cpus_per_node * nr_numa_nodes() != g->p.nr_cpus); in bind_to_node()
306 for (cpu = 0; cpu < g->p.nr_cpus; cpu++) in bind_to_node()
312 BUG_ON(cpu_stop > g->p.nr_cpus); in bind_to_node()
538 BUG_ON(step <= 0 || step >= g->p.nr_cpus); in parse_setup_cpu_list()
550 BUG_ON(bind_len <= 0 || bind_len > g->p.nr_cpus); in parse_setup_cpu_list()
563 if (bind_cpu_0 >= g->p.nr_cpus || bind_cpu_1 >= g->p.nr_cpus) { in parse_setup_cpu_list()
[all …]
/tools/power/cpupower/utils/
Dcpufreq-info.c59 unsigned int cpu, nr_cpus; in proc_cpufreq_output() local
67 nr_cpus = count_cpus(); in proc_cpufreq_output()
68 for (cpu = 0; cpu < nr_cpus; cpu++) { in proc_cpufreq_output()
/tools/perf/Documentation/
Dperf.data-file-format.txt113 struct nr_cpus {