Home
last modified time | relevance | path

Searched refs:nthreads (Results 1 – 16 of 16) sorted by relevance

/tools/perf/bench/
Dfutex-requeue.c45 static unsigned int threads_starting, nthreads = 0; variable
49 OPT_UINTEGER('t', "threads", &nthreads, "Specify amount of threads"),
69 nthreads, in print_summary()
93 threads_starting = nthreads; in block_threads()
96 for (i = 0; i < nthreads; i++) { in block_threads()
136 if (!nthreads) in bench_futex_requeue()
137 nthreads = cpu->nr; in bench_futex_requeue()
139 worker = calloc(nthreads, sizeof(*worker)); in bench_futex_requeue()
146 if (nrequeue > nthreads) in bench_futex_requeue()
147 nrequeue = nthreads; in bench_futex_requeue()
[all …]
Dfutex-wake.c46 static unsigned int threads_starting, nthreads = 0; variable
50 OPT_UINTEGER('t', "threads", &nthreads, "Specify amount of threads"),
88 nthreads, in print_summary()
99 threads_starting = nthreads; in block_threads()
102 for (i = 0; i < nthreads; i++) { in block_threads()
144 if (!nthreads) in bench_futex_wake()
145 nthreads = cpu->nr; in bench_futex_wake()
147 worker = calloc(nthreads, sizeof(*worker)); in bench_futex_wake()
156 getpid(), nthreads, fshared ? "shared":"private", &futex1, nwakes); in bench_futex_wake()
183 while (nwoken != nthreads) in bench_futex_wake()
[all …]
Dfutex-lock-pi.c38 static unsigned int nthreads = 0; variable
46 OPT_UINTEGER('t', "threads", &nthreads, "Specify amount of threads"),
124 threads_starting = nthreads; in create_threads()
126 for (i = 0; i < nthreads; i++) { in create_threads()
168 if (!nthreads) in bench_futex_lock_pi()
169 nthreads = cpu->nr; in bench_futex_lock_pi()
171 worker = calloc(nthreads, sizeof(*worker)); in bench_futex_lock_pi()
179 getpid(), nthreads, nsecs); in bench_futex_lock_pi()
186 threads_starting = nthreads; in bench_futex_lock_pi()
202 for (i = 0; i < nthreads; i++) { in bench_futex_lock_pi()
[all …]
Dfutex-hash.c33 static unsigned int nthreads = 0; variable
54 OPT_UINTEGER('t', "threads", &nthreads, "Specify amount of threads"),
145 if (!nthreads) /* default to the number of CPUs */ in bench_futex_hash()
146 nthreads = cpu->nr; in bench_futex_hash()
148 worker = calloc(nthreads, sizeof(*worker)); in bench_futex_hash()
156 getpid(), nthreads, nfutexes, fshared ? "shared":"private", nsecs); in bench_futex_hash()
163 threads_starting = nthreads; in bench_futex_hash()
166 for (i = 0; i < nthreads; i++) { in bench_futex_hash()
196 for (i = 0; i < nthreads; i++) { in bench_futex_hash()
207 for (i = 0; i < nthreads; i++) { in bench_futex_hash()
Depoll-wait.c91 static unsigned int nthreads = 0; variable
128 OPT_UINTEGER('t', "threads", &nthreads, "Specify amount of threads"),
310 for (i = 0; i < nthreads; i++) { in do_threads()
381 shuffle((void *)worker, nthreads, sizeof(*worker)); in writerfn()
384 for (i = 0; i < nthreads; i++) { in writerfn()
455 if (!nthreads) in bench_epoll_wait()
456 nthreads = cpu->nr - 1; in bench_epoll_wait()
458 worker = calloc(nthreads, sizeof(*worker)); in bench_epoll_wait()
465 rl.rlim_cur = rl.rlim_max = nfds * nthreads * 2 + 50; in bench_epoll_wait()
473 getpid(), nthreads, oneshot ? " (EPOLLONESHOT semantics)": "", nfds, nsecs); in bench_epoll_wait()
[all …]
Depoll-ctl.c36 static unsigned int nthreads = 0; variable
75 OPT_UINTEGER('t', "threads", &nthreads, "Specify amount of threads"),
233 for (i = 0; i < nthreads; i++) { in do_threads()
336 if (!nthreads) in bench_epoll_ctl()
337 nthreads = cpu->nr; in bench_epoll_ctl()
339 worker = calloc(nthreads, sizeof(*worker)); in bench_epoll_ctl()
345 rl.rlim_cur = rl.rlim_max = nfds * nthreads * 2 + 50; in bench_epoll_ctl()
353 getpid(), nthreads, nfds, nsecs); in bench_epoll_ctl()
362 threads_starting = nthreads; in bench_epoll_ctl()
378 for (i = 0; i < nthreads; i++) { in bench_epoll_ctl()
[all …]
/tools/perf/util/
Dcounts.c9 struct perf_counts *perf_counts__new(int ncpus, int nthreads) in perf_counts__new() argument
16 values = xyarray__new(ncpus, nthreads, sizeof(struct perf_counts_values)); in perf_counts__new()
24 values = xyarray__new(ncpus, nthreads, sizeof(bool)); in perf_counts__new()
58 int evsel__alloc_counts(struct evsel *evsel, int ncpus, int nthreads) in evsel__alloc_counts() argument
60 evsel->counts = perf_counts__new(ncpus, nthreads); in evsel__alloc_counts()
Dcounts.h38 struct perf_counts *perf_counts__new(int ncpus, int nthreads);
43 int evsel__alloc_counts(struct evsel *evsel, int ncpus, int nthreads);
Dstat.c151 static int evsel__alloc_prev_raw_counts(struct evsel *evsel, int ncpus, int nthreads) in evsel__alloc_prev_raw_counts() argument
155 counts = perf_counts__new(ncpus, nthreads); in evsel__alloc_prev_raw_counts()
177 int nthreads = perf_thread_map__nr(evsel->core.threads); in evsel__alloc_stats() local
180 evsel__alloc_counts(evsel, ncpus, nthreads) < 0 || in evsel__alloc_stats()
181 (alloc_raw && evsel__alloc_prev_raw_counts(evsel, ncpus, nthreads) < 0)) in evsel__alloc_stats()
235 int nthreads = perf_thread_map__nr(evsel->core.threads); in perf_evsel__copy_prev_raw_counts() local
237 for (int thread = 0; thread < nthreads; thread++) { in perf_evsel__copy_prev_raw_counts()
380 int nthreads = perf_thread_map__nr(counter->core.threads); in process_counter_maps() local
385 nthreads = 1; in process_counter_maps()
387 for (thread = 0; thread < nthreads; thread++) { in process_counter_maps()
Dstat-display.c701 int nthreads, int ncpus, in sort_aggr_thread() argument
709 buf = calloc(nthreads, sizeof(struct perf_aggr_thread_value)); in sort_aggr_thread()
713 for (thread = 0; thread < nthreads; thread++) { in sort_aggr_thread()
753 int nthreads = perf_thread_map__nr(counter->core.threads); in print_aggr_thread() local
758 buf = sort_aggr_thread(counter, nthreads, ncpus, &sorted_threads, _target); in print_aggr_thread()
Devsel.c1706 int cpu, thread, nthreads; in evsel__open_cpu() local
1740 nthreads = 1; in evsel__open_cpu()
1742 nthreads = threads->nr; in evsel__open_cpu()
1745 perf_evsel__alloc_fd(&evsel->core, cpus->nr, nthreads) < 0) in evsel__open_cpu()
1785 for (thread = 0; thread < nthreads; thread++) { in evsel__open_cpu()
1814 nthreads--; in evsel__open_cpu()
1968 thread = nthreads; in evsel__open_cpu()
/tools/lib/perf/
Devsel.c42 int perf_evsel__alloc_fd(struct perf_evsel *evsel, int ncpus, int nthreads) in perf_evsel__alloc_fd() argument
44 evsel->fd = xyarray__new(ncpus, nthreads, sizeof(int)); in perf_evsel__alloc_fd()
49 for (thread = 0; thread < nthreads; thread++) { in perf_evsel__alloc_fd()
273 int perf_evsel__alloc_id(struct perf_evsel *evsel, int ncpus, int nthreads) in perf_evsel__alloc_id() argument
275 if (ncpus == 0 || nthreads == 0) in perf_evsel__alloc_id()
279 nthreads = 1; in perf_evsel__alloc_id()
281 evsel->sample_id = xyarray__new(ncpus, nthreads, sizeof(struct perf_sample_id)); in perf_evsel__alloc_id()
285 evsel->id = zalloc(ncpus * nthreads * sizeof(u64)); in perf_evsel__alloc_id()
/tools/lib/perf/include/internal/
Devsel.h54 int perf_evsel__alloc_fd(struct perf_evsel *evsel, int ncpus, int nthreads);
60 int perf_evsel__alloc_id(struct perf_evsel *evsel, int ncpus, int nthreads);
/tools/testing/selftests/filesystems/binderfs/
Dbinderfs_test.c385 int i, j, k, nthreads; in TEST() local
434 nthreads = get_nprocs_conf(); in TEST()
435 if (nthreads > DEFAULT_THREADS) in TEST()
436 nthreads = DEFAULT_THREADS; in TEST()
441 for (i = 0; i < nthreads; i++) { in TEST()
/tools/perf/
Dbuiltin-stat.c335 int nthreads = perf_thread_map__nr(evsel_list->core.threads); in read_counter_cpu() local
342 nthreads = 1; in read_counter_cpu()
344 for (thread = 0; thread < nthreads; thread++) { in read_counter_cpu()
428 static int runtime_stat_new(struct perf_stat_config *config, int nthreads) in runtime_stat_new() argument
432 config->stats = calloc(nthreads, sizeof(struct runtime_stat)); in runtime_stat_new()
436 config->stats_num = nthreads; in runtime_stat_new()
438 for (i = 0; i < nthreads; i++) in runtime_stat_new()
Dbuiltin-script.c2031 int nthreads = perf_thread_map__nr(counter->core.threads); in __process_stat() local
2037 nthreads = 1; in __process_stat()
2045 for (thread = 0; thread < nthreads; thread++) { in __process_stat()