• Home
  • Raw
  • Download

Lines Matching +full:minimum +full:- +full:bulk

7 //     http://www.apache.org/licenses/LICENSE-2.0
71 // or a minimum number of seconds (specified as `<float>s`). If the latter
78 // For cpu-time based tests, this is the lower bound
80 // real-time based tests, this is the lower bound on the elapsed time of the
84 // Minimum number of seconds a benchmark should be run before results should be
133 // information about libpfm: https://man7.org/linux/man-pages/man3/libpfm.3.html
136 // Extra context to include in the output formatted as comma-separated key-value
188 perf_counters_measurement_->names()) { in State()
195 // currently provide well-defined behavior as an extension (which is in State()
204 #pragma GCC diagnostic ignored "-Winvalid-offsetof" in State()
217 offsetof(State, skipped_) <= (cache_line_size - sizeof(skipped_)), ""); in State()
234 timer_->StopTimer(); in PauseTiming()
237 if (!perf_counters_measurement_->Stop(measurements)) { in PauseTiming()
252 timer_->StartTimer(); in ResumeTiming()
254 perf_counters_measurement_->Start(); in ResumeTiming()
261 MutexLock l(manager_->GetBenchmarkMutex()); in SkipWithMessage()
262 if (internal::NotSkipped == manager_->results.skipped_) { in SkipWithMessage()
263 manager_->results.skip_message_ = msg; in SkipWithMessage()
264 manager_->results.skipped_ = skipped_; in SkipWithMessage()
268 if (timer_->running()) timer_->StopTimer(); in SkipWithMessage()
274 MutexLock l(manager_->GetBenchmarkMutex()); in SkipWithError()
275 if (internal::NotSkipped == manager_->results.skipped_) { in SkipWithError()
276 manager_->results.skip_message_ = msg; in SkipWithError()
277 manager_->results.skipped_ = skipped_; in SkipWithError()
281 if (timer_->running()) timer_->StopTimer(); in SkipWithError()
285 timer_->SetIterationTime(seconds); in SetIterationTime()
289 MutexLock l(manager_->GetBenchmarkMutex()); in SetLabel()
290 manager_->results.report_label_ = label; in SetLabel()
297 manager_->StartStopBarrier(); in StartKeepRunning()
309 manager_->StartStopBarrier(); in FinishKeepRunning()
316 // ensures users get timely updates even when streams are not line-buffered.
319 std::flush(reporter->GetOutputStream()); in FlushStreams()
320 std::flush(reporter->GetErrorStream()); in FlushStreams()
329 // If there are no aggregates, do output non-aggregates. in Report()
331 if (!aggregates_only) reporter->ReportRuns(results.non_aggregates); in Report()
333 reporter->ReportRuns(results.aggregates_only); in Report()
352 // Determine the width of the name field using a minimum width of 10. in RunBenchmarks()
374 if (display_reporter->ReportContext(context) && in RunBenchmarks()
375 (!file_reporter || file_reporter->ReportContext(context))) { in RunBenchmarks()
404 reports_for_family->num_runs_total += num_repeats_of_this_instance; in RunBenchmarks()
440 // FIXME: report each repetition separately, not all of them in bulk. in RunBenchmarks()
442 display_reporter->ReportRunsConfig( in RunBenchmarks()
445 file_reporter->ReportRunsConfig( in RunBenchmarks()
452 if (reports_for_family->num_runs_done == in RunBenchmarks()
453 reports_for_family->num_runs_total) { in RunBenchmarks()
454 auto additional_run_stats = ComputeBigO(reports_for_family->Runs); in RunBenchmarks()
459 static_cast<int>(reports_for_family->Runs.front().family_index)); in RunBenchmarks()
466 display_reporter->Finalize(); in RunBenchmarks()
467 if (file_reporter) file_reporter->Finalize(); in RunBenchmarks()
473 // CSVReporter but don't want to trigger -Werror=-Wdeprecated-declarations
502 auto is_benchmark_color = [force_no_color]() -> bool { in GetOutputOptions()
572 auto& Out = display_reporter->GetOutputStream(); in RunSpecifiedBenchmarks()
573 auto& Err = display_reporter->GetErrorStream(); in RunSpecifiedBenchmarks()
578 "--benchmark_out=<file> was not specified." in RunSpecifiedBenchmarks()
595 file_reporter->SetOutputStream(&output_file); in RunSpecifiedBenchmarks()
596 file_reporter->SetErrorStream(&output_file); in RunSpecifiedBenchmarks()
642 if (!internal::global_context->emplace(key, value).second) { in AddCustomContext()
709 for (int j = i; j != *argc - 1; ++j) argv[j] = argv[j + 1]; in ParseCommandLineFlags()
711 --(*argc); in ParseCommandLineFlags()
712 --i; in ParseCommandLineFlags()
742 " [--benchmark_list_tests={true|false}]\n" in PrintDefaultHelp()
743 " [--benchmark_filter=<regex>]\n" in PrintDefaultHelp()
744 " [--benchmark_min_time=`<integer>x` OR `<float>s` ]\n" in PrintDefaultHelp()
745 " [--benchmark_min_warmup_time=<min_warmup_time>]\n" in PrintDefaultHelp()
746 " [--benchmark_repetitions=<num_repetitions>]\n" in PrintDefaultHelp()
747 " [--benchmark_enable_random_interleaving={true|false}]\n" in PrintDefaultHelp()
748 " [--benchmark_report_aggregates_only={true|false}]\n" in PrintDefaultHelp()
749 " [--benchmark_display_aggregates_only={true|false}]\n" in PrintDefaultHelp()
750 " [--benchmark_format=<console|json|csv>]\n" in PrintDefaultHelp()
751 " [--benchmark_out=<filename>]\n" in PrintDefaultHelp()
752 " [--benchmark_out_format=<json|console|csv>]\n" in PrintDefaultHelp()
753 " [--benchmark_color={auto|true|false}]\n" in PrintDefaultHelp()
754 " [--benchmark_counters_tabular={true|false}]\n" in PrintDefaultHelp()
756 " [--benchmark_perf_counters=<counter>,...]\n" in PrintDefaultHelp()
758 " [--benchmark_context=<key>=<value>,...]\n" in PrintDefaultHelp()
759 " [--benchmark_time_unit={ns|us|ms|s}]\n" in PrintDefaultHelp()
760 " [--v=<verbosity>]\n"); in PrintDefaultHelp()
773 fprintf(stderr, "%s: error: unrecognized command-line flag: %s\n", argv[0], in ReportUnrecognizedArguments()