• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2015 Google Inc. All rights reserved.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 //     http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 
15 #include "benchmark/benchmark.h"
16 #include "benchmark_api_internal.h"
17 #include "benchmark_runner.h"
18 #include "internal_macros.h"
19 
20 #ifndef BENCHMARK_OS_WINDOWS
21 #ifndef BENCHMARK_OS_FUCHSIA
22 #include <sys/resource.h>
23 #endif
24 #include <sys/time.h>
25 #include <unistd.h>
26 #endif
27 
28 #include <algorithm>
29 #include <atomic>
30 #include <condition_variable>
31 #include <cstdio>
32 #include <cstdlib>
33 #include <fstream>
34 #include <iostream>
35 #include <memory>
36 #include <string>
37 #include <thread>
38 #include <utility>
39 
40 #include "check.h"
41 #include "colorprint.h"
42 #include "commandlineflags.h"
43 #include "complexity.h"
44 #include "counter.h"
45 #include "internal_macros.h"
46 #include "log.h"
47 #include "mutex.h"
48 #include "re.h"
49 #include "statistics.h"
50 #include "string_util.h"
51 #include "thread_manager.h"
52 #include "thread_timer.h"
53 
54 // Print a list of benchmarks. This option overrides all other options.
55 DEFINE_bool(benchmark_list_tests, false);
56 
57 // A regular expression that specifies the set of benchmarks to execute.  If
58 // this flag is empty, or if this flag is the string \"all\", all benchmarks
59 // linked into the binary are run.
60 DEFINE_string(benchmark_filter, ".");
61 
62 // Minimum number of seconds we should run benchmark before results are
63 // considered significant.  For cpu-time based tests, this is the lower bound
64 // on the total cpu time used by all threads that make up the test.  For
65 // real-time based tests, this is the lower bound on the elapsed time of the
66 // benchmark execution, regardless of number of threads.
67 DEFINE_double(benchmark_min_time, 0.5);
68 
69 // The number of runs of each benchmark. If greater than 1, the mean and
70 // standard deviation of the runs will be reported.
71 DEFINE_int32(benchmark_repetitions, 1);
72 
73 // Report the result of each benchmark repetitions. When 'true' is specified
74 // only the mean, standard deviation, and other statistics are reported for
75 // repeated benchmarks. Affects all reporters.
76 DEFINE_bool(benchmark_report_aggregates_only, false);
77 
78 // Display the result of each benchmark repetitions. When 'true' is specified
79 // only the mean, standard deviation, and other statistics are displayed for
80 // repeated benchmarks. Unlike benchmark_report_aggregates_only, only affects
81 // the display reporter, but  *NOT* file reporter, which will still contain
82 // all the output.
83 DEFINE_bool(benchmark_display_aggregates_only, false);
84 
85 // The format to use for console output.
86 // Valid values are 'console', 'json', or 'csv'.
87 DEFINE_string(benchmark_format, "console");
88 
89 // The format to use for file output.
90 // Valid values are 'console', 'json', or 'csv'.
91 DEFINE_string(benchmark_out_format, "json");
92 
93 // The file to write additional output to.
94 DEFINE_string(benchmark_out, "");
95 
96 // Whether to use colors in the output.  Valid values:
97 // 'true'/'yes'/1, 'false'/'no'/0, and 'auto'. 'auto' means to use colors if
98 // the output is being sent to a terminal and the TERM environment variable is
99 // set to a terminal type that supports colors.
100 DEFINE_string(benchmark_color, "auto");
101 
102 // Whether to use tabular format when printing user counters to the console.
103 // Valid values: 'true'/'yes'/1, 'false'/'no'/0.  Defaults to false.
104 DEFINE_bool(benchmark_counters_tabular, false);
105 
106 // The level of verbose logging to output
107 DEFINE_int32(v, 0);
108 
109 namespace benchmark {
110 
111 namespace internal {
112 
113 // FIXME: wouldn't LTO mess this up?
UseCharPointer(char const volatile *)114 void UseCharPointer(char const volatile*) {}
115 
116 }  // namespace internal
117 
State(IterationCount max_iters,const std::vector<int64_t> & ranges,int thread_i,int n_threads,internal::ThreadTimer * timer,internal::ThreadManager * manager)118 State::State(IterationCount max_iters, const std::vector<int64_t>& ranges,
119              int thread_i, int n_threads, internal::ThreadTimer* timer,
120              internal::ThreadManager* manager)
121     : total_iterations_(0),
122       batch_leftover_(0),
123       max_iterations(max_iters),
124       started_(false),
125       finished_(false),
126       error_occurred_(false),
127       range_(ranges),
128       complexity_n_(0),
129       counters(),
130       thread_index(thread_i),
131       threads(n_threads),
132       timer_(timer),
133       manager_(manager) {
134   CHECK(max_iterations != 0) << "At least one iteration must be run";
135   CHECK_LT(thread_index, threads) << "thread_index must be less than threads";
136 
137   // Note: The use of offsetof below is technically undefined until C++17
138   // because State is not a standard layout type. However, all compilers
139   // currently provide well-defined behavior as an extension (which is
140   // demonstrated since constexpr evaluation must diagnose all undefined
141   // behavior). However, GCC and Clang also warn about this use of offsetof,
142   // which must be suppressed.
143 #if defined(__INTEL_COMPILER)
144 #pragma warning push
145 #pragma warning(disable : 1875)
146 #elif defined(__GNUC__)
147 #pragma GCC diagnostic push
148 #pragma GCC diagnostic ignored "-Winvalid-offsetof"
149 #endif
150   // Offset tests to ensure commonly accessed data is on the first cache line.
151   const int cache_line_size = 64;
152   static_assert(offsetof(State, error_occurred_) <=
153                     (cache_line_size - sizeof(error_occurred_)),
154                 "");
155 #if defined(__INTEL_COMPILER)
156 #pragma warning pop
157 #elif defined(__GNUC__)
158 #pragma GCC diagnostic pop
159 #endif
160 }
161 
PauseTiming()162 void State::PauseTiming() {
163   // Add in time accumulated so far
164   CHECK(started_ && !finished_ && !error_occurred_);
165   timer_->StopTimer();
166 }
167 
ResumeTiming()168 void State::ResumeTiming() {
169   CHECK(started_ && !finished_ && !error_occurred_);
170   timer_->StartTimer();
171 }
172 
SkipWithError(const char * msg)173 void State::SkipWithError(const char* msg) {
174   CHECK(msg);
175   error_occurred_ = true;
176   {
177     MutexLock l(manager_->GetBenchmarkMutex());
178     if (manager_->results.has_error_ == false) {
179       manager_->results.error_message_ = msg;
180       manager_->results.has_error_ = true;
181     }
182   }
183   total_iterations_ = 0;
184   if (timer_->running()) timer_->StopTimer();
185 }
186 
SetIterationTime(double seconds)187 void State::SetIterationTime(double seconds) {
188   timer_->SetIterationTime(seconds);
189 }
190 
SetLabel(const char * label)191 void State::SetLabel(const char* label) {
192   MutexLock l(manager_->GetBenchmarkMutex());
193   manager_->results.report_label_ = label;
194 }
195 
StartKeepRunning()196 void State::StartKeepRunning() {
197   CHECK(!started_ && !finished_);
198   started_ = true;
199   total_iterations_ = error_occurred_ ? 0 : max_iterations;
200   manager_->StartStopBarrier();
201   if (!error_occurred_) ResumeTiming();
202 }
203 
FinishKeepRunning()204 void State::FinishKeepRunning() {
205   CHECK(started_ && (!finished_ || error_occurred_));
206   if (!error_occurred_) {
207     PauseTiming();
208   }
209   // Total iterations has now wrapped around past 0. Fix this.
210   total_iterations_ = 0;
211   finished_ = true;
212   manager_->StartStopBarrier();
213 }
214 
215 namespace internal {
216 namespace {
217 
RunBenchmarks(const std::vector<BenchmarkInstance> & benchmarks,BenchmarkReporter * display_reporter,BenchmarkReporter * file_reporter)218 void RunBenchmarks(const std::vector<BenchmarkInstance>& benchmarks,
219                    BenchmarkReporter* display_reporter,
220                    BenchmarkReporter* file_reporter) {
221   // Note the file_reporter can be null.
222   CHECK(display_reporter != nullptr);
223 
224   // Determine the width of the name field using a minimum width of 10.
225   bool might_have_aggregates = FLAGS_benchmark_repetitions > 1;
226   size_t name_field_width = 10;
227   size_t stat_field_width = 0;
228   for (const BenchmarkInstance& benchmark : benchmarks) {
229     name_field_width =
230         std::max<size_t>(name_field_width, benchmark.name.str().size());
231     might_have_aggregates |= benchmark.repetitions > 1;
232 
233     for (const auto& Stat : *benchmark.statistics)
234       stat_field_width = std::max<size_t>(stat_field_width, Stat.name_.size());
235   }
236   if (might_have_aggregates) name_field_width += 1 + stat_field_width;
237 
238   // Print header here
239   BenchmarkReporter::Context context;
240   context.name_field_width = name_field_width;
241 
242   // Keep track of running times of all instances of current benchmark
243   std::vector<BenchmarkReporter::Run> complexity_reports;
244 
245   // We flush streams after invoking reporter methods that write to them. This
246   // ensures users get timely updates even when streams are not line-buffered.
247   auto flushStreams = [](BenchmarkReporter* reporter) {
248     if (!reporter) return;
249     std::flush(reporter->GetOutputStream());
250     std::flush(reporter->GetErrorStream());
251   };
252 
253   if (display_reporter->ReportContext(context) &&
254       (!file_reporter || file_reporter->ReportContext(context))) {
255     flushStreams(display_reporter);
256     flushStreams(file_reporter);
257 
258     for (const auto& benchmark : benchmarks) {
259       RunResults run_results = RunBenchmark(benchmark, &complexity_reports);
260 
261       auto report = [&run_results](BenchmarkReporter* reporter,
262                                    bool report_aggregates_only) {
263         assert(reporter);
264         // If there are no aggregates, do output non-aggregates.
265         report_aggregates_only &= !run_results.aggregates_only.empty();
266         if (!report_aggregates_only)
267           reporter->ReportRuns(run_results.non_aggregates);
268         if (!run_results.aggregates_only.empty())
269           reporter->ReportRuns(run_results.aggregates_only);
270       };
271 
272       report(display_reporter, run_results.display_report_aggregates_only);
273       if (file_reporter)
274         report(file_reporter, run_results.file_report_aggregates_only);
275 
276       flushStreams(display_reporter);
277       flushStreams(file_reporter);
278     }
279   }
280   display_reporter->Finalize();
281   if (file_reporter) file_reporter->Finalize();
282   flushStreams(display_reporter);
283   flushStreams(file_reporter);
284 }
285 
286 // Disable deprecated warnings temporarily because we need to reference
287 // CSVReporter but don't want to trigger -Werror=-Wdeprecated-declarations
288 #ifdef __GNUC__
289 #pragma GCC diagnostic push
290 #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
291 #endif
292 
CreateReporter(std::string const & name,ConsoleReporter::OutputOptions output_opts)293 std::unique_ptr<BenchmarkReporter> CreateReporter(
294     std::string const& name, ConsoleReporter::OutputOptions output_opts) {
295   typedef std::unique_ptr<BenchmarkReporter> PtrType;
296   if (name == "console") {
297     return PtrType(new ConsoleReporter(output_opts));
298   } else if (name == "json") {
299     return PtrType(new JSONReporter);
300   } else if (name == "csv") {
301     return PtrType(new CSVReporter);
302   } else {
303     std::cerr << "Unexpected format: '" << name << "'\n";
304     std::exit(1);
305   }
306 }
307 
308 #ifdef __GNUC__
309 #pragma GCC diagnostic pop
310 #endif
311 
312 }  // end namespace
313 
IsZero(double n)314 bool IsZero(double n) {
315   return std::abs(n) < std::numeric_limits<double>::epsilon();
316 }
317 
GetOutputOptions(bool force_no_color)318 ConsoleReporter::OutputOptions GetOutputOptions(bool force_no_color) {
319   int output_opts = ConsoleReporter::OO_Defaults;
320   auto is_benchmark_color = [force_no_color]() -> bool {
321     if (force_no_color) {
322       return false;
323     }
324     if (FLAGS_benchmark_color == "auto") {
325       return IsColorTerminal();
326     }
327     return IsTruthyFlagValue(FLAGS_benchmark_color);
328   };
329   if (is_benchmark_color()) {
330     output_opts |= ConsoleReporter::OO_Color;
331   } else {
332     output_opts &= ~ConsoleReporter::OO_Color;
333   }
334   if (FLAGS_benchmark_counters_tabular) {
335     output_opts |= ConsoleReporter::OO_Tabular;
336   } else {
337     output_opts &= ~ConsoleReporter::OO_Tabular;
338   }
339   return static_cast<ConsoleReporter::OutputOptions>(output_opts);
340 }
341 
342 }  // end namespace internal
343 
RunSpecifiedBenchmarks()344 size_t RunSpecifiedBenchmarks() {
345   return RunSpecifiedBenchmarks(nullptr, nullptr);
346 }
347 
RunSpecifiedBenchmarks(BenchmarkReporter * display_reporter)348 size_t RunSpecifiedBenchmarks(BenchmarkReporter* display_reporter) {
349   return RunSpecifiedBenchmarks(display_reporter, nullptr);
350 }
351 
RunSpecifiedBenchmarks(BenchmarkReporter * display_reporter,BenchmarkReporter * file_reporter)352 size_t RunSpecifiedBenchmarks(BenchmarkReporter* display_reporter,
353                               BenchmarkReporter* file_reporter) {
354   std::string spec = FLAGS_benchmark_filter;
355   if (spec.empty() || spec == "all")
356     spec = ".";  // Regexp that matches all benchmarks
357 
358   // Setup the reporters
359   std::ofstream output_file;
360   std::unique_ptr<BenchmarkReporter> default_display_reporter;
361   std::unique_ptr<BenchmarkReporter> default_file_reporter;
362   if (!display_reporter) {
363     default_display_reporter = internal::CreateReporter(
364         FLAGS_benchmark_format, internal::GetOutputOptions());
365     display_reporter = default_display_reporter.get();
366   }
367   auto& Out = display_reporter->GetOutputStream();
368   auto& Err = display_reporter->GetErrorStream();
369 
370   std::string const& fname = FLAGS_benchmark_out;
371   if (fname.empty() && file_reporter) {
372     Err << "A custom file reporter was provided but "
373            "--benchmark_out=<file> was not specified."
374         << std::endl;
375     std::exit(1);
376   }
377   if (!fname.empty()) {
378     output_file.open(fname);
379     if (!output_file.is_open()) {
380       Err << "invalid file name: '" << fname << std::endl;
381       std::exit(1);
382     }
383     if (!file_reporter) {
384       default_file_reporter = internal::CreateReporter(
385           FLAGS_benchmark_out_format, ConsoleReporter::OO_None);
386       file_reporter = default_file_reporter.get();
387     }
388     file_reporter->SetOutputStream(&output_file);
389     file_reporter->SetErrorStream(&output_file);
390   }
391 
392   std::vector<internal::BenchmarkInstance> benchmarks;
393   if (!FindBenchmarksInternal(spec, &benchmarks, &Err)) return 0;
394 
395   if (benchmarks.empty()) {
396     Err << "Failed to match any benchmarks against regex: " << spec << "\n";
397     return 0;
398   }
399 
400   if (FLAGS_benchmark_list_tests) {
401     for (auto const& benchmark : benchmarks)
402       Out << benchmark.name.str() << "\n";
403   } else {
404     internal::RunBenchmarks(benchmarks, display_reporter, file_reporter);
405   }
406 
407   return benchmarks.size();
408 }
409 
RegisterMemoryManager(MemoryManager * manager)410 void RegisterMemoryManager(MemoryManager* manager) {
411   internal::memory_manager = manager;
412 }
413 
414 namespace internal {
415 
PrintUsageAndExit()416 void PrintUsageAndExit() {
417   fprintf(stdout,
418           "benchmark"
419           " [--benchmark_list_tests={true|false}]\n"
420           "          [--benchmark_filter=<regex>]\n"
421           "          [--benchmark_min_time=<min_time>]\n"
422           "          [--benchmark_repetitions=<num_repetitions>]\n"
423           "          [--benchmark_report_aggregates_only={true|false}]\n"
424           "          [--benchmark_display_aggregates_only={true|false}]\n"
425           "          [--benchmark_format=<console|json|csv>]\n"
426           "          [--benchmark_out=<filename>]\n"
427           "          [--benchmark_out_format=<json|console|csv>]\n"
428           "          [--benchmark_color={auto|true|false}]\n"
429           "          [--benchmark_counters_tabular={true|false}]\n"
430           "          [--v=<verbosity>]\n");
431   exit(0);
432 }
433 
ParseCommandLineFlags(int * argc,char ** argv)434 void ParseCommandLineFlags(int* argc, char** argv) {
435   using namespace benchmark;
436   BenchmarkReporter::Context::executable_name =
437       (argc && *argc > 0) ? argv[0] : "unknown";
438   for (int i = 1; argc && i < *argc; ++i) {
439     if (ParseBoolFlag(argv[i], "benchmark_list_tests",
440                       &FLAGS_benchmark_list_tests) ||
441         ParseStringFlag(argv[i], "benchmark_filter", &FLAGS_benchmark_filter) ||
442         ParseDoubleFlag(argv[i], "benchmark_min_time",
443                         &FLAGS_benchmark_min_time) ||
444         ParseInt32Flag(argv[i], "benchmark_repetitions",
445                        &FLAGS_benchmark_repetitions) ||
446         ParseBoolFlag(argv[i], "benchmark_report_aggregates_only",
447                       &FLAGS_benchmark_report_aggregates_only) ||
448         ParseBoolFlag(argv[i], "benchmark_display_aggregates_only",
449                       &FLAGS_benchmark_display_aggregates_only) ||
450         ParseStringFlag(argv[i], "benchmark_format", &FLAGS_benchmark_format) ||
451         ParseStringFlag(argv[i], "benchmark_out", &FLAGS_benchmark_out) ||
452         ParseStringFlag(argv[i], "benchmark_out_format",
453                         &FLAGS_benchmark_out_format) ||
454         ParseStringFlag(argv[i], "benchmark_color", &FLAGS_benchmark_color) ||
455         // "color_print" is the deprecated name for "benchmark_color".
456         // TODO: Remove this.
457         ParseStringFlag(argv[i], "color_print", &FLAGS_benchmark_color) ||
458         ParseBoolFlag(argv[i], "benchmark_counters_tabular",
459                       &FLAGS_benchmark_counters_tabular) ||
460         ParseInt32Flag(argv[i], "v", &FLAGS_v)) {
461       for (int j = i; j != *argc - 1; ++j) argv[j] = argv[j + 1];
462 
463       --(*argc);
464       --i;
465     } else if (IsFlag(argv[i], "help")) {
466       PrintUsageAndExit();
467     }
468   }
469   for (auto const* flag :
470        {&FLAGS_benchmark_format, &FLAGS_benchmark_out_format})
471     if (*flag != "console" && *flag != "json" && *flag != "csv") {
472       PrintUsageAndExit();
473     }
474   if (FLAGS_benchmark_color.empty()) {
475     PrintUsageAndExit();
476   }
477 }
478 
InitializeStreams()479 int InitializeStreams() {
480   static std::ios_base::Init init;
481   return 0;
482 }
483 
484 }  // end namespace internal
485 
Initialize(int * argc,char ** argv)486 void Initialize(int* argc, char** argv) {
487   internal::ParseCommandLineFlags(argc, argv);
488   internal::LogLevel() = FLAGS_v;
489 }
490 
ReportUnrecognizedArguments(int argc,char ** argv)491 bool ReportUnrecognizedArguments(int argc, char** argv) {
492   for (int i = 1; i < argc; ++i) {
493     fprintf(stderr, "%s: error: unrecognized command-line flag: %s\n", argv[0],
494             argv[i]);
495   }
496   return argc > 1;
497 }
498 
499 }  // end namespace benchmark
500