1 // Copyright 2016 Ismael Jimenez Martinez. All rights reserved.
2 // Copyright 2017 Roman Lebedev. All rights reserved.
3 //
4 // Licensed under the Apache License, Version 2.0 (the "License");
5 // you may not use this file except in compliance with the License.
6 // You may obtain a copy of the License at
7 //
8 // http://www.apache.org/licenses/LICENSE-2.0
9 //
10 // Unless required by applicable law or agreed to in writing, software
11 // distributed under the License is distributed on an "AS IS" BASIS,
12 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 // See the License for the specific language governing permissions and
14 // limitations under the License.
15
16 #include "statistics.h"
17
18 #include <algorithm>
19 #include <cmath>
20 #include <numeric>
21 #include <string>
22 #include <vector>
23
24 #include "benchmark/benchmark.h"
25 #include "check.h"
26
27 namespace benchmark {
28
__anon1e6b2a1f0102(const std::vector<double>& v) 29 auto StatisticsSum = [](const std::vector<double>& v) {
30 return std::accumulate(v.begin(), v.end(), 0.0);
31 };
32
StatisticsMean(const std::vector<double> & v)33 double StatisticsMean(const std::vector<double>& v) {
34 if (v.empty()) return 0.0;
35 return StatisticsSum(v) * (1.0 / v.size());
36 }
37
StatisticsMedian(const std::vector<double> & v)38 double StatisticsMedian(const std::vector<double>& v) {
39 if (v.size() < 3) return StatisticsMean(v);
40 std::vector<double> copy(v);
41
42 auto center = copy.begin() + v.size() / 2;
43 std::nth_element(copy.begin(), center, copy.end());
44
45 // Did we have an odd number of samples? If yes, then center is the median.
46 // If not, then we are looking for the average between center and the value
47 // before. Instead of resorting, we just look for the max value before it,
48 // which is not necessarily the element immediately preceding `center` Since
49 // `copy` is only partially sorted by `nth_element`.
50 if (v.size() % 2 == 1) return *center;
51 auto center2 = std::max_element(copy.begin(), center);
52 return (*center + *center2) / 2.0;
53 }
54
55 // Return the sum of the squares of this sample set
__anon1e6b2a1f0202(const std::vector<double>& v) 56 auto SumSquares = [](const std::vector<double>& v) {
57 return std::inner_product(v.begin(), v.end(), v.begin(), 0.0);
58 };
59
__anon1e6b2a1f0302(const double dat) 60 auto Sqr = [](const double dat) { return dat * dat; };
__anon1e6b2a1f0402(const double dat) 61 auto Sqrt = [](const double dat) {
62 // Avoid NaN due to imprecision in the calculations
63 if (dat < 0.0) return 0.0;
64 return std::sqrt(dat);
65 };
66
StatisticsStdDev(const std::vector<double> & v)67 double StatisticsStdDev(const std::vector<double>& v) {
68 const auto mean = StatisticsMean(v);
69 if (v.empty()) return mean;
70
71 // Sample standard deviation is undefined for n = 1
72 if (v.size() == 1) return 0.0;
73
74 const double avg_squares = SumSquares(v) * (1.0 / v.size());
75 return Sqrt(v.size() / (v.size() - 1.0) * (avg_squares - Sqr(mean)));
76 }
77
StatisticsCV(const std::vector<double> & v)78 double StatisticsCV(const std::vector<double>& v) {
79 if (v.size() < 2) return 0.0;
80
81 const auto stddev = StatisticsStdDev(v);
82 const auto mean = StatisticsMean(v);
83
84 return stddev / mean;
85 }
86
ComputeStats(const std::vector<BenchmarkReporter::Run> & reports)87 std::vector<BenchmarkReporter::Run> ComputeStats(
88 const std::vector<BenchmarkReporter::Run>& reports) {
89 typedef BenchmarkReporter::Run Run;
90 std::vector<Run> results;
91
92 auto error_count = std::count_if(reports.begin(), reports.end(),
93 [](Run const& run) { return run.skipped; });
94
95 if (reports.size() - error_count < 2) {
96 // We don't report aggregated data if there was a single run.
97 return results;
98 }
99
100 // Accumulators.
101 std::vector<double> real_accumulated_time_stat;
102 std::vector<double> cpu_accumulated_time_stat;
103
104 real_accumulated_time_stat.reserve(reports.size());
105 cpu_accumulated_time_stat.reserve(reports.size());
106
107 // All repetitions should be run with the same number of iterations so we
108 // can take this information from the first benchmark.
109 const IterationCount run_iterations = reports.front().iterations;
110 // create stats for user counters
111 struct CounterStat {
112 Counter c;
113 std::vector<double> s;
114 };
115 std::map<std::string, CounterStat> counter_stats;
116 for (Run const& r : reports) {
117 for (auto const& cnt : r.counters) {
118 auto it = counter_stats.find(cnt.first);
119 if (it == counter_stats.end()) {
120 it = counter_stats
121 .emplace(cnt.first,
122 CounterStat{cnt.second, std::vector<double>{}})
123 .first;
124 it->second.s.reserve(reports.size());
125 } else {
126 BM_CHECK_EQ(it->second.c.flags, cnt.second.flags);
127 }
128 }
129 }
130
131 // Populate the accumulators.
132 for (Run const& run : reports) {
133 BM_CHECK_EQ(reports[0].benchmark_name(), run.benchmark_name());
134 BM_CHECK_EQ(run_iterations, run.iterations);
135 if (run.skipped) continue;
136 real_accumulated_time_stat.emplace_back(run.real_accumulated_time);
137 cpu_accumulated_time_stat.emplace_back(run.cpu_accumulated_time);
138 // user counters
139 for (auto const& cnt : run.counters) {
140 auto it = counter_stats.find(cnt.first);
141 BM_CHECK_NE(it, counter_stats.end());
142 it->second.s.emplace_back(cnt.second);
143 }
144 }
145
146 // Only add label if it is same for all runs
147 std::string report_label = reports[0].report_label;
148 for (std::size_t i = 1; i < reports.size(); i++) {
149 if (reports[i].report_label != report_label) {
150 report_label = "";
151 break;
152 }
153 }
154
155 const double iteration_rescale_factor =
156 double(reports.size()) / double(run_iterations);
157
158 for (const auto& Stat : *reports[0].statistics) {
159 // Get the data from the accumulator to BenchmarkReporter::Run's.
160 Run data;
161 data.run_name = reports[0].run_name;
162 data.family_index = reports[0].family_index;
163 data.per_family_instance_index = reports[0].per_family_instance_index;
164 data.run_type = BenchmarkReporter::Run::RT_Aggregate;
165 data.threads = reports[0].threads;
166 data.repetitions = reports[0].repetitions;
167 data.repetition_index = Run::no_repetition_index;
168 data.aggregate_name = Stat.name_;
169 data.aggregate_unit = Stat.unit_;
170 data.report_label = report_label;
171
172 // It is incorrect to say that an aggregate is computed over
173 // run's iterations, because those iterations already got averaged.
174 // Similarly, if there are N repetitions with 1 iterations each,
175 // an aggregate will be computed over N measurements, not 1.
176 // Thus it is best to simply use the count of separate reports.
177 data.iterations = reports.size();
178
179 data.real_accumulated_time = Stat.compute_(real_accumulated_time_stat);
180 data.cpu_accumulated_time = Stat.compute_(cpu_accumulated_time_stat);
181
182 if (data.aggregate_unit == StatisticUnit::kTime) {
183 // We will divide these times by data.iterations when reporting, but the
184 // data.iterations is not necessarily the scale of these measurements,
185 // because in each repetition, these timers are sum over all the iters.
186 // And if we want to say that the stats are over N repetitions and not
187 // M iterations, we need to multiply these by (N/M).
188 data.real_accumulated_time *= iteration_rescale_factor;
189 data.cpu_accumulated_time *= iteration_rescale_factor;
190 }
191
192 data.time_unit = reports[0].time_unit;
193
194 // user counters
195 for (auto const& kv : counter_stats) {
196 // Do *NOT* rescale the custom counters. They are already properly scaled.
197 const auto uc_stat = Stat.compute_(kv.second.s);
198 auto c = Counter(uc_stat, counter_stats[kv.first].c.flags,
199 counter_stats[kv.first].c.oneK);
200 data.counters[kv.first] = c;
201 }
202
203 results.push_back(data);
204 }
205
206 return results;
207 }
208
209 } // end namespace benchmark
210