1 // Copyright 2012 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "base/metrics/histogram_samples.h"
6
7 #include <limits>
8
9 #include "base/compiler_specific.h"
10 #include "base/memory/raw_ptr.h"
11 #include "base/metrics/histogram_functions.h"
12 #include "base/metrics/histogram_macros.h"
13 #include "base/numerics/safe_conversions.h"
14 #include "base/numerics/safe_math.h"
15 #include "base/pickle.h"
16 #include "base/strings/stringprintf.h"
17
18 namespace base {
19
20 namespace {
21
22 // A shorthand constant for the max value of size_t.
23 constexpr size_t kSizeMax = std::numeric_limits<size_t>::max();
24
25 // A constant stored in an AtomicSingleSample (as_atomic) to indicate that the
26 // sample is "disabled" and no further accumulation should be done with it. The
27 // value is chosen such that it will be MAX_UINT16 for both |bucket| & |count|,
28 // and thus less likely to conflict with real use. Conflicts are explicitly
29 // handled in the code but it's worth making them as unlikely as possible.
30 constexpr int32_t kDisabledSingleSample = -1;
31
32 class SampleCountPickleIterator : public SampleCountIterator {
33 public:
34 explicit SampleCountPickleIterator(PickleIterator* iter);
35
36 bool Done() const override;
37 void Next() override;
38 void Get(HistogramBase::Sample* min,
39 int64_t* max,
40 HistogramBase::Count* count) override;
41
42 private:
43 const raw_ptr<PickleIterator> iter_;
44
45 HistogramBase::Sample min_;
46 int64_t max_;
47 HistogramBase::Count count_;
48 bool is_done_;
49 };
50
SampleCountPickleIterator(PickleIterator * iter)51 SampleCountPickleIterator::SampleCountPickleIterator(PickleIterator* iter)
52 : iter_(iter),
53 is_done_(false) {
54 Next();
55 }
56
Done() const57 bool SampleCountPickleIterator::Done() const {
58 return is_done_;
59 }
60
Next()61 void SampleCountPickleIterator::Next() {
62 DCHECK(!Done());
63 if (!iter_->ReadInt(&min_) || !iter_->ReadInt64(&max_) ||
64 !iter_->ReadInt(&count_)) {
65 is_done_ = true;
66 }
67 }
68
Get(HistogramBase::Sample * min,int64_t * max,HistogramBase::Count * count)69 void SampleCountPickleIterator::Get(HistogramBase::Sample* min,
70 int64_t* max,
71 HistogramBase::Count* count) {
72 DCHECK(!Done());
73 *min = min_;
74 *max = max_;
75 *count = count_;
76 }
77
78 } // namespace
79
80 static_assert(sizeof(HistogramSamples::AtomicSingleSample) ==
81 sizeof(subtle::Atomic32),
82 "AtomicSingleSample isn't 32 bits");
83
Load() const84 HistogramSamples::SingleSample HistogramSamples::AtomicSingleSample::Load()
85 const {
86 AtomicSingleSample single_sample(subtle::Acquire_Load(&as_atomic));
87
88 // If the sample was extracted/disabled, it's still zero to the outside.
89 if (single_sample.as_atomic == kDisabledSingleSample)
90 single_sample.as_atomic = 0;
91
92 return single_sample.as_parts;
93 }
94
Extract(AtomicSingleSample new_value)95 HistogramSamples::SingleSample HistogramSamples::AtomicSingleSample::Extract(
96 AtomicSingleSample new_value) {
97 DCHECK(new_value.as_atomic != kDisabledSingleSample)
98 << "Disabling an AtomicSingleSample should be done through "
99 "ExtractAndDisable().";
100
101 AtomicSingleSample old_value;
102
103 // Because a concurrent call may modify and/or disable this object as we are
104 // trying to extract its value, a compare-and-swap loop must be done to ensure
105 // that the value was not changed between the reading and writing (and to
106 // prevent accidentally re-enabling this object).
107 while (true) {
108 old_value.as_atomic = subtle::Acquire_Load(&as_atomic);
109
110 // If this object was already disabled, return an empty sample and keep it
111 // disabled.
112 if (old_value.as_atomic == kDisabledSingleSample) {
113 old_value.as_atomic = 0;
114 return old_value.as_parts;
115 }
116
117 // Extract the single-sample from memory. |existing| is what was in that
118 // memory location at the time of the call; if it doesn't match |original|
119 // (i.e., the single-sample was concurrently modified during this
120 // iteration), then the swap did not happen, so try again.
121 subtle::Atomic32 existing = subtle::Release_CompareAndSwap(
122 &as_atomic, old_value.as_atomic, new_value.as_atomic);
123 if (existing == old_value.as_atomic) {
124 return old_value.as_parts;
125 }
126 }
127 }
128
129 HistogramSamples::SingleSample
ExtractAndDisable()130 HistogramSamples::AtomicSingleSample::ExtractAndDisable() {
131 AtomicSingleSample old_value(
132 subtle::NoBarrier_AtomicExchange(&as_atomic, kDisabledSingleSample));
133 // If this object was already disabled, return an empty sample.
134 if (old_value.as_atomic == kDisabledSingleSample) {
135 old_value.as_atomic = 0;
136 }
137 return old_value.as_parts;
138 }
139
Accumulate(size_t bucket,HistogramBase::Count count)140 bool HistogramSamples::AtomicSingleSample::Accumulate(
141 size_t bucket,
142 HistogramBase::Count count) {
143 if (count == 0)
144 return true;
145
146 // Convert the parameters to 16-bit variables because it's all 16-bit below.
147 // To support decrements/subtractions, divide the |count| into sign/value and
148 // do the proper operation below. The alternative is to change the single-
149 // sample's count to be a signed integer (int16_t) and just add an int16_t
150 // |count16| but that is somewhat wasteful given that the single-sample is
151 // never expected to have a count less than zero.
152 if (count < -std::numeric_limits<uint16_t>::max() ||
153 count > std::numeric_limits<uint16_t>::max() ||
154 bucket > std::numeric_limits<uint16_t>::max()) {
155 return false;
156 }
157 bool count_is_negative = count < 0;
158 uint16_t count16 = static_cast<uint16_t>(count_is_negative ? -count : count);
159 uint16_t bucket16 = static_cast<uint16_t>(bucket);
160
161 // A local, unshared copy of the single-sample is necessary so the parts
162 // can be manipulated without worrying about atomicity.
163 AtomicSingleSample single_sample;
164
165 bool sample_updated;
166 do {
167 subtle::Atomic32 original = subtle::Acquire_Load(&as_atomic);
168 if (original == kDisabledSingleSample)
169 return false;
170 single_sample.as_atomic = original;
171 if (single_sample.as_atomic != 0) {
172 // Only the same bucket (parameter and stored) can be counted multiple
173 // times.
174 if (single_sample.as_parts.bucket != bucket16)
175 return false;
176 } else {
177 // The |single_ sample| was zero so becomes the |bucket| parameter, the
178 // contents of which were checked above to fit in 16 bits.
179 single_sample.as_parts.bucket = bucket16;
180 }
181
182 // Update count, making sure that it doesn't overflow.
183 CheckedNumeric<uint16_t> new_count(single_sample.as_parts.count);
184 if (count_is_negative)
185 new_count -= count16;
186 else
187 new_count += count16;
188 if (!new_count.AssignIfValid(&single_sample.as_parts.count))
189 return false;
190
191 // Don't let this become equivalent to the "disabled" value.
192 if (single_sample.as_atomic == kDisabledSingleSample)
193 return false;
194
195 // Store the updated single-sample back into memory. |existing| is what
196 // was in that memory location at the time of the call; if it doesn't
197 // match |original| then the swap didn't happen so loop again.
198 subtle::Atomic32 existing = subtle::Release_CompareAndSwap(
199 &as_atomic, original, single_sample.as_atomic);
200 sample_updated = (existing == original);
201 } while (!sample_updated);
202
203 return true;
204 }
205
IsDisabled() const206 bool HistogramSamples::AtomicSingleSample::IsDisabled() const {
207 return subtle::Acquire_Load(&as_atomic) == kDisabledSingleSample;
208 }
209
LocalMetadata()210 HistogramSamples::LocalMetadata::LocalMetadata() {
211 // This is the same way it's done for persistent metadata since no ctor
212 // is called for the data members in that case.
213 memset(this, 0, sizeof(*this));
214 }
215
HistogramSamples(uint64_t id,Metadata * meta)216 HistogramSamples::HistogramSamples(uint64_t id, Metadata* meta)
217 : meta_(meta) {
218 DCHECK(meta_->id == 0 || meta_->id == id);
219
220 // It's possible that |meta| is contained in initialized, read-only memory
221 // so it's essential that no write be done in that case.
222 if (!meta_->id)
223 meta_->id = id;
224 }
225
HistogramSamples(uint64_t id,std::unique_ptr<Metadata> meta)226 HistogramSamples::HistogramSamples(uint64_t id, std::unique_ptr<Metadata> meta)
227 : HistogramSamples(id, meta.get()) {
228 meta_owned_ = std::move(meta);
229 }
230
231 // This mustn't do anything with |meta_|. It was passed to the ctor and may
232 // be invalid by the time this dtor gets called.
233 HistogramSamples::~HistogramSamples() = default;
234
Add(const HistogramSamples & other)235 void HistogramSamples::Add(const HistogramSamples& other) {
236 IncreaseSumAndCount(other.sum(), other.redundant_count());
237 std::unique_ptr<SampleCountIterator> it = other.Iterator();
238 bool success = AddSubtractImpl(it.get(), ADD);
239 DCHECK(success);
240 }
241
AddFromPickle(PickleIterator * iter)242 bool HistogramSamples::AddFromPickle(PickleIterator* iter) {
243 int64_t sum;
244 HistogramBase::Count redundant_count;
245
246 if (!iter->ReadInt64(&sum) || !iter->ReadInt(&redundant_count))
247 return false;
248
249 IncreaseSumAndCount(sum, redundant_count);
250
251 SampleCountPickleIterator pickle_iter(iter);
252 return AddSubtractImpl(&pickle_iter, ADD);
253 }
254
Subtract(const HistogramSamples & other)255 void HistogramSamples::Subtract(const HistogramSamples& other) {
256 IncreaseSumAndCount(-other.sum(), -other.redundant_count());
257 std::unique_ptr<SampleCountIterator> it = other.Iterator();
258 bool success = AddSubtractImpl(it.get(), SUBTRACT);
259 DCHECK(success);
260 }
261
Extract(HistogramSamples & other)262 void HistogramSamples::Extract(HistogramSamples& other) {
263 static_assert(sizeof(other.meta_->sum) == 8);
264
265 #ifdef ARCH_CPU_64_BITS
266 // NoBarrier_AtomicExchange() is only defined for 64-bit types if
267 // the ARCH_CPU_64_BITS macro is set.
268 subtle::Atomic64 other_sum =
269 subtle::NoBarrier_AtomicExchange(&other.meta_->sum, 0);
270 #else
271 // |sum| is only atomic on 64 bit archs. Make |other_sum| volatile so that
272 // the following code is not optimized or rearranged to be something like:
273 // IncreaseSumAndCount(other.meta_->sum, ...);
274 // other.meta_->sum = 0;
275 // Or:
276 // int64_t other_sum = other.meta_->sum;
277 // other.meta_->sum = 0;
278 // IncreaseSumAndCount(other_sum, ...);
279 // Which do not guarantee eventual consistency anymore (other.meta_->sum may
280 // be modified concurrently at any time). However, despite this, eventual
281 // consistency is still not guaranteed here because performing 64-bit
282 // operations (loading, storing, adding, etc.) on a 32-bit machine cannot be
283 // done atomically, but this at least reduces the odds of inconsistencies, at
284 // the cost of a few extra instructions.
285 volatile int64_t other_sum = other.meta_->sum;
286 other.meta_->sum -= other_sum;
287 #endif // ARCH_CPU_64_BITS
288 HistogramBase::AtomicCount other_redundant_count =
289 subtle::NoBarrier_AtomicExchange(&other.meta_->redundant_count, 0);
290 IncreaseSumAndCount(other_sum, other_redundant_count);
291 std::unique_ptr<SampleCountIterator> it = other.ExtractingIterator();
292 bool success = AddSubtractImpl(it.get(), ADD);
293 DCHECK(success);
294 }
295
Serialize(Pickle * pickle) const296 void HistogramSamples::Serialize(Pickle* pickle) const {
297 pickle->WriteInt64(sum());
298 pickle->WriteInt(redundant_count());
299
300 HistogramBase::Sample min;
301 int64_t max;
302 HistogramBase::Count count;
303 for (std::unique_ptr<SampleCountIterator> it = Iterator(); !it->Done();
304 it->Next()) {
305 it->Get(&min, &max, &count);
306 pickle->WriteInt(min);
307 pickle->WriteInt64(max);
308 pickle->WriteInt(count);
309 }
310 }
311
AccumulateSingleSample(HistogramBase::Sample value,HistogramBase::Count count,size_t bucket)312 bool HistogramSamples::AccumulateSingleSample(HistogramBase::Sample value,
313 HistogramBase::Count count,
314 size_t bucket) {
315 if (single_sample().Accumulate(bucket, count)) {
316 // Success. Update the (separate) sum and redundant-count.
317 IncreaseSumAndCount(strict_cast<int64_t>(value) * count, count);
318 return true;
319 }
320 return false;
321 }
322
IncreaseSumAndCount(int64_t sum,HistogramBase::Count count)323 void HistogramSamples::IncreaseSumAndCount(int64_t sum,
324 HistogramBase::Count count) {
325 #ifdef ARCH_CPU_64_BITS
326 subtle::NoBarrier_AtomicIncrement(&meta_->sum, sum);
327 #else
328 meta_->sum += sum;
329 #endif
330 subtle::NoBarrier_AtomicIncrement(&meta_->redundant_count, count);
331 }
332
RecordNegativeSample(NegativeSampleReason reason,HistogramBase::Count increment)333 void HistogramSamples::RecordNegativeSample(NegativeSampleReason reason,
334 HistogramBase::Count increment) {
335 UMA_HISTOGRAM_ENUMERATION("UMA.NegativeSamples.Reason", reason,
336 MAX_NEGATIVE_SAMPLE_REASONS);
337 UMA_HISTOGRAM_CUSTOM_COUNTS("UMA.NegativeSamples.Increment", increment, 1,
338 1 << 30, 100);
339 UmaHistogramSparse("UMA.NegativeSamples.Histogram",
340 static_cast<int32_t>(id()));
341 }
342
ToGraphDict(StringPiece histogram_name,int32_t flags) const343 base::Value::Dict HistogramSamples::ToGraphDict(StringPiece histogram_name,
344 int32_t flags) const {
345 base::Value::Dict dict;
346 dict.Set("name", histogram_name);
347 dict.Set("header", GetAsciiHeader(histogram_name, flags));
348 dict.Set("body", GetAsciiBody());
349 return dict;
350 }
351
GetAsciiHeader(StringPiece histogram_name,int32_t flags) const352 std::string HistogramSamples::GetAsciiHeader(StringPiece histogram_name,
353 int32_t flags) const {
354 std::string output;
355 StringAppendF(&output, "Histogram: %.*s recorded %d samples",
356 static_cast<int>(histogram_name.size()), histogram_name.data(),
357 TotalCount());
358 if (flags)
359 StringAppendF(&output, " (flags = 0x%x)", flags);
360 return output;
361 }
362
GetAsciiBody() const363 std::string HistogramSamples::GetAsciiBody() const {
364 HistogramBase::Count total_count = TotalCount();
365 double scaled_total_count = total_count / 100.0;
366
367 // Determine how wide the largest bucket range is (how many digits to print),
368 // so that we'll be able to right-align starts for the graphical bars.
369 // Determine which bucket has the largest sample count so that we can
370 // normalize the graphical bar-width relative to that sample count.
371 HistogramBase::Count largest_count = 0;
372 HistogramBase::Sample largest_sample = 0;
373 std::unique_ptr<SampleCountIterator> it = Iterator();
374 while (!it->Done()) {
375 HistogramBase::Sample min;
376 int64_t max;
377 HistogramBase::Count count;
378 it->Get(&min, &max, &count);
379 if (min > largest_sample)
380 largest_sample = min;
381 if (count > largest_count)
382 largest_count = count;
383 it->Next();
384 }
385 // Scale histogram bucket counts to take at most 72 characters.
386 // Note: Keep in sync w/ kLineLength sample_vector.cc
387 const double kLineLength = 72;
388 double scaling_factor = 1;
389 if (largest_count > kLineLength)
390 scaling_factor = kLineLength / largest_count;
391 size_t print_width = GetSimpleAsciiBucketRange(largest_sample).size() + 1;
392
393 // iterate over each item and display them
394 it = Iterator();
395 std::string output;
396 while (!it->Done()) {
397 HistogramBase::Sample min;
398 int64_t max;
399 HistogramBase::Count count;
400 it->Get(&min, &max, &count);
401
402 // value is min, so display it
403 std::string range = GetSimpleAsciiBucketRange(min);
404 output.append(range);
405 for (size_t j = 0; range.size() + j < print_width + 1; ++j)
406 output.push_back(' ');
407 HistogramBase::Count current_size = round(count * scaling_factor);
408 WriteAsciiBucketGraph(current_size, kLineLength, &output);
409 WriteAsciiBucketValue(count, scaled_total_count, &output);
410 StringAppendF(&output, "\n");
411 it->Next();
412 }
413 return output;
414 }
415
WriteAsciiBucketGraph(double x_count,int line_length,std::string * output) const416 void HistogramSamples::WriteAsciiBucketGraph(double x_count,
417 int line_length,
418 std::string* output) const {
419 int x_remainder = line_length - x_count;
420
421 while (0 < x_count--)
422 output->append("-");
423 output->append("O");
424 while (0 < x_remainder--)
425 output->append(" ");
426 }
427
WriteAsciiBucketValue(HistogramBase::Count current,double scaled_sum,std::string * output) const428 void HistogramSamples::WriteAsciiBucketValue(HistogramBase::Count current,
429 double scaled_sum,
430 std::string* output) const {
431 StringAppendF(output, " (%d = %3.1f%%)", current, current / scaled_sum);
432 }
433
GetSimpleAsciiBucketRange(HistogramBase::Sample sample) const434 const std::string HistogramSamples::GetSimpleAsciiBucketRange(
435 HistogramBase::Sample sample) const {
436 return StringPrintf("%d", sample);
437 }
438
439 SampleCountIterator::~SampleCountIterator() = default;
440
GetBucketIndex(size_t * index) const441 bool SampleCountIterator::GetBucketIndex(size_t* index) const {
442 DCHECK(!Done());
443 return false;
444 }
445
SingleSampleIterator(HistogramBase::Sample min,int64_t max,HistogramBase::Count count,size_t bucket_index,bool value_was_extracted)446 SingleSampleIterator::SingleSampleIterator(HistogramBase::Sample min,
447 int64_t max,
448 HistogramBase::Count count,
449 size_t bucket_index,
450 bool value_was_extracted)
451 : min_(min),
452 max_(max),
453 bucket_index_(bucket_index),
454 count_(count),
455 value_was_extracted_(value_was_extracted) {}
456
~SingleSampleIterator()457 SingleSampleIterator::~SingleSampleIterator() {
458 // Because this object may have been instantiated in such a way that the
459 // samples it is holding were already extracted from the underlying data, we
460 // add a DCHECK to ensure that in those cases, users of this iterator read the
461 // samples, otherwise they may be lost.
462 DCHECK(!value_was_extracted_ || Done());
463 }
464
Done() const465 bool SingleSampleIterator::Done() const {
466 return count_ == 0;
467 }
468
Next()469 void SingleSampleIterator::Next() {
470 DCHECK(!Done());
471 count_ = 0;
472 }
473
Get(HistogramBase::Sample * min,int64_t * max,HistogramBase::Count * count)474 void SingleSampleIterator::Get(HistogramBase::Sample* min,
475 int64_t* max,
476 HistogramBase::Count* count) {
477 DCHECK(!Done());
478 *min = min_;
479 *max = max_;
480 *count = count_;
481 }
482
GetBucketIndex(size_t * index) const483 bool SingleSampleIterator::GetBucketIndex(size_t* index) const {
484 DCHECK(!Done());
485 if (bucket_index_ == kSizeMax)
486 return false;
487 *index = bucket_index_;
488 return true;
489 }
490
491 } // namespace base
492