• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "base/metrics/histogram_samples.h"
6 
7 #include <limits>
8 
9 #include "base/compiler_specific.h"
10 #include "base/memory/raw_ptr.h"
11 #include "base/metrics/histogram_functions.h"
12 #include "base/metrics/histogram_macros.h"
13 #include "base/numerics/safe_conversions.h"
14 #include "base/numerics/safe_math.h"
15 #include "base/pickle.h"
16 #include "base/strings/stringprintf.h"
17 
18 namespace base {
19 
20 namespace {
21 
22 // A shorthand constant for the max value of size_t.
23 constexpr size_t kSizeMax = std::numeric_limits<size_t>::max();
24 
25 // A constant stored in an AtomicSingleSample (as_atomic) to indicate that the
26 // sample is "disabled" and no further accumulation should be done with it. The
27 // value is chosen such that it will be MAX_UINT16 for both |bucket| & |count|,
28 // and thus less likely to conflict with real use. Conflicts are explicitly
29 // handled in the code but it's worth making them as unlikely as possible.
30 constexpr int32_t kDisabledSingleSample = -1;
31 
32 class SampleCountPickleIterator : public SampleCountIterator {
33  public:
34   explicit SampleCountPickleIterator(PickleIterator* iter);
35 
36   bool Done() const override;
37   void Next() override;
38   void Get(HistogramBase::Sample* min,
39            int64_t* max,
40            HistogramBase::Count* count) override;
41 
42  private:
43   const raw_ptr<PickleIterator> iter_;
44 
45   HistogramBase::Sample min_;
46   int64_t max_;
47   HistogramBase::Count count_;
48   bool is_done_;
49 };
50 
SampleCountPickleIterator(PickleIterator * iter)51 SampleCountPickleIterator::SampleCountPickleIterator(PickleIterator* iter)
52     : iter_(iter),
53       is_done_(false) {
54   Next();
55 }
56 
Done() const57 bool SampleCountPickleIterator::Done() const {
58   return is_done_;
59 }
60 
Next()61 void SampleCountPickleIterator::Next() {
62   DCHECK(!Done());
63   if (!iter_->ReadInt(&min_) || !iter_->ReadInt64(&max_) ||
64       !iter_->ReadInt(&count_)) {
65     is_done_ = true;
66   }
67 }
68 
Get(HistogramBase::Sample * min,int64_t * max,HistogramBase::Count * count)69 void SampleCountPickleIterator::Get(HistogramBase::Sample* min,
70                                     int64_t* max,
71                                     HistogramBase::Count* count) {
72   DCHECK(!Done());
73   *min = min_;
74   *max = max_;
75   *count = count_;
76 }
77 
78 }  // namespace
79 
80 static_assert(sizeof(HistogramSamples::AtomicSingleSample) ==
81                   sizeof(subtle::Atomic32),
82               "AtomicSingleSample isn't 32 bits");
83 
Load() const84 HistogramSamples::SingleSample HistogramSamples::AtomicSingleSample::Load()
85     const {
86   AtomicSingleSample single_sample(subtle::Acquire_Load(&as_atomic));
87 
88   // If the sample was extracted/disabled, it's still zero to the outside.
89   if (single_sample.as_atomic == kDisabledSingleSample)
90     single_sample.as_atomic = 0;
91 
92   return single_sample.as_parts;
93 }
94 
Extract(AtomicSingleSample new_value)95 HistogramSamples::SingleSample HistogramSamples::AtomicSingleSample::Extract(
96     AtomicSingleSample new_value) {
97   DCHECK(new_value.as_atomic != kDisabledSingleSample)
98       << "Disabling an AtomicSingleSample should be done through "
99          "ExtractAndDisable().";
100 
101   AtomicSingleSample old_value;
102 
103   // Because a concurrent call may modify and/or disable this object as we are
104   // trying to extract its value, a compare-and-swap loop must be done to ensure
105   // that the value was not changed between the reading and writing (and to
106   // prevent accidentally re-enabling this object).
107   while (true) {
108     old_value.as_atomic = subtle::Acquire_Load(&as_atomic);
109 
110     // If this object was already disabled, return an empty sample and keep it
111     // disabled.
112     if (old_value.as_atomic == kDisabledSingleSample) {
113       old_value.as_atomic = 0;
114       return old_value.as_parts;
115     }
116 
117     // Extract the single-sample from memory. |existing| is what was in that
118     // memory location at the time of the call; if it doesn't match |original|
119     // (i.e., the single-sample was concurrently modified during this
120     // iteration), then the swap did not happen, so try again.
121     subtle::Atomic32 existing = subtle::Release_CompareAndSwap(
122         &as_atomic, old_value.as_atomic, new_value.as_atomic);
123     if (existing == old_value.as_atomic) {
124       return old_value.as_parts;
125     }
126   }
127 }
128 
129 HistogramSamples::SingleSample
ExtractAndDisable()130 HistogramSamples::AtomicSingleSample::ExtractAndDisable() {
131   AtomicSingleSample old_value(
132       subtle::NoBarrier_AtomicExchange(&as_atomic, kDisabledSingleSample));
133   // If this object was already disabled, return an empty sample.
134   if (old_value.as_atomic == kDisabledSingleSample) {
135     old_value.as_atomic = 0;
136   }
137   return old_value.as_parts;
138 }
139 
Accumulate(size_t bucket,HistogramBase::Count count)140 bool HistogramSamples::AtomicSingleSample::Accumulate(
141     size_t bucket,
142     HistogramBase::Count count) {
143   if (count == 0)
144     return true;
145 
146   // Convert the parameters to 16-bit variables because it's all 16-bit below.
147   // To support decrements/subtractions, divide the |count| into sign/value and
148   // do the proper operation below. The alternative is to change the single-
149   // sample's count to be a signed integer (int16_t) and just add an int16_t
150   // |count16| but that is somewhat wasteful given that the single-sample is
151   // never expected to have a count less than zero.
152   if (count < -std::numeric_limits<uint16_t>::max() ||
153       count > std::numeric_limits<uint16_t>::max() ||
154       bucket > std::numeric_limits<uint16_t>::max()) {
155     return false;
156   }
157   bool count_is_negative = count < 0;
158   uint16_t count16 = static_cast<uint16_t>(count_is_negative ? -count : count);
159   uint16_t bucket16 = static_cast<uint16_t>(bucket);
160 
161   // A local, unshared copy of the single-sample is necessary so the parts
162   // can be manipulated without worrying about atomicity.
163   AtomicSingleSample single_sample;
164 
165   bool sample_updated;
166   do {
167     subtle::Atomic32 original = subtle::Acquire_Load(&as_atomic);
168     if (original == kDisabledSingleSample)
169       return false;
170     single_sample.as_atomic = original;
171     if (single_sample.as_atomic != 0) {
172       // Only the same bucket (parameter and stored) can be counted multiple
173       // times.
174       if (single_sample.as_parts.bucket != bucket16)
175         return false;
176     } else {
177       // The |single_ sample| was zero so becomes the |bucket| parameter, the
178       // contents of which were checked above to fit in 16 bits.
179       single_sample.as_parts.bucket = bucket16;
180     }
181 
182     // Update count, making sure that it doesn't overflow.
183     CheckedNumeric<uint16_t> new_count(single_sample.as_parts.count);
184     if (count_is_negative)
185       new_count -= count16;
186     else
187       new_count += count16;
188     if (!new_count.AssignIfValid(&single_sample.as_parts.count))
189       return false;
190 
191     // Don't let this become equivalent to the "disabled" value.
192     if (single_sample.as_atomic == kDisabledSingleSample)
193       return false;
194 
195     // Store the updated single-sample back into memory. |existing| is what
196     // was in that memory location at the time of the call; if it doesn't
197     // match |original| then the swap didn't happen so loop again.
198     subtle::Atomic32 existing = subtle::Release_CompareAndSwap(
199         &as_atomic, original, single_sample.as_atomic);
200     sample_updated = (existing == original);
201   } while (!sample_updated);
202 
203   return true;
204 }
205 
IsDisabled() const206 bool HistogramSamples::AtomicSingleSample::IsDisabled() const {
207   return subtle::Acquire_Load(&as_atomic) == kDisabledSingleSample;
208 }
209 
LocalMetadata()210 HistogramSamples::LocalMetadata::LocalMetadata() {
211   // This is the same way it's done for persistent metadata since no ctor
212   // is called for the data members in that case.
213   memset(this, 0, sizeof(*this));
214 }
215 
HistogramSamples(uint64_t id,Metadata * meta)216 HistogramSamples::HistogramSamples(uint64_t id, Metadata* meta)
217     : meta_(meta) {
218   DCHECK(meta_->id == 0 || meta_->id == id);
219 
220   // It's possible that |meta| is contained in initialized, read-only memory
221   // so it's essential that no write be done in that case.
222   if (!meta_->id)
223     meta_->id = id;
224 }
225 
HistogramSamples(uint64_t id,std::unique_ptr<Metadata> meta)226 HistogramSamples::HistogramSamples(uint64_t id, std::unique_ptr<Metadata> meta)
227     : HistogramSamples(id, meta.get()) {
228   meta_owned_ = std::move(meta);
229 }
230 
231 // This mustn't do anything with |meta_|. It was passed to the ctor and may
232 // be invalid by the time this dtor gets called.
233 HistogramSamples::~HistogramSamples() = default;
234 
Add(const HistogramSamples & other)235 void HistogramSamples::Add(const HistogramSamples& other) {
236   IncreaseSumAndCount(other.sum(), other.redundant_count());
237   std::unique_ptr<SampleCountIterator> it = other.Iterator();
238   bool success = AddSubtractImpl(it.get(), ADD);
239   DCHECK(success);
240 }
241 
AddFromPickle(PickleIterator * iter)242 bool HistogramSamples::AddFromPickle(PickleIterator* iter) {
243   int64_t sum;
244   HistogramBase::Count redundant_count;
245 
246   if (!iter->ReadInt64(&sum) || !iter->ReadInt(&redundant_count))
247     return false;
248 
249   IncreaseSumAndCount(sum, redundant_count);
250 
251   SampleCountPickleIterator pickle_iter(iter);
252   return AddSubtractImpl(&pickle_iter, ADD);
253 }
254 
Subtract(const HistogramSamples & other)255 void HistogramSamples::Subtract(const HistogramSamples& other) {
256   IncreaseSumAndCount(-other.sum(), -other.redundant_count());
257   std::unique_ptr<SampleCountIterator> it = other.Iterator();
258   bool success = AddSubtractImpl(it.get(), SUBTRACT);
259   DCHECK(success);
260 }
261 
Extract(HistogramSamples & other)262 void HistogramSamples::Extract(HistogramSamples& other) {
263   static_assert(sizeof(other.meta_->sum) == 8);
264 
265 #ifdef ARCH_CPU_64_BITS
266   // NoBarrier_AtomicExchange() is only defined for 64-bit types if
267   // the ARCH_CPU_64_BITS macro is set.
268   subtle::Atomic64 other_sum =
269       subtle::NoBarrier_AtomicExchange(&other.meta_->sum, 0);
270 #else
271   // |sum| is only atomic on 64 bit archs. Make |other_sum| volatile so that
272   // the following code is not optimized or rearranged to be something like:
273   //     IncreaseSumAndCount(other.meta_->sum, ...);
274   //     other.meta_->sum = 0;
275   // Or:
276   //     int64_t other_sum = other.meta_->sum;
277   //     other.meta_->sum = 0;
278   //     IncreaseSumAndCount(other_sum, ...);
279   // Which do not guarantee eventual consistency anymore (other.meta_->sum may
280   // be modified concurrently at any time). However, despite this, eventual
281   // consistency is still not guaranteed here because performing 64-bit
282   // operations (loading, storing, adding, etc.) on a 32-bit machine cannot be
283   // done atomically, but this at least reduces the odds of inconsistencies, at
284   // the cost of a few extra instructions.
285   volatile int64_t other_sum = other.meta_->sum;
286   other.meta_->sum -= other_sum;
287 #endif  // ARCH_CPU_64_BITS
288   HistogramBase::AtomicCount other_redundant_count =
289       subtle::NoBarrier_AtomicExchange(&other.meta_->redundant_count, 0);
290   IncreaseSumAndCount(other_sum, other_redundant_count);
291   std::unique_ptr<SampleCountIterator> it = other.ExtractingIterator();
292   bool success = AddSubtractImpl(it.get(), ADD);
293   DCHECK(success);
294 }
295 
IsDefinitelyEmpty() const296 bool HistogramSamples::IsDefinitelyEmpty() const {
297   return sum() == 0 && redundant_count() == 0;
298 }
299 
Serialize(Pickle * pickle) const300 void HistogramSamples::Serialize(Pickle* pickle) const {
301   pickle->WriteInt64(sum());
302   pickle->WriteInt(redundant_count());
303 
304   HistogramBase::Sample min;
305   int64_t max;
306   HistogramBase::Count count;
307   for (std::unique_ptr<SampleCountIterator> it = Iterator(); !it->Done();
308        it->Next()) {
309     it->Get(&min, &max, &count);
310     pickle->WriteInt(min);
311     pickle->WriteInt64(max);
312     pickle->WriteInt(count);
313   }
314 }
315 
AccumulateSingleSample(HistogramBase::Sample value,HistogramBase::Count count,size_t bucket)316 bool HistogramSamples::AccumulateSingleSample(HistogramBase::Sample value,
317                                               HistogramBase::Count count,
318                                               size_t bucket) {
319   if (single_sample().Accumulate(bucket, count)) {
320     // Success. Update the (separate) sum and redundant-count.
321     IncreaseSumAndCount(strict_cast<int64_t>(value) * count, count);
322     return true;
323   }
324   return false;
325 }
326 
IncreaseSumAndCount(int64_t sum,HistogramBase::Count count)327 void HistogramSamples::IncreaseSumAndCount(int64_t sum,
328                                            HistogramBase::Count count) {
329 #ifdef ARCH_CPU_64_BITS
330   subtle::NoBarrier_AtomicIncrement(&meta_->sum, sum);
331 #else
332   meta_->sum += sum;
333 #endif
334   subtle::NoBarrier_AtomicIncrement(&meta_->redundant_count, count);
335 }
336 
RecordNegativeSample(NegativeSampleReason reason,HistogramBase::Count increment)337 void HistogramSamples::RecordNegativeSample(NegativeSampleReason reason,
338                                             HistogramBase::Count increment) {
339   UMA_HISTOGRAM_ENUMERATION("UMA.NegativeSamples.Reason", reason,
340                             MAX_NEGATIVE_SAMPLE_REASONS);
341   UMA_HISTOGRAM_CUSTOM_COUNTS("UMA.NegativeSamples.Increment", increment, 1,
342                               1 << 30, 100);
343   UmaHistogramSparse("UMA.NegativeSamples.Histogram",
344                      static_cast<int32_t>(id()));
345 }
346 
ToGraphDict(StringPiece histogram_name,int32_t flags) const347 base::Value::Dict HistogramSamples::ToGraphDict(StringPiece histogram_name,
348                                                 int32_t flags) const {
349   base::Value::Dict dict;
350   dict.Set("name", histogram_name);
351   dict.Set("header", GetAsciiHeader(histogram_name, flags));
352   dict.Set("body", GetAsciiBody());
353   return dict;
354 }
355 
GetAsciiHeader(StringPiece histogram_name,int32_t flags) const356 std::string HistogramSamples::GetAsciiHeader(StringPiece histogram_name,
357                                              int32_t flags) const {
358   std::string output;
359   StringAppendF(&output, "Histogram: %.*s recorded %d samples",
360                 static_cast<int>(histogram_name.size()), histogram_name.data(),
361                 TotalCount());
362   if (flags)
363     StringAppendF(&output, " (flags = 0x%x)", flags);
364   return output;
365 }
366 
GetAsciiBody() const367 std::string HistogramSamples::GetAsciiBody() const {
368   HistogramBase::Count total_count = TotalCount();
369   double scaled_total_count = total_count / 100.0;
370 
371   // Determine how wide the largest bucket range is (how many digits to print),
372   // so that we'll be able to right-align starts for the graphical bars.
373   // Determine which bucket has the largest sample count so that we can
374   // normalize the graphical bar-width relative to that sample count.
375   HistogramBase::Count largest_count = 0;
376   HistogramBase::Sample largest_sample = 0;
377   std::unique_ptr<SampleCountIterator> it = Iterator();
378   while (!it->Done()) {
379     HistogramBase::Sample min;
380     int64_t max;
381     HistogramBase::Count count;
382     it->Get(&min, &max, &count);
383     if (min > largest_sample)
384       largest_sample = min;
385     if (count > largest_count)
386       largest_count = count;
387     it->Next();
388   }
389   // Scale histogram bucket counts to take at most 72 characters.
390   // Note: Keep in sync w/ kLineLength sample_vector.cc
391   const double kLineLength = 72;
392   double scaling_factor = 1;
393   if (largest_count > kLineLength)
394     scaling_factor = kLineLength / largest_count;
395   size_t print_width = GetSimpleAsciiBucketRange(largest_sample).size() + 1;
396 
397   // iterate over each item and display them
398   it = Iterator();
399   std::string output;
400   while (!it->Done()) {
401     HistogramBase::Sample min;
402     int64_t max;
403     HistogramBase::Count count;
404     it->Get(&min, &max, &count);
405 
406     // value is min, so display it
407     std::string range = GetSimpleAsciiBucketRange(min);
408     output.append(range);
409     for (size_t j = 0; range.size() + j < print_width + 1; ++j)
410       output.push_back(' ');
411     HistogramBase::Count current_size = round(count * scaling_factor);
412     WriteAsciiBucketGraph(current_size, kLineLength, &output);
413     WriteAsciiBucketValue(count, scaled_total_count, &output);
414     StringAppendF(&output, "\n");
415     it->Next();
416   }
417   return output;
418 }
419 
WriteAsciiBucketGraph(double x_count,int line_length,std::string * output) const420 void HistogramSamples::WriteAsciiBucketGraph(double x_count,
421                                              int line_length,
422                                              std::string* output) const {
423   int x_remainder = line_length - x_count;
424 
425   while (0 < x_count--)
426     output->append("-");
427   output->append("O");
428   while (0 < x_remainder--)
429     output->append(" ");
430 }
431 
WriteAsciiBucketValue(HistogramBase::Count current,double scaled_sum,std::string * output) const432 void HistogramSamples::WriteAsciiBucketValue(HistogramBase::Count current,
433                                              double scaled_sum,
434                                              std::string* output) const {
435   StringAppendF(output, " (%d = %3.1f%%)", current, current / scaled_sum);
436 }
437 
GetSimpleAsciiBucketRange(HistogramBase::Sample sample) const438 const std::string HistogramSamples::GetSimpleAsciiBucketRange(
439     HistogramBase::Sample sample) const {
440   return StringPrintf("%d", sample);
441 }
442 
443 SampleCountIterator::~SampleCountIterator() = default;
444 
GetBucketIndex(size_t * index) const445 bool SampleCountIterator::GetBucketIndex(size_t* index) const {
446   DCHECK(!Done());
447   return false;
448 }
449 
SingleSampleIterator(HistogramBase::Sample min,int64_t max,HistogramBase::Count count,size_t bucket_index,bool value_was_extracted)450 SingleSampleIterator::SingleSampleIterator(HistogramBase::Sample min,
451                                            int64_t max,
452                                            HistogramBase::Count count,
453                                            size_t bucket_index,
454                                            bool value_was_extracted)
455     : min_(min),
456       max_(max),
457       bucket_index_(bucket_index),
458       count_(count),
459       value_was_extracted_(value_was_extracted) {}
460 
~SingleSampleIterator()461 SingleSampleIterator::~SingleSampleIterator() {
462   // Because this object may have been instantiated in such a way that the
463   // samples it is holding were already extracted from the underlying data, we
464   // add a DCHECK to ensure that in those cases, users of this iterator read the
465   // samples, otherwise they may be lost.
466   DCHECK(!value_was_extracted_ || Done());
467 }
468 
Done() const469 bool SingleSampleIterator::Done() const {
470   return count_ == 0;
471 }
472 
Next()473 void SingleSampleIterator::Next() {
474   DCHECK(!Done());
475   count_ = 0;
476 }
477 
Get(HistogramBase::Sample * min,int64_t * max,HistogramBase::Count * count)478 void SingleSampleIterator::Get(HistogramBase::Sample* min,
479                                int64_t* max,
480                                HistogramBase::Count* count) {
481   DCHECK(!Done());
482   *min = min_;
483   *max = max_;
484   *count = count_;
485 }
486 
GetBucketIndex(size_t * index) const487 bool SingleSampleIterator::GetBucketIndex(size_t* index) const {
488   DCHECK(!Done());
489   if (bucket_index_ == kSizeMax)
490     return false;
491   *index = bucket_index_;
492   return true;
493 }
494 
495 }  // namespace base
496