• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "base/metrics/sample_vector.h"
6 
7 #include <ostream>
8 
9 #include "base/check_op.h"
10 #include "base/debug/crash_logging.h"
11 #include "base/lazy_instance.h"
12 #include "base/memory/ptr_util.h"
13 #include "base/memory/raw_span.h"
14 #include "base/metrics/persistent_memory_allocator.h"
15 #include "base/notreached.h"
16 #include "base/numerics/safe_conversions.h"
17 #include "base/strings/stringprintf.h"
18 #include "base/synchronization/lock.h"
19 #include "base/threading/platform_thread.h"
20 
21 // This SampleVector makes use of the single-sample embedded in the base
22 // HistogramSamples class. If the count is non-zero then there is guaranteed
23 // (within the bounds of "eventual consistency") to be no allocated external
24 // storage. Once the full counts storage is allocated, the single-sample must
25 // be extracted and disabled.
26 
27 namespace base {
28 
29 typedef HistogramBase::Count Count;
30 typedef HistogramBase::Sample Sample;
31 
32 namespace {
33 
34 // An iterator for sample vectors.
35 template <typename T>
36 class IteratorTemplate : public SampleCountIterator {
37  public:
IteratorTemplate(base::span<T> counts,const BucketRanges * bucket_ranges)38   IteratorTemplate(base::span<T> counts, const BucketRanges* bucket_ranges)
39       : counts_(counts), bucket_ranges_(bucket_ranges) {
40     DCHECK_GE(bucket_ranges_->bucket_count(), counts_.size());
41     SkipEmptyBuckets();
42   }
43 
44   ~IteratorTemplate() override;
45 
46   // SampleCountIterator:
Done() const47   bool Done() const override { return index_ >= counts_.size(); }
Next()48   void Next() override {
49     DCHECK(!Done());
50     index_++;
51     SkipEmptyBuckets();
52   }
53   void Get(HistogramBase::Sample* min,
54            int64_t* max,
55            HistogramBase::Count* count) override;
56 
57   // SampleVector uses predefined buckets, so iterator can return bucket index.
GetBucketIndex(size_t * index) const58   bool GetBucketIndex(size_t* index) const override {
59     DCHECK(!Done());
60     if (index != nullptr) {
61       *index = index_;
62     }
63     return true;
64   }
65 
66  private:
SkipEmptyBuckets()67   void SkipEmptyBuckets() {
68     if (Done()) {
69       return;
70     }
71 
72     while (index_ < counts_.size()) {
73       if (subtle::NoBarrier_Load(&counts_[index_]) != 0) {
74         return;
75       }
76       index_++;
77     }
78   }
79 
80   raw_span<T> counts_;
81   raw_ptr<const BucketRanges> bucket_ranges_;
82   size_t index_ = 0;
83 };
84 
85 using SampleVectorIterator = IteratorTemplate<const HistogramBase::AtomicCount>;
86 
87 template <>
88 SampleVectorIterator::~IteratorTemplate() = default;
89 
90 // Get() for an iterator of a SampleVector.
91 template <>
Get(HistogramBase::Sample * min,int64_t * max,HistogramBase::Count * count)92 void SampleVectorIterator::Get(HistogramBase::Sample* min,
93                                int64_t* max,
94                                HistogramBase::Count* count) {
95   DCHECK(!Done());
96   *min = bucket_ranges_->range(index_);
97   *max = strict_cast<int64_t>(bucket_ranges_->range(index_ + 1));
98   *count = subtle::NoBarrier_Load(&counts_[index_]);
99 }
100 
101 using ExtractingSampleVectorIterator =
102     IteratorTemplate<HistogramBase::AtomicCount>;
103 
104 template <>
~IteratorTemplate()105 ExtractingSampleVectorIterator::~IteratorTemplate() {
106   // Ensure that the user has consumed all the samples in order to ensure no
107   // samples are lost.
108   DCHECK(Done());
109 }
110 
111 // Get() for an extracting iterator of a SampleVector.
112 template <>
Get(HistogramBase::Sample * min,int64_t * max,HistogramBase::Count * count)113 void ExtractingSampleVectorIterator::Get(HistogramBase::Sample* min,
114                                          int64_t* max,
115                                          HistogramBase::Count* count) {
116   DCHECK(!Done());
117   *min = bucket_ranges_->range(index_);
118   *max = strict_cast<int64_t>(bucket_ranges_->range(index_ + 1));
119   *count = subtle::NoBarrier_AtomicExchange(&counts_[index_], 0);
120 }
121 
122 }  // namespace
123 
SampleVectorBase(uint64_t id,Metadata * meta,const BucketRanges * bucket_ranges)124 SampleVectorBase::SampleVectorBase(uint64_t id,
125                                    Metadata* meta,
126                                    const BucketRanges* bucket_ranges)
127     : HistogramSamples(id, meta), bucket_ranges_(bucket_ranges) {
128   CHECK_GE(bucket_ranges_->bucket_count(), 1u);
129 }
130 
SampleVectorBase(uint64_t id,std::unique_ptr<Metadata> meta,const BucketRanges * bucket_ranges)131 SampleVectorBase::SampleVectorBase(uint64_t id,
132                                    std::unique_ptr<Metadata> meta,
133                                    const BucketRanges* bucket_ranges)
134     : HistogramSamples(id, std::move(meta)), bucket_ranges_(bucket_ranges) {
135   CHECK_GE(bucket_ranges_->bucket_count(), 1u);
136 }
137 
138 SampleVectorBase::~SampleVectorBase() = default;
139 
Accumulate(Sample value,Count count)140 void SampleVectorBase::Accumulate(Sample value, Count count) {
141   const size_t bucket_index = GetBucketIndex(value);
142 
143   // Handle the single-sample case.
144   if (!counts()) {
145     // Try to accumulate the parameters into the single-count entry.
146     if (AccumulateSingleSample(value, count, bucket_index)) {
147       // A race condition could lead to a new single-sample being accumulated
148       // above just after another thread executed the MountCountsStorage below.
149       // Since it is mounted, it could be mounted elsewhere and have values
150       // written to it. It's not allowed to have both a single-sample and
151       // entries in the counts array so move the single-sample.
152       if (counts())
153         MoveSingleSampleToCounts();
154       return;
155     }
156 
157     // Need real storage to store both what was in the single-sample plus the
158     // parameter information.
159     MountCountsStorageAndMoveSingleSample();
160   }
161 
162   // Handle the multi-sample case.
163   Count new_value =
164       subtle::NoBarrier_AtomicIncrement(&counts()[bucket_index], count);
165   IncreaseSumAndCount(strict_cast<int64_t>(count) * value, count);
166 
167   // TODO(bcwhite) Remove after crbug.com/682680.
168   Count old_value = new_value - count;
169   if ((new_value >= 0) != (old_value >= 0) && count > 0)
170     RecordNegativeSample(SAMPLES_ACCUMULATE_OVERFLOW, count);
171 }
172 
GetCount(Sample value) const173 Count SampleVectorBase::GetCount(Sample value) const {
174   return GetCountAtIndex(GetBucketIndex(value));
175 }
176 
TotalCount() const177 Count SampleVectorBase::TotalCount() const {
178   // Handle the single-sample case.
179   SingleSample sample = single_sample().Load();
180   if (sample.count != 0)
181     return sample.count;
182 
183   // Handle the multi-sample case.
184   if (counts() || MountExistingCountsStorage()) {
185     Count count = 0;
186     size_t size = counts_size();
187     const HistogramBase::AtomicCount* counts_array = counts();
188     for (size_t i = 0; i < size; ++i) {
189       count += subtle::NoBarrier_Load(&counts_array[i]);
190     }
191     return count;
192   }
193 
194   // And the no-value case.
195   return 0;
196 }
197 
GetCountAtIndex(size_t bucket_index) const198 Count SampleVectorBase::GetCountAtIndex(size_t bucket_index) const {
199   DCHECK(bucket_index < counts_size());
200 
201   // Handle the single-sample case.
202   SingleSample sample = single_sample().Load();
203   if (sample.count != 0)
204     return sample.bucket == bucket_index ? sample.count : 0;
205 
206   // Handle the multi-sample case.
207   if (counts() || MountExistingCountsStorage())
208     return subtle::NoBarrier_Load(&counts()[bucket_index]);
209 
210   // And the no-value case.
211   return 0;
212 }
213 
Iterator() const214 std::unique_ptr<SampleCountIterator> SampleVectorBase::Iterator() const {
215   // Handle the single-sample case.
216   SingleSample sample = single_sample().Load();
217   if (sample.count != 0) {
218     return std::make_unique<SingleSampleIterator>(
219         bucket_ranges_->range(sample.bucket),
220         bucket_ranges_->range(sample.bucket + 1), sample.count, sample.bucket,
221         /*value_was_extracted=*/false);
222   }
223 
224   // Handle the multi-sample case.
225   if (counts() || MountExistingCountsStorage()) {
226     return std::make_unique<SampleVectorIterator>(
227         base::make_span(counts(), counts_size()), bucket_ranges_);
228   }
229 
230   // And the no-value case.
231   return std::make_unique<SampleVectorIterator>(
232       base::span<const HistogramBase::AtomicCount>(), bucket_ranges_);
233 }
234 
ExtractingIterator()235 std::unique_ptr<SampleCountIterator> SampleVectorBase::ExtractingIterator() {
236   // Handle the single-sample case.
237   SingleSample sample = single_sample().Extract();
238   if (sample.count != 0) {
239     // Note that we have already extracted the samples (i.e., reset the
240     // underlying data back to 0 samples), even before the iterator has been
241     // used. This means that the caller needs to ensure that this value is
242     // eventually consumed, otherwise the sample is lost. There is no iterator
243     // that simply points to the underlying SingleSample and extracts its value
244     // on-demand because there are tricky edge cases when the SingleSample is
245     // disabled between the creation of the iterator and the actual call to
246     // Get() (for example, due to histogram changing to use a vector to store
247     // its samples).
248     return std::make_unique<SingleSampleIterator>(
249         bucket_ranges_->range(sample.bucket),
250         bucket_ranges_->range(sample.bucket + 1), sample.count, sample.bucket,
251         /*value_was_extracted=*/true);
252   }
253 
254   // Handle the multi-sample case.
255   if (counts() || MountExistingCountsStorage()) {
256     return std::make_unique<ExtractingSampleVectorIterator>(
257         base::make_span(counts(), counts_size()), bucket_ranges_);
258   }
259 
260   // And the no-value case.
261   return std::make_unique<ExtractingSampleVectorIterator>(
262       base::span<HistogramBase::AtomicCount>(), bucket_ranges_);
263 }
264 
AddSubtractImpl(SampleCountIterator * iter,HistogramSamples::Operator op)265 bool SampleVectorBase::AddSubtractImpl(SampleCountIterator* iter,
266                                        HistogramSamples::Operator op) {
267   // Stop now if there's nothing to do.
268   if (iter->Done())
269     return true;
270 
271   // Get the first value and its index.
272   HistogramBase::Sample min;
273   int64_t max;
274   HistogramBase::Count count;
275   iter->Get(&min, &max, &count);
276   size_t dest_index = GetBucketIndex(min);
277 
278   // The destination must be a superset of the source meaning that though the
279   // incoming ranges will find an exact match, the incoming bucket-index, if
280   // it exists, may be offset from the destination bucket-index. Calculate
281   // that offset of the passed iterator; there are are no overflow checks
282   // because 2's compliment math will work it out in the end.
283   //
284   // Because GetBucketIndex() always returns the same true or false result for
285   // a given iterator object, |index_offset| is either set here and used below,
286   // or never set and never used. The compiler doesn't know this, though, which
287   // is why it's necessary to initialize it to something.
288   size_t index_offset = 0;
289   size_t iter_index;
290   if (iter->GetBucketIndex(&iter_index))
291     index_offset = dest_index - iter_index;
292   if (dest_index >= counts_size())
293     return false;
294 
295   // Post-increment. Information about the current sample is not available
296   // after this point.
297   iter->Next();
298 
299   // Single-value storage is possible if there is no counts storage and the
300   // retrieved entry is the only one in the iterator.
301   if (!counts()) {
302     if (iter->Done()) {
303       // Don't call AccumulateSingleSample because that updates sum and count
304       // which was already done by the caller of this method.
305       if (single_sample().Accumulate(
306               dest_index, op == HistogramSamples::ADD ? count : -count)) {
307         // Handle race-condition that mounted counts storage between above and
308         // here.
309         if (counts())
310           MoveSingleSampleToCounts();
311         return true;
312       }
313     }
314 
315     // The counts storage will be needed to hold the multiple incoming values.
316     MountCountsStorageAndMoveSingleSample();
317   }
318 
319   // Go through the iterator and add the counts into correct bucket.
320   while (true) {
321     // Ensure that the sample's min/max match the ranges min/max.
322     if (min != bucket_ranges_->range(dest_index) ||
323         max != bucket_ranges_->range(dest_index + 1)) {
324 #if !BUILDFLAG(IS_NACL)
325       // TODO(crbug/1432981): Remove these. They are used to investigate
326       // unexpected failures.
327       SCOPED_CRASH_KEY_NUMBER("SampleVector", "min", min);
328       SCOPED_CRASH_KEY_NUMBER("SampleVector", "max", max);
329       SCOPED_CRASH_KEY_NUMBER("SampleVector", "range_min",
330                               bucket_ranges_->range(dest_index));
331       SCOPED_CRASH_KEY_NUMBER("SampleVector", "range_max",
332                               bucket_ranges_->range(dest_index + 1));
333 #endif  // !BUILDFLAG(IS_NACL)
334       NOTREACHED() << "sample=" << min << "," << max
335                    << "; range=" << bucket_ranges_->range(dest_index) << ","
336                    << bucket_ranges_->range(dest_index + 1);
337       return false;
338     }
339 
340     // Sample's bucket matches exactly. Adjust count.
341     subtle::NoBarrier_AtomicIncrement(
342         &counts()[dest_index], op == HistogramSamples::ADD ? count : -count);
343 
344     // Advance to the next iterable sample. See comments above for how
345     // everything works.
346     if (iter->Done())
347       return true;
348     iter->Get(&min, &max, &count);
349     if (iter->GetBucketIndex(&iter_index)) {
350       // Destination bucket is a known offset from the source bucket.
351       dest_index = iter_index + index_offset;
352     } else {
353       // Destination bucket has to be determined anew each time.
354       dest_index = GetBucketIndex(min);
355     }
356     if (dest_index >= counts_size())
357       return false;
358     iter->Next();
359   }
360 }
361 
362 // Uses simple binary search or calculates the index directly if it's an "exact"
363 // linear histogram. This is very general, but there are better approaches if we
364 // knew that the buckets were linearly distributed.
GetBucketIndex(Sample value) const365 size_t SampleVectorBase::GetBucketIndex(Sample value) const {
366   size_t bucket_count = bucket_ranges_->bucket_count();
367   CHECK_GE(bucket_count, 1u);
368   CHECK_GE(value, bucket_ranges_->range(0));
369   CHECK_LT(value, bucket_ranges_->range(bucket_count));
370 
371   // For "exact" linear histograms, e.g. bucket_count = maximum + 1, their
372   // minimum is 1 and bucket sizes are 1. Thus, we don't need to binary search
373   // the bucket index. The bucket index for bucket |value| is just the |value|.
374   Sample maximum = bucket_ranges_->range(bucket_count - 1);
375   if (maximum == static_cast<Sample>(bucket_count - 1)) {
376     // |value| is in the underflow bucket.
377     if (value < 1)
378       return 0;
379     // |value| is in the overflow bucket.
380     if (value > maximum)
381       return bucket_count - 1;
382     return static_cast<size_t>(value);
383   }
384 
385   size_t under = 0;
386   size_t over = bucket_count;
387   size_t mid;
388   do {
389     DCHECK_GE(over, under);
390     mid = under + (over - under)/2;
391     if (mid == under)
392       break;
393     if (bucket_ranges_->range(mid) <= value)
394       under = mid;
395     else
396       over = mid;
397   } while (true);
398 
399   DCHECK_LE(bucket_ranges_->range(mid), value);
400   CHECK_GT(bucket_ranges_->range(mid + 1), value);
401   return mid;
402 }
403 
MoveSingleSampleToCounts()404 void SampleVectorBase::MoveSingleSampleToCounts() {
405   DCHECK(counts());
406 
407   // Disable the single-sample since there is now counts storage for the data.
408   SingleSample sample = single_sample().ExtractAndDisable();
409 
410   // Stop here if there is no "count" as trying to find the bucket index of
411   // an invalid (including zero) "value" will crash.
412   if (sample.count == 0)
413     return;
414 
415   // Stop here if the sample bucket would be out of range for the AtomicCount
416   // array.
417   if (sample.bucket >= counts_size()) {
418     return;
419   }
420 
421   // Move the value into storage. Sum and redundant-count already account
422   // for this entry so no need to call IncreaseSumAndCount().
423   subtle::NoBarrier_AtomicIncrement(&counts()[sample.bucket], sample.count);
424 }
425 
MountCountsStorageAndMoveSingleSample()426 void SampleVectorBase::MountCountsStorageAndMoveSingleSample() {
427   // There are many SampleVector objects and the lock is needed very
428   // infrequently (just when advancing from single-sample to multi-sample) so
429   // define a single, global lock that all can use. This lock only prevents
430   // concurrent entry into the code below; access and updates to |counts_|
431   // still requires atomic operations.
432   static LazyInstance<Lock>::Leaky counts_lock = LAZY_INSTANCE_INITIALIZER;
433   if (!counts_.load(std::memory_order_relaxed)) {
434     AutoLock lock(counts_lock.Get());
435     if (!counts_.load(std::memory_order_relaxed)) {
436       // Create the actual counts storage while the above lock is acquired.
437       HistogramBase::Count* counts = CreateCountsStorageWhileLocked();
438       DCHECK(counts);
439 
440       // Point |counts_| to the newly created storage. This is done while
441       // locked to prevent possible concurrent calls to CreateCountsStorage
442       // but, between that call and here, other threads could notice the
443       // existence of the storage and race with this to set_counts(). That's
444       // okay because (a) it's atomic and (b) it always writes the same value.
445       set_counts(counts);
446     }
447   }
448 
449   // Move any single-sample into the newly mounted storage.
450   MoveSingleSampleToCounts();
451 }
452 
SampleVector(const BucketRanges * bucket_ranges)453 SampleVector::SampleVector(const BucketRanges* bucket_ranges)
454     : SampleVector(0, bucket_ranges) {}
455 
SampleVector(uint64_t id,const BucketRanges * bucket_ranges)456 SampleVector::SampleVector(uint64_t id, const BucketRanges* bucket_ranges)
457     : SampleVectorBase(id, std::make_unique<LocalMetadata>(), bucket_ranges) {}
458 
459 SampleVector::~SampleVector() = default;
460 
IsDefinitelyEmpty() const461 bool SampleVector::IsDefinitelyEmpty() const {
462   // If we are still using SingleSample, and it has a count of 0, then |this|
463   // has no samples. If we are not using SingleSample, always return false, even
464   // though it is possible that |this| has no samples (e.g. we are using a
465   // counts array and all the bucket counts are 0). If we are wrong, this will
466   // just make the caller perform some extra work thinking that |this| is
467   // non-empty.
468   AtomicSingleSample sample = single_sample();
469   return HistogramSamples::IsDefinitelyEmpty() && !sample.IsDisabled() &&
470          sample.Load().count == 0;
471 }
472 
MountExistingCountsStorage() const473 bool SampleVector::MountExistingCountsStorage() const {
474   // There is never any existing storage other than what is already in use.
475   return counts() != nullptr;
476 }
477 
GetAsciiHeader(StringPiece histogram_name,int32_t flags) const478 std::string SampleVector::GetAsciiHeader(StringPiece histogram_name,
479                                          int32_t flags) const {
480   Count sample_count = TotalCount();
481   std::string output;
482   StringAppendF(&output, "Histogram: %.*s recorded %d samples",
483                 static_cast<int>(histogram_name.size()), histogram_name.data(),
484                 sample_count);
485   if (sample_count == 0) {
486     DCHECK_EQ(sum(), 0);
487   } else {
488     double mean = static_cast<float>(sum()) / sample_count;
489     StringAppendF(&output, ", mean = %.1f", mean);
490   }
491   if (flags)
492     StringAppendF(&output, " (flags = 0x%x)", flags);
493   return output;
494 }
495 
GetAsciiBody() const496 std::string SampleVector::GetAsciiBody() const {
497   Count sample_count = TotalCount();
498 
499   // Prepare to normalize graphical rendering of bucket contents.
500   double max_size = 0;
501   double scaling_factor = 1;
502   max_size = GetPeakBucketSize();
503   // Scale histogram bucket counts to take at most 72 characters.
504   // Note: Keep in sync w/ kLineLength histogram_samples.cc
505   const double kLineLength = 72;
506   if (max_size > kLineLength)
507     scaling_factor = kLineLength / max_size;
508 
509   // Calculate largest print width needed for any of our bucket range displays.
510   size_t print_width = 1;
511   for (uint32_t i = 0; i < bucket_count(); ++i) {
512     if (GetCountAtIndex(i)) {
513       size_t width =
514           GetSimpleAsciiBucketRange(bucket_ranges()->range(i)).size() + 1;
515       if (width > print_width)
516         print_width = width;
517     }
518   }
519 
520   int64_t remaining = sample_count;
521   int64_t past = 0;
522   std::string output;
523   // Output the actual histogram graph.
524   for (uint32_t i = 0; i < bucket_count(); ++i) {
525     Count current = GetCountAtIndex(i);
526     remaining -= current;
527     std::string range = GetSimpleAsciiBucketRange(bucket_ranges()->range(i));
528     output.append(range);
529     for (size_t j = 0; range.size() + j < print_width + 1; ++j)
530       output.push_back(' ');
531     if (0 == current && i < bucket_count() - 1 && 0 == GetCountAtIndex(i + 1)) {
532       while (i < bucket_count() - 1 && 0 == GetCountAtIndex(i + 1)) {
533         ++i;
534       }
535       output.append("... \n");
536       continue;  // No reason to plot emptiness.
537     }
538     Count current_size = round(current * scaling_factor);
539     WriteAsciiBucketGraph(current_size, kLineLength, &output);
540     WriteAsciiBucketContext(past, current, remaining, i, &output);
541     output.append("\n");
542     past += current;
543   }
544   DCHECK_EQ(sample_count, past);
545   return output;
546 }
547 
GetPeakBucketSize() const548 double SampleVector::GetPeakBucketSize() const {
549   Count max = 0;
550   for (uint32_t i = 0; i < bucket_count(); ++i) {
551     Count current = GetCountAtIndex(i);
552     if (current > max)
553       max = current;
554   }
555   return max;
556 }
557 
WriteAsciiBucketContext(int64_t past,Count current,int64_t remaining,uint32_t current_bucket_index,std::string * output) const558 void SampleVector::WriteAsciiBucketContext(int64_t past,
559                                            Count current,
560                                            int64_t remaining,
561                                            uint32_t current_bucket_index,
562                                            std::string* output) const {
563   double scaled_sum = (past + current + remaining) / 100.0;
564   WriteAsciiBucketValue(current, scaled_sum, output);
565   if (0 < current_bucket_index) {
566     double percentage = past / scaled_sum;
567     StringAppendF(output, " {%3.1f%%}", percentage);
568   }
569 }
570 
CreateCountsStorageWhileLocked()571 HistogramBase::AtomicCount* SampleVector::CreateCountsStorageWhileLocked() {
572   local_counts_.resize(counts_size());
573   return &local_counts_[0];
574 }
575 
PersistentSampleVector(uint64_t id,const BucketRanges * bucket_ranges,Metadata * meta,const DelayedPersistentAllocation & counts)576 PersistentSampleVector::PersistentSampleVector(
577     uint64_t id,
578     const BucketRanges* bucket_ranges,
579     Metadata* meta,
580     const DelayedPersistentAllocation& counts)
581     : SampleVectorBase(id, meta, bucket_ranges), persistent_counts_(counts) {
582   // Only mount the full storage if the single-sample has been disabled.
583   // Otherwise, it is possible for this object instance to start using (empty)
584   // storage that was created incidentally while another instance continues to
585   // update to the single sample. This "incidental creation" can happen because
586   // the memory is a DelayedPersistentAllocation which allows multiple memory
587   // blocks within it and applies an all-or-nothing approach to the allocation.
588   // Thus, a request elsewhere for one of the _other_ blocks would make _this_
589   // block available even though nothing has explicitly requested it.
590   //
591   // Note that it's not possible for the ctor to mount existing storage and
592   // move any single-sample to it because sometimes the persistent memory is
593   // read-only. Only non-const methods (which assume that memory is read/write)
594   // can do that.
595   if (single_sample().IsDisabled()) {
596     bool success = MountExistingCountsStorage();
597     DCHECK(success);
598   }
599 }
600 
601 PersistentSampleVector::~PersistentSampleVector() = default;
602 
IsDefinitelyEmpty() const603 bool PersistentSampleVector::IsDefinitelyEmpty() const {
604   // Not implemented.
605   NOTREACHED();
606 
607   // Always return false. If we are wrong, this will just make the caller
608   // perform some extra work thinking that |this| is non-empty.
609   return false;
610 }
611 
MountExistingCountsStorage() const612 bool PersistentSampleVector::MountExistingCountsStorage() const {
613   // There is no early exit if counts is not yet mounted because, given that
614   // this is a virtual function, it's more efficient to do that at the call-
615   // site. There is no danger, however, should this get called anyway (perhaps
616   // because of a race condition) because at worst the |counts_| value would
617   // be over-written (in an atomic manner) with the exact same address.
618 
619   if (!persistent_counts_.reference())
620     return false;  // Nothing to mount.
621 
622   // Mount the counts array in position.
623   set_counts(
624       static_cast<HistogramBase::AtomicCount*>(persistent_counts_.Get()));
625 
626   // The above shouldn't fail but can if the data is corrupt or incomplete.
627   return counts() != nullptr;
628 }
629 
630 HistogramBase::AtomicCount*
CreateCountsStorageWhileLocked()631 PersistentSampleVector::CreateCountsStorageWhileLocked() {
632   void* mem = persistent_counts_.Get();
633   if (!mem) {
634     // The above shouldn't fail but can if Bad Things(tm) are occurring in the
635     // persistent allocator. Crashing isn't a good option so instead just
636     // allocate something from the heap and return that. There will be no
637     // sharing or persistence but worse things are already happening.
638     return new HistogramBase::AtomicCount[counts_size()];
639   }
640 
641   return static_cast<HistogramBase::AtomicCount*>(mem);
642 }
643 
644 }  // namespace base
645