• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "base/metrics/sample_vector.h"
6 
7 #include "base/lazy_instance.h"
8 #include "base/logging.h"
9 #include "base/memory/ptr_util.h"
10 #include "base/metrics/persistent_memory_allocator.h"
11 #include "base/numerics/safe_conversions.h"
12 #include "base/synchronization/lock.h"
13 #include "base/threading/platform_thread.h"
14 
15 // This SampleVector makes use of the single-sample embedded in the base
16 // HistogramSamples class. If the count is non-zero then there is guaranteed
17 // (within the bounds of "eventual consistency") to be no allocated external
18 // storage. Once the full counts storage is allocated, the single-sample must
19 // be extracted and disabled.
20 
21 namespace base {
22 
23 typedef HistogramBase::Count Count;
24 typedef HistogramBase::Sample Sample;
25 
SampleVectorBase(uint64_t id,Metadata * meta,const BucketRanges * bucket_ranges)26 SampleVectorBase::SampleVectorBase(uint64_t id,
27                                    Metadata* meta,
28                                    const BucketRanges* bucket_ranges)
29     : HistogramSamples(id, meta), bucket_ranges_(bucket_ranges) {
30   CHECK_GE(bucket_ranges_->bucket_count(), 1u);
31 }
32 
33 SampleVectorBase::~SampleVectorBase() = default;
34 
Accumulate(Sample value,Count count)35 void SampleVectorBase::Accumulate(Sample value, Count count) {
36   const size_t bucket_index = GetBucketIndex(value);
37 
38   // Handle the single-sample case.
39   if (!counts()) {
40     // Try to accumulate the parameters into the single-count entry.
41     if (AccumulateSingleSample(value, count, bucket_index)) {
42       // A race condition could lead to a new single-sample being accumulated
43       // above just after another thread executed the MountCountsStorage below.
44       // Since it is mounted, it could be mounted elsewhere and have values
45       // written to it. It's not allowed to have both a single-sample and
46       // entries in the counts array so move the single-sample.
47       if (counts())
48         MoveSingleSampleToCounts();
49       return;
50     }
51 
52     // Need real storage to store both what was in the single-sample plus the
53     // parameter information.
54     MountCountsStorageAndMoveSingleSample();
55   }
56 
57   // Handle the multi-sample case.
58   Count new_value =
59       subtle::NoBarrier_AtomicIncrement(&counts()[bucket_index], count);
60   IncreaseSumAndCount(strict_cast<int64_t>(count) * value, count);
61 
62   // TODO(bcwhite) Remove after crbug.com/682680.
63   Count old_value = new_value - count;
64   if ((new_value >= 0) != (old_value >= 0) && count > 0)
65     RecordNegativeSample(SAMPLES_ACCUMULATE_OVERFLOW, count);
66 }
67 
GetCount(Sample value) const68 Count SampleVectorBase::GetCount(Sample value) const {
69   return GetCountAtIndex(GetBucketIndex(value));
70 }
71 
TotalCount() const72 Count SampleVectorBase::TotalCount() const {
73   // Handle the single-sample case.
74   SingleSample sample = single_sample().Load();
75   if (sample.count != 0)
76     return sample.count;
77 
78   // Handle the multi-sample case.
79   if (counts() || MountExistingCountsStorage()) {
80     Count count = 0;
81     size_t size = counts_size();
82     const HistogramBase::AtomicCount* counts_array = counts();
83     for (size_t i = 0; i < size; ++i) {
84       count += subtle::NoBarrier_Load(&counts_array[i]);
85     }
86     return count;
87   }
88 
89   // And the no-value case.
90   return 0;
91 }
92 
GetCountAtIndex(size_t bucket_index) const93 Count SampleVectorBase::GetCountAtIndex(size_t bucket_index) const {
94   DCHECK(bucket_index < counts_size());
95 
96   // Handle the single-sample case.
97   SingleSample sample = single_sample().Load();
98   if (sample.count != 0)
99     return sample.bucket == bucket_index ? sample.count : 0;
100 
101   // Handle the multi-sample case.
102   if (counts() || MountExistingCountsStorage())
103     return subtle::NoBarrier_Load(&counts()[bucket_index]);
104 
105   // And the no-value case.
106   return 0;
107 }
108 
Iterator() const109 std::unique_ptr<SampleCountIterator> SampleVectorBase::Iterator() const {
110   // Handle the single-sample case.
111   SingleSample sample = single_sample().Load();
112   if (sample.count != 0) {
113     return std::make_unique<SingleSampleIterator>(
114         bucket_ranges_->range(sample.bucket),
115         bucket_ranges_->range(sample.bucket + 1), sample.count, sample.bucket);
116   }
117 
118   // Handle the multi-sample case.
119   if (counts() || MountExistingCountsStorage()) {
120     return std::make_unique<SampleVectorIterator>(counts(), counts_size(),
121                                                   bucket_ranges_);
122   }
123 
124   // And the no-value case.
125   return std::make_unique<SampleVectorIterator>(nullptr, 0, bucket_ranges_);
126 }
127 
AddSubtractImpl(SampleCountIterator * iter,HistogramSamples::Operator op)128 bool SampleVectorBase::AddSubtractImpl(SampleCountIterator* iter,
129                                        HistogramSamples::Operator op) {
130   // Stop now if there's nothing to do.
131   if (iter->Done())
132     return true;
133 
134   // Get the first value and its index.
135   HistogramBase::Sample min;
136   int64_t max;
137   HistogramBase::Count count;
138   iter->Get(&min, &max, &count);
139   size_t dest_index = GetBucketIndex(min);
140 
141   // The destination must be a superset of the source meaning that though the
142   // incoming ranges will find an exact match, the incoming bucket-index, if
143   // it exists, may be offset from the destination bucket-index. Calculate
144   // that offset of the passed iterator; there are are no overflow checks
145   // because 2's compliment math will work it out in the end.
146   //
147   // Because GetBucketIndex() always returns the same true or false result for
148   // a given iterator object, |index_offset| is either set here and used below,
149   // or never set and never used. The compiler doesn't know this, though, which
150   // is why it's necessary to initialize it to something.
151   size_t index_offset = 0;
152   size_t iter_index;
153   if (iter->GetBucketIndex(&iter_index))
154     index_offset = dest_index - iter_index;
155   if (dest_index >= counts_size())
156     return false;
157 
158   // Post-increment. Information about the current sample is not available
159   // after this point.
160   iter->Next();
161 
162   // Single-value storage is possible if there is no counts storage and the
163   // retrieved entry is the only one in the iterator.
164   if (!counts()) {
165     if (iter->Done()) {
166       // Don't call AccumulateSingleSample because that updates sum and count
167       // which was already done by the caller of this method.
168       if (single_sample().Accumulate(
169               dest_index, op == HistogramSamples::ADD ? count : -count)) {
170         // Handle race-condition that mounted counts storage between above and
171         // here.
172         if (counts())
173           MoveSingleSampleToCounts();
174         return true;
175       }
176     }
177 
178     // The counts storage will be needed to hold the multiple incoming values.
179     MountCountsStorageAndMoveSingleSample();
180   }
181 
182   // Go through the iterator and add the counts into correct bucket.
183   while (true) {
184     // Ensure that the sample's min/max match the ranges min/max.
185     if (min != bucket_ranges_->range(dest_index) ||
186         max != bucket_ranges_->range(dest_index + 1)) {
187       NOTREACHED() << "sample=" << min << "," << max
188                    << "; range=" << bucket_ranges_->range(dest_index) << ","
189                    << bucket_ranges_->range(dest_index + 1);
190       return false;
191     }
192 
193     // Sample's bucket matches exactly. Adjust count.
194     subtle::NoBarrier_AtomicIncrement(
195         &counts()[dest_index], op == HistogramSamples::ADD ? count : -count);
196 
197     // Advance to the next iterable sample. See comments above for how
198     // everything works.
199     if (iter->Done())
200       return true;
201     iter->Get(&min, &max, &count);
202     if (iter->GetBucketIndex(&iter_index)) {
203       // Destination bucket is a known offset from the source bucket.
204       dest_index = iter_index + index_offset;
205     } else {
206       // Destination bucket has to be determined anew each time.
207       dest_index = GetBucketIndex(min);
208     }
209     if (dest_index >= counts_size())
210       return false;
211     iter->Next();
212   }
213 }
214 
215 // Use simple binary search.  This is very general, but there are better
216 // approaches if we knew that the buckets were linearly distributed.
GetBucketIndex(Sample value) const217 size_t SampleVectorBase::GetBucketIndex(Sample value) const {
218   size_t bucket_count = bucket_ranges_->bucket_count();
219   CHECK_GE(bucket_count, 1u);
220   CHECK_GE(value, bucket_ranges_->range(0));
221   CHECK_LT(value, bucket_ranges_->range(bucket_count));
222 
223   size_t under = 0;
224   size_t over = bucket_count;
225   size_t mid;
226   do {
227     DCHECK_GE(over, under);
228     mid = under + (over - under)/2;
229     if (mid == under)
230       break;
231     if (bucket_ranges_->range(mid) <= value)
232       under = mid;
233     else
234       over = mid;
235   } while (true);
236 
237   DCHECK_LE(bucket_ranges_->range(mid), value);
238   CHECK_GT(bucket_ranges_->range(mid + 1), value);
239   return mid;
240 }
241 
MoveSingleSampleToCounts()242 void SampleVectorBase::MoveSingleSampleToCounts() {
243   DCHECK(counts());
244 
245   // Disable the single-sample since there is now counts storage for the data.
246   SingleSample sample = single_sample().Extract(/*disable=*/true);
247 
248   // Stop here if there is no "count" as trying to find the bucket index of
249   // an invalid (including zero) "value" will crash.
250   if (sample.count == 0)
251     return;
252 
253   // Move the value into storage. Sum and redundant-count already account
254   // for this entry so no need to call IncreaseSumAndCount().
255   subtle::NoBarrier_AtomicIncrement(&counts()[sample.bucket], sample.count);
256 }
257 
MountCountsStorageAndMoveSingleSample()258 void SampleVectorBase::MountCountsStorageAndMoveSingleSample() {
259   // There are many SampleVector objects and the lock is needed very
260   // infrequently (just when advancing from single-sample to multi-sample) so
261   // define a single, global lock that all can use. This lock only prevents
262   // concurrent entry into the code below; access and updates to |counts_|
263   // still requires atomic operations.
264   static LazyInstance<Lock>::Leaky counts_lock = LAZY_INSTANCE_INITIALIZER;
265   if (subtle::NoBarrier_Load(&counts_) == 0) {
266     AutoLock lock(counts_lock.Get());
267     if (subtle::NoBarrier_Load(&counts_) == 0) {
268       // Create the actual counts storage while the above lock is acquired.
269       HistogramBase::Count* counts = CreateCountsStorageWhileLocked();
270       DCHECK(counts);
271 
272       // Point |counts_| to the newly created storage. This is done while
273       // locked to prevent possible concurrent calls to CreateCountsStorage
274       // but, between that call and here, other threads could notice the
275       // existence of the storage and race with this to set_counts(). That's
276       // okay because (a) it's atomic and (b) it always writes the same value.
277       set_counts(counts);
278     }
279   }
280 
281   // Move any single-sample into the newly mounted storage.
282   MoveSingleSampleToCounts();
283 }
284 
SampleVector(const BucketRanges * bucket_ranges)285 SampleVector::SampleVector(const BucketRanges* bucket_ranges)
286     : SampleVector(0, bucket_ranges) {}
287 
SampleVector(uint64_t id,const BucketRanges * bucket_ranges)288 SampleVector::SampleVector(uint64_t id, const BucketRanges* bucket_ranges)
289     : SampleVectorBase(id, new LocalMetadata(), bucket_ranges) {}
290 
~SampleVector()291 SampleVector::~SampleVector() {
292   delete static_cast<LocalMetadata*>(meta());
293 }
294 
MountExistingCountsStorage() const295 bool SampleVector::MountExistingCountsStorage() const {
296   // There is never any existing storage other than what is already in use.
297   return counts() != nullptr;
298 }
299 
CreateCountsStorageWhileLocked()300 HistogramBase::AtomicCount* SampleVector::CreateCountsStorageWhileLocked() {
301   local_counts_.resize(counts_size());
302   return &local_counts_[0];
303 }
304 
PersistentSampleVector(uint64_t id,const BucketRanges * bucket_ranges,Metadata * meta,const DelayedPersistentAllocation & counts)305 PersistentSampleVector::PersistentSampleVector(
306     uint64_t id,
307     const BucketRanges* bucket_ranges,
308     Metadata* meta,
309     const DelayedPersistentAllocation& counts)
310     : SampleVectorBase(id, meta, bucket_ranges), persistent_counts_(counts) {
311   // Only mount the full storage if the single-sample has been disabled.
312   // Otherwise, it is possible for this object instance to start using (empty)
313   // storage that was created incidentally while another instance continues to
314   // update to the single sample. This "incidental creation" can happen because
315   // the memory is a DelayedPersistentAllocation which allows multiple memory
316   // blocks within it and applies an all-or-nothing approach to the allocation.
317   // Thus, a request elsewhere for one of the _other_ blocks would make _this_
318   // block available even though nothing has explicitly requested it.
319   //
320   // Note that it's not possible for the ctor to mount existing storage and
321   // move any single-sample to it because sometimes the persistent memory is
322   // read-only. Only non-const methods (which assume that memory is read/write)
323   // can do that.
324   if (single_sample().IsDisabled()) {
325     bool success = MountExistingCountsStorage();
326     DCHECK(success);
327   }
328 }
329 
330 PersistentSampleVector::~PersistentSampleVector() = default;
331 
MountExistingCountsStorage() const332 bool PersistentSampleVector::MountExistingCountsStorage() const {
333   // There is no early exit if counts is not yet mounted because, given that
334   // this is a virtual function, it's more efficient to do that at the call-
335   // site. There is no danger, however, should this get called anyway (perhaps
336   // because of a race condition) because at worst the |counts_| value would
337   // be over-written (in an atomic manner) with the exact same address.
338 
339   if (!persistent_counts_.reference())
340     return false;  // Nothing to mount.
341 
342   // Mount the counts array in position.
343   set_counts(
344       static_cast<HistogramBase::AtomicCount*>(persistent_counts_.Get()));
345 
346   // The above shouldn't fail but can if the data is corrupt or incomplete.
347   return counts() != nullptr;
348 }
349 
350 HistogramBase::AtomicCount*
CreateCountsStorageWhileLocked()351 PersistentSampleVector::CreateCountsStorageWhileLocked() {
352   void* mem = persistent_counts_.Get();
353   if (!mem) {
354     // The above shouldn't fail but can if Bad Things(tm) are occurring in the
355     // persistent allocator. Crashing isn't a good option so instead just
356     // allocate something from the heap and return that. There will be no
357     // sharing or persistence but worse things are already happening.
358     return new HistogramBase::AtomicCount[counts_size()];
359   }
360 
361   return static_cast<HistogramBase::AtomicCount*>(mem);
362 }
363 
SampleVectorIterator(const std::vector<HistogramBase::AtomicCount> * counts,const BucketRanges * bucket_ranges)364 SampleVectorIterator::SampleVectorIterator(
365     const std::vector<HistogramBase::AtomicCount>* counts,
366     const BucketRanges* bucket_ranges)
367     : counts_(&(*counts)[0]),
368       counts_size_(counts->size()),
369       bucket_ranges_(bucket_ranges),
370       index_(0) {
371   DCHECK_GE(bucket_ranges_->bucket_count(), counts_size_);
372   SkipEmptyBuckets();
373 }
374 
SampleVectorIterator(const HistogramBase::AtomicCount * counts,size_t counts_size,const BucketRanges * bucket_ranges)375 SampleVectorIterator::SampleVectorIterator(
376     const HistogramBase::AtomicCount* counts,
377     size_t counts_size,
378     const BucketRanges* bucket_ranges)
379     : counts_(counts),
380       counts_size_(counts_size),
381       bucket_ranges_(bucket_ranges),
382       index_(0) {
383   DCHECK_GE(bucket_ranges_->bucket_count(), counts_size_);
384   SkipEmptyBuckets();
385 }
386 
387 SampleVectorIterator::~SampleVectorIterator() = default;
388 
Done() const389 bool SampleVectorIterator::Done() const {
390   return index_ >= counts_size_;
391 }
392 
Next()393 void SampleVectorIterator::Next() {
394   DCHECK(!Done());
395   index_++;
396   SkipEmptyBuckets();
397 }
398 
Get(HistogramBase::Sample * min,int64_t * max,HistogramBase::Count * count) const399 void SampleVectorIterator::Get(HistogramBase::Sample* min,
400                                int64_t* max,
401                                HistogramBase::Count* count) const {
402   DCHECK(!Done());
403   if (min != nullptr)
404     *min = bucket_ranges_->range(index_);
405   if (max != nullptr)
406     *max = strict_cast<int64_t>(bucket_ranges_->range(index_ + 1));
407   if (count != nullptr)
408     *count = subtle::NoBarrier_Load(&counts_[index_]);
409 }
410 
GetBucketIndex(size_t * index) const411 bool SampleVectorIterator::GetBucketIndex(size_t* index) const {
412   DCHECK(!Done());
413   if (index != nullptr)
414     *index = index_;
415   return true;
416 }
417 
SkipEmptyBuckets()418 void SampleVectorIterator::SkipEmptyBuckets() {
419   if (Done())
420     return;
421 
422   while (index_ < counts_size_) {
423     if (subtle::NoBarrier_Load(&counts_[index_]) != 0)
424       return;
425     index_++;
426   }
427 }
428 
429 }  // namespace base
430