1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_COUNTERS_H_
6 #define V8_COUNTERS_H_
7
8 #include "include/v8.h"
9 #include "src/allocation.h"
10 #include "src/base/atomic-utils.h"
11 #include "src/base/platform/elapsed-timer.h"
12 #include "src/base/platform/time.h"
13 #include "src/globals.h"
14 #include "src/heap-symbols.h"
15 #include "src/isolate.h"
16 #include "src/objects.h"
17 #include "src/runtime/runtime.h"
18 #include "src/tracing/trace-event.h"
19 #include "src/tracing/traced-value.h"
20 #include "src/tracing/tracing-category-observer.h"
21
22 namespace v8 {
23 namespace internal {
24
25 // StatsCounters is an interface for plugging into external
26 // counters for monitoring. Counters can be looked up and
27 // manipulated by name.
28
29 class Counters;
30
31 class StatsTable {
32 public:
33 // Register an application-defined function for recording
34 // subsequent counter statistics.
35 void SetCounterFunction(CounterLookupCallback f);
36
37 // Register an application-defined function to create histograms for
38 // recording subsequent histogram samples.
SetCreateHistogramFunction(CreateHistogramCallback f)39 void SetCreateHistogramFunction(CreateHistogramCallback f) {
40 create_histogram_function_ = f;
41 }
42
43 // Register an application-defined function to add a sample
44 // to a histogram created with CreateHistogram function.
SetAddHistogramSampleFunction(AddHistogramSampleCallback f)45 void SetAddHistogramSampleFunction(AddHistogramSampleCallback f) {
46 add_histogram_sample_function_ = f;
47 }
48
HasCounterFunction()49 bool HasCounterFunction() const { return lookup_function_ != nullptr; }
50
51 // Lookup the location of a counter by name. If the lookup
52 // is successful, returns a non-nullptr pointer for writing the
53 // value of the counter. Each thread calling this function
54 // may receive a different location to store it's counter.
55 // The return value must not be cached and re-used across
56 // threads, although a single thread is free to cache it.
FindLocation(const char * name)57 int* FindLocation(const char* name) {
58 if (!lookup_function_) return nullptr;
59 return lookup_function_(name);
60 }
61
62 // Create a histogram by name. If the create is successful,
63 // returns a non-nullptr pointer for use with AddHistogramSample
64 // function. min and max define the expected minimum and maximum
65 // sample values. buckets is the maximum number of buckets
66 // that the samples will be grouped into.
CreateHistogram(const char * name,int min,int max,size_t buckets)67 void* CreateHistogram(const char* name,
68 int min,
69 int max,
70 size_t buckets) {
71 if (!create_histogram_function_) return nullptr;
72 return create_histogram_function_(name, min, max, buckets);
73 }
74
75 // Add a sample to a histogram created with the CreateHistogram
76 // function.
AddHistogramSample(void * histogram,int sample)77 void AddHistogramSample(void* histogram, int sample) {
78 if (!add_histogram_sample_function_) return;
79 return add_histogram_sample_function_(histogram, sample);
80 }
81
82 private:
83 friend class Counters;
84
85 explicit StatsTable(Counters* counters);
86
87 CounterLookupCallback lookup_function_;
88 CreateHistogramCallback create_histogram_function_;
89 AddHistogramSampleCallback add_histogram_sample_function_;
90
91 DISALLOW_COPY_AND_ASSIGN(StatsTable);
92 };
93
94 // Base class for stats counters.
95 class StatsCounterBase {
96 protected:
97 Counters* counters_;
98 const char* name_;
99 int* ptr_;
100
StatsCounterBase()101 StatsCounterBase() {}
StatsCounterBase(Counters * counters,const char * name)102 StatsCounterBase(Counters* counters, const char* name)
103 : counters_(counters), name_(name), ptr_(nullptr) {}
104
SetLoc(int * loc,int value)105 void SetLoc(int* loc, int value) { *loc = value; }
IncrementLoc(int * loc)106 void IncrementLoc(int* loc) { (*loc)++; }
IncrementLoc(int * loc,int value)107 void IncrementLoc(int* loc, int value) { (*loc) += value; }
DecrementLoc(int * loc)108 void DecrementLoc(int* loc) { (*loc)--; }
DecrementLoc(int * loc,int value)109 void DecrementLoc(int* loc, int value) { (*loc) -= value; }
110
111 int* FindLocationInStatsTable() const;
112 };
113
114 // StatsCounters are dynamically created values which can be tracked in
115 // the StatsTable. They are designed to be lightweight to create and
116 // easy to use.
117 //
118 // Internally, a counter represents a value in a row of a StatsTable.
119 // The row has a 32bit value for each process/thread in the table and also
120 // a name (stored in the table metadata). Since the storage location can be
121 // thread-specific, this class cannot be shared across threads. Note: This
122 // class is not thread safe.
123 class StatsCounter : public StatsCounterBase {
124 public:
125 // Sets the counter to a specific value.
Set(int value)126 void Set(int value) {
127 if (int* loc = GetPtr()) SetLoc(loc, value);
128 }
129
130 // Increments the counter.
Increment()131 void Increment() {
132 if (int* loc = GetPtr()) IncrementLoc(loc);
133 }
134
Increment(int value)135 void Increment(int value) {
136 if (int* loc = GetPtr()) IncrementLoc(loc, value);
137 }
138
139 // Decrements the counter.
Decrement()140 void Decrement() {
141 if (int* loc = GetPtr()) DecrementLoc(loc);
142 }
143
Decrement(int value)144 void Decrement(int value) {
145 if (int* loc = GetPtr()) DecrementLoc(loc, value);
146 }
147
148 // Is this counter enabled?
149 // Returns false if table is full.
Enabled()150 bool Enabled() { return GetPtr() != nullptr; }
151
152 // Get the internal pointer to the counter. This is used
153 // by the code generator to emit code that manipulates a
154 // given counter without calling the runtime system.
GetInternalPointer()155 int* GetInternalPointer() {
156 int* loc = GetPtr();
157 DCHECK_NOT_NULL(loc);
158 return loc;
159 }
160
161 private:
162 friend class Counters;
163
StatsCounter()164 StatsCounter() {}
StatsCounter(Counters * counters,const char * name)165 StatsCounter(Counters* counters, const char* name)
166 : StatsCounterBase(counters, name), lookup_done_(false) {}
167
168 // Reset the cached internal pointer.
Reset()169 void Reset() { lookup_done_ = false; }
170
171 // Returns the cached address of this counter location.
GetPtr()172 int* GetPtr() {
173 if (lookup_done_) return ptr_;
174 lookup_done_ = true;
175 ptr_ = FindLocationInStatsTable();
176 return ptr_;
177 }
178
179 bool lookup_done_;
180 };
181
182 // Thread safe version of StatsCounter.
183 class StatsCounterThreadSafe : public StatsCounterBase {
184 public:
185 void Set(int Value);
186 void Increment();
187 void Increment(int value);
188 void Decrement();
189 void Decrement(int value);
Enabled()190 bool Enabled() { return ptr_ != nullptr; }
GetInternalPointer()191 int* GetInternalPointer() {
192 DCHECK_NOT_NULL(ptr_);
193 return ptr_;
194 }
195
196 private:
197 friend class Counters;
198
199 StatsCounterThreadSafe(Counters* counters, const char* name);
Reset()200 void Reset() { ptr_ = FindLocationInStatsTable(); }
201
202 base::Mutex mutex_;
203
204 DISALLOW_IMPLICIT_CONSTRUCTORS(StatsCounterThreadSafe);
205 };
206
207 // A Histogram represents a dynamically created histogram in the
208 // StatsTable. Note: This class is thread safe.
209 class Histogram {
210 public:
211 // Add a single sample to this histogram.
212 void AddSample(int sample);
213
214 // Returns true if this histogram is enabled.
Enabled()215 bool Enabled() { return histogram_ != nullptr; }
216
name()217 const char* name() { return name_; }
218
min()219 int min() const { return min_; }
max()220 int max() const { return max_; }
num_buckets()221 int num_buckets() const { return num_buckets_; }
222
223 // Asserts that |expected_counters| are the same as the Counters this
224 // Histogram reports to.
AssertReportsToCounters(Counters * expected_counters)225 void AssertReportsToCounters(Counters* expected_counters) {
226 DCHECK_EQ(counters_, expected_counters);
227 }
228
229 protected:
Histogram()230 Histogram() {}
Histogram(const char * name,int min,int max,int num_buckets,Counters * counters)231 Histogram(const char* name, int min, int max, int num_buckets,
232 Counters* counters)
233 : name_(name),
234 min_(min),
235 max_(max),
236 num_buckets_(num_buckets),
237 histogram_(nullptr),
238 counters_(counters) {
239 DCHECK(counters_);
240 }
241
counters()242 Counters* counters() const { return counters_; }
243
244 // Reset the cached internal pointer.
Reset()245 void Reset() { histogram_ = CreateHistogram(); }
246
247 private:
248 friend class Counters;
249
250 void* CreateHistogram() const;
251
252 const char* name_;
253 int min_;
254 int max_;
255 int num_buckets_;
256 void* histogram_;
257 Counters* counters_;
258 };
259
260 enum class HistogramTimerResolution { MILLISECOND, MICROSECOND };
261
262 // A thread safe histogram timer. It also allows distributions of
263 // nested timed results.
264 class TimedHistogram : public Histogram {
265 public:
266 // Start the timer. Log if isolate non-null.
267 void Start(base::ElapsedTimer* timer, Isolate* isolate);
268
269 // Stop the timer and record the results. Log if isolate non-null.
270 void Stop(base::ElapsedTimer* timer, Isolate* isolate);
271
272 // Records a TimeDelta::Max() result. Useful to record percentage of tasks
273 // that never got to run in a given scenario. Log if isolate non-null.
274 void RecordAbandon(base::ElapsedTimer* timer, Isolate* isolate);
275
276 protected:
277 friend class Counters;
278 HistogramTimerResolution resolution_;
279
TimedHistogram()280 TimedHistogram() {}
TimedHistogram(const char * name,int min,int max,HistogramTimerResolution resolution,int num_buckets,Counters * counters)281 TimedHistogram(const char* name, int min, int max,
282 HistogramTimerResolution resolution, int num_buckets,
283 Counters* counters)
284 : Histogram(name, min, max, num_buckets, counters),
285 resolution_(resolution) {}
286 void AddTimeSample();
287 };
288
289 // Helper class for scoping a TimedHistogram.
290 class TimedHistogramScope {
291 public:
292 explicit TimedHistogramScope(TimedHistogram* histogram,
293 Isolate* isolate = nullptr)
histogram_(histogram)294 : histogram_(histogram), isolate_(isolate) {
295 histogram_->Start(&timer_, isolate);
296 }
297
~TimedHistogramScope()298 ~TimedHistogramScope() { histogram_->Stop(&timer_, isolate_); }
299
300 private:
301 base::ElapsedTimer timer_;
302 TimedHistogram* histogram_;
303 Isolate* isolate_;
304
305 DISALLOW_IMPLICIT_CONSTRUCTORS(TimedHistogramScope);
306 };
307
308 // Helper class for recording a TimedHistogram asynchronously with manual
309 // controls (it will not generate a report if destroyed without explicitly
310 // triggering a report). |async_counters| should be a shared_ptr to
311 // |histogram->counters()|, making it is safe to report to an
312 // AsyncTimedHistogram after the associated isolate has been destroyed.
313 // AsyncTimedHistogram can be moved/copied to avoid computing Now() multiple
314 // times when the times of multiple tasks are identical; each copy will generate
315 // its own report.
316 class AsyncTimedHistogram {
317 public:
AsyncTimedHistogram(TimedHistogram * histogram,std::shared_ptr<Counters> async_counters)318 explicit AsyncTimedHistogram(TimedHistogram* histogram,
319 std::shared_ptr<Counters> async_counters)
320 : histogram_(histogram), async_counters_(std::move(async_counters)) {
321 histogram_->AssertReportsToCounters(async_counters_.get());
322 histogram_->Start(&timer_, nullptr);
323 }
324
325 ~AsyncTimedHistogram() = default;
326
327 AsyncTimedHistogram(const AsyncTimedHistogram& other) = default;
328 AsyncTimedHistogram& operator=(const AsyncTimedHistogram& other) = default;
329 AsyncTimedHistogram(AsyncTimedHistogram&& other) = default;
330 AsyncTimedHistogram& operator=(AsyncTimedHistogram&& other) = default;
331
332 // Records the time elapsed to |histogram_| and stops |timer_|.
RecordDone()333 void RecordDone() { histogram_->Stop(&timer_, nullptr); }
334
335 // Records TimeDelta::Max() to |histogram_| and stops |timer_|.
RecordAbandon()336 void RecordAbandon() { histogram_->RecordAbandon(&timer_, nullptr); }
337
338 private:
339 base::ElapsedTimer timer_;
340 TimedHistogram* histogram_;
341 std::shared_ptr<Counters> async_counters_;
342 };
343
344 // Helper class for scoping a TimedHistogram, where the histogram is selected at
345 // stop time rather than start time.
346 // TODO(leszeks): This is heavily reliant on TimedHistogram::Start() doing
347 // nothing but starting the timer, and TimedHistogram::Stop() logging the sample
348 // correctly even if Start() was not called. This happens to be true iff Stop()
349 // is passed a null isolate, but that's an implementation detail of
350 // TimedHistogram, and we shouldn't rely on it.
351 class LazyTimedHistogramScope {
352 public:
LazyTimedHistogramScope()353 LazyTimedHistogramScope() : histogram_(nullptr) { timer_.Start(); }
~LazyTimedHistogramScope()354 ~LazyTimedHistogramScope() {
355 // We should set the histogram before this scope exits.
356 DCHECK_NOT_NULL(histogram_);
357 histogram_->Stop(&timer_, nullptr);
358 }
359
set_histogram(TimedHistogram * histogram)360 void set_histogram(TimedHistogram* histogram) { histogram_ = histogram; }
361
362 private:
363 base::ElapsedTimer timer_;
364 TimedHistogram* histogram_;
365 };
366
367 // A HistogramTimer allows distributions of non-nested timed results
368 // to be created. WARNING: This class is not thread safe and can only
369 // be run on the foreground thread.
370 class HistogramTimer : public TimedHistogram {
371 public:
372 // Note: public for testing purposes only.
HistogramTimer(const char * name,int min,int max,HistogramTimerResolution resolution,int num_buckets,Counters * counters)373 HistogramTimer(const char* name, int min, int max,
374 HistogramTimerResolution resolution, int num_buckets,
375 Counters* counters)
376 : TimedHistogram(name, min, max, resolution, num_buckets, counters) {}
377
378 inline void Start();
379 inline void Stop();
380
381 // Returns true if the timer is running.
Running()382 bool Running() {
383 return Enabled() && timer_.IsStarted();
384 }
385
386 // TODO(bmeurer): Remove this when HistogramTimerScope is fixed.
387 #ifdef DEBUG
timer()388 base::ElapsedTimer* timer() { return &timer_; }
389 #endif
390
391 private:
392 friend class Counters;
393
394 base::ElapsedTimer timer_;
395
HistogramTimer()396 HistogramTimer() {}
397 };
398
399 // Helper class for scoping a HistogramTimer.
400 // TODO(bmeurer): The ifdeffery is an ugly hack around the fact that the
401 // Parser is currently reentrant (when it throws an error, we call back
402 // into JavaScript and all bets are off), but ElapsedTimer is not
403 // reentry-safe. Fix this properly and remove |allow_nesting|.
404 class HistogramTimerScope BASE_EMBEDDED {
405 public:
406 explicit HistogramTimerScope(HistogramTimer* timer,
407 bool allow_nesting = false)
408 #ifdef DEBUG
timer_(timer)409 : timer_(timer), skipped_timer_start_(false) {
410 if (timer_->timer()->IsStarted() && allow_nesting) {
411 skipped_timer_start_ = true;
412 } else {
413 timer_->Start();
414 }
415 }
416 #else
417 : timer_(timer) {
418 timer_->Start();
419 }
420 #endif
~HistogramTimerScope()421 ~HistogramTimerScope() {
422 #ifdef DEBUG
423 if (!skipped_timer_start_) {
424 timer_->Stop();
425 }
426 #else
427 timer_->Stop();
428 #endif
429 }
430
431 private:
432 HistogramTimer* timer_;
433 #ifdef DEBUG
434 bool skipped_timer_start_;
435 #endif
436 };
437
438 enum class OptionalHistogramTimerScopeMode { TAKE_TIME, DONT_TAKE_TIME };
439
440 // Helper class for scoping a HistogramTimer.
441 // It will not take time if take_time is set to false.
442 class OptionalHistogramTimerScope BASE_EMBEDDED {
443 public:
OptionalHistogramTimerScope(HistogramTimer * timer,OptionalHistogramTimerScopeMode mode)444 OptionalHistogramTimerScope(HistogramTimer* timer,
445 OptionalHistogramTimerScopeMode mode)
446 : timer_(timer), mode_(mode) {
447 if (mode == OptionalHistogramTimerScopeMode::TAKE_TIME) timer_->Start();
448 }
449
~OptionalHistogramTimerScope()450 ~OptionalHistogramTimerScope() {
451 if (mode_ == OptionalHistogramTimerScopeMode::TAKE_TIME) timer_->Stop();
452 }
453
454 private:
455 HistogramTimer* timer_;
456 OptionalHistogramTimerScopeMode mode_;
457 };
458
459 // A histogram timer that can aggregate events within a larger scope.
460 //
461 // Intended use of this timer is to have an outer (aggregating) and an inner
462 // (to be aggregated) scope, where the inner scope measure the time of events,
463 // and all those inner scope measurements will be summed up by the outer scope.
464 // An example use might be to aggregate the time spent in lazy compilation
465 // while running a script.
466 //
467 // Helpers:
468 // - AggregatingHistogramTimerScope, the "outer" scope within which
469 // times will be summed up.
470 // - AggregatedHistogramTimerScope, the "inner" scope which defines the
471 // events to be timed.
472 class AggregatableHistogramTimer : public Histogram {
473 public:
474 // Start/stop the "outer" scope.
Start()475 void Start() { time_ = base::TimeDelta(); }
Stop()476 void Stop() {
477 if (time_ != base::TimeDelta()) {
478 // Only add non-zero samples, since zero samples represent situations
479 // where there were no aggregated samples added.
480 AddSample(static_cast<int>(time_.InMicroseconds()));
481 }
482 }
483
484 // Add a time value ("inner" scope).
Add(base::TimeDelta other)485 void Add(base::TimeDelta other) { time_ += other; }
486
487 private:
488 friend class Counters;
489
AggregatableHistogramTimer()490 AggregatableHistogramTimer() {}
AggregatableHistogramTimer(const char * name,int min,int max,int num_buckets,Counters * counters)491 AggregatableHistogramTimer(const char* name, int min, int max,
492 int num_buckets, Counters* counters)
493 : Histogram(name, min, max, num_buckets, counters) {}
494
495 base::TimeDelta time_;
496 };
497
498 // A helper class for use with AggregatableHistogramTimer. This is the
499 // // outer-most timer scope used with an AggregatableHistogramTimer. It will
500 // // aggregate the information from the inner AggregatedHistogramTimerScope.
501 class AggregatingHistogramTimerScope {
502 public:
AggregatingHistogramTimerScope(AggregatableHistogramTimer * histogram)503 explicit AggregatingHistogramTimerScope(AggregatableHistogramTimer* histogram)
504 : histogram_(histogram) {
505 histogram_->Start();
506 }
~AggregatingHistogramTimerScope()507 ~AggregatingHistogramTimerScope() { histogram_->Stop(); }
508
509 private:
510 AggregatableHistogramTimer* histogram_;
511 };
512
513 // A helper class for use with AggregatableHistogramTimer, the "inner" scope
514 // // which defines the events to be timed.
515 class AggregatedHistogramTimerScope {
516 public:
AggregatedHistogramTimerScope(AggregatableHistogramTimer * histogram)517 explicit AggregatedHistogramTimerScope(AggregatableHistogramTimer* histogram)
518 : histogram_(histogram) {
519 timer_.Start();
520 }
~AggregatedHistogramTimerScope()521 ~AggregatedHistogramTimerScope() { histogram_->Add(timer_.Elapsed()); }
522
523 private:
524 base::ElapsedTimer timer_;
525 AggregatableHistogramTimer* histogram_;
526 };
527
528
529 // AggretatedMemoryHistogram collects (time, value) sample pairs and turns
530 // them into time-uniform samples for the backing historgram, such that the
531 // backing histogram receives one sample every T ms, where the T is controlled
532 // by the FLAG_histogram_interval.
533 //
534 // More formally: let F be a real-valued function that maps time to sample
535 // values. We define F as a linear interpolation between adjacent samples. For
536 // each time interval [x; x + T) the backing histogram gets one sample value
537 // that is the average of F(t) in the interval.
538 template <typename Histogram>
539 class AggregatedMemoryHistogram {
540 public:
541 // Note: public for testing purposes only.
AggregatedMemoryHistogram(Histogram * backing_histogram)542 explicit AggregatedMemoryHistogram(Histogram* backing_histogram)
543 : AggregatedMemoryHistogram() {
544 backing_histogram_ = backing_histogram;
545 }
546
547 // Invariants that hold before and after AddSample if
548 // is_initialized_ is true:
549 //
550 // 1) For we processed samples that came in before start_ms_ and sent the
551 // corresponding aggregated samples to backing histogram.
552 // 2) (last_ms_, last_value_) is the last received sample.
553 // 3) last_ms_ < start_ms_ + FLAG_histogram_interval.
554 // 4) aggregate_value_ is the average of the function that is constructed by
555 // linearly interpolating samples received between start_ms_ and last_ms_.
556 void AddSample(double current_ms, double current_value);
557
558 private:
559 friend class Counters;
560
AggregatedMemoryHistogram()561 AggregatedMemoryHistogram()
562 : is_initialized_(false),
563 start_ms_(0.0),
564 last_ms_(0.0),
565 aggregate_value_(0.0),
566 last_value_(0.0),
567 backing_histogram_(nullptr) {}
568 double Aggregate(double current_ms, double current_value);
569
570 bool is_initialized_;
571 double start_ms_;
572 double last_ms_;
573 double aggregate_value_;
574 double last_value_;
575 Histogram* backing_histogram_;
576 };
577
578
579 template <typename Histogram>
AddSample(double current_ms,double current_value)580 void AggregatedMemoryHistogram<Histogram>::AddSample(double current_ms,
581 double current_value) {
582 if (!is_initialized_) {
583 aggregate_value_ = current_value;
584 start_ms_ = current_ms;
585 last_value_ = current_value;
586 last_ms_ = current_ms;
587 is_initialized_ = true;
588 } else {
589 const double kEpsilon = 1e-6;
590 const int kMaxSamples = 1000;
591 if (current_ms < last_ms_ + kEpsilon) {
592 // Two samples have the same time, remember the last one.
593 last_value_ = current_value;
594 } else {
595 double sample_interval_ms = FLAG_histogram_interval;
596 double end_ms = start_ms_ + sample_interval_ms;
597 if (end_ms <= current_ms + kEpsilon) {
598 // Linearly interpolate between the last_ms_ and the current_ms.
599 double slope = (current_value - last_value_) / (current_ms - last_ms_);
600 int i;
601 // Send aggregated samples to the backing histogram from the start_ms
602 // to the current_ms.
603 for (i = 0; i < kMaxSamples && end_ms <= current_ms + kEpsilon; i++) {
604 double end_value = last_value_ + (end_ms - last_ms_) * slope;
605 double sample_value;
606 if (i == 0) {
607 // Take aggregate_value_ into account.
608 sample_value = Aggregate(end_ms, end_value);
609 } else {
610 // There is no aggregate_value_ for i > 0.
611 sample_value = (last_value_ + end_value) / 2;
612 }
613 backing_histogram_->AddSample(static_cast<int>(sample_value + 0.5));
614 last_value_ = end_value;
615 last_ms_ = end_ms;
616 end_ms += sample_interval_ms;
617 }
618 if (i == kMaxSamples) {
619 // We hit the sample limit, ignore the remaining samples.
620 aggregate_value_ = current_value;
621 start_ms_ = current_ms;
622 } else {
623 aggregate_value_ = last_value_;
624 start_ms_ = last_ms_;
625 }
626 }
627 aggregate_value_ = current_ms > start_ms_ + kEpsilon
628 ? Aggregate(current_ms, current_value)
629 : aggregate_value_;
630 last_value_ = current_value;
631 last_ms_ = current_ms;
632 }
633 }
634 }
635
636
637 template <typename Histogram>
Aggregate(double current_ms,double current_value)638 double AggregatedMemoryHistogram<Histogram>::Aggregate(double current_ms,
639 double current_value) {
640 double interval_ms = current_ms - start_ms_;
641 double value = (current_value + last_value_) / 2;
642 // The aggregate_value_ is the average for [start_ms_; last_ms_].
643 // The value is the average for [last_ms_; current_ms].
644 // Return the weighted average of the aggregate_value_ and the value.
645 return aggregate_value_ * ((last_ms_ - start_ms_) / interval_ms) +
646 value * ((current_ms - last_ms_) / interval_ms);
647 }
648
649 class RuntimeCallCounter final {
650 public:
RuntimeCallCounter()651 RuntimeCallCounter() : RuntimeCallCounter(nullptr) {}
RuntimeCallCounter(const char * name)652 explicit RuntimeCallCounter(const char* name)
653 : name_(name), count_(0), time_(0) {}
654 V8_NOINLINE void Reset();
655 V8_NOINLINE void Dump(v8::tracing::TracedValue* value);
656 void Add(RuntimeCallCounter* other);
657
name()658 const char* name() const { return name_; }
count()659 int64_t count() const { return count_; }
time()660 base::TimeDelta time() const {
661 return base::TimeDelta::FromMicroseconds(time_);
662 }
Increment()663 void Increment() { count_++; }
Add(base::TimeDelta delta)664 void Add(base::TimeDelta delta) { time_ += delta.InMicroseconds(); }
665
666 private:
667 friend class RuntimeCallStats;
668
669 const char* name_;
670 int64_t count_;
671 // Stored as int64_t so that its initialization can be deferred.
672 int64_t time_;
673 };
674
675 // RuntimeCallTimer is used to keep track of the stack of currently active
676 // timers used for properly measuring the own time of a RuntimeCallCounter.
677 class RuntimeCallTimer final {
678 public:
counter()679 RuntimeCallCounter* counter() { return counter_; }
set_counter(RuntimeCallCounter * counter)680 void set_counter(RuntimeCallCounter* counter) { counter_ = counter; }
parent()681 RuntimeCallTimer* parent() const { return parent_.Value(); }
set_parent(RuntimeCallTimer * timer)682 void set_parent(RuntimeCallTimer* timer) { parent_.SetValue(timer); }
name()683 const char* name() const { return counter_->name(); }
684
685 inline bool IsStarted();
686
687 inline void Start(RuntimeCallCounter* counter, RuntimeCallTimer* parent);
688 void Snapshot();
689 inline RuntimeCallTimer* Stop();
690
691 // Make the time source configurable for testing purposes.
692 V8_EXPORT_PRIVATE static base::TimeTicks (*Now)();
693
694 private:
695 inline void Pause(base::TimeTicks now);
696 inline void Resume(base::TimeTicks now);
697 inline void CommitTimeToCounter();
698
699 RuntimeCallCounter* counter_ = nullptr;
700 base::AtomicValue<RuntimeCallTimer*> parent_;
701 base::TimeTicks start_ticks_;
702 base::TimeDelta elapsed_;
703 };
704
705 #define FOR_EACH_GC_COUNTER(V) \
706 TRACER_SCOPES(V) \
707 TRACER_BACKGROUND_SCOPES(V)
708
709 #define FOR_EACH_API_COUNTER(V) \
710 V(ArrayBuffer_Cast) \
711 V(ArrayBuffer_Neuter) \
712 V(ArrayBuffer_New) \
713 V(Array_CloneElementAt) \
714 V(Array_New) \
715 V(BigInt_NewFromWords) \
716 V(BigInt64Array_New) \
717 V(BigUint64Array_New) \
718 V(BigIntObject_New) \
719 V(BigIntObject_BigIntValue) \
720 V(BooleanObject_BooleanValue) \
721 V(BooleanObject_New) \
722 V(Context_New) \
723 V(Context_NewRemoteContext) \
724 V(DataView_New) \
725 V(Date_DateTimeConfigurationChangeNotification) \
726 V(Date_New) \
727 V(Date_NumberValue) \
728 V(Debug_Call) \
729 V(Error_New) \
730 V(External_New) \
731 V(Float32Array_New) \
732 V(Float64Array_New) \
733 V(Function_Call) \
734 V(Function_New) \
735 V(Function_NewInstance) \
736 V(FunctionTemplate_GetFunction) \
737 V(FunctionTemplate_New) \
738 V(FunctionTemplate_NewRemoteInstance) \
739 V(FunctionTemplate_NewWithCache) \
740 V(FunctionTemplate_NewWithFastHandler) \
741 V(Int16Array_New) \
742 V(Int32Array_New) \
743 V(Int8Array_New) \
744 V(JSON_Parse) \
745 V(JSON_Stringify) \
746 V(Map_AsArray) \
747 V(Map_Clear) \
748 V(Map_Delete) \
749 V(Map_Get) \
750 V(Map_Has) \
751 V(Map_New) \
752 V(Map_Set) \
753 V(Message_GetEndColumn) \
754 V(Message_GetLineNumber) \
755 V(Message_GetSourceLine) \
756 V(Message_GetStartColumn) \
757 V(Module_Evaluate) \
758 V(Module_InstantiateModule) \
759 V(NumberObject_New) \
760 V(NumberObject_NumberValue) \
761 V(Object_CallAsConstructor) \
762 V(Object_CallAsFunction) \
763 V(Object_CreateDataProperty) \
764 V(Object_DefineOwnProperty) \
765 V(Object_DefineProperty) \
766 V(Object_Delete) \
767 V(Object_DeleteProperty) \
768 V(Object_ForceSet) \
769 V(Object_Get) \
770 V(Object_GetOwnPropertyDescriptor) \
771 V(Object_GetOwnPropertyNames) \
772 V(Object_GetPropertyAttributes) \
773 V(Object_GetPropertyNames) \
774 V(Object_GetRealNamedProperty) \
775 V(Object_GetRealNamedPropertyAttributes) \
776 V(Object_GetRealNamedPropertyAttributesInPrototypeChain) \
777 V(Object_GetRealNamedPropertyInPrototypeChain) \
778 V(Object_Has) \
779 V(Object_HasOwnProperty) \
780 V(Object_HasRealIndexedProperty) \
781 V(Object_HasRealNamedCallbackProperty) \
782 V(Object_HasRealNamedProperty) \
783 V(Object_New) \
784 V(Object_ObjectProtoToString) \
785 V(Object_Set) \
786 V(Object_SetAccessor) \
787 V(Object_SetIntegrityLevel) \
788 V(Object_SetPrivate) \
789 V(Object_SetPrototype) \
790 V(ObjectTemplate_New) \
791 V(ObjectTemplate_NewInstance) \
792 V(Object_ToArrayIndex) \
793 V(Object_ToBigInt) \
794 V(Object_ToDetailString) \
795 V(Object_ToInt32) \
796 V(Object_ToInteger) \
797 V(Object_ToNumber) \
798 V(Object_ToObject) \
799 V(Object_ToString) \
800 V(Object_ToUint32) \
801 V(Persistent_New) \
802 V(Private_New) \
803 V(Promise_Catch) \
804 V(Promise_Chain) \
805 V(Promise_HasRejectHandler) \
806 V(Promise_Resolver_New) \
807 V(Promise_Resolver_Resolve) \
808 V(Promise_Resolver_Reject) \
809 V(Promise_Result) \
810 V(Promise_Status) \
811 V(Promise_Then) \
812 V(Proxy_New) \
813 V(RangeError_New) \
814 V(ReferenceError_New) \
815 V(RegExp_New) \
816 V(ScriptCompiler_Compile) \
817 V(ScriptCompiler_CompileFunctionInContext) \
818 V(ScriptCompiler_CompileUnbound) \
819 V(Script_Run) \
820 V(Set_Add) \
821 V(Set_AsArray) \
822 V(Set_Clear) \
823 V(Set_Delete) \
824 V(Set_Has) \
825 V(Set_New) \
826 V(SharedArrayBuffer_New) \
827 V(String_Concat) \
828 V(String_NewExternalOneByte) \
829 V(String_NewExternalTwoByte) \
830 V(String_NewFromOneByte) \
831 V(String_NewFromTwoByte) \
832 V(String_NewFromUtf8) \
833 V(StringObject_New) \
834 V(StringObject_StringValue) \
835 V(String_Write) \
836 V(String_WriteUtf8) \
837 V(Symbol_New) \
838 V(SymbolObject_New) \
839 V(SymbolObject_SymbolValue) \
840 V(SyntaxError_New) \
841 V(TryCatch_StackTrace) \
842 V(TypeError_New) \
843 V(Uint16Array_New) \
844 V(Uint32Array_New) \
845 V(Uint8Array_New) \
846 V(Uint8ClampedArray_New) \
847 V(UnboundScript_GetId) \
848 V(UnboundScript_GetLineNumber) \
849 V(UnboundScript_GetName) \
850 V(UnboundScript_GetSourceMappingURL) \
851 V(UnboundScript_GetSourceURL) \
852 V(Value_InstanceOf) \
853 V(Value_IntegerValue) \
854 V(Value_Int32Value) \
855 V(Value_NumberValue) \
856 V(Value_TypeOf) \
857 V(Value_Uint32Value) \
858 V(ValueDeserializer_ReadHeader) \
859 V(ValueDeserializer_ReadValue) \
860 V(ValueSerializer_WriteValue)
861
862 #define FOR_EACH_MANUAL_COUNTER(V) \
863 V(AccessorGetterCallback) \
864 V(AccessorSetterCallback) \
865 V(ArrayLengthGetter) \
866 V(ArrayLengthSetter) \
867 V(BoundFunctionNameGetter) \
868 V(BoundFunctionLengthGetter) \
869 V(CompileBackgroundAnalyse) \
870 V(CompileBackgroundEval) \
871 V(CompileBackgroundIgnition) \
872 V(CompileBackgroundScript) \
873 V(CompileBackgroundRewriteReturnResult) \
874 V(CompileBackgroundScopeAnalysis) \
875 V(CompileDeserialize) \
876 V(CompileEval) \
877 V(CompileAnalyse) \
878 V(CompileFunction) \
879 V(CompileGetFromOptimizedCodeMap) \
880 V(CompileIgnition) \
881 V(CompileIgnitionFinalization) \
882 V(CompileRewriteReturnResult) \
883 V(CompileScopeAnalysis) \
884 V(CompileScript) \
885 V(CompileSerialize) \
886 V(CompileWaitForDispatcher) \
887 V(DeoptimizeCode) \
888 V(FunctionCallback) \
889 V(FunctionPrototypeGetter) \
890 V(FunctionPrototypeSetter) \
891 V(FunctionLengthGetter) \
892 V(GC_Custom_AllAvailableGarbage) \
893 V(GC_Custom_IncrementalMarkingObserver) \
894 V(GC_Custom_SlowAllocateRaw) \
895 V(GCEpilogueCallback) \
896 V(GCPrologueCallback) \
897 V(GetMoreDataCallback) \
898 V(NamedDefinerCallback) \
899 V(NamedDeleterCallback) \
900 V(NamedDescriptorCallback) \
901 V(NamedQueryCallback) \
902 V(NamedSetterCallback) \
903 V(NamedGetterCallback) \
904 V(NamedEnumeratorCallback) \
905 V(IndexedDefinerCallback) \
906 V(IndexedDeleterCallback) \
907 V(IndexedDescriptorCallback) \
908 V(IndexedGetterCallback) \
909 V(IndexedQueryCallback) \
910 V(IndexedSetterCallback) \
911 V(IndexedEnumeratorCallback) \
912 V(InvokeApiInterruptCallbacks) \
913 V(InvokeFunctionCallback) \
914 V(JS_Execution) \
915 V(Map_SetPrototype) \
916 V(Map_TransitionToAccessorProperty) \
917 V(Map_TransitionToDataProperty) \
918 V(Object_DeleteProperty) \
919 V(OptimizeCode) \
920 V(ParseArrowFunctionLiteral) \
921 V(ParseBackgroundArrowFunctionLiteral) \
922 V(ParseBackgroundFunctionLiteral) \
923 V(ParseBackgroundProgram) \
924 V(ParseEval) \
925 V(ParseFunction) \
926 V(ParseFunctionLiteral) \
927 V(ParseProgram) \
928 V(PreParseArrowFunctionLiteral) \
929 V(PreParseBackgroundArrowFunctionLiteral) \
930 V(PreParseBackgroundNoVariableResolution) \
931 V(PreParseBackgroundWithVariableResolution) \
932 V(PreParseNoVariableResolution) \
933 V(PreParseWithVariableResolution) \
934 V(PropertyCallback) \
935 V(PrototypeMap_TransitionToAccessorProperty) \
936 V(PrototypeMap_TransitionToDataProperty) \
937 V(PrototypeObject_DeleteProperty) \
938 V(RecompileConcurrent) \
939 V(RecompileSynchronous) \
940 V(ReconfigureToDataProperty) \
941 V(StringLengthGetter) \
942 V(TestCounter1) \
943 V(TestCounter2) \
944 V(TestCounter3)
945
946 #define FOR_EACH_HANDLER_COUNTER(V) \
947 V(KeyedLoadIC_LoadIndexedInterceptorStub) \
948 V(KeyedLoadIC_KeyedLoadSloppyArgumentsStub) \
949 V(KeyedLoadIC_LoadElementDH) \
950 V(KeyedLoadIC_LoadIndexedStringDH) \
951 V(KeyedLoadIC_SlowStub) \
952 V(KeyedStoreIC_ElementsTransitionAndStoreStub) \
953 V(KeyedStoreIC_KeyedStoreSloppyArgumentsStub) \
954 V(KeyedStoreIC_SlowStub) \
955 V(KeyedStoreIC_StoreFastElementStub) \
956 V(KeyedStoreIC_StoreElementStub) \
957 V(StoreInArrayLiteralIC_SlowStub) \
958 V(LoadGlobalIC_LoadScriptContextField) \
959 V(LoadGlobalIC_SlowStub) \
960 V(LoadIC_FunctionPrototypeStub) \
961 V(LoadIC_HandlerCacheHit_Accessor) \
962 V(LoadIC_LoadAccessorDH) \
963 V(LoadIC_LoadAccessorFromPrototypeDH) \
964 V(LoadIC_LoadApiGetterFromPrototypeDH) \
965 V(LoadIC_LoadCallback) \
966 V(LoadIC_LoadConstantDH) \
967 V(LoadIC_LoadConstantFromPrototypeDH) \
968 V(LoadIC_LoadFieldDH) \
969 V(LoadIC_LoadFieldFromPrototypeDH) \
970 V(LoadIC_LoadGlobalDH) \
971 V(LoadIC_LoadGlobalFromPrototypeDH) \
972 V(LoadIC_LoadIntegerIndexedExoticDH) \
973 V(LoadIC_LoadInterceptorDH) \
974 V(LoadIC_LoadNonMaskingInterceptorDH) \
975 V(LoadIC_LoadInterceptorFromPrototypeDH) \
976 V(LoadIC_LoadNativeDataPropertyDH) \
977 V(LoadIC_LoadNativeDataPropertyFromPrototypeDH) \
978 V(LoadIC_LoadNonexistentDH) \
979 V(LoadIC_LoadNormalDH) \
980 V(LoadIC_LoadNormalFromPrototypeDH) \
981 V(LoadIC_NonReceiver) \
982 V(LoadIC_Premonomorphic) \
983 V(LoadIC_SlowStub) \
984 V(LoadIC_StringLength) \
985 V(LoadIC_StringWrapperLength) \
986 V(StoreGlobalIC_StoreScriptContextField) \
987 V(StoreGlobalIC_SlowStub) \
988 V(StoreIC_HandlerCacheHit_Accessor) \
989 V(StoreIC_NonReceiver) \
990 V(StoreIC_Premonomorphic) \
991 V(StoreIC_SlowStub) \
992 V(StoreIC_StoreAccessorDH) \
993 V(StoreIC_StoreAccessorOnPrototypeDH) \
994 V(StoreIC_StoreApiSetterOnPrototypeDH) \
995 V(StoreIC_StoreFieldDH) \
996 V(StoreIC_StoreGlobalDH) \
997 V(StoreIC_StoreGlobalTransitionDH) \
998 V(StoreIC_StoreInterceptorStub) \
999 V(StoreIC_StoreNativeDataPropertyDH) \
1000 V(StoreIC_StoreNativeDataPropertyOnPrototypeDH) \
1001 V(StoreIC_StoreNormalDH) \
1002 V(StoreIC_StoreTransitionDH)
1003
1004 enum RuntimeCallCounterId {
1005 #define CALL_RUNTIME_COUNTER(name) kGC_##name,
1006 FOR_EACH_GC_COUNTER(CALL_RUNTIME_COUNTER)
1007 #undef CALL_RUNTIME_COUNTER
1008 #define CALL_RUNTIME_COUNTER(name) k##name,
1009 FOR_EACH_MANUAL_COUNTER(CALL_RUNTIME_COUNTER)
1010 #undef CALL_RUNTIME_COUNTER
1011 #define CALL_RUNTIME_COUNTER(name, nargs, ressize) kRuntime_##name,
1012 FOR_EACH_INTRINSIC(CALL_RUNTIME_COUNTER)
1013 #undef CALL_RUNTIME_COUNTER
1014 #define CALL_BUILTIN_COUNTER(name) kBuiltin_##name,
1015 BUILTIN_LIST_C(CALL_BUILTIN_COUNTER)
1016 #undef CALL_BUILTIN_COUNTER
1017 #define CALL_BUILTIN_COUNTER(name) kAPI_##name,
1018 FOR_EACH_API_COUNTER(CALL_BUILTIN_COUNTER)
1019 #undef CALL_BUILTIN_COUNTER
1020 #define CALL_BUILTIN_COUNTER(name) kHandler_##name,
1021 FOR_EACH_HANDLER_COUNTER(CALL_BUILTIN_COUNTER)
1022 #undef CALL_BUILTIN_COUNTER
1023 kNumberOfCounters
1024 };
1025
1026 class RuntimeCallStats final : public ZoneObject {
1027 public:
1028 V8_EXPORT_PRIVATE RuntimeCallStats();
1029
1030 // Starting measuring the time for a function. This will establish the
1031 // connection to the parent counter for properly calculating the own times.
1032 V8_EXPORT_PRIVATE void Enter(RuntimeCallTimer* timer,
1033 RuntimeCallCounterId counter_id);
1034
1035 // Leave a scope for a measured runtime function. This will properly add
1036 // the time delta to the current_counter and subtract the delta from its
1037 // parent.
1038 V8_EXPORT_PRIVATE void Leave(RuntimeCallTimer* timer);
1039
1040 // Set counter id for the innermost measurement. It can be used to refine
1041 // event kind when a runtime entry counter is too generic.
1042 V8_EXPORT_PRIVATE void CorrectCurrentCounterId(
1043 RuntimeCallCounterId counter_id);
1044
1045 V8_EXPORT_PRIVATE void Reset();
1046 // Add all entries from another stats object.
1047 void Add(RuntimeCallStats* other);
1048 V8_EXPORT_PRIVATE void Print(std::ostream& os);
1049 V8_EXPORT_PRIVATE void Print();
1050 V8_NOINLINE void Dump(v8::tracing::TracedValue* value);
1051
thread_id()1052 ThreadId thread_id() const { return thread_id_; }
current_timer()1053 RuntimeCallTimer* current_timer() { return current_timer_.Value(); }
current_counter()1054 RuntimeCallCounter* current_counter() { return current_counter_.Value(); }
InUse()1055 bool InUse() { return in_use_; }
1056 bool IsCalledOnTheSameThread();
1057
1058 static const int kNumberOfCounters =
1059 static_cast<int>(RuntimeCallCounterId::kNumberOfCounters);
GetCounter(RuntimeCallCounterId counter_id)1060 RuntimeCallCounter* GetCounter(RuntimeCallCounterId counter_id) {
1061 return &counters_[static_cast<int>(counter_id)];
1062 }
GetCounter(int counter_id)1063 RuntimeCallCounter* GetCounter(int counter_id) {
1064 return &counters_[counter_id];
1065 }
1066
1067 private:
1068 // Top of a stack of active timers.
1069 base::AtomicValue<RuntimeCallTimer*> current_timer_;
1070 // Active counter object associated with current timer.
1071 base::AtomicValue<RuntimeCallCounter*> current_counter_;
1072 // Used to track nested tracing scopes.
1073 bool in_use_;
1074 ThreadId thread_id_;
1075 RuntimeCallCounter counters_[kNumberOfCounters];
1076 };
1077
1078 #define CHANGE_CURRENT_RUNTIME_COUNTER(runtime_call_stats, counter_id) \
1079 do { \
1080 if (V8_UNLIKELY(FLAG_runtime_stats) && runtime_call_stats) { \
1081 runtime_call_stats->CorrectCurrentCounterId(counter_id); \
1082 } \
1083 } while (false)
1084
1085 #define TRACE_HANDLER_STATS(isolate, counter_name) \
1086 CHANGE_CURRENT_RUNTIME_COUNTER( \
1087 isolate->counters()->runtime_call_stats(), \
1088 RuntimeCallCounterId::kHandler_##counter_name)
1089
1090 // A RuntimeCallTimerScopes wraps around a RuntimeCallTimer to measure the
1091 // the time of C++ scope.
1092 class RuntimeCallTimerScope {
1093 public:
1094 inline RuntimeCallTimerScope(Isolate* isolate,
1095 RuntimeCallCounterId counter_id);
1096 // This constructor is here just to avoid calling GetIsolate() when the
1097 // stats are disabled and the isolate is not directly available.
1098 inline RuntimeCallTimerScope(Isolate* isolate, HeapObject* heap_object,
1099 RuntimeCallCounterId counter_id);
RuntimeCallTimerScope(RuntimeCallStats * stats,RuntimeCallCounterId counter_id)1100 inline RuntimeCallTimerScope(RuntimeCallStats* stats,
1101 RuntimeCallCounterId counter_id) {
1102 if (V8_LIKELY(!FLAG_runtime_stats || stats == nullptr)) return;
1103 stats_ = stats;
1104 stats_->Enter(&timer_, counter_id);
1105 }
1106
~RuntimeCallTimerScope()1107 inline ~RuntimeCallTimerScope() {
1108 if (V8_UNLIKELY(stats_ != nullptr)) {
1109 stats_->Leave(&timer_);
1110 }
1111 }
1112
1113 private:
1114 RuntimeCallStats* stats_ = nullptr;
1115 RuntimeCallTimer timer_;
1116
1117 DISALLOW_COPY_AND_ASSIGN(RuntimeCallTimerScope);
1118 };
1119
1120 #define HISTOGRAM_RANGE_LIST(HR) \
1121 /* Generic range histograms: HR(name, caption, min, max, num_buckets) */ \
1122 HR(background_marking, V8.GCBackgroundMarking, 0, 10000, 101) \
1123 HR(background_scavenger, V8.GCBackgroundScavenger, 0, 10000, 101) \
1124 HR(background_sweeping, V8.GCBackgroundSweeping, 0, 10000, 101) \
1125 HR(detached_context_age_in_gc, V8.DetachedContextAgeInGC, 0, 20, 21) \
1126 HR(code_cache_reject_reason, V8.CodeCacheRejectReason, 1, 6, 6) \
1127 HR(errors_thrown_per_context, V8.ErrorsThrownPerContext, 0, 200, 20) \
1128 HR(debug_feature_usage, V8.DebugFeatureUsage, 1, 7, 7) \
1129 HR(incremental_marking_reason, V8.GCIncrementalMarkingReason, 0, 21, 22) \
1130 HR(incremental_marking_sum, V8.GCIncrementalMarkingSum, 0, 10000, 101) \
1131 HR(mark_compact_reason, V8.GCMarkCompactReason, 0, 21, 22) \
1132 HR(gc_finalize_clear, V8.GCFinalizeMC.Clear, 0, 10000, 101) \
1133 HR(gc_finalize_epilogue, V8.GCFinalizeMC.Epilogue, 0, 10000, 101) \
1134 HR(gc_finalize_evacuate, V8.GCFinalizeMC.Evacuate, 0, 10000, 101) \
1135 HR(gc_finalize_finish, V8.GCFinalizeMC.Finish, 0, 10000, 101) \
1136 HR(gc_finalize_mark, V8.GCFinalizeMC.Mark, 0, 10000, 101) \
1137 HR(gc_finalize_prologue, V8.GCFinalizeMC.Prologue, 0, 10000, 101) \
1138 HR(gc_finalize_sweep, V8.GCFinalizeMC.Sweep, 0, 10000, 101) \
1139 HR(scavenge_reason, V8.GCScavengeReason, 0, 21, 22) \
1140 HR(young_generation_handling, V8.GCYoungGenerationHandling, 0, 2, 3) \
1141 /* Asm/Wasm. */ \
1142 HR(wasm_functions_per_asm_module, V8.WasmFunctionsPerModule.asm, 1, 100000, \
1143 51) \
1144 HR(wasm_functions_per_wasm_module, V8.WasmFunctionsPerModule.wasm, 1, \
1145 100000, 51) \
1146 HR(array_buffer_big_allocations, V8.ArrayBufferLargeAllocations, 0, 4096, \
1147 13) \
1148 HR(array_buffer_new_size_failures, V8.ArrayBufferNewSizeFailures, 0, 4096, \
1149 13) \
1150 HR(shared_array_allocations, V8.SharedArrayAllocationSizes, 0, 4096, 13) \
1151 HR(wasm_asm_function_size_bytes, V8.WasmFunctionSizeBytes.asm, 1, GB, 51) \
1152 HR(wasm_wasm_function_size_bytes, V8.WasmFunctionSizeBytes.wasm, 1, GB, 51) \
1153 HR(wasm_asm_module_size_bytes, V8.WasmModuleSizeBytes.asm, 1, GB, 51) \
1154 HR(wasm_wasm_module_size_bytes, V8.WasmModuleSizeBytes.wasm, 1, GB, 51) \
1155 HR(wasm_asm_min_mem_pages_count, V8.WasmMinMemPagesCount.asm, 1, 2 << 16, \
1156 51) \
1157 HR(wasm_wasm_min_mem_pages_count, V8.WasmMinMemPagesCount.wasm, 1, 2 << 16, \
1158 51) \
1159 HR(wasm_wasm_max_mem_pages_count, V8.WasmMaxMemPagesCount.wasm, 1, 2 << 16, \
1160 51) \
1161 HR(wasm_decode_asm_module_peak_memory_bytes, \
1162 V8.WasmDecodeModulePeakMemoryBytes.asm, 1, GB, 51) \
1163 HR(wasm_decode_wasm_module_peak_memory_bytes, \
1164 V8.WasmDecodeModulePeakMemoryBytes.wasm, 1, GB, 51) \
1165 HR(asm_wasm_translation_peak_memory_bytes, \
1166 V8.AsmWasmTranslationPeakMemoryBytes, 1, GB, 51) \
1167 HR(wasm_compile_function_peak_memory_bytes, \
1168 V8.WasmCompileFunctionPeakMemoryBytes, 1, GB, 51) \
1169 HR(asm_module_size_bytes, V8.AsmModuleSizeBytes, 1, GB, 51) \
1170 HR(asm_wasm_translation_throughput, V8.AsmWasmTranslationThroughput, 1, 100, \
1171 20) \
1172 HR(wasm_lazy_compilation_throughput, V8.WasmLazyCompilationThroughput, 1, \
1173 10000, 50) \
1174 HR(compile_script_cache_behaviour, V8.CompileScript.CacheBehaviour, 0, 20, \
1175 21) \
1176 HR(wasm_memory_allocation_result, V8.WasmMemoryAllocationResult, 0, 3, 4) \
1177 HR(wasm_address_space_usage_mb, V8.WasmAddressSpaceUsageMiB, 0, 1 << 20, \
1178 128) \
1179 HR(wasm_module_code_size_mb, V8.WasmModuleCodeSizeMiB, 0, 256, 64)
1180
1181 #define HISTOGRAM_TIMER_LIST(HT) \
1182 /* Garbage collection timers. */ \
1183 HT(gc_compactor, V8.GCCompactor, 10000, MILLISECOND) \
1184 HT(gc_compactor_background, V8.GCCompactorBackground, 10000, MILLISECOND) \
1185 HT(gc_compactor_foreground, V8.GCCompactorForeground, 10000, MILLISECOND) \
1186 HT(gc_finalize, V8.GCFinalizeMC, 10000, MILLISECOND) \
1187 HT(gc_finalize_background, V8.GCFinalizeMCBackground, 10000, MILLISECOND) \
1188 HT(gc_finalize_foreground, V8.GCFinalizeMCForeground, 10000, MILLISECOND) \
1189 HT(gc_finalize_reduce_memory, V8.GCFinalizeMCReduceMemory, 10000, \
1190 MILLISECOND) \
1191 HT(gc_finalize_reduce_memory_background, \
1192 V8.GCFinalizeMCReduceMemoryBackground, 10000, MILLISECOND) \
1193 HT(gc_finalize_reduce_memory_foreground, \
1194 V8.GCFinalizeMCReduceMemoryForeground, 10000, MILLISECOND) \
1195 HT(gc_scavenger, V8.GCScavenger, 10000, MILLISECOND) \
1196 HT(gc_scavenger_background, V8.GCScavengerBackground, 10000, MILLISECOND) \
1197 HT(gc_scavenger_foreground, V8.GCScavengerForeground, 10000, MILLISECOND) \
1198 HT(gc_context, V8.GCContext, 10000, \
1199 MILLISECOND) /* GC context cleanup time */ \
1200 HT(gc_idle_notification, V8.GCIdleNotification, 10000, MILLISECOND) \
1201 HT(gc_incremental_marking, V8.GCIncrementalMarking, 10000, MILLISECOND) \
1202 HT(gc_incremental_marking_start, V8.GCIncrementalMarkingStart, 10000, \
1203 MILLISECOND) \
1204 HT(gc_incremental_marking_finalize, V8.GCIncrementalMarkingFinalize, 10000, \
1205 MILLISECOND) \
1206 HT(gc_low_memory_notification, V8.GCLowMemoryNotification, 10000, \
1207 MILLISECOND) \
1208 /* Compilation times. */ \
1209 HT(compile, V8.CompileMicroSeconds, 1000000, MICROSECOND) \
1210 HT(compile_eval, V8.CompileEvalMicroSeconds, 1000000, MICROSECOND) \
1211 /* Serialization as part of compilation (code caching) */ \
1212 HT(compile_serialize, V8.CompileSerializeMicroSeconds, 100000, MICROSECOND) \
1213 HT(compile_deserialize, V8.CompileDeserializeMicroSeconds, 1000000, \
1214 MICROSECOND) \
1215 /* Total compilation time incl. caching/parsing */ \
1216 HT(compile_script, V8.CompileScriptMicroSeconds, 1000000, MICROSECOND) \
1217 /* Total JavaScript execution time (including callbacks and runtime calls */ \
1218 HT(execute, V8.Execute, 1000000, MICROSECOND) \
1219 /* Asm/Wasm */ \
1220 HT(asm_wasm_translation_time, V8.AsmWasmTranslationMicroSeconds, 1000000, \
1221 MICROSECOND) \
1222 HT(wasm_lazy_compilation_time, V8.WasmLazyCompilationMicroSeconds, 1000000, \
1223 MICROSECOND) \
1224 HT(wasm_execution_time, V8.WasmExecutionTimeMicroSeconds, 10000000, \
1225 MICROSECOND)
1226
1227 #define TIMED_HISTOGRAM_LIST(HT) \
1228 HT(wasm_decode_asm_module_time, V8.WasmDecodeModuleMicroSeconds.asm, \
1229 1000000, MICROSECOND) \
1230 HT(wasm_decode_wasm_module_time, V8.WasmDecodeModuleMicroSeconds.wasm, \
1231 1000000, MICROSECOND) \
1232 HT(wasm_decode_asm_function_time, V8.WasmDecodeFunctionMicroSeconds.asm, \
1233 1000000, MICROSECOND) \
1234 HT(wasm_decode_wasm_function_time, V8.WasmDecodeFunctionMicroSeconds.wasm, \
1235 1000000, MICROSECOND) \
1236 HT(wasm_compile_asm_module_time, V8.WasmCompileModuleMicroSeconds.asm, \
1237 10000000, MICROSECOND) \
1238 HT(wasm_compile_wasm_module_time, V8.WasmCompileModuleMicroSeconds.wasm, \
1239 10000000, MICROSECOND) \
1240 HT(wasm_compile_asm_function_time, V8.WasmCompileFunctionMicroSeconds.asm, \
1241 1000000, MICROSECOND) \
1242 HT(wasm_compile_wasm_function_time, V8.WasmCompileFunctionMicroSeconds.wasm, \
1243 1000000, MICROSECOND) \
1244 HT(liftoff_compile_time, V8.LiftoffCompileMicroSeconds, 10000000, \
1245 MICROSECOND) \
1246 HT(wasm_instantiate_wasm_module_time, \
1247 V8.WasmInstantiateModuleMicroSeconds.wasm, 10000000, MICROSECOND) \
1248 HT(wasm_instantiate_asm_module_time, \
1249 V8.WasmInstantiateModuleMicroSeconds.asm, 10000000, MICROSECOND) \
1250 /* Total compilation time incl. caching/parsing for various cache states. */ \
1251 HT(compile_script_with_produce_cache, \
1252 V8.CompileScriptMicroSeconds.ProduceCache, 1000000, MICROSECOND) \
1253 HT(compile_script_with_isolate_cache_hit, \
1254 V8.CompileScriptMicroSeconds.IsolateCacheHit, 1000000, MICROSECOND) \
1255 HT(compile_script_with_consume_cache, \
1256 V8.CompileScriptMicroSeconds.ConsumeCache, 1000000, MICROSECOND) \
1257 HT(compile_script_consume_failed, \
1258 V8.CompileScriptMicroSeconds.ConsumeCache.Failed, 1000000, MICROSECOND) \
1259 HT(compile_script_no_cache_other, \
1260 V8.CompileScriptMicroSeconds.NoCache.Other, 1000000, MICROSECOND) \
1261 HT(compile_script_no_cache_because_inline_script, \
1262 V8.CompileScriptMicroSeconds.NoCache.InlineScript, 1000000, MICROSECOND) \
1263 HT(compile_script_no_cache_because_script_too_small, \
1264 V8.CompileScriptMicroSeconds.NoCache.ScriptTooSmall, 1000000, \
1265 MICROSECOND) \
1266 HT(compile_script_no_cache_because_cache_too_cold, \
1267 V8.CompileScriptMicroSeconds.NoCache.CacheTooCold, 1000000, MICROSECOND) \
1268 HT(compile_script_on_background, \
1269 V8.CompileScriptMicroSeconds.BackgroundThread, 1000000, MICROSECOND) \
1270 HT(gc_parallel_task_latency, V8.GC.ParallelTaskLatencyMicroSeconds, 1000000, \
1271 MICROSECOND)
1272
1273 #define AGGREGATABLE_HISTOGRAM_TIMER_LIST(AHT) \
1274 AHT(compile_lazy, V8.CompileLazyMicroSeconds)
1275
1276 #define HISTOGRAM_PERCENTAGE_LIST(HP) \
1277 /* Heap fragmentation. */ \
1278 HP(external_fragmentation_total, V8.MemoryExternalFragmentationTotal) \
1279 HP(external_fragmentation_old_space, V8.MemoryExternalFragmentationOldSpace) \
1280 HP(external_fragmentation_code_space, \
1281 V8.MemoryExternalFragmentationCodeSpace) \
1282 HP(external_fragmentation_map_space, V8.MemoryExternalFragmentationMapSpace) \
1283 HP(external_fragmentation_lo_space, V8.MemoryExternalFragmentationLoSpace)
1284
1285 // Note: These use Histogram with options (min=1000, max=500000, buckets=50).
1286 #define HISTOGRAM_LEGACY_MEMORY_LIST(HM) \
1287 HM(heap_sample_total_committed, V8.MemoryHeapSampleTotalCommitted) \
1288 HM(heap_sample_total_used, V8.MemoryHeapSampleTotalUsed) \
1289 HM(heap_sample_map_space_committed, V8.MemoryHeapSampleMapSpaceCommitted) \
1290 HM(heap_sample_code_space_committed, V8.MemoryHeapSampleCodeSpaceCommitted) \
1291 HM(heap_sample_maximum_committed, V8.MemoryHeapSampleMaximumCommitted)
1292
1293 // WARNING: STATS_COUNTER_LIST_* is a very large macro that is causing MSVC
1294 // Intellisense to crash. It was broken into two macros (each of length 40
1295 // lines) rather than one macro (of length about 80 lines) to work around
1296 // this problem. Please avoid using recursive macros of this length when
1297 // possible.
1298 #define STATS_COUNTER_LIST_1(SC) \
1299 /* Global Handle Count*/ \
1300 SC(global_handles, V8.GlobalHandles) \
1301 /* OS Memory allocated */ \
1302 SC(memory_allocated, V8.OsMemoryAllocated) \
1303 SC(maps_normalized, V8.MapsNormalized) \
1304 SC(maps_created, V8.MapsCreated) \
1305 SC(elements_transitions, V8.ObjectElementsTransitions) \
1306 SC(props_to_dictionary, V8.ObjectPropertiesToDictionary) \
1307 SC(elements_to_dictionary, V8.ObjectElementsToDictionary) \
1308 SC(alive_after_last_gc, V8.AliveAfterLastGC) \
1309 SC(objs_since_last_young, V8.ObjsSinceLastYoung) \
1310 SC(objs_since_last_full, V8.ObjsSinceLastFull) \
1311 SC(string_table_capacity, V8.StringTableCapacity) \
1312 SC(number_of_symbols, V8.NumberOfSymbols) \
1313 SC(inlined_copied_elements, V8.InlinedCopiedElements) \
1314 SC(arguments_adaptors, V8.ArgumentsAdaptors) \
1315 SC(compilation_cache_hits, V8.CompilationCacheHits) \
1316 SC(compilation_cache_misses, V8.CompilationCacheMisses) \
1317 /* Amount of evaled source code. */ \
1318 SC(total_eval_size, V8.TotalEvalSize) \
1319 /* Amount of loaded source code. */ \
1320 SC(total_load_size, V8.TotalLoadSize) \
1321 /* Amount of parsed source code. */ \
1322 SC(total_parse_size, V8.TotalParseSize) \
1323 /* Amount of source code skipped over using preparsing. */ \
1324 SC(total_preparse_skipped, V8.TotalPreparseSkipped) \
1325 /* Amount of compiled source code. */ \
1326 SC(total_compile_size, V8.TotalCompileSize) \
1327 /* Amount of source code compiled with the full codegen. */ \
1328 SC(total_full_codegen_source_size, V8.TotalFullCodegenSourceSize) \
1329 /* Number of contexts created from scratch. */ \
1330 SC(contexts_created_from_scratch, V8.ContextsCreatedFromScratch) \
1331 /* Number of contexts created by partial snapshot. */ \
1332 SC(contexts_created_by_snapshot, V8.ContextsCreatedBySnapshot) \
1333 /* Number of code objects found from pc. */ \
1334 SC(pc_to_code, V8.PcToCode) \
1335 SC(pc_to_code_cached, V8.PcToCodeCached) \
1336 /* The store-buffer implementation of the write barrier. */ \
1337 SC(store_buffer_overflows, V8.StoreBufferOverflows)
1338
1339 #define STATS_COUNTER_LIST_2(SC) \
1340 /* Number of code stubs. */ \
1341 SC(code_stubs, V8.CodeStubs) \
1342 /* Amount of stub code. */ \
1343 SC(total_stubs_code_size, V8.TotalStubsCodeSize) \
1344 /* Amount of (JS) compiled code. */ \
1345 SC(total_compiled_code_size, V8.TotalCompiledCodeSize) \
1346 SC(gc_compactor_caused_by_request, V8.GCCompactorCausedByRequest) \
1347 SC(gc_compactor_caused_by_promoted_data, V8.GCCompactorCausedByPromotedData) \
1348 SC(gc_compactor_caused_by_oldspace_exhaustion, \
1349 V8.GCCompactorCausedByOldspaceExhaustion) \
1350 SC(gc_last_resort_from_js, V8.GCLastResortFromJS) \
1351 SC(gc_last_resort_from_handles, V8.GCLastResortFromHandles) \
1352 SC(ic_keyed_load_generic_smi, V8.ICKeyedLoadGenericSmi) \
1353 SC(ic_keyed_load_generic_symbol, V8.ICKeyedLoadGenericSymbol) \
1354 SC(ic_keyed_load_generic_slow, V8.ICKeyedLoadGenericSlow) \
1355 SC(ic_named_load_global_stub, V8.ICNamedLoadGlobalStub) \
1356 SC(ic_store_normal_miss, V8.ICStoreNormalMiss) \
1357 SC(ic_store_normal_hit, V8.ICStoreNormalHit) \
1358 SC(ic_binary_op_miss, V8.ICBinaryOpMiss) \
1359 SC(ic_compare_miss, V8.ICCompareMiss) \
1360 SC(ic_call_miss, V8.ICCallMiss) \
1361 SC(ic_keyed_call_miss, V8.ICKeyedCallMiss) \
1362 SC(ic_store_miss, V8.ICStoreMiss) \
1363 SC(ic_keyed_store_miss, V8.ICKeyedStoreMiss) \
1364 SC(cow_arrays_converted, V8.COWArraysConverted) \
1365 SC(constructed_objects, V8.ConstructedObjects) \
1366 SC(constructed_objects_runtime, V8.ConstructedObjectsRuntime) \
1367 SC(megamorphic_stub_cache_probes, V8.MegamorphicStubCacheProbes) \
1368 SC(megamorphic_stub_cache_misses, V8.MegamorphicStubCacheMisses) \
1369 SC(megamorphic_stub_cache_updates, V8.MegamorphicStubCacheUpdates) \
1370 SC(enum_cache_hits, V8.EnumCacheHits) \
1371 SC(enum_cache_misses, V8.EnumCacheMisses) \
1372 SC(fast_new_closure_total, V8.FastNewClosureTotal) \
1373 SC(string_add_runtime, V8.StringAddRuntime) \
1374 SC(string_add_native, V8.StringAddNative) \
1375 SC(string_add_runtime_ext_to_one_byte, V8.StringAddRuntimeExtToOneByte) \
1376 SC(sub_string_runtime, V8.SubStringRuntime) \
1377 SC(sub_string_native, V8.SubStringNative) \
1378 SC(regexp_entry_runtime, V8.RegExpEntryRuntime) \
1379 SC(regexp_entry_native, V8.RegExpEntryNative) \
1380 SC(math_exp_runtime, V8.MathExpRuntime) \
1381 SC(math_log_runtime, V8.MathLogRuntime) \
1382 SC(math_pow_runtime, V8.MathPowRuntime) \
1383 SC(stack_interrupts, V8.StackInterrupts) \
1384 SC(runtime_profiler_ticks, V8.RuntimeProfilerTicks) \
1385 SC(runtime_calls, V8.RuntimeCalls) \
1386 SC(bounds_checks_eliminated, V8.BoundsChecksEliminated) \
1387 SC(bounds_checks_hoisted, V8.BoundsChecksHoisted) \
1388 SC(soft_deopts_requested, V8.SoftDeoptsRequested) \
1389 SC(soft_deopts_inserted, V8.SoftDeoptsInserted) \
1390 SC(soft_deopts_executed, V8.SoftDeoptsExecuted) \
1391 /* Number of write barriers in generated code. */ \
1392 SC(write_barriers_dynamic, V8.WriteBarriersDynamic) \
1393 SC(write_barriers_static, V8.WriteBarriersStatic) \
1394 SC(new_space_bytes_available, V8.MemoryNewSpaceBytesAvailable) \
1395 SC(new_space_bytes_committed, V8.MemoryNewSpaceBytesCommitted) \
1396 SC(new_space_bytes_used, V8.MemoryNewSpaceBytesUsed) \
1397 SC(old_space_bytes_available, V8.MemoryOldSpaceBytesAvailable) \
1398 SC(old_space_bytes_committed, V8.MemoryOldSpaceBytesCommitted) \
1399 SC(old_space_bytes_used, V8.MemoryOldSpaceBytesUsed) \
1400 SC(code_space_bytes_available, V8.MemoryCodeSpaceBytesAvailable) \
1401 SC(code_space_bytes_committed, V8.MemoryCodeSpaceBytesCommitted) \
1402 SC(code_space_bytes_used, V8.MemoryCodeSpaceBytesUsed) \
1403 SC(map_space_bytes_available, V8.MemoryMapSpaceBytesAvailable) \
1404 SC(map_space_bytes_committed, V8.MemoryMapSpaceBytesCommitted) \
1405 SC(map_space_bytes_used, V8.MemoryMapSpaceBytesUsed) \
1406 SC(lo_space_bytes_available, V8.MemoryLoSpaceBytesAvailable) \
1407 SC(lo_space_bytes_committed, V8.MemoryLoSpaceBytesCommitted) \
1408 SC(lo_space_bytes_used, V8.MemoryLoSpaceBytesUsed) \
1409 /* Total code size (including metadata) of baseline code or bytecode. */ \
1410 SC(total_baseline_code_size, V8.TotalBaselineCodeSize) \
1411 /* Total count of functions compiled using the baseline compiler. */ \
1412 SC(total_baseline_compile_count, V8.TotalBaselineCompileCount)
1413
1414 #define STATS_COUNTER_TS_LIST(SC) \
1415 SC(wasm_generated_code_size, V8.WasmGeneratedCodeBytes) \
1416 SC(wasm_reloc_size, V8.WasmRelocBytes) \
1417 SC(wasm_lazily_compiled_functions, V8.WasmLazilyCompiledFunctions) \
1418 SC(liftoff_compiled_functions, V8.LiftoffCompiledFunctions) \
1419 SC(liftoff_unsupported_functions, V8.LiftoffUnsupportedFunctions)
1420
1421 // This file contains all the v8 counters that are in use.
1422 class Counters : public std::enable_shared_from_this<Counters> {
1423 public:
1424 explicit Counters(Isolate* isolate);
1425
1426 // Register an application-defined function for recording
1427 // subsequent counter statistics. Note: Must be called on the main
1428 // thread.
1429 void ResetCounterFunction(CounterLookupCallback f);
1430
1431 // Register an application-defined function to create histograms for
1432 // recording subsequent histogram samples. Note: Must be called on
1433 // the main thread.
1434 void ResetCreateHistogramFunction(CreateHistogramCallback f);
1435
1436 // Register an application-defined function to add a sample
1437 // to a histogram. Will be used in all subsequent sample additions.
1438 // Note: Must be called on the main thread.
SetAddHistogramSampleFunction(AddHistogramSampleCallback f)1439 void SetAddHistogramSampleFunction(AddHistogramSampleCallback f) {
1440 stats_table_.SetAddHistogramSampleFunction(f);
1441 }
1442
1443 #define HR(name, caption, min, max, num_buckets) \
1444 Histogram* name() { return &name##_; }
1445 HISTOGRAM_RANGE_LIST(HR)
1446 #undef HR
1447
1448 #define HT(name, caption, max, res) \
1449 HistogramTimer* name() { return &name##_; }
1450 HISTOGRAM_TIMER_LIST(HT)
1451 #undef HT
1452
1453 #define HT(name, caption, max, res) \
1454 TimedHistogram* name() { return &name##_; }
1455 TIMED_HISTOGRAM_LIST(HT)
1456 #undef HT
1457
1458 #define AHT(name, caption) \
1459 AggregatableHistogramTimer* name() { return &name##_; }
1460 AGGREGATABLE_HISTOGRAM_TIMER_LIST(AHT)
1461 #undef AHT
1462
1463 #define HP(name, caption) \
1464 Histogram* name() { return &name##_; }
1465 HISTOGRAM_PERCENTAGE_LIST(HP)
1466 #undef HP
1467
1468 #define HM(name, caption) \
1469 Histogram* name() { return &name##_; }
1470 HISTOGRAM_LEGACY_MEMORY_LIST(HM)
1471 #undef HM
1472
1473 #define SC(name, caption) \
1474 StatsCounter* name() { return &name##_; }
1475 STATS_COUNTER_LIST_1(SC)
1476 STATS_COUNTER_LIST_2(SC)
1477 #undef SC
1478
1479 #define SC(name, caption) \
1480 StatsCounterThreadSafe* name() { return &name##_; }
1481 STATS_COUNTER_TS_LIST(SC)
1482 #undef SC
1483
1484 // clang-format off
1485 enum Id {
1486 #define RATE_ID(name, caption, max, res) k_##name,
1487 HISTOGRAM_TIMER_LIST(RATE_ID)
1488 TIMED_HISTOGRAM_LIST(RATE_ID)
1489 #undef RATE_ID
1490 #define AGGREGATABLE_ID(name, caption) k_##name,
1491 AGGREGATABLE_HISTOGRAM_TIMER_LIST(AGGREGATABLE_ID)
1492 #undef AGGREGATABLE_ID
1493 #define PERCENTAGE_ID(name, caption) k_##name,
1494 HISTOGRAM_PERCENTAGE_LIST(PERCENTAGE_ID)
1495 #undef PERCENTAGE_ID
1496 #define MEMORY_ID(name, caption) k_##name,
1497 HISTOGRAM_LEGACY_MEMORY_LIST(MEMORY_ID)
1498 #undef MEMORY_ID
1499 #define COUNTER_ID(name, caption) k_##name,
1500 STATS_COUNTER_LIST_1(COUNTER_ID)
1501 STATS_COUNTER_LIST_2(COUNTER_ID)
1502 STATS_COUNTER_TS_LIST(COUNTER_ID)
1503 #undef COUNTER_ID
1504 #define COUNTER_ID(name) kCountOf##name, kSizeOf##name,
1505 INSTANCE_TYPE_LIST(COUNTER_ID)
1506 #undef COUNTER_ID
1507 #define COUNTER_ID(name) kCountOfCODE_TYPE_##name, \
1508 kSizeOfCODE_TYPE_##name,
1509 CODE_KIND_LIST(COUNTER_ID)
1510 #undef COUNTER_ID
1511 #define COUNTER_ID(name) kCountOfFIXED_ARRAY__##name, \
1512 kSizeOfFIXED_ARRAY__##name,
1513 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(COUNTER_ID)
1514 #undef COUNTER_ID
1515 stats_counter_count
1516 };
1517 // clang-format on
1518
runtime_call_stats()1519 RuntimeCallStats* runtime_call_stats() { return &runtime_call_stats_; }
1520
1521 private:
1522 friend class StatsTable;
1523 friend class StatsCounterBase;
1524 friend class Histogram;
1525 friend class HistogramTimer;
1526
1527 Isolate* isolate_;
1528 StatsTable stats_table_;
1529
FindLocation(const char * name)1530 int* FindLocation(const char* name) {
1531 return stats_table_.FindLocation(name);
1532 }
1533
CreateHistogram(const char * name,int min,int max,size_t buckets)1534 void* CreateHistogram(const char* name, int min, int max, size_t buckets) {
1535 return stats_table_.CreateHistogram(name, min, max, buckets);
1536 }
1537
AddHistogramSample(void * histogram,int sample)1538 void AddHistogramSample(void* histogram, int sample) {
1539 stats_table_.AddHistogramSample(histogram, sample);
1540 }
1541
isolate()1542 Isolate* isolate() { return isolate_; }
1543
1544 #define HR(name, caption, min, max, num_buckets) Histogram name##_;
1545 HISTOGRAM_RANGE_LIST(HR)
1546 #undef HR
1547
1548 #define HT(name, caption, max, res) HistogramTimer name##_;
1549 HISTOGRAM_TIMER_LIST(HT)
1550 #undef HT
1551
1552 #define HT(name, caption, max, res) TimedHistogram name##_;
1553 TIMED_HISTOGRAM_LIST(HT)
1554 #undef HT
1555
1556 #define AHT(name, caption) \
1557 AggregatableHistogramTimer name##_;
1558 AGGREGATABLE_HISTOGRAM_TIMER_LIST(AHT)
1559 #undef AHT
1560
1561 #define HP(name, caption) \
1562 Histogram name##_;
1563 HISTOGRAM_PERCENTAGE_LIST(HP)
1564 #undef HP
1565
1566 #define HM(name, caption) \
1567 Histogram name##_;
1568 HISTOGRAM_LEGACY_MEMORY_LIST(HM)
1569 #undef HM
1570
1571 #define SC(name, caption) \
1572 StatsCounter name##_;
1573 STATS_COUNTER_LIST_1(SC)
1574 STATS_COUNTER_LIST_2(SC)
1575 #undef SC
1576
1577 #define SC(name, caption) StatsCounterThreadSafe name##_;
1578 STATS_COUNTER_TS_LIST(SC)
1579 #undef SC
1580
1581 #define SC(name) \
1582 StatsCounter size_of_##name##_; \
1583 StatsCounter count_of_##name##_;
1584 INSTANCE_TYPE_LIST(SC)
1585 #undef SC
1586
1587 #define SC(name) \
1588 StatsCounter size_of_CODE_TYPE_##name##_; \
1589 StatsCounter count_of_CODE_TYPE_##name##_;
1590 CODE_KIND_LIST(SC)
1591 #undef SC
1592
1593 #define SC(name) \
1594 StatsCounter size_of_FIXED_ARRAY_##name##_; \
1595 StatsCounter count_of_FIXED_ARRAY_##name##_;
1596 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(SC)
1597 #undef SC
1598
1599 RuntimeCallStats runtime_call_stats_;
1600
1601 DISALLOW_IMPLICIT_CONSTRUCTORS(Counters);
1602 };
1603
Start()1604 void HistogramTimer::Start() {
1605 TimedHistogram::Start(&timer_, counters()->isolate());
1606 }
1607
Stop()1608 void HistogramTimer::Stop() {
1609 TimedHistogram::Stop(&timer_, counters()->isolate());
1610 }
1611
RuntimeCallTimerScope(Isolate * isolate,RuntimeCallCounterId counter_id)1612 RuntimeCallTimerScope::RuntimeCallTimerScope(Isolate* isolate,
1613 RuntimeCallCounterId counter_id) {
1614 if (V8_LIKELY(!FLAG_runtime_stats)) return;
1615 stats_ = isolate->counters()->runtime_call_stats();
1616 stats_->Enter(&timer_, counter_id);
1617 }
1618
1619 } // namespace internal
1620 } // namespace v8
1621
1622 #endif // V8_COUNTERS_H_
1623