1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_LOGGING_COUNTERS_H_
6 #define V8_LOGGING_COUNTERS_H_
7
8 #include <memory>
9
10 #include "include/v8.h"
11 #include "src/base/atomic-utils.h"
12 #include "src/base/optional.h"
13 #include "src/base/platform/elapsed-timer.h"
14 #include "src/base/platform/time.h"
15 #include "src/common/globals.h"
16 #include "src/debug/debug-interface.h"
17 #include "src/execution/isolate.h"
18 #include "src/init/heap-symbols.h"
19 #include "src/logging/counters-definitions.h"
20 #include "src/logging/tracing-flags.h"
21 #include "src/objects/objects.h"
22 #include "src/runtime/runtime.h"
23 #include "src/tracing/trace-event.h"
24 #include "src/tracing/traced-value.h"
25 #include "src/tracing/tracing-category-observer.h"
26 #include "src/utils/allocation.h"
27
28 namespace v8 {
29 namespace internal {
30
31 // StatsCounters is an interface for plugging into external
32 // counters for monitoring. Counters can be looked up and
33 // manipulated by name.
34
35 class Counters;
36
37 class StatsTable {
38 public:
39 // Register an application-defined function for recording
40 // subsequent counter statistics.
41 void SetCounterFunction(CounterLookupCallback f);
42
43 // Register an application-defined function to create histograms for
44 // recording subsequent histogram samples.
SetCreateHistogramFunction(CreateHistogramCallback f)45 void SetCreateHistogramFunction(CreateHistogramCallback f) {
46 create_histogram_function_ = f;
47 }
48
49 // Register an application-defined function to add a sample
50 // to a histogram created with CreateHistogram function.
SetAddHistogramSampleFunction(AddHistogramSampleCallback f)51 void SetAddHistogramSampleFunction(AddHistogramSampleCallback f) {
52 add_histogram_sample_function_ = f;
53 }
54
HasCounterFunction()55 bool HasCounterFunction() const { return lookup_function_ != nullptr; }
56
57 // Lookup the location of a counter by name. If the lookup
58 // is successful, returns a non-nullptr pointer for writing the
59 // value of the counter. Each thread calling this function
60 // may receive a different location to store it's counter.
61 // The return value must not be cached and re-used across
62 // threads, although a single thread is free to cache it.
FindLocation(const char * name)63 int* FindLocation(const char* name) {
64 if (!lookup_function_) return nullptr;
65 return lookup_function_(name);
66 }
67
68 // Create a histogram by name. If the create is successful,
69 // returns a non-nullptr pointer for use with AddHistogramSample
70 // function. min and max define the expected minimum and maximum
71 // sample values. buckets is the maximum number of buckets
72 // that the samples will be grouped into.
CreateHistogram(const char * name,int min,int max,size_t buckets)73 void* CreateHistogram(const char* name, int min, int max, size_t buckets) {
74 if (!create_histogram_function_) return nullptr;
75 return create_histogram_function_(name, min, max, buckets);
76 }
77
78 // Add a sample to a histogram created with the CreateHistogram
79 // function.
AddHistogramSample(void * histogram,int sample)80 void AddHistogramSample(void* histogram, int sample) {
81 if (!add_histogram_sample_function_) return;
82 return add_histogram_sample_function_(histogram, sample);
83 }
84
85 private:
86 friend class Counters;
87
88 explicit StatsTable(Counters* counters);
89
90 CounterLookupCallback lookup_function_;
91 CreateHistogramCallback create_histogram_function_;
92 AddHistogramSampleCallback add_histogram_sample_function_;
93
94 DISALLOW_COPY_AND_ASSIGN(StatsTable);
95 };
96
97 // Base class for stats counters.
98 class StatsCounterBase {
99 protected:
100 Counters* counters_;
101 const char* name_;
102 int* ptr_;
103
104 StatsCounterBase() = default;
StatsCounterBase(Counters * counters,const char * name)105 StatsCounterBase(Counters* counters, const char* name)
106 : counters_(counters), name_(name), ptr_(nullptr) {}
107
SetLoc(int * loc,int value)108 void SetLoc(int* loc, int value) { *loc = value; }
IncrementLoc(int * loc)109 void IncrementLoc(int* loc) { (*loc)++; }
IncrementLoc(int * loc,int value)110 void IncrementLoc(int* loc, int value) { (*loc) += value; }
DecrementLoc(int * loc)111 void DecrementLoc(int* loc) { (*loc)--; }
DecrementLoc(int * loc,int value)112 void DecrementLoc(int* loc, int value) { (*loc) -= value; }
113
114 V8_EXPORT_PRIVATE int* FindLocationInStatsTable() const;
115 };
116
117 // StatsCounters are dynamically created values which can be tracked in
118 // the StatsTable. They are designed to be lightweight to create and
119 // easy to use.
120 //
121 // Internally, a counter represents a value in a row of a StatsTable.
122 // The row has a 32bit value for each process/thread in the table and also
123 // a name (stored in the table metadata). Since the storage location can be
124 // thread-specific, this class cannot be shared across threads. Note: This
125 // class is not thread safe.
126 class StatsCounter : public StatsCounterBase {
127 public:
128 // Sets the counter to a specific value.
Set(int value)129 void Set(int value) {
130 if (int* loc = GetPtr()) SetLoc(loc, value);
131 }
132
133 // Increments the counter.
Increment()134 void Increment() {
135 if (int* loc = GetPtr()) IncrementLoc(loc);
136 }
137
Increment(int value)138 void Increment(int value) {
139 if (int* loc = GetPtr()) IncrementLoc(loc, value);
140 }
141
142 // Decrements the counter.
Decrement()143 void Decrement() {
144 if (int* loc = GetPtr()) DecrementLoc(loc);
145 }
146
Decrement(int value)147 void Decrement(int value) {
148 if (int* loc = GetPtr()) DecrementLoc(loc, value);
149 }
150
151 // Is this counter enabled?
152 // Returns false if table is full.
Enabled()153 bool Enabled() { return GetPtr() != nullptr; }
154
155 // Get the internal pointer to the counter. This is used
156 // by the code generator to emit code that manipulates a
157 // given counter without calling the runtime system.
GetInternalPointer()158 int* GetInternalPointer() {
159 int* loc = GetPtr();
160 DCHECK_NOT_NULL(loc);
161 return loc;
162 }
163
164 private:
165 friend class Counters;
166
167 StatsCounter() = default;
StatsCounter(Counters * counters,const char * name)168 StatsCounter(Counters* counters, const char* name)
169 : StatsCounterBase(counters, name), lookup_done_(false) {}
170
171 // Reset the cached internal pointer.
Reset()172 void Reset() { lookup_done_ = false; }
173
174 // Returns the cached address of this counter location.
GetPtr()175 int* GetPtr() {
176 if (lookup_done_) return ptr_;
177 lookup_done_ = true;
178 ptr_ = FindLocationInStatsTable();
179 return ptr_;
180 }
181
182 bool lookup_done_;
183 };
184
185 // Thread safe version of StatsCounter.
186 class V8_EXPORT_PRIVATE StatsCounterThreadSafe : public StatsCounterBase {
187 public:
188 void Set(int Value);
189 void Increment();
190 void Increment(int value);
191 void Decrement();
192 void Decrement(int value);
Enabled()193 bool Enabled() { return ptr_ != nullptr; }
GetInternalPointer()194 int* GetInternalPointer() {
195 DCHECK_NOT_NULL(ptr_);
196 return ptr_;
197 }
198
199 private:
200 friend class Counters;
201
202 StatsCounterThreadSafe(Counters* counters, const char* name);
Reset()203 void Reset() { ptr_ = FindLocationInStatsTable(); }
204
205 base::Mutex mutex_;
206
207 DISALLOW_IMPLICIT_CONSTRUCTORS(StatsCounterThreadSafe);
208 };
209
210 // A Histogram represents a dynamically created histogram in the
211 // StatsTable. Note: This class is thread safe.
212 class Histogram {
213 public:
214 // Add a single sample to this histogram.
215 void AddSample(int sample);
216
217 // Returns true if this histogram is enabled.
Enabled()218 bool Enabled() { return histogram_ != nullptr; }
219
name()220 const char* name() { return name_; }
221
min()222 int min() const { return min_; }
max()223 int max() const { return max_; }
num_buckets()224 int num_buckets() const { return num_buckets_; }
225
226 // Asserts that |expected_counters| are the same as the Counters this
227 // Histogram reports to.
AssertReportsToCounters(Counters * expected_counters)228 void AssertReportsToCounters(Counters* expected_counters) {
229 DCHECK_EQ(counters_, expected_counters);
230 }
231
232 protected:
233 Histogram() = default;
Histogram(const char * name,int min,int max,int num_buckets,Counters * counters)234 Histogram(const char* name, int min, int max, int num_buckets,
235 Counters* counters)
236 : name_(name),
237 min_(min),
238 max_(max),
239 num_buckets_(num_buckets),
240 histogram_(nullptr),
241 counters_(counters) {
242 DCHECK(counters_);
243 }
244
counters()245 Counters* counters() const { return counters_; }
246
247 // Reset the cached internal pointer.
Reset()248 void Reset() { histogram_ = CreateHistogram(); }
249
250 private:
251 friend class Counters;
252
253 void* CreateHistogram() const;
254
255 const char* name_;
256 int min_;
257 int max_;
258 int num_buckets_;
259 void* histogram_;
260 Counters* counters_;
261 };
262
263 enum class HistogramTimerResolution { MILLISECOND, MICROSECOND };
264
265 // A thread safe histogram timer. It also allows distributions of
266 // nested timed results.
267 class TimedHistogram : public Histogram {
268 public:
269 // Start the timer. Log if isolate non-null.
270 V8_EXPORT_PRIVATE void Start(base::ElapsedTimer* timer, Isolate* isolate);
271
272 // Stop the timer and record the results. Log if isolate non-null.
273 V8_EXPORT_PRIVATE void Stop(base::ElapsedTimer* timer, Isolate* isolate);
274
275 // Records a TimeDelta::Max() result. Useful to record percentage of tasks
276 // that never got to run in a given scenario. Log if isolate non-null.
277 void RecordAbandon(base::ElapsedTimer* timer, Isolate* isolate);
278
279 // Add a single sample to this histogram.
280 void AddTimedSample(base::TimeDelta sample);
281
282 protected:
283 friend class Counters;
284 HistogramTimerResolution resolution_;
285
286 TimedHistogram() = default;
TimedHistogram(const char * name,int min,int max,HistogramTimerResolution resolution,int num_buckets,Counters * counters)287 TimedHistogram(const char* name, int min, int max,
288 HistogramTimerResolution resolution, int num_buckets,
289 Counters* counters)
290 : Histogram(name, min, max, num_buckets, counters),
291 resolution_(resolution) {}
292 void AddTimeSample();
293 };
294
295 // Helper class for scoping a TimedHistogram.
296 class TimedHistogramScope {
297 public:
298 explicit TimedHistogramScope(TimedHistogram* histogram,
299 Isolate* isolate = nullptr)
histogram_(histogram)300 : histogram_(histogram), isolate_(isolate) {
301 histogram_->Start(&timer_, isolate);
302 }
303
~TimedHistogramScope()304 ~TimedHistogramScope() { histogram_->Stop(&timer_, isolate_); }
305
306 private:
307 base::ElapsedTimer timer_;
308 TimedHistogram* histogram_;
309 Isolate* isolate_;
310
311 DISALLOW_IMPLICIT_CONSTRUCTORS(TimedHistogramScope);
312 };
313
314 enum class OptionalTimedHistogramScopeMode { TAKE_TIME, DONT_TAKE_TIME };
315
316 // Helper class for scoping a TimedHistogram.
317 // It will not take time for mode = DONT_TAKE_TIME.
318 class OptionalTimedHistogramScope {
319 public:
OptionalTimedHistogramScope(TimedHistogram * histogram,Isolate * isolate,OptionalTimedHistogramScopeMode mode)320 OptionalTimedHistogramScope(TimedHistogram* histogram, Isolate* isolate,
321 OptionalTimedHistogramScopeMode mode)
322 : histogram_(histogram), isolate_(isolate), mode_(mode) {
323 if (mode == OptionalTimedHistogramScopeMode::TAKE_TIME) {
324 histogram_->Start(&timer_, isolate);
325 }
326 }
327
~OptionalTimedHistogramScope()328 ~OptionalTimedHistogramScope() {
329 if (mode_ == OptionalTimedHistogramScopeMode::TAKE_TIME) {
330 histogram_->Stop(&timer_, isolate_);
331 }
332 }
333
334 private:
335 base::ElapsedTimer timer_;
336 TimedHistogram* const histogram_;
337 Isolate* const isolate_;
338 const OptionalTimedHistogramScopeMode mode_;
339 DISALLOW_IMPLICIT_CONSTRUCTORS(OptionalTimedHistogramScope);
340 };
341
342 // Helper class for recording a TimedHistogram asynchronously with manual
343 // controls (it will not generate a report if destroyed without explicitly
344 // triggering a report). |async_counters| should be a shared_ptr to
345 // |histogram->counters()|, making it is safe to report to an
346 // AsyncTimedHistogram after the associated isolate has been destroyed.
347 // AsyncTimedHistogram can be moved/copied to avoid computing Now() multiple
348 // times when the times of multiple tasks are identical; each copy will generate
349 // its own report.
350 class AsyncTimedHistogram {
351 public:
AsyncTimedHistogram(TimedHistogram * histogram,std::shared_ptr<Counters> async_counters)352 explicit AsyncTimedHistogram(TimedHistogram* histogram,
353 std::shared_ptr<Counters> async_counters)
354 : histogram_(histogram), async_counters_(std::move(async_counters)) {
355 histogram_->AssertReportsToCounters(async_counters_.get());
356 histogram_->Start(&timer_, nullptr);
357 }
358
359 // Records the time elapsed to |histogram_| and stops |timer_|.
RecordDone()360 void RecordDone() { histogram_->Stop(&timer_, nullptr); }
361
362 // Records TimeDelta::Max() to |histogram_| and stops |timer_|.
RecordAbandon()363 void RecordAbandon() { histogram_->RecordAbandon(&timer_, nullptr); }
364
365 private:
366 base::ElapsedTimer timer_;
367 TimedHistogram* histogram_;
368 std::shared_ptr<Counters> async_counters_;
369 };
370
371 // Helper class for scoping a TimedHistogram, where the histogram is selected at
372 // stop time rather than start time.
373 // TODO(leszeks): This is heavily reliant on TimedHistogram::Start() doing
374 // nothing but starting the timer, and TimedHistogram::Stop() logging the sample
375 // correctly even if Start() was not called. This happens to be true iff Stop()
376 // is passed a null isolate, but that's an implementation detail of
377 // TimedHistogram, and we shouldn't rely on it.
378 class LazyTimedHistogramScope {
379 public:
LazyTimedHistogramScope()380 LazyTimedHistogramScope() : histogram_(nullptr) { timer_.Start(); }
~LazyTimedHistogramScope()381 ~LazyTimedHistogramScope() {
382 // We should set the histogram before this scope exits.
383 DCHECK_NOT_NULL(histogram_);
384 histogram_->Stop(&timer_, nullptr);
385 }
386
set_histogram(TimedHistogram * histogram)387 void set_histogram(TimedHistogram* histogram) { histogram_ = histogram; }
388
389 private:
390 base::ElapsedTimer timer_;
391 TimedHistogram* histogram_;
392 };
393
394 // A HistogramTimer allows distributions of non-nested timed results
395 // to be created. WARNING: This class is not thread safe and can only
396 // be run on the foreground thread.
397 class HistogramTimer : public TimedHistogram {
398 public:
399 // Note: public for testing purposes only.
HistogramTimer(const char * name,int min,int max,HistogramTimerResolution resolution,int num_buckets,Counters * counters)400 HistogramTimer(const char* name, int min, int max,
401 HistogramTimerResolution resolution, int num_buckets,
402 Counters* counters)
403 : TimedHistogram(name, min, max, resolution, num_buckets, counters) {}
404
405 inline void Start();
406 inline void Stop();
407
408 // Returns true if the timer is running.
Running()409 bool Running() { return Enabled() && timer_.IsStarted(); }
410
411 // TODO(bmeurer): Remove this when HistogramTimerScope is fixed.
412 #ifdef DEBUG
timer()413 base::ElapsedTimer* timer() { return &timer_; }
414 #endif
415
416 private:
417 friend class Counters;
418
419 base::ElapsedTimer timer_;
420
421 HistogramTimer() = default;
422 };
423
424 // Helper class for scoping a HistogramTimer.
425 // TODO(bmeurer): The ifdeffery is an ugly hack around the fact that the
426 // Parser is currently reentrant (when it throws an error, we call back
427 // into JavaScript and all bets are off), but ElapsedTimer is not
428 // reentry-safe. Fix this properly and remove |allow_nesting|.
429 class HistogramTimerScope {
430 public:
431 explicit HistogramTimerScope(HistogramTimer* timer,
432 bool allow_nesting = false)
433 #ifdef DEBUG
timer_(timer)434 : timer_(timer), skipped_timer_start_(false) {
435 if (timer_->timer()->IsStarted() && allow_nesting) {
436 skipped_timer_start_ = true;
437 } else {
438 timer_->Start();
439 }
440 }
441 #else
442 : timer_(timer) {
443 timer_->Start();
444 }
445 #endif
~HistogramTimerScope()446 ~HistogramTimerScope() {
447 #ifdef DEBUG
448 if (!skipped_timer_start_) {
449 timer_->Stop();
450 }
451 #else
452 timer_->Stop();
453 #endif
454 }
455
456 private:
457 HistogramTimer* timer_;
458 #ifdef DEBUG
459 bool skipped_timer_start_;
460 #endif
461 };
462
463 // A histogram timer that can aggregate events within a larger scope.
464 //
465 // Intended use of this timer is to have an outer (aggregating) and an inner
466 // (to be aggregated) scope, where the inner scope measure the time of events,
467 // and all those inner scope measurements will be summed up by the outer scope.
468 // An example use might be to aggregate the time spent in lazy compilation
469 // while running a script.
470 //
471 // Helpers:
472 // - AggregatingHistogramTimerScope, the "outer" scope within which
473 // times will be summed up.
474 // - AggregatedHistogramTimerScope, the "inner" scope which defines the
475 // events to be timed.
476 class AggregatableHistogramTimer : public Histogram {
477 public:
478 // Start/stop the "outer" scope.
Start()479 void Start() { time_ = base::TimeDelta(); }
Stop()480 void Stop() {
481 if (time_ != base::TimeDelta()) {
482 // Only add non-zero samples, since zero samples represent situations
483 // where there were no aggregated samples added.
484 AddSample(static_cast<int>(time_.InMicroseconds()));
485 }
486 }
487
488 // Add a time value ("inner" scope).
Add(base::TimeDelta other)489 void Add(base::TimeDelta other) { time_ += other; }
490
491 private:
492 friend class Counters;
493
494 AggregatableHistogramTimer() = default;
AggregatableHistogramTimer(const char * name,int min,int max,int num_buckets,Counters * counters)495 AggregatableHistogramTimer(const char* name, int min, int max,
496 int num_buckets, Counters* counters)
497 : Histogram(name, min, max, num_buckets, counters) {}
498
499 base::TimeDelta time_;
500 };
501
502 // A helper class for use with AggregatableHistogramTimer. This is the
503 // // outer-most timer scope used with an AggregatableHistogramTimer. It will
504 // // aggregate the information from the inner AggregatedHistogramTimerScope.
505 class AggregatingHistogramTimerScope {
506 public:
AggregatingHistogramTimerScope(AggregatableHistogramTimer * histogram)507 explicit AggregatingHistogramTimerScope(AggregatableHistogramTimer* histogram)
508 : histogram_(histogram) {
509 histogram_->Start();
510 }
~AggregatingHistogramTimerScope()511 ~AggregatingHistogramTimerScope() { histogram_->Stop(); }
512
513 private:
514 AggregatableHistogramTimer* histogram_;
515 };
516
517 // A helper class for use with AggregatableHistogramTimer, the "inner" scope
518 // // which defines the events to be timed.
519 class AggregatedHistogramTimerScope {
520 public:
AggregatedHistogramTimerScope(AggregatableHistogramTimer * histogram)521 explicit AggregatedHistogramTimerScope(AggregatableHistogramTimer* histogram)
522 : histogram_(histogram) {
523 timer_.Start();
524 }
~AggregatedHistogramTimerScope()525 ~AggregatedHistogramTimerScope() { histogram_->Add(timer_.Elapsed()); }
526
527 private:
528 base::ElapsedTimer timer_;
529 AggregatableHistogramTimer* histogram_;
530 };
531
532 // AggretatedMemoryHistogram collects (time, value) sample pairs and turns
533 // them into time-uniform samples for the backing historgram, such that the
534 // backing histogram receives one sample every T ms, where the T is controlled
535 // by the FLAG_histogram_interval.
536 //
537 // More formally: let F be a real-valued function that maps time to sample
538 // values. We define F as a linear interpolation between adjacent samples. For
539 // each time interval [x; x + T) the backing histogram gets one sample value
540 // that is the average of F(t) in the interval.
541 template <typename Histogram>
542 class AggregatedMemoryHistogram {
543 public:
544 // Note: public for testing purposes only.
AggregatedMemoryHistogram(Histogram * backing_histogram)545 explicit AggregatedMemoryHistogram(Histogram* backing_histogram)
546 : AggregatedMemoryHistogram() {
547 backing_histogram_ = backing_histogram;
548 }
549
550 // Invariants that hold before and after AddSample if
551 // is_initialized_ is true:
552 //
553 // 1) For we processed samples that came in before start_ms_ and sent the
554 // corresponding aggregated samples to backing histogram.
555 // 2) (last_ms_, last_value_) is the last received sample.
556 // 3) last_ms_ < start_ms_ + FLAG_histogram_interval.
557 // 4) aggregate_value_ is the average of the function that is constructed by
558 // linearly interpolating samples received between start_ms_ and last_ms_.
559 void AddSample(double current_ms, double current_value);
560
561 private:
562 friend class Counters;
563
AggregatedMemoryHistogram()564 AggregatedMemoryHistogram()
565 : is_initialized_(false),
566 start_ms_(0.0),
567 last_ms_(0.0),
568 aggregate_value_(0.0),
569 last_value_(0.0),
570 backing_histogram_(nullptr) {}
571 double Aggregate(double current_ms, double current_value);
572
573 bool is_initialized_;
574 double start_ms_;
575 double last_ms_;
576 double aggregate_value_;
577 double last_value_;
578 Histogram* backing_histogram_;
579 };
580
581 template <typename Histogram>
AddSample(double current_ms,double current_value)582 void AggregatedMemoryHistogram<Histogram>::AddSample(double current_ms,
583 double current_value) {
584 if (!is_initialized_) {
585 aggregate_value_ = current_value;
586 start_ms_ = current_ms;
587 last_value_ = current_value;
588 last_ms_ = current_ms;
589 is_initialized_ = true;
590 } else {
591 const double kEpsilon = 1e-6;
592 const int kMaxSamples = 1000;
593 if (current_ms < last_ms_ + kEpsilon) {
594 // Two samples have the same time, remember the last one.
595 last_value_ = current_value;
596 } else {
597 double sample_interval_ms = FLAG_histogram_interval;
598 double end_ms = start_ms_ + sample_interval_ms;
599 if (end_ms <= current_ms + kEpsilon) {
600 // Linearly interpolate between the last_ms_ and the current_ms.
601 double slope = (current_value - last_value_) / (current_ms - last_ms_);
602 int i;
603 // Send aggregated samples to the backing histogram from the start_ms
604 // to the current_ms.
605 for (i = 0; i < kMaxSamples && end_ms <= current_ms + kEpsilon; i++) {
606 double end_value = last_value_ + (end_ms - last_ms_) * slope;
607 double sample_value;
608 if (i == 0) {
609 // Take aggregate_value_ into account.
610 sample_value = Aggregate(end_ms, end_value);
611 } else {
612 // There is no aggregate_value_ for i > 0.
613 sample_value = (last_value_ + end_value) / 2;
614 }
615 backing_histogram_->AddSample(static_cast<int>(sample_value + 0.5));
616 last_value_ = end_value;
617 last_ms_ = end_ms;
618 end_ms += sample_interval_ms;
619 }
620 if (i == kMaxSamples) {
621 // We hit the sample limit, ignore the remaining samples.
622 aggregate_value_ = current_value;
623 start_ms_ = current_ms;
624 } else {
625 aggregate_value_ = last_value_;
626 start_ms_ = last_ms_;
627 }
628 }
629 aggregate_value_ = current_ms > start_ms_ + kEpsilon
630 ? Aggregate(current_ms, current_value)
631 : aggregate_value_;
632 last_value_ = current_value;
633 last_ms_ = current_ms;
634 }
635 }
636 }
637
638 template <typename Histogram>
Aggregate(double current_ms,double current_value)639 double AggregatedMemoryHistogram<Histogram>::Aggregate(double current_ms,
640 double current_value) {
641 double interval_ms = current_ms - start_ms_;
642 double value = (current_value + last_value_) / 2;
643 // The aggregate_value_ is the average for [start_ms_; last_ms_].
644 // The value is the average for [last_ms_; current_ms].
645 // Return the weighted average of the aggregate_value_ and the value.
646 return aggregate_value_ * ((last_ms_ - start_ms_) / interval_ms) +
647 value * ((current_ms - last_ms_) / interval_ms);
648 }
649
650 class RuntimeCallCounter final {
651 public:
RuntimeCallCounter()652 RuntimeCallCounter() : RuntimeCallCounter(nullptr) {}
RuntimeCallCounter(const char * name)653 explicit RuntimeCallCounter(const char* name)
654 : name_(name), count_(0), time_(0) {}
655 V8_NOINLINE void Reset();
656 V8_NOINLINE void Dump(v8::tracing::TracedValue* value);
657 void Add(RuntimeCallCounter* other);
658
name()659 const char* name() const { return name_; }
count()660 int64_t count() const { return count_; }
time()661 base::TimeDelta time() const {
662 return base::TimeDelta::FromMicroseconds(time_);
663 }
Increment()664 void Increment() { count_++; }
Add(base::TimeDelta delta)665 void Add(base::TimeDelta delta) { time_ += delta.InMicroseconds(); }
666
667 private:
668 friend class RuntimeCallStats;
669
670 const char* name_;
671 int64_t count_;
672 // Stored as int64_t so that its initialization can be deferred.
673 int64_t time_;
674 };
675
676 // RuntimeCallTimer is used to keep track of the stack of currently active
677 // timers used for properly measuring the own time of a RuntimeCallCounter.
678 class RuntimeCallTimer final {
679 public:
counter()680 RuntimeCallCounter* counter() { return counter_; }
set_counter(RuntimeCallCounter * counter)681 void set_counter(RuntimeCallCounter* counter) { counter_ = counter; }
parent()682 RuntimeCallTimer* parent() const { return parent_.Value(); }
set_parent(RuntimeCallTimer * timer)683 void set_parent(RuntimeCallTimer* timer) { parent_.SetValue(timer); }
name()684 const char* name() const { return counter_->name(); }
685
686 inline bool IsStarted();
687
688 inline void Start(RuntimeCallCounter* counter, RuntimeCallTimer* parent);
689 void Snapshot();
690 inline RuntimeCallTimer* Stop();
691
692 // Make the time source configurable for testing purposes.
693 V8_EXPORT_PRIVATE static base::TimeTicks (*Now)();
694
695 // Helper to switch over to CPU time.
696 static base::TimeTicks NowCPUTime();
697
698 private:
699 inline void Pause(base::TimeTicks now);
700 inline void Resume(base::TimeTicks now);
701 inline void CommitTimeToCounter();
702
703 RuntimeCallCounter* counter_ = nullptr;
704 base::AtomicValue<RuntimeCallTimer*> parent_;
705 base::TimeTicks start_ticks_;
706 base::TimeDelta elapsed_;
707 };
708
709 #define FOR_EACH_GC_COUNTER(V) \
710 TRACER_SCOPES(V) \
711 TRACER_BACKGROUND_SCOPES(V)
712
713 #define FOR_EACH_API_COUNTER(V) \
714 V(AccessorPair_New) \
715 V(ArrayBuffer_Cast) \
716 V(ArrayBuffer_Detach) \
717 V(ArrayBuffer_New) \
718 V(ArrayBuffer_NewBackingStore) \
719 V(ArrayBuffer_BackingStore_Reallocate) \
720 V(Array_CloneElementAt) \
721 V(Array_New) \
722 V(BigInt64Array_New) \
723 V(BigInt_NewFromWords) \
724 V(BigIntObject_BigIntValue) \
725 V(BigIntObject_New) \
726 V(BigUint64Array_New) \
727 V(BooleanObject_BooleanValue) \
728 V(BooleanObject_New) \
729 V(Context_New) \
730 V(Context_NewRemoteContext) \
731 V(DataView_New) \
732 V(Date_New) \
733 V(Date_NumberValue) \
734 V(Debug_Call) \
735 V(debug_GetPrivateMembers) \
736 V(Error_New) \
737 V(External_New) \
738 V(Float32Array_New) \
739 V(Float64Array_New) \
740 V(Function_Call) \
741 V(Function_New) \
742 V(Function_FunctionProtoToString) \
743 V(Function_NewInstance) \
744 V(FunctionTemplate_GetFunction) \
745 V(FunctionTemplate_New) \
746 V(FunctionTemplate_NewRemoteInstance) \
747 V(FunctionTemplate_NewWithCache) \
748 V(FunctionTemplate_NewWithFastHandler) \
749 V(Int16Array_New) \
750 V(Int32Array_New) \
751 V(Int8Array_New) \
752 V(Isolate_DateTimeConfigurationChangeNotification) \
753 V(Isolate_LocaleConfigurationChangeNotification) \
754 V(JSON_Parse) \
755 V(JSON_Stringify) \
756 V(Map_AsArray) \
757 V(Map_Clear) \
758 V(Map_Delete) \
759 V(Map_Get) \
760 V(Map_Has) \
761 V(Map_New) \
762 V(Map_Set) \
763 V(Message_GetEndColumn) \
764 V(Message_GetLineNumber) \
765 V(Message_GetSourceLine) \
766 V(Message_GetStartColumn) \
767 V(Module_Evaluate) \
768 V(Module_InstantiateModule) \
769 V(Module_SetSyntheticModuleExport) \
770 V(NumberObject_New) \
771 V(NumberObject_NumberValue) \
772 V(Object_CallAsConstructor) \
773 V(Object_CallAsFunction) \
774 V(Object_CreateDataProperty) \
775 V(Object_DefineOwnProperty) \
776 V(Object_DefineProperty) \
777 V(Object_Delete) \
778 V(Object_DeleteProperty) \
779 V(Object_ForceSet) \
780 V(Object_Get) \
781 V(Object_GetOwnPropertyDescriptor) \
782 V(Object_GetOwnPropertyNames) \
783 V(Object_GetPropertyAttributes) \
784 V(Object_GetPropertyNames) \
785 V(Object_GetRealNamedProperty) \
786 V(Object_GetRealNamedPropertyAttributes) \
787 V(Object_GetRealNamedPropertyAttributesInPrototypeChain) \
788 V(Object_GetRealNamedPropertyInPrototypeChain) \
789 V(Object_Has) \
790 V(Object_HasOwnProperty) \
791 V(Object_HasRealIndexedProperty) \
792 V(Object_HasRealNamedCallbackProperty) \
793 V(Object_HasRealNamedProperty) \
794 V(Object_IsCodeLike) \
795 V(Object_New) \
796 V(Object_ObjectProtoToString) \
797 V(Object_Set) \
798 V(Object_SetAccessor) \
799 V(Object_SetIntegrityLevel) \
800 V(Object_SetPrivate) \
801 V(Object_SetPrototype) \
802 V(ObjectTemplate_New) \
803 V(ObjectTemplate_NewInstance) \
804 V(Object_ToArrayIndex) \
805 V(Object_ToBigInt) \
806 V(Object_ToDetailString) \
807 V(Object_ToInt32) \
808 V(Object_ToInteger) \
809 V(Object_ToNumber) \
810 V(Object_ToObject) \
811 V(Object_ToString) \
812 V(Object_ToUint32) \
813 V(Persistent_New) \
814 V(Private_New) \
815 V(Promise_Catch) \
816 V(Promise_Chain) \
817 V(Promise_HasRejectHandler) \
818 V(Promise_Resolver_New) \
819 V(Promise_Resolver_Reject) \
820 V(Promise_Resolver_Resolve) \
821 V(Promise_Result) \
822 V(Promise_Status) \
823 V(Promise_Then) \
824 V(Proxy_New) \
825 V(RangeError_New) \
826 V(ReferenceError_New) \
827 V(RegExp_Exec) \
828 V(RegExp_New) \
829 V(ScriptCompiler_Compile) \
830 V(ScriptCompiler_CompileFunctionInContext) \
831 V(ScriptCompiler_CompileUnbound) \
832 V(Script_Run) \
833 V(Set_Add) \
834 V(Set_AsArray) \
835 V(Set_Clear) \
836 V(Set_Delete) \
837 V(Set_Has) \
838 V(Set_New) \
839 V(SharedArrayBuffer_New) \
840 V(SharedArrayBuffer_NewBackingStore) \
841 V(String_Concat) \
842 V(String_NewExternalOneByte) \
843 V(String_NewExternalTwoByte) \
844 V(String_NewFromOneByte) \
845 V(String_NewFromTwoByte) \
846 V(String_NewFromUtf8) \
847 V(String_NewFromUtf8Literal) \
848 V(StringObject_New) \
849 V(StringObject_StringValue) \
850 V(String_Write) \
851 V(String_WriteUtf8) \
852 V(Symbol_New) \
853 V(SymbolObject_New) \
854 V(SymbolObject_SymbolValue) \
855 V(SyntaxError_New) \
856 V(TracedGlobal_New) \
857 V(TryCatch_StackTrace) \
858 V(TypeError_New) \
859 V(Uint16Array_New) \
860 V(Uint32Array_New) \
861 V(Uint8Array_New) \
862 V(Uint8ClampedArray_New) \
863 V(UnboundScript_GetId) \
864 V(UnboundScript_GetLineNumber) \
865 V(UnboundScript_GetName) \
866 V(UnboundScript_GetSourceMappingURL) \
867 V(UnboundScript_GetSourceURL) \
868 V(ValueDeserializer_ReadHeader) \
869 V(ValueDeserializer_ReadValue) \
870 V(ValueSerializer_WriteValue) \
871 V(Value_InstanceOf) \
872 V(Value_Int32Value) \
873 V(Value_IntegerValue) \
874 V(Value_NumberValue) \
875 V(Value_TypeOf) \
876 V(Value_Uint32Value) \
877 V(WasmCompileError_New) \
878 V(WasmLinkError_New) \
879 V(WasmRuntimeError_New) \
880 V(WeakMap_Get) \
881 V(WeakMap_New) \
882 V(WeakMap_Set)
883
884 #define ADD_THREAD_SPECIFIC_COUNTER(V, Prefix, Suffix) \
885 V(Prefix##Suffix) \
886 V(Prefix##Background##Suffix)
887
888 #define FOR_EACH_THREAD_SPECIFIC_COUNTER(V) \
889 ADD_THREAD_SPECIFIC_COUNTER(V, Compile, Analyse) \
890 ADD_THREAD_SPECIFIC_COUNTER(V, Compile, Eval) \
891 ADD_THREAD_SPECIFIC_COUNTER(V, Compile, Function) \
892 ADD_THREAD_SPECIFIC_COUNTER(V, Compile, Ignition) \
893 ADD_THREAD_SPECIFIC_COUNTER(V, Compile, IgnitionFinalization) \
894 ADD_THREAD_SPECIFIC_COUNTER(V, Compile, RewriteReturnResult) \
895 ADD_THREAD_SPECIFIC_COUNTER(V, Compile, ScopeAnalysis) \
896 ADD_THREAD_SPECIFIC_COUNTER(V, Compile, Script) \
897 \
898 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, AllocateFPRegisters) \
899 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, AllocateGeneralRegisters) \
900 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, AssembleCode) \
901 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, AssignSpillSlots) \
902 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, BuildLiveRangeBundles) \
903 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, BuildLiveRanges) \
904 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, CommitAssignment) \
905 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, ConnectRanges) \
906 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, ControlFlowOptimization) \
907 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, CSAEarlyOptimization) \
908 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, CSAOptimization) \
909 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, DecideSpillingMode) \
910 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, DecompressionOptimization) \
911 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, EarlyOptimization) \
912 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, EarlyTrimming) \
913 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, EffectLinearization) \
914 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, EscapeAnalysis) \
915 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, MidTierRegisterOutputDefinition) \
916 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, MidTierPopulateReferenceMaps) \
917 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, MidTierRegisterAllocator) \
918 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, MidTierSpillSlotAllocator) \
919 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, FinalizeCode) \
920 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, FrameElision) \
921 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, GenericLowering) \
922 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, BytecodeGraphBuilder) \
923 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, Inlining) \
924 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, JumpThreading) \
925 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, LateGraphTrimming) \
926 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, LateOptimization) \
927 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, LoadElimination) \
928 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, LocateSpillSlots) \
929 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, LoopExitElimination) \
930 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, LoopPeeling) \
931 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, MachineOperatorOptimization) \
932 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, MeetRegisterConstraints) \
933 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, MemoryOptimization) \
934 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, OptimizeMoves) \
935 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, PopulatePointerMaps) \
936 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, PrintGraph) \
937 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, ResolveControlFlow) \
938 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, ResolvePhis) \
939 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, \
940 ScheduledEffectControlLinearization) \
941 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, ScheduledMachineLowering) \
942 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, Scheduling) \
943 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, SelectInstructions) \
944 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, SimplifiedLowering) \
945 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, StoreStoreElimination) \
946 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, TypeAssertions) \
947 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, TypedLowering) \
948 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, Typer) \
949 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, Untyper) \
950 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, VerifyGraph) \
951 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, WasmBaseOptimization) \
952 ADD_THREAD_SPECIFIC_COUNTER(V, Optimize, WasmFullOptimization) \
953 \
954 ADD_THREAD_SPECIFIC_COUNTER(V, Parse, ArrowFunctionLiteral) \
955 ADD_THREAD_SPECIFIC_COUNTER(V, Parse, FunctionLiteral) \
956 ADD_THREAD_SPECIFIC_COUNTER(V, Parse, Program) \
957 ADD_THREAD_SPECIFIC_COUNTER(V, PreParse, ArrowFunctionLiteral) \
958 ADD_THREAD_SPECIFIC_COUNTER(V, PreParse, WithVariableResolution)
959
960 #define FOR_EACH_MANUAL_COUNTER(V) \
961 V(AccessorGetterCallback) \
962 V(AccessorSetterCallback) \
963 V(ArrayLengthGetter) \
964 V(ArrayLengthSetter) \
965 V(BoundFunctionLengthGetter) \
966 V(BoundFunctionNameGetter) \
967 V(CodeGenerationFromStringsCallbacks) \
968 V(CompileBackgroundCompileTask) \
969 V(CompileCollectSourcePositions) \
970 V(CompileDeserialize) \
971 V(CompileEnqueueOnDispatcher) \
972 V(CompileFinalizeBackgroundCompileTask) \
973 V(CompileFinishNowOnDispatcher) \
974 V(CompileGetFromOptimizedCodeMap) \
975 V(CompilePublishBackgroundFinalization) \
976 V(CompileSerialize) \
977 V(CompileWaitForDispatcher) \
978 V(DeoptimizeCode) \
979 V(DeserializeContext) \
980 V(DeserializeIsolate) \
981 V(FinalizationRegistryCleanupFromTask) \
982 V(FunctionCallback) \
983 V(FunctionLengthGetter) \
984 V(FunctionPrototypeGetter) \
985 V(FunctionPrototypeSetter) \
986 V(GC_Custom_AllAvailableGarbage) \
987 V(GC_Custom_IncrementalMarkingObserver) \
988 V(GC_Custom_SlowAllocateRaw) \
989 V(GCEpilogueCallback) \
990 V(GCPrologueCallback) \
991 V(Genesis) \
992 V(GetMoreDataCallback) \
993 V(IndexedDefinerCallback) \
994 V(IndexedDeleterCallback) \
995 V(IndexedDescriptorCallback) \
996 V(IndexedEnumeratorCallback) \
997 V(IndexedGetterCallback) \
998 V(IndexedQueryCallback) \
999 V(IndexedSetterCallback) \
1000 V(Invoke) \
1001 V(InvokeApiFunction) \
1002 V(InvokeApiInterruptCallbacks) \
1003 V(JS_Execution) \
1004 V(Map_SetPrototype) \
1005 V(Map_TransitionToAccessorProperty) \
1006 V(Map_TransitionToDataProperty) \
1007 V(MessageListenerCallback) \
1008 V(NamedDefinerCallback) \
1009 V(NamedDeleterCallback) \
1010 V(NamedDescriptorCallback) \
1011 V(NamedEnumeratorCallback) \
1012 V(NamedGetterCallback) \
1013 V(NamedQueryCallback) \
1014 V(NamedSetterCallback) \
1015 V(Object_DeleteProperty) \
1016 V(ObjectVerify) \
1017 V(OptimizeBackgroundDispatcherJob) \
1018 V(OptimizeCode) \
1019 V(OptimizeConcurrentPrepare) \
1020 V(OptimizeConcurrentFinalize) \
1021 V(OptimizeFinalizePipelineJob) \
1022 V(OptimizeHeapBrokerInitialization) \
1023 V(OptimizeNonConcurrent) \
1024 V(OptimizeSerialization) \
1025 V(OptimizeSerializeMetadata) \
1026 V(ParseEval) \
1027 V(ParseFunction) \
1028 V(PropertyCallback) \
1029 V(PrototypeMap_TransitionToAccessorProperty) \
1030 V(PrototypeMap_TransitionToDataProperty) \
1031 V(PrototypeObject_DeleteProperty) \
1032 V(ReconfigureToDataProperty) \
1033 V(StringLengthGetter) \
1034 V(TestCounter1) \
1035 V(TestCounter2) \
1036 V(TestCounter3)
1037
1038 #define FOR_EACH_HANDLER_COUNTER(V) \
1039 V(KeyedLoadIC_KeyedLoadSloppyArgumentsStub) \
1040 V(KeyedLoadIC_LoadElementDH) \
1041 V(KeyedLoadIC_LoadIndexedInterceptorStub) \
1042 V(KeyedLoadIC_LoadIndexedStringDH) \
1043 V(KeyedLoadIC_SlowStub) \
1044 V(KeyedStoreIC_ElementsTransitionAndStoreStub) \
1045 V(KeyedStoreIC_KeyedStoreSloppyArgumentsStub) \
1046 V(KeyedStoreIC_SlowStub) \
1047 V(KeyedStoreIC_StoreElementStub) \
1048 V(KeyedStoreIC_StoreFastElementStub) \
1049 V(LoadGlobalIC_LoadScriptContextField) \
1050 V(LoadGlobalIC_SlowStub) \
1051 V(LoadIC_FunctionPrototypeStub) \
1052 V(LoadIC_HandlerCacheHit_Accessor) \
1053 V(LoadIC_LoadAccessorDH) \
1054 V(LoadIC_LoadAccessorFromPrototypeDH) \
1055 V(LoadIC_LoadApiGetterFromPrototypeDH) \
1056 V(LoadIC_LoadCallback) \
1057 V(LoadIC_LoadConstantDH) \
1058 V(LoadIC_LoadConstantFromPrototypeDH) \
1059 V(LoadIC_LoadFieldDH) \
1060 V(LoadIC_LoadFieldFromPrototypeDH) \
1061 V(LoadIC_LoadGlobalDH) \
1062 V(LoadIC_LoadGlobalFromPrototypeDH) \
1063 V(LoadIC_LoadIntegerIndexedExoticDH) \
1064 V(LoadIC_LoadInterceptorDH) \
1065 V(LoadIC_LoadInterceptorFromPrototypeDH) \
1066 V(LoadIC_LoadNativeDataPropertyDH) \
1067 V(LoadIC_LoadNativeDataPropertyFromPrototypeDH) \
1068 V(LoadIC_LoadNonexistentDH) \
1069 V(LoadIC_LoadNonMaskingInterceptorDH) \
1070 V(LoadIC_LoadNormalDH) \
1071 V(LoadIC_LoadNormalFromPrototypeDH) \
1072 V(LoadIC_NonReceiver) \
1073 V(LoadIC_SlowStub) \
1074 V(LoadIC_StringLength) \
1075 V(LoadIC_StringWrapperLength) \
1076 V(StoreGlobalIC_SlowStub) \
1077 V(StoreGlobalIC_StoreScriptContextField) \
1078 V(StoreIC_HandlerCacheHit_Accessor) \
1079 V(StoreIC_NonReceiver) \
1080 V(StoreIC_SlowStub) \
1081 V(StoreIC_StoreAccessorDH) \
1082 V(StoreIC_StoreAccessorOnPrototypeDH) \
1083 V(StoreIC_StoreApiSetterOnPrototypeDH) \
1084 V(StoreIC_StoreFieldDH) \
1085 V(StoreIC_StoreGlobalDH) \
1086 V(StoreIC_StoreGlobalTransitionDH) \
1087 V(StoreIC_StoreInterceptorStub) \
1088 V(StoreIC_StoreNativeDataPropertyDH) \
1089 V(StoreIC_StoreNativeDataPropertyOnPrototypeDH) \
1090 V(StoreIC_StoreNormalDH) \
1091 V(StoreIC_StoreTransitionDH) \
1092 V(StoreInArrayLiteralIC_SlowStub)
1093
1094 enum RuntimeCallCounterId {
1095 #define CALL_RUNTIME_COUNTER(name) kGC_##name,
1096 FOR_EACH_GC_COUNTER(CALL_RUNTIME_COUNTER) //
1097 #undef CALL_RUNTIME_COUNTER
1098 #define CALL_RUNTIME_COUNTER(name) k##name,
1099 FOR_EACH_MANUAL_COUNTER(CALL_RUNTIME_COUNTER) //
1100 #undef CALL_RUNTIME_COUNTER
1101 #define CALL_RUNTIME_COUNTER(name, nargs, ressize) kRuntime_##name,
1102 FOR_EACH_INTRINSIC(CALL_RUNTIME_COUNTER) //
1103 #undef CALL_RUNTIME_COUNTER
1104 #define CALL_BUILTIN_COUNTER(name) kBuiltin_##name,
1105 BUILTIN_LIST_C(CALL_BUILTIN_COUNTER) //
1106 #undef CALL_BUILTIN_COUNTER
1107 #define CALL_BUILTIN_COUNTER(name) kAPI_##name,
1108 FOR_EACH_API_COUNTER(CALL_BUILTIN_COUNTER) //
1109 #undef CALL_BUILTIN_COUNTER
1110 #define CALL_BUILTIN_COUNTER(name) kHandler_##name,
1111 FOR_EACH_HANDLER_COUNTER(CALL_BUILTIN_COUNTER) //
1112 #undef CALL_BUILTIN_COUNTER
1113 #define THREAD_SPECIFIC_COUNTER(name) k##name,
1114 FOR_EACH_THREAD_SPECIFIC_COUNTER(THREAD_SPECIFIC_COUNTER) //
1115 #undef THREAD_SPECIFIC_COUNTER
1116 kNumberOfCounters,
1117 };
1118
1119 class RuntimeCallStats final {
1120 public:
1121 enum ThreadType { kMainIsolateThread, kWorkerThread };
1122
1123 // If kExact is chosen the counter will be use as given. With kThreadSpecific,
1124 // if the RuntimeCallStats was created for a worker thread, then the
1125 // background specific version of the counter will be used instead.
1126 enum CounterMode { kExact, kThreadSpecific };
1127
1128 explicit V8_EXPORT_PRIVATE RuntimeCallStats(ThreadType thread_type);
1129
1130 // Starting measuring the time for a function. This will establish the
1131 // connection to the parent counter for properly calculating the own times.
1132 V8_EXPORT_PRIVATE void Enter(RuntimeCallTimer* timer,
1133 RuntimeCallCounterId counter_id);
1134
1135 // Leave a scope for a measured runtime function. This will properly add
1136 // the time delta to the current_counter and subtract the delta from its
1137 // parent.
1138 V8_EXPORT_PRIVATE void Leave(RuntimeCallTimer* timer);
1139
1140 // Set counter id for the innermost measurement. It can be used to refine
1141 // event kind when a runtime entry counter is too generic.
1142 V8_EXPORT_PRIVATE void CorrectCurrentCounterId(
1143 RuntimeCallCounterId counter_id, CounterMode mode = kExact);
1144
1145 V8_EXPORT_PRIVATE void Reset();
1146 // Add all entries from another stats object.
1147 void Add(RuntimeCallStats* other);
1148 V8_EXPORT_PRIVATE void Print(std::ostream& os);
1149 V8_EXPORT_PRIVATE void Print();
1150 V8_NOINLINE void Dump(v8::tracing::TracedValue* value);
1151
1152 V8_EXPORT_PRIVATE void EnumerateCounters(
1153 debug::RuntimeCallCounterCallback callback);
1154
thread_id()1155 ThreadId thread_id() const { return thread_id_; }
current_timer()1156 RuntimeCallTimer* current_timer() { return current_timer_.Value(); }
current_counter()1157 RuntimeCallCounter* current_counter() { return current_counter_.Value(); }
InUse()1158 bool InUse() { return in_use_; }
1159 bool IsCalledOnTheSameThread();
1160
1161 V8_EXPORT_PRIVATE bool IsBackgroundThreadSpecificVariant(
1162 RuntimeCallCounterId id);
1163 V8_EXPORT_PRIVATE bool HasThreadSpecificCounterVariants(
1164 RuntimeCallCounterId id);
1165
1166 // This should only be called for counters with a dual Background variant. If
1167 // on the main thread, this just returns the counter. If on a worker thread,
1168 // it returns Background variant of the counter.
CounterIdForThread(RuntimeCallCounterId id)1169 RuntimeCallCounterId CounterIdForThread(RuntimeCallCounterId id) {
1170 DCHECK(HasThreadSpecificCounterVariants(id));
1171 // All thread specific counters are laid out with the main thread variant
1172 // first followed by the background variant.
1173 return thread_type_ == kWorkerThread
1174 ? static_cast<RuntimeCallCounterId>(id + 1)
1175 : id;
1176 }
1177
IsCounterAppropriateForThread(RuntimeCallCounterId id)1178 bool IsCounterAppropriateForThread(RuntimeCallCounterId id) {
1179 // TODO(delphick): We should add background-only counters and ensure that
1180 // all counters (not just the thread-specific variants) are only invoked on
1181 // the correct thread.
1182 if (!HasThreadSpecificCounterVariants(id)) return true;
1183 return IsBackgroundThreadSpecificVariant(id) ==
1184 (thread_type_ == kWorkerThread);
1185 }
1186
1187 static const int kNumberOfCounters =
1188 static_cast<int>(RuntimeCallCounterId::kNumberOfCounters);
GetCounter(RuntimeCallCounterId counter_id)1189 RuntimeCallCounter* GetCounter(RuntimeCallCounterId counter_id) {
1190 return &counters_[static_cast<int>(counter_id)];
1191 }
GetCounter(int counter_id)1192 RuntimeCallCounter* GetCounter(int counter_id) {
1193 return &counters_[counter_id];
1194 }
1195
1196 private:
1197 // Top of a stack of active timers.
1198 base::AtomicValue<RuntimeCallTimer*> current_timer_;
1199 // Active counter object associated with current timer.
1200 base::AtomicValue<RuntimeCallCounter*> current_counter_;
1201 // Used to track nested tracing scopes.
1202 bool in_use_;
1203 ThreadType thread_type_;
1204 ThreadId thread_id_;
1205 RuntimeCallCounter counters_[kNumberOfCounters];
1206 };
1207
1208 class WorkerThreadRuntimeCallStats final {
1209 public:
1210 WorkerThreadRuntimeCallStats();
1211 ~WorkerThreadRuntimeCallStats();
1212
1213 // Returns the TLS key associated with this WorkerThreadRuntimeCallStats.
1214 base::Thread::LocalStorageKey GetKey();
1215
1216 // Returns a new worker thread runtime call stats table managed by this
1217 // WorkerThreadRuntimeCallStats.
1218 RuntimeCallStats* NewTable();
1219
1220 // Adds the counters from the worker thread tables to |main_call_stats|.
1221 void AddToMainTable(RuntimeCallStats* main_call_stats);
1222
1223 private:
1224 base::Mutex mutex_;
1225 std::vector<std::unique_ptr<RuntimeCallStats>> tables_;
1226 base::Optional<base::Thread::LocalStorageKey> tls_key_;
1227 // Since this is for creating worker thread runtime-call stats, record the
1228 // main thread ID to ensure we never create a worker RCS table for the main
1229 // thread.
1230 ThreadId isolate_thread_id_;
1231 };
1232
1233 // Creating a WorkerThreadRuntimeCallStatsScope will provide a thread-local
1234 // runtime call stats table, and will dump the table to an immediate trace event
1235 // when it is destroyed.
1236 class WorkerThreadRuntimeCallStatsScope final {
1237 public:
1238 explicit WorkerThreadRuntimeCallStatsScope(
1239 WorkerThreadRuntimeCallStats* off_thread_stats);
1240 ~WorkerThreadRuntimeCallStatsScope();
1241
Get()1242 RuntimeCallStats* Get() const { return table_; }
1243
1244 private:
1245 RuntimeCallStats* table_;
1246 };
1247
1248 #define CHANGE_CURRENT_RUNTIME_COUNTER(runtime_call_stats, counter_id) \
1249 do { \
1250 if (V8_UNLIKELY(TracingFlags::is_runtime_stats_enabled()) && \
1251 runtime_call_stats) { \
1252 runtime_call_stats->CorrectCurrentCounterId(counter_id); \
1253 } \
1254 } while (false)
1255
1256 #define TRACE_HANDLER_STATS(isolate, counter_name) \
1257 CHANGE_CURRENT_RUNTIME_COUNTER( \
1258 isolate->counters()->runtime_call_stats(), \
1259 RuntimeCallCounterId::kHandler_##counter_name)
1260
1261 // A RuntimeCallTimerScopes wraps around a RuntimeCallTimer to measure the
1262 // the time of C++ scope.
1263 class RuntimeCallTimerScope {
1264 public:
1265 inline RuntimeCallTimerScope(Isolate* isolate,
1266 RuntimeCallCounterId counter_id);
1267 inline RuntimeCallTimerScope(RuntimeCallStats* stats,
1268 RuntimeCallCounterId counter_id,
1269 RuntimeCallStats::CounterMode mode =
1270 RuntimeCallStats::CounterMode::kExact) {
1271 if (V8_LIKELY(!TracingFlags::is_runtime_stats_enabled() ||
1272 stats == nullptr)) {
1273 return;
1274 }
1275 stats_ = stats;
1276 if (mode == RuntimeCallStats::CounterMode::kThreadSpecific) {
1277 counter_id = stats->CounterIdForThread(counter_id);
1278 }
1279
1280 DCHECK(stats->IsCounterAppropriateForThread(counter_id));
1281 stats_->Enter(&timer_, counter_id);
1282 }
1283
~RuntimeCallTimerScope()1284 inline ~RuntimeCallTimerScope() {
1285 if (V8_UNLIKELY(stats_ != nullptr)) {
1286 stats_->Leave(&timer_);
1287 }
1288 }
1289
1290 private:
1291 RuntimeCallStats* stats_ = nullptr;
1292 RuntimeCallTimer timer_;
1293
1294 DISALLOW_COPY_AND_ASSIGN(RuntimeCallTimerScope);
1295 };
1296
1297 // This file contains all the v8 counters that are in use.
1298 class Counters : public std::enable_shared_from_this<Counters> {
1299 public:
1300 explicit Counters(Isolate* isolate);
1301
1302 // Register an application-defined function for recording
1303 // subsequent counter statistics. Note: Must be called on the main
1304 // thread.
1305 void ResetCounterFunction(CounterLookupCallback f);
1306
1307 // Register an application-defined function to create histograms for
1308 // recording subsequent histogram samples. Note: Must be called on
1309 // the main thread.
1310 void ResetCreateHistogramFunction(CreateHistogramCallback f);
1311
1312 // Register an application-defined function to add a sample
1313 // to a histogram. Will be used in all subsequent sample additions.
1314 // Note: Must be called on the main thread.
SetAddHistogramSampleFunction(AddHistogramSampleCallback f)1315 void SetAddHistogramSampleFunction(AddHistogramSampleCallback f) {
1316 stats_table_.SetAddHistogramSampleFunction(f);
1317 }
1318
1319 #define HR(name, caption, min, max, num_buckets) \
1320 Histogram* name() { return &name##_; }
1321 HISTOGRAM_RANGE_LIST(HR)
1322 #undef HR
1323
1324 #define HT(name, caption, max, res) \
1325 HistogramTimer* name() { return &name##_; }
1326 HISTOGRAM_TIMER_LIST(HT)
1327 #undef HT
1328
1329 #define HT(name, caption, max, res) \
1330 TimedHistogram* name() { return &name##_; }
1331 TIMED_HISTOGRAM_LIST(HT)
1332 #undef HT
1333
1334 #define AHT(name, caption) \
1335 AggregatableHistogramTimer* name() { return &name##_; }
1336 AGGREGATABLE_HISTOGRAM_TIMER_LIST(AHT)
1337 #undef AHT
1338
1339 #define HP(name, caption) \
1340 Histogram* name() { return &name##_; }
1341 HISTOGRAM_PERCENTAGE_LIST(HP)
1342 #undef HP
1343
1344 #define HM(name, caption) \
1345 Histogram* name() { return &name##_; }
1346 HISTOGRAM_LEGACY_MEMORY_LIST(HM)
1347 #undef HM
1348
1349 #define SC(name, caption) \
1350 StatsCounter* name() { return &name##_; }
1351 STATS_COUNTER_LIST_1(SC)
1352 STATS_COUNTER_LIST_2(SC)
1353 STATS_COUNTER_NATIVE_CODE_LIST(SC)
1354 #undef SC
1355
1356 #define SC(name, caption) \
1357 StatsCounterThreadSafe* name() { return &name##_; }
1358 STATS_COUNTER_TS_LIST(SC)
1359 #undef SC
1360
1361 // clang-format off
1362 enum Id {
1363 #define RATE_ID(name, caption, max, res) k_##name,
1364 HISTOGRAM_TIMER_LIST(RATE_ID)
1365 TIMED_HISTOGRAM_LIST(RATE_ID)
1366 #undef RATE_ID
1367 #define AGGREGATABLE_ID(name, caption) k_##name,
1368 AGGREGATABLE_HISTOGRAM_TIMER_LIST(AGGREGATABLE_ID)
1369 #undef AGGREGATABLE_ID
1370 #define PERCENTAGE_ID(name, caption) k_##name,
1371 HISTOGRAM_PERCENTAGE_LIST(PERCENTAGE_ID)
1372 #undef PERCENTAGE_ID
1373 #define MEMORY_ID(name, caption) k_##name,
1374 HISTOGRAM_LEGACY_MEMORY_LIST(MEMORY_ID)
1375 #undef MEMORY_ID
1376 #define COUNTER_ID(name, caption) k_##name,
1377 STATS_COUNTER_LIST_1(COUNTER_ID)
1378 STATS_COUNTER_LIST_2(COUNTER_ID)
1379 STATS_COUNTER_TS_LIST(COUNTER_ID)
1380 STATS_COUNTER_NATIVE_CODE_LIST(COUNTER_ID)
1381 #undef COUNTER_ID
1382 #define COUNTER_ID(name) kCountOf##name, kSizeOf##name,
1383 INSTANCE_TYPE_LIST(COUNTER_ID)
1384 #undef COUNTER_ID
1385 #define COUNTER_ID(name) kCountOfCODE_TYPE_##name, \
1386 kSizeOfCODE_TYPE_##name,
1387 CODE_KIND_LIST(COUNTER_ID)
1388 #undef COUNTER_ID
1389 #define COUNTER_ID(name) kCountOfFIXED_ARRAY__##name, \
1390 kSizeOfFIXED_ARRAY__##name,
1391 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(COUNTER_ID)
1392 #undef COUNTER_ID
1393 stats_counter_count
1394 };
1395 // clang-format on
1396
runtime_call_stats()1397 RuntimeCallStats* runtime_call_stats() { return &runtime_call_stats_; }
1398
worker_thread_runtime_call_stats()1399 WorkerThreadRuntimeCallStats* worker_thread_runtime_call_stats() {
1400 return &worker_thread_runtime_call_stats_;
1401 }
1402
1403 private:
1404 friend class StatsTable;
1405 friend class StatsCounterBase;
1406 friend class Histogram;
1407 friend class HistogramTimer;
1408
1409 Isolate* isolate_;
1410 StatsTable stats_table_;
1411
FindLocation(const char * name)1412 int* FindLocation(const char* name) {
1413 return stats_table_.FindLocation(name);
1414 }
1415
CreateHistogram(const char * name,int min,int max,size_t buckets)1416 void* CreateHistogram(const char* name, int min, int max, size_t buckets) {
1417 return stats_table_.CreateHistogram(name, min, max, buckets);
1418 }
1419
AddHistogramSample(void * histogram,int sample)1420 void AddHistogramSample(void* histogram, int sample) {
1421 stats_table_.AddHistogramSample(histogram, sample);
1422 }
1423
isolate()1424 Isolate* isolate() { return isolate_; }
1425
1426 #define HR(name, caption, min, max, num_buckets) Histogram name##_;
1427 HISTOGRAM_RANGE_LIST(HR)
1428 #undef HR
1429
1430 #define HT(name, caption, max, res) HistogramTimer name##_;
1431 HISTOGRAM_TIMER_LIST(HT)
1432 #undef HT
1433
1434 #define HT(name, caption, max, res) TimedHistogram name##_;
1435 TIMED_HISTOGRAM_LIST(HT)
1436 #undef HT
1437
1438 #define AHT(name, caption) AggregatableHistogramTimer name##_;
1439 AGGREGATABLE_HISTOGRAM_TIMER_LIST(AHT)
1440 #undef AHT
1441
1442 #define HP(name, caption) Histogram name##_;
1443 HISTOGRAM_PERCENTAGE_LIST(HP)
1444 #undef HP
1445
1446 #define HM(name, caption) Histogram name##_;
1447 HISTOGRAM_LEGACY_MEMORY_LIST(HM)
1448 #undef HM
1449
1450 #define SC(name, caption) StatsCounter name##_;
1451 STATS_COUNTER_LIST_1(SC)
1452 STATS_COUNTER_LIST_2(SC)
1453 STATS_COUNTER_NATIVE_CODE_LIST(SC)
1454 #undef SC
1455
1456 #define SC(name, caption) StatsCounterThreadSafe name##_;
1457 STATS_COUNTER_TS_LIST(SC)
1458 #undef SC
1459
1460 #define SC(name) \
1461 StatsCounter size_of_##name##_; \
1462 StatsCounter count_of_##name##_;
1463 INSTANCE_TYPE_LIST(SC)
1464 #undef SC
1465
1466 #define SC(name) \
1467 StatsCounter size_of_CODE_TYPE_##name##_; \
1468 StatsCounter count_of_CODE_TYPE_##name##_;
1469 CODE_KIND_LIST(SC)
1470 #undef SC
1471
1472 #define SC(name) \
1473 StatsCounter size_of_FIXED_ARRAY_##name##_; \
1474 StatsCounter count_of_FIXED_ARRAY_##name##_;
1475 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(SC)
1476 #undef SC
1477
1478 RuntimeCallStats runtime_call_stats_;
1479 WorkerThreadRuntimeCallStats worker_thread_runtime_call_stats_;
1480
1481 DISALLOW_IMPLICIT_CONSTRUCTORS(Counters);
1482 };
1483
Start()1484 void HistogramTimer::Start() {
1485 TimedHistogram::Start(&timer_, counters()->isolate());
1486 }
1487
Stop()1488 void HistogramTimer::Stop() {
1489 TimedHistogram::Stop(&timer_, counters()->isolate());
1490 }
1491
RuntimeCallTimerScope(Isolate * isolate,RuntimeCallCounterId counter_id)1492 RuntimeCallTimerScope::RuntimeCallTimerScope(Isolate* isolate,
1493 RuntimeCallCounterId counter_id) {
1494 if (V8_LIKELY(!TracingFlags::is_runtime_stats_enabled())) return;
1495 stats_ = isolate->counters()->runtime_call_stats();
1496 stats_->Enter(&timer_, counter_id);
1497 }
1498
1499 } // namespace internal
1500 } // namespace v8
1501
1502 #endif // V8_LOGGING_COUNTERS_H_
1503