• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_LIBSAMPLER_SAMPLER_H_
6 #define V8_LIBSAMPLER_SAMPLER_H_
7 
8 #include <atomic>
9 #include <memory>
10 #include <unordered_map>
11 
12 #include "include/v8.h"
13 #include "src/base/lazy-instance.h"
14 #include "src/base/macros.h"
15 
16 #if V8_OS_POSIX && !V8_OS_CYGWIN && !V8_OS_FUCHSIA
17 #define USE_SIGNALS
18 #endif
19 
20 namespace v8 {
21 namespace sampler {
22 
23 // ----------------------------------------------------------------------------
24 // Sampler
25 //
26 // A sampler periodically samples the state of the VM and optionally
27 // (if used for profiling) the program counter and stack pointer for
28 // the thread that created it.
29 
30 class V8_EXPORT_PRIVATE Sampler {
31  public:
32   static const int kMaxFramesCountLog2 = 8;
33   static const unsigned kMaxFramesCount = (1u << kMaxFramesCountLog2) - 1;
34 
35   // Initialize sampler.
36   explicit Sampler(Isolate* isolate);
37   virtual ~Sampler();
38 
isolate()39   Isolate* isolate() const { return isolate_; }
40 
41   // Performs stack sampling.
42   // Clients should override this method in order to do something on samples,
43   // for example buffer samples in a queue.
44   virtual void SampleStack(const v8::RegisterState& regs) = 0;
45 
46   // Start and stop sampler.
47   void Start();
48   void Stop();
49 
50   // Whether the sampler is running (start has been called).
IsActive()51   bool IsActive() const { return active_.load(std::memory_order_relaxed); }
52 
53   // Returns true and consumes the pending sample bit if a sample should be
54   // dispatched to this sampler.
ShouldRecordSample()55   bool ShouldRecordSample() {
56     return record_sample_.exchange(false, std::memory_order_relaxed);
57   }
58 
59   void DoSample();
60 
61   // Used in tests to make sure that stack sampling is performed.
js_sample_count()62   unsigned js_sample_count() const { return js_sample_count_; }
external_sample_count()63   unsigned external_sample_count() const { return external_sample_count_; }
StartCountingSamples()64   void StartCountingSamples() {
65     js_sample_count_ = 0;
66     external_sample_count_ = 0;
67     is_counting_samples_ = true;
68   }
69 
70   class PlatformData;
platform_data()71   PlatformData* platform_data() const { return data_.get(); }
72 
73  protected:
74   // Counts stack samples taken in various VM states.
75   bool is_counting_samples_ = false;
76   unsigned js_sample_count_ = 0;
77   unsigned external_sample_count_ = 0;
78 
SetActive(bool value)79   void SetActive(bool value) {
80     active_.store(value, std::memory_order_relaxed);
81   }
82 
SetShouldRecordSample()83   void SetShouldRecordSample() {
84     record_sample_.store(true, std::memory_order_relaxed);
85   }
86 
87   Isolate* isolate_;
88   std::atomic_bool active_{false};
89   std::atomic_bool record_sample_{false};
90   std::unique_ptr<PlatformData> data_;  // Platform specific data.
91   DISALLOW_IMPLICIT_CONSTRUCTORS(Sampler);
92 };
93 
94 #ifdef USE_SIGNALS
95 
96 using AtomicMutex = std::atomic_bool;
97 
98 // A helper that uses an std::atomic_bool to create a lock that is obtained on
99 // construction and released on destruction.
100 class V8_EXPORT_PRIVATE AtomicGuard {
101  public:
102   // Attempt to obtain the lock represented by |atomic|. |is_blocking|
103   // determines whether we will block to obtain the lock, or only make one
104   // attempt to gain the lock and then stop. If we fail to gain the lock,
105   // is_success will be false.
106   explicit AtomicGuard(AtomicMutex* atomic, bool is_blocking = true);
107 
108   // Releases the lock represented by atomic, if it is held by this guard.
109   ~AtomicGuard();
110 
111   // Whether the lock was successfully obtained in the constructor. This will
112   // always be true if is_blocking was true.
113   bool is_success() const;
114 
115  private:
116   AtomicMutex* const atomic_;
117   bool is_success_;
118 };
119 
120 // SamplerManager keeps a list of Samplers per thread, and allows the caller to
121 // take a sample for every Sampler on the current thread.
122 class V8_EXPORT_PRIVATE SamplerManager {
123  public:
124   using SamplerList = std::vector<Sampler*>;
125 
126   // Add |sampler| to the map if it is not already present.
127   void AddSampler(Sampler* sampler);
128 
129   // If |sampler| exists in the map, remove it and delete the SamplerList if
130   // |sampler| was the last sampler in the list.
131   void RemoveSampler(Sampler* sampler);
132 
133   // Take a sample for every sampler on the current thread. This function can
134   // return without taking samples if AddSampler or RemoveSampler are being
135   // concurrently called on any thread.
136   void DoSample(const v8::RegisterState& state);
137 
138   // Get the lazily instantiated, global SamplerManager instance.
139   static SamplerManager* instance();
140 
141  private:
142   SamplerManager() = default;
143   // Must be a friend so that it can access the private constructor for the
144   // global lazy instance.
145   friend class base::LeakyObject<SamplerManager>;
146 
147   std::unordered_map<pthread_t, SamplerList> sampler_map_;
148   AtomicMutex samplers_access_counter_{false};
149 
150   DISALLOW_COPY_AND_ASSIGN(SamplerManager);
151 };
152 
153 #endif  // USE_SIGNALS
154 
155 }  // namespace sampler
156 }  // namespace v8
157 
158 #endif  // V8_LIBSAMPLER_SAMPLER_H_
159