• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_LIBSAMPLER_SAMPLER_H_
6 #define V8_LIBSAMPLER_SAMPLER_H_
7 
8 #include <atomic>
9 #include <memory>
10 #include <unordered_map>
11 #include <vector>
12 
13 #include "src/base/lazy-instance.h"
14 #include "src/base/macros.h"
15 
16 #if V8_OS_POSIX && !V8_OS_CYGWIN && !V8_OS_FUCHSIA
17 #define USE_SIGNALS
18 #endif
19 
20 namespace v8 {
21 
22 class Isolate;
23 struct RegisterState;
24 
25 namespace sampler {
26 
27 // ----------------------------------------------------------------------------
28 // Sampler
29 //
30 // A sampler periodically samples the state of the VM and optionally
31 // (if used for profiling) the program counter and stack pointer for
32 // the thread that created it.
33 
34 class V8_EXPORT_PRIVATE Sampler {
35  public:
36   static const int kMaxFramesCountLog2 = 8;
37   static const unsigned kMaxFramesCount = (1u << kMaxFramesCountLog2) - 1;
38 
39   // Initialize sampler.
40   explicit Sampler(Isolate* isolate);
41   virtual ~Sampler();
42 
isolate()43   Isolate* isolate() const { return isolate_; }
44 
45   // Performs stack sampling.
46   // Clients should override this method in order to do something on samples,
47   // for example buffer samples in a queue.
48   virtual void SampleStack(const v8::RegisterState& regs) = 0;
49 
50   // Start and stop sampler.
51   void Start();
52   void Stop();
53 
54   // Whether the sampler is running (start has been called).
IsActive()55   bool IsActive() const { return active_.load(std::memory_order_relaxed); }
56 
57   // Returns true and consumes the pending sample bit if a sample should be
58   // dispatched to this sampler.
ShouldRecordSample()59   bool ShouldRecordSample() {
60     return record_sample_.exchange(false, std::memory_order_relaxed);
61   }
62 
63   void DoSample();
64 
65   // Used in tests to make sure that stack sampling is performed.
js_sample_count()66   unsigned js_sample_count() const { return js_sample_count_; }
external_sample_count()67   unsigned external_sample_count() const { return external_sample_count_; }
StartCountingSamples()68   void StartCountingSamples() {
69     js_sample_count_ = 0;
70     external_sample_count_ = 0;
71     is_counting_samples_ = true;
72   }
73 
74   class PlatformData;
platform_data()75   PlatformData* platform_data() const { return data_.get(); }
76 
77  protected:
78   // Counts stack samples taken in various VM states.
79   bool is_counting_samples_ = false;
80   unsigned js_sample_count_ = 0;
81   unsigned external_sample_count_ = 0;
82 
SetActive(bool value)83   void SetActive(bool value) {
84     active_.store(value, std::memory_order_relaxed);
85   }
86 
SetShouldRecordSample()87   void SetShouldRecordSample() {
88     record_sample_.store(true, std::memory_order_relaxed);
89   }
90 
91   Isolate* isolate_;
92   std::atomic_bool active_{false};
93   std::atomic_bool record_sample_{false};
94   std::unique_ptr<PlatformData> data_;  // Platform specific data.
95   DISALLOW_IMPLICIT_CONSTRUCTORS(Sampler);
96 };
97 
98 #ifdef USE_SIGNALS
99 
100 using AtomicMutex = std::atomic_bool;
101 
102 // A helper that uses an std::atomic_bool to create a lock that is obtained on
103 // construction and released on destruction.
104 class V8_EXPORT_PRIVATE V8_NODISCARD AtomicGuard {
105  public:
106   // Attempt to obtain the lock represented by |atomic|. |is_blocking|
107   // determines whether we will block to obtain the lock, or only make one
108   // attempt to gain the lock and then stop. If we fail to gain the lock,
109   // is_success will be false.
110   explicit AtomicGuard(AtomicMutex* atomic, bool is_blocking = true);
111 
112   // Releases the lock represented by atomic, if it is held by this guard.
113   ~AtomicGuard();
114 
115   // Whether the lock was successfully obtained in the constructor. This will
116   // always be true if is_blocking was true.
117   bool is_success() const;
118 
119  private:
120   AtomicMutex* const atomic_;
121   bool is_success_;
122 };
123 
124 // SamplerManager keeps a list of Samplers per thread, and allows the caller to
125 // take a sample for every Sampler on the current thread.
126 class V8_EXPORT_PRIVATE SamplerManager {
127  public:
128   using SamplerList = std::vector<Sampler*>;
129 
130   SamplerManager(const SamplerManager&) = delete;
131   SamplerManager& operator=(const SamplerManager&) = delete;
132 
133   // Add |sampler| to the map if it is not already present.
134   void AddSampler(Sampler* sampler);
135 
136   // If |sampler| exists in the map, remove it and delete the SamplerList if
137   // |sampler| was the last sampler in the list.
138   void RemoveSampler(Sampler* sampler);
139 
140   // Take a sample for every sampler on the current thread. This function can
141   // return without taking samples if AddSampler or RemoveSampler are being
142   // concurrently called on any thread.
143   void DoSample(const v8::RegisterState& state);
144 
145   // Get the lazily instantiated, global SamplerManager instance.
146   static SamplerManager* instance();
147 
148  private:
149   SamplerManager() = default;
150   // Must be a friend so that it can access the private constructor for the
151   // global lazy instance.
152   friend class base::LeakyObject<SamplerManager>;
153 
154   std::unordered_map<pthread_t, SamplerList> sampler_map_;
155   AtomicMutex samplers_access_counter_{false};
156 };
157 
158 #endif  // USE_SIGNALS
159 
160 }  // namespace sampler
161 }  // namespace v8
162 
163 #endif  // V8_LIBSAMPLER_SAMPLER_H_
164