• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_PROFILER_PROFILE_GENERATOR_H_
6 #define V8_PROFILER_PROFILE_GENERATOR_H_
7 
8 #include <atomic>
9 #include <deque>
10 #include <limits>
11 #include <map>
12 #include <memory>
13 #include <unordered_map>
14 #include <utility>
15 #include <vector>
16 
17 #include "include/v8-profiler.h"
18 #include "src/base/platform/time.h"
19 #include "src/builtins/builtins.h"
20 #include "src/logging/code-events.h"
21 #include "src/profiler/strings-storage.h"
22 #include "src/utils/allocation.h"
23 
24 namespace v8 {
25 namespace internal {
26 
27 struct TickSample;
28 
29 // Provides a mapping from the offsets within generated code or a bytecode array
30 // to the source line and inlining id.
31 class V8_EXPORT_PRIVATE SourcePositionTable : public Malloced {
32  public:
33   SourcePositionTable() = default;
34 
35   void SetPosition(int pc_offset, int line, int inlining_id);
36   int GetSourceLineNumber(int pc_offset) const;
37   int GetInliningId(int pc_offset) const;
38 
39   void print() const;
40 
41  private:
42   struct SourcePositionTuple {
43     bool operator<(const SourcePositionTuple& other) const {
44       return pc_offset < other.pc_offset;
45     }
46     int pc_offset;
47     int line_number;
48     int inlining_id;
49   };
50   // This is logically a map, but we store it as a vector of tuples, sorted by
51   // the pc offset, so that we can save space and look up items using binary
52   // search.
53   std::vector<SourcePositionTuple> pc_offsets_to_lines_;
54   DISALLOW_COPY_AND_ASSIGN(SourcePositionTable);
55 };
56 
57 struct CodeEntryAndLineNumber;
58 
59 class CodeEntry {
60  public:
61   // CodeEntry doesn't own name strings, just references them.
62   inline CodeEntry(CodeEventListener::LogEventsAndTags tag, const char* name,
63                    const char* resource_name = CodeEntry::kEmptyResourceName,
64                    int line_number = v8::CpuProfileNode::kNoLineNumberInfo,
65                    int column_number = v8::CpuProfileNode::kNoColumnNumberInfo,
66                    std::unique_ptr<SourcePositionTable> line_info = nullptr,
67                    bool is_shared_cross_origin = false);
68 
name()69   const char* name() const { return name_; }
resource_name()70   const char* resource_name() const { return resource_name_; }
line_number()71   int line_number() const { return line_number_; }
column_number()72   int column_number() const { return column_number_; }
line_info()73   const SourcePositionTable* line_info() const { return line_info_.get(); }
script_id()74   int script_id() const { return script_id_; }
set_script_id(int script_id)75   void set_script_id(int script_id) { script_id_ = script_id; }
position()76   int position() const { return position_; }
set_position(int position)77   void set_position(int position) { position_ = position; }
set_bailout_reason(const char * bailout_reason)78   void set_bailout_reason(const char* bailout_reason) {
79     EnsureRareData()->bailout_reason_ = bailout_reason;
80   }
bailout_reason()81   const char* bailout_reason() const {
82     return rare_data_ ? rare_data_->bailout_reason_ : kEmptyBailoutReason;
83   }
84 
85   void set_deopt_info(const char* deopt_reason, int deopt_id,
86                       std::vector<CpuProfileDeoptFrame> inlined_frames);
87 
88   CpuProfileDeoptInfo GetDeoptInfo();
has_deopt_info()89   bool has_deopt_info() const {
90     return rare_data_ && rare_data_->deopt_id_ != kNoDeoptimizationId;
91   }
clear_deopt_info()92   void clear_deopt_info() {
93     if (!rare_data_) return;
94     // TODO(alph): Clear rare_data_ if that was the only field in use.
95     rare_data_->deopt_reason_ = kNoDeoptReason;
96     rare_data_->deopt_id_ = kNoDeoptimizationId;
97   }
mark_used()98   void mark_used() { bit_field_ = UsedField::update(bit_field_, true); }
used()99   bool used() const { return UsedField::decode(bit_field_); }
100 
101   void FillFunctionInfo(SharedFunctionInfo shared);
102 
103   void SetBuiltinId(Builtins::Name id);
builtin_id()104   Builtins::Name builtin_id() const {
105     return BuiltinIdField::decode(bit_field_);
106   }
107 
is_shared_cross_origin()108   bool is_shared_cross_origin() const {
109     return SharedCrossOriginField::decode(bit_field_);
110   }
111 
112   uint32_t GetHash() const;
113   bool IsSameFunctionAs(const CodeEntry* entry) const;
114 
115   int GetSourceLine(int pc_offset) const;
116 
117   struct Equals {
operatorEquals118     bool operator()(const std::unique_ptr<CodeEntry>& lhs,
119                     const std::unique_ptr<CodeEntry>& rhs) const {
120       return lhs.get()->IsSameFunctionAs(rhs.get());
121     }
122   };
123   struct Hasher {
operatorHasher124     std::size_t operator()(const std::unique_ptr<CodeEntry>& e) const {
125       return e->GetHash();
126     }
127   };
128 
129   void SetInlineStacks(
130       std::unordered_set<std::unique_ptr<CodeEntry>, Hasher, Equals>
131           inline_entries,
132       std::unordered_map<int, std::vector<CodeEntryAndLineNumber>>
133           inline_stacks);
134   const std::vector<CodeEntryAndLineNumber>* GetInlineStack(
135       int pc_offset) const;
136 
tag()137   CodeEventListener::LogEventsAndTags tag() const {
138     return TagField::decode(bit_field_);
139   }
140 
141   static const char* const kWasmResourceNamePrefix;
142   V8_EXPORT_PRIVATE static const char* const kEmptyResourceName;
143   static const char* const kEmptyBailoutReason;
144   static const char* const kNoDeoptReason;
145 
146   V8_EXPORT_PRIVATE static const char* const kProgramEntryName;
147   V8_EXPORT_PRIVATE static const char* const kIdleEntryName;
148   static const char* const kGarbageCollectorEntryName;
149   // Used to represent frames for which we have no reliable way to
150   // detect function.
151   V8_EXPORT_PRIVATE static const char* const kUnresolvedFunctionName;
152   V8_EXPORT_PRIVATE static const char* const kRootEntryName;
153 
program_entry()154   V8_INLINE static CodeEntry* program_entry() {
155     return kProgramEntry.Pointer();
156   }
idle_entry()157   V8_INLINE static CodeEntry* idle_entry() { return kIdleEntry.Pointer(); }
gc_entry()158   V8_INLINE static CodeEntry* gc_entry() { return kGCEntry.Pointer(); }
unresolved_entry()159   V8_INLINE static CodeEntry* unresolved_entry() {
160     return kUnresolvedEntry.Pointer();
161   }
root_entry()162   V8_INLINE static CodeEntry* root_entry() { return kRootEntry.Pointer(); }
163 
164   void print() const;
165 
166  private:
167   struct RareData {
168     const char* deopt_reason_ = kNoDeoptReason;
169     const char* bailout_reason_ = kEmptyBailoutReason;
170     int deopt_id_ = kNoDeoptimizationId;
171     std::unordered_map<int, std::vector<CodeEntryAndLineNumber>> inline_stacks_;
172     std::unordered_set<std::unique_ptr<CodeEntry>, Hasher, Equals>
173         inline_entries_;
174     std::vector<CpuProfileDeoptFrame> deopt_inlined_frames_;
175   };
176 
177   RareData* EnsureRareData();
178 
179   struct V8_EXPORT_PRIVATE ProgramEntryCreateTrait {
180     static CodeEntry* Create();
181   };
182   struct V8_EXPORT_PRIVATE IdleEntryCreateTrait {
183     static CodeEntry* Create();
184   };
185   struct V8_EXPORT_PRIVATE GCEntryCreateTrait {
186     static CodeEntry* Create();
187   };
188   struct V8_EXPORT_PRIVATE UnresolvedEntryCreateTrait {
189     static CodeEntry* Create();
190   };
191   struct V8_EXPORT_PRIVATE RootEntryCreateTrait {
192     static CodeEntry* Create();
193   };
194 
195   V8_EXPORT_PRIVATE static base::LazyDynamicInstance<
196       CodeEntry, ProgramEntryCreateTrait>::type kProgramEntry;
197   V8_EXPORT_PRIVATE static base::LazyDynamicInstance<
198       CodeEntry, IdleEntryCreateTrait>::type kIdleEntry;
199   V8_EXPORT_PRIVATE static base::LazyDynamicInstance<
200       CodeEntry, GCEntryCreateTrait>::type kGCEntry;
201   V8_EXPORT_PRIVATE static base::LazyDynamicInstance<
202       CodeEntry, UnresolvedEntryCreateTrait>::type kUnresolvedEntry;
203   V8_EXPORT_PRIVATE static base::LazyDynamicInstance<
204       CodeEntry, RootEntryCreateTrait>::type kRootEntry;
205 
206   using TagField = base::BitField<CodeEventListener::LogEventsAndTags, 0, 8>;
207   using BuiltinIdField = base::BitField<Builtins::Name, 8, 22>;
208   static_assert(Builtins::builtin_count <= BuiltinIdField::kNumValues,
209                 "builtin_count exceeds size of bitfield");
210   using UsedField = base::BitField<bool, 30, 1>;
211   using SharedCrossOriginField = base::BitField<bool, 31, 1>;
212 
213   uint32_t bit_field_;
214   const char* name_;
215   const char* resource_name_;
216   int line_number_;
217   int column_number_;
218   int script_id_;
219   int position_;
220   std::unique_ptr<SourcePositionTable> line_info_;
221   std::unique_ptr<RareData> rare_data_;
222 
223   DISALLOW_COPY_AND_ASSIGN(CodeEntry);
224 };
225 
226 struct CodeEntryAndLineNumber {
227   CodeEntry* code_entry;
228   int line_number;
229 };
230 
231 using ProfileStackTrace = std::vector<CodeEntryAndLineNumber>;
232 
233 class ProfileTree;
234 
235 class V8_EXPORT_PRIVATE ProfileNode {
236  public:
237   inline ProfileNode(ProfileTree* tree, CodeEntry* entry, ProfileNode* parent,
238                      int line_number = 0);
239 
240   ProfileNode* FindChild(
241       CodeEntry* entry,
242       int line_number = v8::CpuProfileNode::kNoLineNumberInfo);
243   ProfileNode* FindOrAddChild(CodeEntry* entry, int line_number = 0);
IncrementSelfTicks()244   void IncrementSelfTicks() { ++self_ticks_; }
IncreaseSelfTicks(unsigned amount)245   void IncreaseSelfTicks(unsigned amount) { self_ticks_ += amount; }
246   void IncrementLineTicks(int src_line);
247 
entry()248   CodeEntry* entry() const { return entry_; }
self_ticks()249   unsigned self_ticks() const { return self_ticks_; }
children()250   const std::vector<ProfileNode*>* children() const { return &children_list_; }
id()251   unsigned id() const { return id_; }
parent()252   ProfileNode* parent() const { return parent_; }
line_number()253   int line_number() const {
254     return line_number_ != 0 ? line_number_ : entry_->line_number();
255   }
256   CpuProfileNode::SourceType source_type() const;
257 
GetHitLineCount()258   unsigned int GetHitLineCount() const {
259     return static_cast<unsigned int>(line_ticks_.size());
260   }
261   bool GetLineTicks(v8::CpuProfileNode::LineTick* entries,
262                     unsigned int length) const;
263   void CollectDeoptInfo(CodeEntry* entry);
deopt_infos()264   const std::vector<CpuProfileDeoptInfo>& deopt_infos() const {
265     return deopt_infos_;
266   }
267   Isolate* isolate() const;
268 
269   void Print(int indent) const;
270 
271  private:
272   struct Equals {
operatorEquals273     bool operator()(CodeEntryAndLineNumber lhs,
274                     CodeEntryAndLineNumber rhs) const {
275       return lhs.code_entry->IsSameFunctionAs(rhs.code_entry) &&
276              lhs.line_number == rhs.line_number;
277     }
278   };
279   struct Hasher {
operatorHasher280     std::size_t operator()(CodeEntryAndLineNumber pair) const {
281       return pair.code_entry->GetHash() ^ ComputeUnseededHash(pair.line_number);
282     }
283   };
284 
285   ProfileTree* tree_;
286   CodeEntry* entry_;
287   unsigned self_ticks_;
288   std::unordered_map<CodeEntryAndLineNumber, ProfileNode*, Hasher, Equals>
289       children_;
290   int line_number_;
291   std::vector<ProfileNode*> children_list_;
292   ProfileNode* parent_;
293   unsigned id_;
294   // maps line number --> number of ticks
295   std::unordered_map<int, int> line_ticks_;
296 
297   std::vector<CpuProfileDeoptInfo> deopt_infos_;
298 
299   DISALLOW_COPY_AND_ASSIGN(ProfileNode);
300 };
301 
302 class V8_EXPORT_PRIVATE ProfileTree {
303  public:
304   explicit ProfileTree(Isolate* isolate);
305   ~ProfileTree();
306 
307   using ProfilingMode = v8::CpuProfilingMode;
308 
309   ProfileNode* AddPathFromEnd(
310       const std::vector<CodeEntry*>& path,
311       int src_line = v8::CpuProfileNode::kNoLineNumberInfo,
312       bool update_stats = true);
313   ProfileNode* AddPathFromEnd(
314       const ProfileStackTrace& path,
315       int src_line = v8::CpuProfileNode::kNoLineNumberInfo,
316       bool update_stats = true,
317       ProfilingMode mode = ProfilingMode::kLeafNodeLineNumbers);
root()318   ProfileNode* root() const { return root_; }
next_node_id()319   unsigned next_node_id() { return next_node_id_++; }
320 
Print()321   void Print() const { root_->Print(0); }
322 
isolate()323   Isolate* isolate() const { return isolate_; }
324 
EnqueueNode(const ProfileNode * node)325   void EnqueueNode(const ProfileNode* node) { pending_nodes_.push_back(node); }
pending_nodes_count()326   size_t pending_nodes_count() const { return pending_nodes_.size(); }
TakePendingNodes()327   std::vector<const ProfileNode*> TakePendingNodes() {
328     return std::move(pending_nodes_);
329   }
330 
331  private:
332   template <typename Callback>
333   void TraverseDepthFirst(Callback* callback);
334 
335   std::vector<const ProfileNode*> pending_nodes_;
336 
337   unsigned next_node_id_;
338   ProfileNode* root_;
339   Isolate* isolate_;
340 
341   DISALLOW_COPY_AND_ASSIGN(ProfileTree);
342 };
343 
344 class CpuProfiler;
345 
346 class CpuProfile {
347  public:
348   struct SampleInfo {
349     ProfileNode* node;
350     base::TimeTicks timestamp;
351     int line;
352   };
353 
354   V8_EXPORT_PRIVATE CpuProfile(CpuProfiler* profiler, const char* title,
355                                CpuProfilingOptions options);
356 
357   // Checks whether or not the given TickSample should be (sub)sampled, given
358   // the sampling interval of the profiler that recorded it (in microseconds).
359   V8_EXPORT_PRIVATE bool CheckSubsample(base::TimeDelta sampling_interval);
360   // Add pc -> ... -> main() call path to the profile.
361   void AddPath(base::TimeTicks timestamp, const ProfileStackTrace& path,
362                int src_line, bool update_stats,
363                base::TimeDelta sampling_interval);
364   void FinishProfile();
365 
title()366   const char* title() const { return title_; }
top_down()367   const ProfileTree* top_down() const { return &top_down_; }
368 
samples_count()369   int samples_count() const { return static_cast<int>(samples_.size()); }
sample(int index)370   const SampleInfo& sample(int index) const { return samples_[index]; }
371 
sampling_interval_us()372   int64_t sampling_interval_us() const {
373     return options_.sampling_interval_us();
374   }
375 
start_time()376   base::TimeTicks start_time() const { return start_time_; }
end_time()377   base::TimeTicks end_time() const { return end_time_; }
cpu_profiler()378   CpuProfiler* cpu_profiler() const { return profiler_; }
379 
380   void UpdateTicksScale();
381 
382   V8_EXPORT_PRIVATE void Print() const;
383 
384  private:
385   void StreamPendingTraceEvents();
386 
387   const char* title_;
388   const CpuProfilingOptions options_;
389   base::TimeTicks start_time_;
390   base::TimeTicks end_time_;
391   std::deque<SampleInfo> samples_;
392   ProfileTree top_down_;
393   CpuProfiler* const profiler_;
394   size_t streaming_next_sample_;
395   uint32_t id_;
396   // Number of microseconds worth of profiler ticks that should elapse before
397   // the next sample is recorded.
398   base::TimeDelta next_sample_delta_;
399 
400   static std::atomic<uint32_t> last_id_;
401 
402   DISALLOW_COPY_AND_ASSIGN(CpuProfile);
403 };
404 
405 class V8_EXPORT_PRIVATE CodeMap {
406  public:
407   CodeMap();
408   ~CodeMap();
409 
410   void AddCode(Address addr, CodeEntry* entry, unsigned size);
411   void MoveCode(Address from, Address to);
412   CodeEntry* FindEntry(Address addr, Address* out_instruction_start = nullptr);
413   void Print();
414 
415   void Clear();
416 
417  private:
418   struct CodeEntryMapInfo {
419     unsigned index;
420     unsigned size;
421   };
422 
423   union CodeEntrySlotInfo {
424     CodeEntry* entry;
425     unsigned next_free_slot;
426   };
427 
428   static constexpr unsigned kNoFreeSlot = std::numeric_limits<unsigned>::max();
429 
430   void ClearCodesInRange(Address start, Address end);
431   unsigned AddCodeEntry(Address start, CodeEntry*);
432   void DeleteCodeEntry(unsigned index);
433 
entry(unsigned index)434   CodeEntry* entry(unsigned index) { return code_entries_[index].entry; }
435 
436   // Added state here needs to be dealt with in Clear() as well.
437   std::deque<CodeEntrySlotInfo> code_entries_;
438   std::map<Address, CodeEntryMapInfo> code_map_;
439   unsigned free_list_head_ = kNoFreeSlot;
440 
441   DISALLOW_COPY_AND_ASSIGN(CodeMap);
442 };
443 
444 class V8_EXPORT_PRIVATE CpuProfilesCollection {
445  public:
446   explicit CpuProfilesCollection(Isolate* isolate);
447 
set_cpu_profiler(CpuProfiler * profiler)448   void set_cpu_profiler(CpuProfiler* profiler) { profiler_ = profiler; }
449   CpuProfilingStatus StartProfiling(const char* title,
450                                     CpuProfilingOptions options = {});
451 
452   CpuProfile* StopProfiling(const char* title);
profiles()453   std::vector<std::unique_ptr<CpuProfile>>* profiles() {
454     return &finished_profiles_;
455   }
GetName(Name name)456   const char* GetName(Name name) { return resource_names_.GetName(name); }
457   bool IsLastProfile(const char* title);
458   void RemoveProfile(CpuProfile* profile);
459 
460   // Finds a common sampling interval dividing each CpuProfile's interval,
461   // rounded up to the nearest multiple of the CpuProfiler's sampling interval.
462   // Returns 0 if no profiles are attached.
463   base::TimeDelta GetCommonSamplingInterval() const;
464 
465   // Called from profile generator thread.
466   void AddPathToCurrentProfiles(base::TimeTicks timestamp,
467                                 const ProfileStackTrace& path, int src_line,
468                                 bool update_stats,
469                                 base::TimeDelta sampling_interval);
470 
471   // Limits the number of profiles that can be simultaneously collected.
472   static const int kMaxSimultaneousProfiles = 100;
473 
474  private:
475   StringsStorage resource_names_;
476   std::vector<std::unique_ptr<CpuProfile>> finished_profiles_;
477   CpuProfiler* profiler_;
478 
479   // Accessed by VM thread and profile generator thread.
480   std::vector<std::unique_ptr<CpuProfile>> current_profiles_;
481   base::Semaphore current_profiles_semaphore_;
482 
483   DISALLOW_COPY_AND_ASSIGN(CpuProfilesCollection);
484 };
485 
486 }  // namespace internal
487 }  // namespace v8
488 
489 #endif  // V8_PROFILER_PROFILE_GENERATOR_H_
490