• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/profiler/sampling-heap-profiler.h"
6 
7 #include <stdint.h>
8 #include <memory>
9 #include "src/api-inl.h"
10 #include "src/base/ieee754.h"
11 #include "src/base/utils/random-number-generator.h"
12 #include "src/frames-inl.h"
13 #include "src/heap/heap.h"
14 #include "src/isolate.h"
15 #include "src/profiler/strings-storage.h"
16 
17 namespace v8 {
18 namespace internal {
19 
20 // We sample with a Poisson process, with constant average sampling interval.
21 // This follows the exponential probability distribution with parameter
22 // λ = 1/rate where rate is the average number of bytes between samples.
23 //
24 // Let u be a uniformly distributed random number between 0 and 1, then
25 // next_sample = (- ln u) / λ
GetNextSampleInterval(uint64_t rate)26 intptr_t SamplingAllocationObserver::GetNextSampleInterval(uint64_t rate) {
27   if (FLAG_sampling_heap_profiler_suppress_randomness) {
28     return static_cast<intptr_t>(rate);
29   }
30   double u = random_->NextDouble();
31   double next = (-base::ieee754::log(u)) * rate;
32   return next < kPointerSize
33              ? kPointerSize
34              : (next > INT_MAX ? INT_MAX : static_cast<intptr_t>(next));
35 }
36 
37 // Samples were collected according to a poisson process. Since we have not
38 // recorded all allocations, we must approximate the shape of the underlying
39 // space of allocations based on the samples we have collected. Given that
40 // we sample at rate R, the probability that an allocation of size S will be
41 // sampled is 1-exp(-S/R). This function uses the above probability to
42 // approximate the true number of allocations with size *size* given that
43 // *count* samples were observed.
ScaleSample(size_t size,unsigned int count)44 v8::AllocationProfile::Allocation SamplingHeapProfiler::ScaleSample(
45     size_t size, unsigned int count) {
46   double scale = 1.0 / (1.0 - std::exp(-static_cast<double>(size) / rate_));
47   // Round count instead of truncating.
48   return {size, static_cast<unsigned int>(count * scale + 0.5)};
49 }
50 
SamplingHeapProfiler(Heap * heap,StringsStorage * names,uint64_t rate,int stack_depth,v8::HeapProfiler::SamplingFlags flags)51 SamplingHeapProfiler::SamplingHeapProfiler(
52     Heap* heap, StringsStorage* names, uint64_t rate, int stack_depth,
53     v8::HeapProfiler::SamplingFlags flags)
54     : isolate_(heap->isolate()),
55       heap_(heap),
56       new_space_observer_(new SamplingAllocationObserver(
57           heap_, static_cast<intptr_t>(rate), rate, this,
58           heap->isolate()->random_number_generator())),
59       other_spaces_observer_(new SamplingAllocationObserver(
60           heap_, static_cast<intptr_t>(rate), rate, this,
61           heap->isolate()->random_number_generator())),
62       names_(names),
63       profile_root_(nullptr, "(root)", v8::UnboundScript::kNoScriptId, 0),
64       samples_(),
65       stack_depth_(stack_depth),
66       rate_(rate),
67       flags_(flags) {
68   CHECK_GT(rate_, 0u);
69 
70   heap_->AddAllocationObserversToAllSpaces(other_spaces_observer_.get(),
71                                            new_space_observer_.get());
72 }
73 
74 
~SamplingHeapProfiler()75 SamplingHeapProfiler::~SamplingHeapProfiler() {
76   heap_->RemoveAllocationObserversFromAllSpaces(other_spaces_observer_.get(),
77                                                 new_space_observer_.get());
78 
79   samples_.clear();
80 }
81 
82 
SampleObject(Address soon_object,size_t size)83 void SamplingHeapProfiler::SampleObject(Address soon_object, size_t size) {
84   DisallowHeapAllocation no_allocation;
85 
86   HandleScope scope(isolate_);
87   HeapObject* heap_object = HeapObject::FromAddress(soon_object);
88   Handle<Object> obj(heap_object, isolate_);
89 
90   // Mark the new block as FreeSpace to make sure the heap is iterable while we
91   // are taking the sample.
92   heap()->CreateFillerObjectAt(soon_object, static_cast<int>(size),
93                                ClearRecordedSlots::kNo);
94 
95   Local<v8::Value> loc = v8::Utils::ToLocal(obj);
96 
97   AllocationNode* node = AddStack();
98   node->allocations_[size]++;
99   Sample* sample = new Sample(size, node, loc, this);
100   samples_.emplace(sample);
101   sample->global.SetWeak(sample, OnWeakCallback, WeakCallbackType::kParameter);
102 #if __clang__
103 #pragma clang diagnostic push
104 #pragma clang diagnostic ignored "-Wdeprecated"
105 #endif
106   // MarkIndependent is marked deprecated but we still rely on it here
107   // temporarily.
108   sample->global.MarkIndependent();
109 #if __clang__
110 #pragma clang diagnostic pop
111 #endif
112 }
113 
OnWeakCallback(const WeakCallbackInfo<Sample> & data)114 void SamplingHeapProfiler::OnWeakCallback(
115     const WeakCallbackInfo<Sample>& data) {
116   Sample* sample = data.GetParameter();
117   AllocationNode* node = sample->owner;
118   DCHECK_GT(node->allocations_[sample->size], 0);
119   node->allocations_[sample->size]--;
120   if (node->allocations_[sample->size] == 0) {
121     node->allocations_.erase(sample->size);
122     while (node->allocations_.empty() && node->children_.empty() &&
123            node->parent_ && !node->parent_->pinned_) {
124       AllocationNode* parent = node->parent_;
125       AllocationNode::FunctionId id = AllocationNode::function_id(
126           node->script_id_, node->script_position_, node->name_);
127       parent->children_.erase(id);
128       delete node;
129       node = parent;
130     }
131   }
132   auto it = std::find_if(sample->profiler->samples_.begin(),
133                          sample->profiler->samples_.end(),
134                          [&sample](const std::unique_ptr<Sample>& s) {
135                            return s.get() == sample;
136                          });
137 
138   sample->profiler->samples_.erase(it);
139   // sample is deleted because its unique ptr was erased from samples_.
140 }
141 
142 SamplingHeapProfiler::AllocationNode*
FindOrAddChildNode(const char * name,int script_id,int start_position)143 SamplingHeapProfiler::AllocationNode::FindOrAddChildNode(const char* name,
144                                                          int script_id,
145                                                          int start_position) {
146   FunctionId id = function_id(script_id, start_position, name);
147   auto it = children_.find(id);
148   if (it != children_.end()) {
149     DCHECK_EQ(strcmp(it->second->name_, name), 0);
150     return it->second;
151   }
152   auto child = new AllocationNode(this, name, script_id, start_position);
153   children_.insert(std::make_pair(id, child));
154   return child;
155 }
156 
AddStack()157 SamplingHeapProfiler::AllocationNode* SamplingHeapProfiler::AddStack() {
158   AllocationNode* node = &profile_root_;
159 
160   std::vector<SharedFunctionInfo*> stack;
161   JavaScriptFrameIterator it(isolate_);
162   int frames_captured = 0;
163   bool found_arguments_marker_frames = false;
164   while (!it.done() && frames_captured < stack_depth_) {
165     JavaScriptFrame* frame = it.frame();
166     // If we are materializing objects during deoptimization, inlined
167     // closures may not yet be materialized, and this includes the
168     // closure on the stack. Skip over any such frames (they'll be
169     // in the top frames of the stack). The allocations made in this
170     // sensitive moment belong to the formerly optimized frame anyway.
171     if (frame->unchecked_function()->IsJSFunction()) {
172       SharedFunctionInfo* shared = frame->function()->shared();
173       stack.push_back(shared);
174       frames_captured++;
175     } else {
176       found_arguments_marker_frames = true;
177     }
178     it.Advance();
179   }
180 
181   if (frames_captured == 0) {
182     const char* name = nullptr;
183     switch (isolate_->current_vm_state()) {
184       case GC:
185         name = "(GC)";
186         break;
187       case PARSER:
188         name = "(PARSER)";
189         break;
190       case COMPILER:
191         name = "(COMPILER)";
192         break;
193       case BYTECODE_COMPILER:
194         name = "(BYTECODE_COMPILER)";
195         break;
196       case OTHER:
197         name = "(V8 API)";
198         break;
199       case EXTERNAL:
200         name = "(EXTERNAL)";
201         break;
202       case IDLE:
203         name = "(IDLE)";
204         break;
205       case JS:
206         name = "(JS)";
207         break;
208     }
209     return node->FindOrAddChildNode(name, v8::UnboundScript::kNoScriptId, 0);
210   }
211 
212   // We need to process the stack in reverse order as the top of the stack is
213   // the first element in the list.
214   for (auto it = stack.rbegin(); it != stack.rend(); ++it) {
215     SharedFunctionInfo* shared = *it;
216     const char* name = this->names()->GetName(shared->DebugName());
217     int script_id = v8::UnboundScript::kNoScriptId;
218     if (shared->script()->IsScript()) {
219       Script* script = Script::cast(shared->script());
220       script_id = script->id();
221     }
222     node = node->FindOrAddChildNode(name, script_id, shared->StartPosition());
223   }
224 
225   if (found_arguments_marker_frames) {
226     node =
227         node->FindOrAddChildNode("(deopt)", v8::UnboundScript::kNoScriptId, 0);
228   }
229 
230   return node;
231 }
232 
TranslateAllocationNode(AllocationProfile * profile,SamplingHeapProfiler::AllocationNode * node,const std::map<int,Handle<Script>> & scripts)233 v8::AllocationProfile::Node* SamplingHeapProfiler::TranslateAllocationNode(
234     AllocationProfile* profile, SamplingHeapProfiler::AllocationNode* node,
235     const std::map<int, Handle<Script>>& scripts) {
236   // By pinning the node we make sure its children won't get disposed if
237   // a GC kicks in during the tree retrieval.
238   node->pinned_ = true;
239   Local<v8::String> script_name =
240       ToApiHandle<v8::String>(isolate_->factory()->InternalizeUtf8String(""));
241   int line = v8::AllocationProfile::kNoLineNumberInfo;
242   int column = v8::AllocationProfile::kNoColumnNumberInfo;
243   std::vector<v8::AllocationProfile::Allocation> allocations;
244   allocations.reserve(node->allocations_.size());
245   if (node->script_id_ != v8::UnboundScript::kNoScriptId &&
246       scripts.find(node->script_id_) != scripts.end()) {
247     // Cannot use std::map<T>::at because it is not available on android.
248     auto non_const_scripts =
249         const_cast<std::map<int, Handle<Script>>&>(scripts);
250     Handle<Script> script = non_const_scripts[node->script_id_];
251     if (!script.is_null()) {
252       if (script->name()->IsName()) {
253         Name* name = Name::cast(script->name());
254         script_name = ToApiHandle<v8::String>(
255             isolate_->factory()->InternalizeUtf8String(names_->GetName(name)));
256       }
257       line = 1 + Script::GetLineNumber(script, node->script_position_);
258       column = 1 + Script::GetColumnNumber(script, node->script_position_);
259     }
260   }
261   for (auto alloc : node->allocations_) {
262     allocations.push_back(ScaleSample(alloc.first, alloc.second));
263   }
264 
265   profile->nodes().push_back(v8::AllocationProfile::Node(
266       {ToApiHandle<v8::String>(
267            isolate_->factory()->InternalizeUtf8String(node->name_)),
268        script_name, node->script_id_, node->script_position_, line, column,
269        std::vector<v8::AllocationProfile::Node*>(), allocations}));
270   v8::AllocationProfile::Node* current = &profile->nodes().back();
271   // The children map may have nodes inserted into it during translation
272   // because the translation may allocate strings on the JS heap that have
273   // the potential to be sampled. That's ok since map iterators are not
274   // invalidated upon std::map insertion.
275   for (auto it : node->children_) {
276     current->children.push_back(
277         TranslateAllocationNode(profile, it.second, scripts));
278   }
279   node->pinned_ = false;
280   return current;
281 }
282 
GetAllocationProfile()283 v8::AllocationProfile* SamplingHeapProfiler::GetAllocationProfile() {
284   if (flags_ & v8::HeapProfiler::kSamplingForceGC) {
285     isolate_->heap()->CollectAllGarbage(
286         Heap::kNoGCFlags, GarbageCollectionReason::kSamplingProfiler);
287   }
288   // To resolve positions to line/column numbers, we will need to look up
289   // scripts. Build a map to allow fast mapping from script id to script.
290   std::map<int, Handle<Script>> scripts;
291   {
292     Script::Iterator iterator(isolate_);
293     while (Script* script = iterator.Next()) {
294       scripts[script->id()] = handle(script, isolate_);
295     }
296   }
297   auto profile = new v8::internal::AllocationProfile();
298   TranslateAllocationNode(profile, &profile_root_, scripts);
299   return profile;
300 }
301 
302 
303 }  // namespace internal
304 }  // namespace v8
305