• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #include "runtime-profiler.h"
31 
32 #include "assembler.h"
33 #include "bootstrapper.h"
34 #include "code-stubs.h"
35 #include "compilation-cache.h"
36 #include "execution.h"
37 #include "full-codegen.h"
38 #include "global-handles.h"
39 #include "isolate-inl.h"
40 #include "mark-compact.h"
41 #include "platform.h"
42 #include "scopeinfo.h"
43 
44 namespace v8 {
45 namespace internal {
46 
47 
48 // Optimization sampler constants.
49 static const int kSamplerFrameCount = 2;
50 
51 // Constants for statistical profiler.
52 static const int kSamplerFrameWeight[kSamplerFrameCount] = { 2, 1 };
53 
54 static const int kSamplerTicksBetweenThresholdAdjustment = 32;
55 
56 static const int kSamplerThresholdInit = 3;
57 static const int kSamplerThresholdMin = 1;
58 static const int kSamplerThresholdDelta = 1;
59 
60 static const int kSamplerThresholdSizeFactorInit = 3;
61 
62 static const int kSizeLimit = 1500;
63 
64 // Constants for counter based profiler.
65 
66 // Number of times a function has to be seen on the stack before it is
67 // optimized.
68 static const int kProfilerTicksBeforeOptimization = 2;
69 // If the function optimization was disabled due to high deoptimization count,
70 // but the function is hot and has been seen on the stack this number of times,
71 // then we try to reenable optimization for this function.
72 static const int kProfilerTicksBeforeReenablingOptimization = 250;
73 // If a function does not have enough type info (according to
74 // FLAG_type_info_threshold), but has seen a huge number of ticks,
75 // optimize it as it is.
76 static const int kTicksWhenNotEnoughTypeInfo = 100;
77 // We only have one byte to store the number of ticks.
78 STATIC_ASSERT(kProfilerTicksBeforeOptimization < 256);
79 STATIC_ASSERT(kProfilerTicksBeforeReenablingOptimization < 256);
80 STATIC_ASSERT(kTicksWhenNotEnoughTypeInfo < 256);
81 
82 // Maximum size in bytes of generate code for a function to allow OSR.
83 static const int kOSRCodeSizeAllowanceBase =
84     100 * FullCodeGenerator::kCodeSizeMultiplier;
85 
86 static const int kOSRCodeSizeAllowancePerTick =
87     3 * FullCodeGenerator::kCodeSizeMultiplier;
88 
89 // Maximum size in bytes of generated code for a function to be optimized
90 // the very first time it is seen on the stack.
91 static const int kMaxSizeEarlyOpt =
92     5 * FullCodeGenerator::kCodeSizeMultiplier;
93 
94 
RuntimeProfiler(Isolate * isolate)95 RuntimeProfiler::RuntimeProfiler(Isolate* isolate)
96     : isolate_(isolate),
97       sampler_threshold_(kSamplerThresholdInit),
98       sampler_threshold_size_factor_(kSamplerThresholdSizeFactorInit),
99       sampler_ticks_until_threshold_adjustment_(
100           kSamplerTicksBetweenThresholdAdjustment),
101       sampler_window_position_(0),
102       any_ic_changed_(false),
103       code_generated_(false) {
104   ClearSampleBuffer();
105 }
106 
107 
GetICCounts(Code * shared_code,int * ic_with_type_info_count,int * ic_total_count,int * percentage)108 static void GetICCounts(Code* shared_code,
109                         int* ic_with_type_info_count,
110                         int* ic_total_count,
111                         int* percentage) {
112   *ic_total_count = 0;
113   *ic_with_type_info_count = 0;
114   Object* raw_info = shared_code->type_feedback_info();
115   if (raw_info->IsTypeFeedbackInfo()) {
116     TypeFeedbackInfo* info = TypeFeedbackInfo::cast(raw_info);
117     *ic_with_type_info_count = info->ic_with_type_info_count();
118     *ic_total_count = info->ic_total_count();
119   }
120   *percentage = *ic_total_count > 0
121       ? 100 * *ic_with_type_info_count / *ic_total_count
122       : 100;
123 }
124 
125 
Optimize(JSFunction * function,const char * reason)126 void RuntimeProfiler::Optimize(JSFunction* function, const char* reason) {
127   ASSERT(function->IsOptimizable());
128 
129   if (FLAG_trace_opt && function->PassesFilter(FLAG_hydrogen_filter)) {
130     PrintF("[marking ");
131     function->ShortPrint();
132     PrintF(" for recompilation, reason: %s", reason);
133     if (FLAG_type_info_threshold > 0) {
134       int typeinfo, total, percentage;
135       GetICCounts(function->shared()->code(), &typeinfo, &total, &percentage);
136       PrintF(", ICs with typeinfo: %d/%d (%d%%)", typeinfo, total, percentage);
137     }
138     PrintF("]\n");
139   }
140 
141 
142   if (isolate_->concurrent_recompilation_enabled() &&
143       !isolate_->bootstrapper()->IsActive()) {
144     if (isolate_->concurrent_osr_enabled() &&
145         isolate_->optimizing_compiler_thread()->IsQueuedForOSR(function)) {
146       // Do not attempt regular recompilation if we already queued this for OSR.
147       // TODO(yangguo): This is necessary so that we don't install optimized
148       // code on a function that is already optimized, since OSR and regular
149       // recompilation race.  This goes away as soon as OSR becomes one-shot.
150       return;
151     }
152     ASSERT(!function->IsInRecompileQueue());
153     function->MarkForConcurrentRecompilation();
154   } else {
155     // The next call to the function will trigger optimization.
156     function->MarkForLazyRecompilation();
157   }
158 }
159 
160 
AttemptOnStackReplacement(JSFunction * function)161 void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function) {
162   // See AlwaysFullCompiler (in compiler.cc) comment on why we need
163   // Debug::has_break_points().
164   if (!FLAG_use_osr ||
165       isolate_->DebuggerHasBreakPoints() ||
166       function->IsBuiltin()) {
167     return;
168   }
169 
170   SharedFunctionInfo* shared = function->shared();
171   // If the code is not optimizable, don't try OSR.
172   if (!shared->code()->optimizable()) return;
173 
174   // We are not prepared to do OSR for a function that already has an
175   // allocated arguments object.  The optimized code would bypass it for
176   // arguments accesses, which is unsound.  Don't try OSR.
177   if (shared->uses_arguments()) return;
178 
179   // We're using on-stack replacement: patch the unoptimized code so that
180   // any back edge in any unoptimized frame will trigger on-stack
181   // replacement for that frame.
182   if (FLAG_trace_osr) {
183     PrintF("[OSR - patching back edges in ");
184     function->PrintName();
185     PrintF("]\n");
186   }
187 
188   BackEdgeTable::Patch(isolate_, shared->code());
189 }
190 
191 
ClearSampleBuffer()192 void RuntimeProfiler::ClearSampleBuffer() {
193   memset(sampler_window_, 0, sizeof(sampler_window_));
194   memset(sampler_window_weight_, 0, sizeof(sampler_window_weight_));
195 }
196 
197 
LookupSample(JSFunction * function)198 int RuntimeProfiler::LookupSample(JSFunction* function) {
199   int weight = 0;
200   for (int i = 0; i < kSamplerWindowSize; i++) {
201     Object* sample = sampler_window_[i];
202     if (sample != NULL) {
203       bool fits = FLAG_lookup_sample_by_shared
204           ? (function->shared() == JSFunction::cast(sample)->shared())
205           : (function == JSFunction::cast(sample));
206       if (fits) {
207         weight += sampler_window_weight_[i];
208       }
209     }
210   }
211   return weight;
212 }
213 
214 
AddSample(JSFunction * function,int weight)215 void RuntimeProfiler::AddSample(JSFunction* function, int weight) {
216   ASSERT(IsPowerOf2(kSamplerWindowSize));
217   sampler_window_[sampler_window_position_] = function;
218   sampler_window_weight_[sampler_window_position_] = weight;
219   sampler_window_position_ = (sampler_window_position_ + 1) &
220       (kSamplerWindowSize - 1);
221 }
222 
223 
OptimizeNow()224 void RuntimeProfiler::OptimizeNow() {
225   HandleScope scope(isolate_);
226 
227   if (isolate_->DebuggerHasBreakPoints()) return;
228 
229   DisallowHeapAllocation no_gc;
230 
231   // Run through the JavaScript frames and collect them. If we already
232   // have a sample of the function, we mark it for optimizations
233   // (eagerly or lazily).
234   JSFunction* samples[kSamplerFrameCount];
235   int sample_count = 0;
236   int frame_count = 0;
237   int frame_count_limit = FLAG_watch_ic_patching ? FLAG_frame_count
238                                                  : kSamplerFrameCount;
239   for (JavaScriptFrameIterator it(isolate_);
240        frame_count++ < frame_count_limit && !it.done();
241        it.Advance()) {
242     JavaScriptFrame* frame = it.frame();
243     JSFunction* function = frame->function();
244 
245     if (!FLAG_watch_ic_patching) {
246       // Adjust threshold each time we have processed
247       // a certain number of ticks.
248       if (sampler_ticks_until_threshold_adjustment_ > 0) {
249         sampler_ticks_until_threshold_adjustment_--;
250         if (sampler_ticks_until_threshold_adjustment_ <= 0) {
251           // If the threshold is not already at the minimum
252           // modify and reset the ticks until next adjustment.
253           if (sampler_threshold_ > kSamplerThresholdMin) {
254             sampler_threshold_ -= kSamplerThresholdDelta;
255             sampler_ticks_until_threshold_adjustment_ =
256                 kSamplerTicksBetweenThresholdAdjustment;
257           }
258         }
259       }
260     }
261 
262     SharedFunctionInfo* shared = function->shared();
263     Code* shared_code = shared->code();
264 
265     if (shared_code->kind() != Code::FUNCTION) continue;
266     if (function->IsInRecompileQueue()) continue;
267 
268     if (FLAG_always_osr &&
269         shared_code->allow_osr_at_loop_nesting_level() == 0) {
270       // Testing mode: always try an OSR compile for every function.
271       for (int i = 0; i < Code::kMaxLoopNestingMarker; i++) {
272         // TODO(titzer): fix AttemptOnStackReplacement to avoid this dumb loop.
273         shared_code->set_allow_osr_at_loop_nesting_level(i);
274         AttemptOnStackReplacement(function);
275       }
276       // Fall through and do a normal optimized compile as well.
277     } else if (!frame->is_optimized() &&
278         (function->IsMarkedForLazyRecompilation() ||
279          function->IsMarkedForConcurrentRecompilation() ||
280          function->IsOptimized())) {
281       // Attempt OSR if we are still running unoptimized code even though the
282       // the function has long been marked or even already been optimized.
283       int ticks = shared_code->profiler_ticks();
284       int allowance = kOSRCodeSizeAllowanceBase +
285                       ticks * kOSRCodeSizeAllowancePerTick;
286       if (shared_code->CodeSize() > allowance) {
287         if (ticks < 255) shared_code->set_profiler_ticks(ticks + 1);
288       } else {
289         int nesting = shared_code->allow_osr_at_loop_nesting_level();
290         if (nesting < Code::kMaxLoopNestingMarker) {
291           int new_nesting = nesting + 1;
292           shared_code->set_allow_osr_at_loop_nesting_level(new_nesting);
293           AttemptOnStackReplacement(function);
294         }
295       }
296       continue;
297     }
298 
299     // Only record top-level code on top of the execution stack and
300     // avoid optimizing excessively large scripts since top-level code
301     // will be executed only once.
302     const int kMaxToplevelSourceSize = 10 * 1024;
303     if (shared->is_toplevel() &&
304         (frame_count > 1 || shared->SourceSize() > kMaxToplevelSourceSize)) {
305       continue;
306     }
307 
308     // Do not record non-optimizable functions.
309     if (shared->optimization_disabled()) {
310       if (shared->deopt_count() >= FLAG_max_opt_count) {
311         // If optimization was disabled due to many deoptimizations,
312         // then check if the function is hot and try to reenable optimization.
313         int ticks = shared_code->profiler_ticks();
314         if (ticks >= kProfilerTicksBeforeReenablingOptimization) {
315           shared_code->set_profiler_ticks(0);
316           shared->TryReenableOptimization();
317         } else {
318           shared_code->set_profiler_ticks(ticks + 1);
319         }
320       }
321       continue;
322     }
323     if (!function->IsOptimizable()) continue;
324 
325     if (FLAG_watch_ic_patching) {
326       int ticks = shared_code->profiler_ticks();
327 
328       if (ticks >= kProfilerTicksBeforeOptimization) {
329         int typeinfo, total, percentage;
330         GetICCounts(shared_code, &typeinfo, &total, &percentage);
331         if (percentage >= FLAG_type_info_threshold) {
332           // If this particular function hasn't had any ICs patched for enough
333           // ticks, optimize it now.
334           Optimize(function, "hot and stable");
335         } else if (ticks >= kTicksWhenNotEnoughTypeInfo) {
336           Optimize(function, "not much type info but very hot");
337         } else {
338           shared_code->set_profiler_ticks(ticks + 1);
339           if (FLAG_trace_opt_verbose) {
340             PrintF("[not yet optimizing ");
341             function->PrintName();
342             PrintF(", not enough type info: %d/%d (%d%%)]\n",
343                    typeinfo, total, percentage);
344           }
345         }
346       } else if (!any_ic_changed_ &&
347                  shared_code->instruction_size() < kMaxSizeEarlyOpt) {
348         // If no IC was patched since the last tick and this function is very
349         // small, optimistically optimize it now.
350         Optimize(function, "small function");
351       } else {
352         shared_code->set_profiler_ticks(ticks + 1);
353       }
354     } else {  // !FLAG_watch_ic_patching
355       samples[sample_count++] = function;
356 
357       int function_size = function->shared()->SourceSize();
358       int threshold_size_factor = (function_size > kSizeLimit)
359           ? sampler_threshold_size_factor_
360           : 1;
361 
362       int threshold = sampler_threshold_ * threshold_size_factor;
363 
364       if (LookupSample(function) >= threshold) {
365         Optimize(function, "sampler window lookup");
366       }
367     }
368   }
369   if (FLAG_watch_ic_patching) {
370     any_ic_changed_ = false;
371   } else {  // !FLAG_watch_ic_patching
372     // Add the collected functions as samples. It's important not to do
373     // this as part of collecting them because this will interfere with
374     // the sample lookup in case of recursive functions.
375     for (int i = 0; i < sample_count; i++) {
376       AddSample(samples[i], kSamplerFrameWeight[i]);
377     }
378   }
379 }
380 
381 
SetUp()382 void RuntimeProfiler::SetUp() {
383   if (!FLAG_watch_ic_patching) {
384     ClearSampleBuffer();
385   }
386 }
387 
388 
Reset()389 void RuntimeProfiler::Reset() {
390   if (!FLAG_watch_ic_patching) {
391     sampler_threshold_ = kSamplerThresholdInit;
392     sampler_threshold_size_factor_ = kSamplerThresholdSizeFactorInit;
393     sampler_ticks_until_threshold_adjustment_ =
394         kSamplerTicksBetweenThresholdAdjustment;
395   }
396 }
397 
398 
TearDown()399 void RuntimeProfiler::TearDown() {
400   // Nothing to do.
401 }
402 
403 
404 // Update the pointers in the sampler window after a GC.
UpdateSamplesAfterScavenge()405 void RuntimeProfiler::UpdateSamplesAfterScavenge() {
406   for (int i = 0; i < kSamplerWindowSize; i++) {
407     Object* function = sampler_window_[i];
408     if (function != NULL && isolate_->heap()->InNewSpace(function)) {
409       MapWord map_word = HeapObject::cast(function)->map_word();
410       if (map_word.IsForwardingAddress()) {
411         sampler_window_[i] = map_word.ToForwardingAddress();
412       } else {
413         sampler_window_[i] = NULL;
414       }
415     }
416   }
417 }
418 
419 
RemoveDeadSamples()420 void RuntimeProfiler::RemoveDeadSamples() {
421   for (int i = 0; i < kSamplerWindowSize; i++) {
422     Object* function = sampler_window_[i];
423     if (function != NULL &&
424         !Marking::MarkBitFrom(HeapObject::cast(function)).Get()) {
425       sampler_window_[i] = NULL;
426     }
427   }
428 }
429 
430 
UpdateSamplesAfterCompact(ObjectVisitor * visitor)431 void RuntimeProfiler::UpdateSamplesAfterCompact(ObjectVisitor* visitor) {
432   for (int i = 0; i < kSamplerWindowSize; i++) {
433     visitor->VisitPointer(&sampler_window_[i]);
434   }
435 }
436 
437 
438 } }  // namespace v8::internal
439