1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/runtime-profiler.h"
6
7 #include "src/assembler.h"
8 #include "src/base/platform/platform.h"
9 #include "src/bootstrapper.h"
10 #include "src/code-stubs.h"
11 #include "src/compilation-cache.h"
12 #include "src/compiler.h"
13 #include "src/execution.h"
14 #include "src/frames-inl.h"
15 #include "src/full-codegen/full-codegen.h"
16 #include "src/global-handles.h"
17 #include "src/interpreter/interpreter.h"
18
19 namespace v8 {
20 namespace internal {
21
22
23 // Number of times a function has to be seen on the stack before it is
24 // compiled for baseline.
25 static const int kProfilerTicksBeforeBaseline = 0;
26 // Number of times a function has to be seen on the stack before it is
27 // optimized.
28 static const int kProfilerTicksBeforeOptimization = 2;
29 // If the function optimization was disabled due to high deoptimization count,
30 // but the function is hot and has been seen on the stack this number of times,
31 // then we try to reenable optimization for this function.
32 static const int kProfilerTicksBeforeReenablingOptimization = 250;
33 // If a function does not have enough type info (according to
34 // FLAG_type_info_threshold), but has seen a huge number of ticks,
35 // optimize it as it is.
36 static const int kTicksWhenNotEnoughTypeInfo = 100;
37 // We only have one byte to store the number of ticks.
38 STATIC_ASSERT(kProfilerTicksBeforeOptimization < 256);
39 STATIC_ASSERT(kProfilerTicksBeforeReenablingOptimization < 256);
40 STATIC_ASSERT(kTicksWhenNotEnoughTypeInfo < 256);
41
42 // Maximum size in bytes of generate code for a function to allow OSR.
43 static const int kOSRCodeSizeAllowanceBase =
44 100 * FullCodeGenerator::kCodeSizeMultiplier;
45 static const int kOSRCodeSizeAllowanceBaseIgnition =
46 10 * interpreter::Interpreter::kCodeSizeMultiplier;
47
48 static const int kOSRCodeSizeAllowancePerTick =
49 4 * FullCodeGenerator::kCodeSizeMultiplier;
50 static const int kOSRCodeSizeAllowancePerTickIgnition =
51 2 * interpreter::Interpreter::kCodeSizeMultiplier;
52
53 // Maximum size in bytes of generated code for a function to be optimized
54 // the very first time it is seen on the stack.
55 static const int kMaxSizeEarlyOpt =
56 5 * FullCodeGenerator::kCodeSizeMultiplier;
57 static const int kMaxSizeEarlyOptIgnition =
58 5 * interpreter::Interpreter::kCodeSizeMultiplier;
59
60 // Certain functions are simply too big to be worth optimizing.
61 // We aren't using the code size multiplier here because there is no
62 // "kMaxSizeOpt" with which we would need to normalize. This constant is
63 // only for optimization decisions coming into TurboFan from Ignition.
64 static const int kMaxSizeOptIgnition = 250 * 1024;
65
66 #define OPTIMIZATION_REASON_LIST(V) \
67 V(DoNotOptimize, "do not optimize") \
68 V(HotAndStable, "hot and stable") \
69 V(HotEnoughForBaseline, "hot enough for baseline") \
70 V(HotWithoutMuchTypeInfo, "not much type info but very hot") \
71 V(SmallFunction, "small function")
72
73 enum class OptimizationReason : uint8_t {
74 #define OPTIMIZATION_REASON_CONSTANTS(Constant, message) k##Constant,
75 OPTIMIZATION_REASON_LIST(OPTIMIZATION_REASON_CONSTANTS)
76 #undef OPTIMIZATION_REASON_CONSTANTS
77 };
78
OptimizationReasonToString(OptimizationReason reason)79 char const* OptimizationReasonToString(OptimizationReason reason) {
80 static char const* reasons[] = {
81 #define OPTIMIZATION_REASON_TEXTS(Constant, message) message,
82 OPTIMIZATION_REASON_LIST(OPTIMIZATION_REASON_TEXTS)
83 #undef OPTIMIZATION_REASON_TEXTS
84 };
85 size_t const index = static_cast<size_t>(reason);
86 DCHECK_LT(index, arraysize(reasons));
87 return reasons[index];
88 }
89
operator <<(std::ostream & os,OptimizationReason reason)90 std::ostream& operator<<(std::ostream& os, OptimizationReason reason) {
91 return os << OptimizationReasonToString(reason);
92 }
93
RuntimeProfiler(Isolate * isolate)94 RuntimeProfiler::RuntimeProfiler(Isolate* isolate)
95 : isolate_(isolate),
96 any_ic_changed_(false) {
97 }
98
GetICCounts(JSFunction * function,int * ic_with_type_info_count,int * ic_generic_count,int * ic_total_count,int * type_info_percentage,int * generic_percentage)99 static void GetICCounts(JSFunction* function, int* ic_with_type_info_count,
100 int* ic_generic_count, int* ic_total_count,
101 int* type_info_percentage, int* generic_percentage) {
102 *ic_total_count = 0;
103 *ic_generic_count = 0;
104 *ic_with_type_info_count = 0;
105 if (function->code()->kind() == Code::FUNCTION) {
106 Code* shared_code = function->shared()->code();
107 Object* raw_info = shared_code->type_feedback_info();
108 if (raw_info->IsTypeFeedbackInfo()) {
109 TypeFeedbackInfo* info = TypeFeedbackInfo::cast(raw_info);
110 *ic_with_type_info_count = info->ic_with_type_info_count();
111 *ic_generic_count = info->ic_generic_count();
112 *ic_total_count = info->ic_total_count();
113 }
114 }
115
116 // Harvest vector-ics as well
117 FeedbackVector* vector = function->feedback_vector();
118 int with = 0, gen = 0, type_vector_ic_count = 0;
119 const bool is_interpreted = function->shared()->IsInterpreted();
120
121 vector->ComputeCounts(&with, &gen, &type_vector_ic_count, is_interpreted);
122 *ic_total_count += type_vector_ic_count;
123 *ic_with_type_info_count += with;
124 *ic_generic_count += gen;
125
126 if (*ic_total_count > 0) {
127 *type_info_percentage = 100 * *ic_with_type_info_count / *ic_total_count;
128 *generic_percentage = 100 * *ic_generic_count / *ic_total_count;
129 } else {
130 *type_info_percentage = 100; // Compared against lower bound.
131 *generic_percentage = 0; // Compared against upper bound.
132 }
133 }
134
TraceRecompile(JSFunction * function,const char * reason,const char * type)135 static void TraceRecompile(JSFunction* function, const char* reason,
136 const char* type) {
137 if (FLAG_trace_opt &&
138 function->shared()->PassesFilter(FLAG_hydrogen_filter)) {
139 PrintF("[marking ");
140 function->ShortPrint();
141 PrintF(" for %s recompilation, reason: %s", type, reason);
142 if (FLAG_type_info_threshold > 0) {
143 int typeinfo, generic, total, type_percentage, generic_percentage;
144 GetICCounts(function, &typeinfo, &generic, &total, &type_percentage,
145 &generic_percentage);
146 PrintF(", ICs with typeinfo: %d/%d (%d%%)", typeinfo, total,
147 type_percentage);
148 PrintF(", generic ICs: %d/%d (%d%%)", generic, total, generic_percentage);
149 }
150 PrintF("]\n");
151 }
152 }
153
Optimize(JSFunction * function,OptimizationReason reason)154 void RuntimeProfiler::Optimize(JSFunction* function,
155 OptimizationReason reason) {
156 DCHECK_NE(reason, OptimizationReason::kDoNotOptimize);
157 TraceRecompile(function, OptimizationReasonToString(reason), "optimized");
158 function->AttemptConcurrentOptimization();
159 }
160
Baseline(JSFunction * function,OptimizationReason reason)161 void RuntimeProfiler::Baseline(JSFunction* function,
162 OptimizationReason reason) {
163 DCHECK_NE(reason, OptimizationReason::kDoNotOptimize);
164 TraceRecompile(function, OptimizationReasonToString(reason), "baseline");
165 DCHECK(function->shared()->IsInterpreted());
166 function->MarkForBaseline();
167 }
168
AttemptOnStackReplacement(JavaScriptFrame * frame,int loop_nesting_levels)169 void RuntimeProfiler::AttemptOnStackReplacement(JavaScriptFrame* frame,
170 int loop_nesting_levels) {
171 JSFunction* function = frame->function();
172 SharedFunctionInfo* shared = function->shared();
173 if (!FLAG_use_osr || !function->shared()->IsUserJavaScript()) {
174 return;
175 }
176
177 // If the code is not optimizable, don't try OSR.
178 if (shared->optimization_disabled()) return;
179
180 // We are not prepared to do OSR for a function that already has an
181 // allocated arguments object. The optimized code would bypass it for
182 // arguments accesses, which is unsound. Don't try OSR.
183 if (shared->uses_arguments()) return;
184
185 // We're using on-stack replacement: modify unoptimized code so that
186 // certain back edges in any unoptimized frame will trigger on-stack
187 // replacement for that frame.
188 // - Ignition: Store new loop nesting level in BytecodeArray header.
189 // - FullCodegen: Patch back edges up to new level using BackEdgeTable.
190 if (FLAG_trace_osr) {
191 PrintF("[OSR - arming back edges in ");
192 function->PrintName();
193 PrintF("]\n");
194 }
195
196 if (frame->type() == StackFrame::JAVA_SCRIPT) {
197 DCHECK(shared->HasBaselineCode());
198 DCHECK(BackEdgeTable::Verify(shared->GetIsolate(), shared->code()));
199 for (int i = 0; i < loop_nesting_levels; i++) {
200 BackEdgeTable::Patch(isolate_, shared->code());
201 }
202 } else if (frame->type() == StackFrame::INTERPRETED) {
203 DCHECK(shared->HasBytecodeArray());
204 if (!FLAG_ignition_osr) return; // Only use this when enabled.
205 int level = shared->bytecode_array()->osr_loop_nesting_level();
206 shared->bytecode_array()->set_osr_loop_nesting_level(
207 Min(level + loop_nesting_levels, AbstractCode::kMaxLoopNestingMarker));
208 } else {
209 UNREACHABLE();
210 }
211 }
212
MaybeOptimizeFullCodegen(JSFunction * function,JavaScriptFrame * frame,int frame_count)213 void RuntimeProfiler::MaybeOptimizeFullCodegen(JSFunction* function,
214 JavaScriptFrame* frame,
215 int frame_count) {
216 SharedFunctionInfo* shared = function->shared();
217 Code* shared_code = shared->code();
218 if (shared_code->kind() != Code::FUNCTION) return;
219 if (function->IsInOptimizationQueue()) return;
220
221 if (FLAG_always_osr) {
222 AttemptOnStackReplacement(frame, AbstractCode::kMaxLoopNestingMarker);
223 // Fall through and do a normal optimized compile as well.
224 } else if (!frame->is_optimized() &&
225 (function->IsMarkedForOptimization() ||
226 function->IsMarkedForConcurrentOptimization() ||
227 function->IsOptimized())) {
228 // Attempt OSR if we are still running unoptimized code even though the
229 // the function has long been marked or even already been optimized.
230 int ticks = shared_code->profiler_ticks();
231 int64_t allowance =
232 kOSRCodeSizeAllowanceBase +
233 static_cast<int64_t>(ticks) * kOSRCodeSizeAllowancePerTick;
234 if (shared_code->CodeSize() > allowance &&
235 ticks < Code::ProfilerTicksField::kMax) {
236 shared_code->set_profiler_ticks(ticks + 1);
237 } else {
238 AttemptOnStackReplacement(frame);
239 }
240 return;
241 }
242
243 // Only record top-level code on top of the execution stack and
244 // avoid optimizing excessively large scripts since top-level code
245 // will be executed only once.
246 const int kMaxToplevelSourceSize = 10 * 1024;
247 if (shared->is_toplevel() &&
248 (frame_count > 1 || shared->SourceSize() > kMaxToplevelSourceSize)) {
249 return;
250 }
251
252 // Do not record non-optimizable functions.
253 if (shared->optimization_disabled()) {
254 if (shared->deopt_count() >= FLAG_max_opt_count) {
255 // If optimization was disabled due to many deoptimizations,
256 // then check if the function is hot and try to reenable optimization.
257 int ticks = shared_code->profiler_ticks();
258 if (ticks >= kProfilerTicksBeforeReenablingOptimization) {
259 shared_code->set_profiler_ticks(0);
260 shared->TryReenableOptimization();
261 } else {
262 shared_code->set_profiler_ticks(ticks + 1);
263 }
264 }
265 return;
266 }
267 if (frame->is_optimized()) return;
268
269 int ticks = shared_code->profiler_ticks();
270
271 if (ticks >= kProfilerTicksBeforeOptimization) {
272 int typeinfo, generic, total, type_percentage, generic_percentage;
273 GetICCounts(function, &typeinfo, &generic, &total, &type_percentage,
274 &generic_percentage);
275 if (type_percentage >= FLAG_type_info_threshold &&
276 generic_percentage <= FLAG_generic_ic_threshold) {
277 // If this particular function hasn't had any ICs patched for enough
278 // ticks, optimize it now.
279 Optimize(function, OptimizationReason::kHotAndStable);
280 } else if (ticks >= kTicksWhenNotEnoughTypeInfo) {
281 Optimize(function, OptimizationReason::kHotWithoutMuchTypeInfo);
282 } else {
283 shared_code->set_profiler_ticks(ticks + 1);
284 if (FLAG_trace_opt_verbose) {
285 PrintF("[not yet optimizing ");
286 function->PrintName();
287 PrintF(", not enough type info: %d/%d (%d%%)]\n", typeinfo, total,
288 type_percentage);
289 }
290 }
291 } else if (!any_ic_changed_ &&
292 shared_code->instruction_size() < kMaxSizeEarlyOpt) {
293 // If no IC was patched since the last tick and this function is very
294 // small, optimistically optimize it now.
295 int typeinfo, generic, total, type_percentage, generic_percentage;
296 GetICCounts(function, &typeinfo, &generic, &total, &type_percentage,
297 &generic_percentage);
298 if (type_percentage >= FLAG_type_info_threshold &&
299 generic_percentage <= FLAG_generic_ic_threshold) {
300 Optimize(function, OptimizationReason::kSmallFunction);
301 } else {
302 shared_code->set_profiler_ticks(ticks + 1);
303 }
304 } else {
305 shared_code->set_profiler_ticks(ticks + 1);
306 }
307 }
308
MaybeBaselineIgnition(JSFunction * function,JavaScriptFrame * frame)309 void RuntimeProfiler::MaybeBaselineIgnition(JSFunction* function,
310 JavaScriptFrame* frame) {
311 if (function->IsInOptimizationQueue()) return;
312
313 if (FLAG_always_osr) {
314 AttemptOnStackReplacement(frame, AbstractCode::kMaxLoopNestingMarker);
315 // Fall through and do a normal baseline compile as well.
316 } else if (MaybeOSRIgnition(function, frame)) {
317 return;
318 }
319
320 SharedFunctionInfo* shared = function->shared();
321 int ticks = shared->profiler_ticks();
322
323 if (shared->optimization_disabled() &&
324 shared->disable_optimization_reason() == kOptimizationDisabledForTest) {
325 // Don't baseline functions which have been marked by NeverOptimizeFunction
326 // in a test.
327 return;
328 }
329
330 if (ticks >= kProfilerTicksBeforeBaseline) {
331 Baseline(function, OptimizationReason::kHotEnoughForBaseline);
332 }
333 }
334
MaybeOptimizeIgnition(JSFunction * function,JavaScriptFrame * frame)335 void RuntimeProfiler::MaybeOptimizeIgnition(JSFunction* function,
336 JavaScriptFrame* frame) {
337 if (function->IsInOptimizationQueue()) return;
338
339 if (FLAG_always_osr) {
340 AttemptOnStackReplacement(frame, AbstractCode::kMaxLoopNestingMarker);
341 // Fall through and do a normal optimized compile as well.
342 } else if (MaybeOSRIgnition(function, frame)) {
343 return;
344 }
345
346 SharedFunctionInfo* shared = function->shared();
347 int ticks = shared->profiler_ticks();
348
349 if (shared->optimization_disabled()) {
350 if (shared->deopt_count() >= FLAG_max_opt_count) {
351 // If optimization was disabled due to many deoptimizations,
352 // then check if the function is hot and try to reenable optimization.
353 if (ticks >= kProfilerTicksBeforeReenablingOptimization) {
354 shared->set_profiler_ticks(0);
355 shared->TryReenableOptimization();
356 }
357 }
358 return;
359 }
360
361 if (frame->is_optimized()) return;
362
363 OptimizationReason reason = ShouldOptimizeIgnition(function, frame);
364
365 if (reason != OptimizationReason::kDoNotOptimize) {
366 Optimize(function, reason);
367 }
368 }
369
MaybeOSRIgnition(JSFunction * function,JavaScriptFrame * frame)370 bool RuntimeProfiler::MaybeOSRIgnition(JSFunction* function,
371 JavaScriptFrame* frame) {
372 SharedFunctionInfo* shared = function->shared();
373 int ticks = shared->profiler_ticks();
374
375 // TODO(rmcilroy): Also ensure we only OSR top-level code if it is smaller
376 // than kMaxToplevelSourceSize.
377
378 bool osr_before_baselined = function->IsMarkedForBaseline() &&
379 ShouldOptimizeIgnition(function, frame) !=
380 OptimizationReason::kDoNotOptimize;
381 if (!frame->is_optimized() &&
382 (osr_before_baselined || function->IsMarkedForOptimization() ||
383 function->IsMarkedForConcurrentOptimization() ||
384 function->IsOptimized())) {
385 // Attempt OSR if we are still running interpreted code even though the
386 // the function has long been marked or even already been optimized.
387 int64_t allowance =
388 kOSRCodeSizeAllowanceBaseIgnition +
389 static_cast<int64_t>(ticks) * kOSRCodeSizeAllowancePerTickIgnition;
390 if (shared->bytecode_array()->Size() <= allowance) {
391 AttemptOnStackReplacement(frame);
392 }
393 return true;
394 }
395 return false;
396 }
397
ShouldOptimizeIgnition(JSFunction * function,JavaScriptFrame * frame)398 OptimizationReason RuntimeProfiler::ShouldOptimizeIgnition(
399 JSFunction* function, JavaScriptFrame* frame) {
400 SharedFunctionInfo* shared = function->shared();
401 int ticks = shared->profiler_ticks();
402
403 if (shared->bytecode_array()->Size() > kMaxSizeOptIgnition) {
404 return OptimizationReason::kDoNotOptimize;
405 }
406
407 if (ticks >= kProfilerTicksBeforeOptimization) {
408 int typeinfo, generic, total, type_percentage, generic_percentage;
409 GetICCounts(function, &typeinfo, &generic, &total, &type_percentage,
410 &generic_percentage);
411 if (type_percentage >= FLAG_type_info_threshold) {
412 // If this particular function hasn't had any ICs patched for enough
413 // ticks, optimize it now.
414 return OptimizationReason::kHotAndStable;
415 } else if (ticks >= kTicksWhenNotEnoughTypeInfo) {
416 return OptimizationReason::kHotWithoutMuchTypeInfo;
417 } else {
418 if (FLAG_trace_opt_verbose) {
419 PrintF("[not yet optimizing ");
420 function->PrintName();
421 PrintF(", not enough type info: %d/%d (%d%%)]\n", typeinfo, total,
422 type_percentage);
423 }
424 return OptimizationReason::kDoNotOptimize;
425 }
426 } else if (!any_ic_changed_ &&
427 shared->bytecode_array()->Size() < kMaxSizeEarlyOptIgnition) {
428 // If no IC was patched since the last tick and this function is very
429 // small, optimistically optimize it now.
430 int typeinfo, generic, total, type_percentage, generic_percentage;
431 GetICCounts(function, &typeinfo, &generic, &total, &type_percentage,
432 &generic_percentage);
433 if (type_percentage >= FLAG_type_info_threshold) {
434 return OptimizationReason::kSmallFunction;
435 }
436 }
437 return OptimizationReason::kDoNotOptimize;
438 }
439
MarkCandidatesForOptimization()440 void RuntimeProfiler::MarkCandidatesForOptimization() {
441 HandleScope scope(isolate_);
442
443 if (!isolate_->use_crankshaft()) return;
444
445 DisallowHeapAllocation no_gc;
446
447 // Run through the JavaScript frames and collect them. If we already
448 // have a sample of the function, we mark it for optimizations
449 // (eagerly or lazily).
450 int frame_count = 0;
451 int frame_count_limit = FLAG_frame_count;
452 for (JavaScriptFrameIterator it(isolate_);
453 frame_count++ < frame_count_limit && !it.done();
454 it.Advance()) {
455 JavaScriptFrame* frame = it.frame();
456 JSFunction* function = frame->function();
457
458 Compiler::CompilationTier next_tier =
459 Compiler::NextCompilationTier(function);
460 if (function->shared()->IsInterpreted()) {
461 if (next_tier == Compiler::BASELINE) {
462 MaybeBaselineIgnition(function, frame);
463 } else {
464 DCHECK_EQ(next_tier, Compiler::OPTIMIZED);
465 MaybeOptimizeIgnition(function, frame);
466 }
467 } else {
468 DCHECK_EQ(next_tier, Compiler::OPTIMIZED);
469 MaybeOptimizeFullCodegen(function, frame, frame_count);
470 }
471
472 // Update shared function info ticks after checking for whether functions
473 // should be optimized to keep FCG (which updates ticks on code) and
474 // Ignition (which updates ticks on shared function info) in sync.
475 List<SharedFunctionInfo*> functions(4);
476 frame->GetFunctions(&functions);
477 for (int i = functions.length(); --i >= 0;) {
478 SharedFunctionInfo* shared_function_info = functions[i];
479 int ticks = shared_function_info->profiler_ticks();
480 if (ticks < Smi::kMaxValue) {
481 shared_function_info->set_profiler_ticks(ticks + 1);
482 }
483 }
484 }
485 any_ic_changed_ = false;
486 }
487
488 } // namespace internal
489 } // namespace v8
490