1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "v8.h"
29
30 #include "compiler.h"
31
32 #include "bootstrapper.h"
33 #include "codegen.h"
34 #include "compilation-cache.h"
35 #include "cpu-profiler.h"
36 #include "debug.h"
37 #include "deoptimizer.h"
38 #include "full-codegen.h"
39 #include "gdb-jit.h"
40 #include "typing.h"
41 #include "hydrogen.h"
42 #include "isolate-inl.h"
43 #include "lithium.h"
44 #include "liveedit.h"
45 #include "parser.h"
46 #include "rewriter.h"
47 #include "runtime-profiler.h"
48 #include "scanner-character-streams.h"
49 #include "scopeinfo.h"
50 #include "scopes.h"
51 #include "vm-state-inl.h"
52
53 namespace v8 {
54 namespace internal {
55
56
CompilationInfo(Handle<Script> script,Zone * zone)57 CompilationInfo::CompilationInfo(Handle<Script> script,
58 Zone* zone)
59 : flags_(LanguageModeField::encode(CLASSIC_MODE)),
60 script_(script),
61 osr_ast_id_(BailoutId::None()),
62 osr_pc_offset_(0),
63 parameter_count_(0) {
64 Initialize(script->GetIsolate(), BASE, zone);
65 }
66
67
CompilationInfo(Handle<SharedFunctionInfo> shared_info,Zone * zone)68 CompilationInfo::CompilationInfo(Handle<SharedFunctionInfo> shared_info,
69 Zone* zone)
70 : flags_(LanguageModeField::encode(CLASSIC_MODE) | IsLazy::encode(true)),
71 shared_info_(shared_info),
72 script_(Handle<Script>(Script::cast(shared_info->script()))),
73 osr_ast_id_(BailoutId::None()),
74 osr_pc_offset_(0),
75 parameter_count_(0) {
76 Initialize(script_->GetIsolate(), BASE, zone);
77 }
78
79
CompilationInfo(Handle<JSFunction> closure,Zone * zone)80 CompilationInfo::CompilationInfo(Handle<JSFunction> closure,
81 Zone* zone)
82 : flags_(LanguageModeField::encode(CLASSIC_MODE) | IsLazy::encode(true)),
83 closure_(closure),
84 shared_info_(Handle<SharedFunctionInfo>(closure->shared())),
85 script_(Handle<Script>(Script::cast(shared_info_->script()))),
86 context_(closure->context()),
87 osr_ast_id_(BailoutId::None()),
88 osr_pc_offset_(0),
89 parameter_count_(0) {
90 Initialize(script_->GetIsolate(), BASE, zone);
91 }
92
93
CompilationInfo(HydrogenCodeStub * stub,Isolate * isolate,Zone * zone)94 CompilationInfo::CompilationInfo(HydrogenCodeStub* stub,
95 Isolate* isolate,
96 Zone* zone)
97 : flags_(LanguageModeField::encode(CLASSIC_MODE) |
98 IsLazy::encode(true)),
99 osr_ast_id_(BailoutId::None()),
100 osr_pc_offset_(0),
101 parameter_count_(0) {
102 Initialize(isolate, STUB, zone);
103 code_stub_ = stub;
104 }
105
106
Initialize(Isolate * isolate,Mode mode,Zone * zone)107 void CompilationInfo::Initialize(Isolate* isolate,
108 Mode mode,
109 Zone* zone) {
110 isolate_ = isolate;
111 function_ = NULL;
112 scope_ = NULL;
113 global_scope_ = NULL;
114 extension_ = NULL;
115 pre_parse_data_ = NULL;
116 zone_ = zone;
117 deferred_handles_ = NULL;
118 code_stub_ = NULL;
119 prologue_offset_ = Code::kPrologueOffsetNotSet;
120 opt_count_ = shared_info().is_null() ? 0 : shared_info()->opt_count();
121 no_frame_ranges_ = isolate->cpu_profiler()->is_profiling()
122 ? new List<OffsetRange>(2) : NULL;
123 for (int i = 0; i < DependentCode::kGroupCount; i++) {
124 dependencies_[i] = NULL;
125 }
126 if (mode == STUB) {
127 mode_ = STUB;
128 return;
129 }
130 mode_ = mode;
131 abort_due_to_dependency_ = false;
132 if (script_->type()->value() == Script::TYPE_NATIVE) {
133 MarkAsNative();
134 }
135 if (!shared_info_.is_null()) {
136 ASSERT(language_mode() == CLASSIC_MODE);
137 SetLanguageMode(shared_info_->language_mode());
138 }
139 set_bailout_reason(kUnknown);
140 }
141
142
~CompilationInfo()143 CompilationInfo::~CompilationInfo() {
144 delete deferred_handles_;
145 delete no_frame_ranges_;
146 #ifdef DEBUG
147 // Check that no dependent maps have been added or added dependent maps have
148 // been rolled back or committed.
149 for (int i = 0; i < DependentCode::kGroupCount; i++) {
150 ASSERT_EQ(NULL, dependencies_[i]);
151 }
152 #endif // DEBUG
153 }
154
155
CommitDependencies(Handle<Code> code)156 void CompilationInfo::CommitDependencies(Handle<Code> code) {
157 for (int i = 0; i < DependentCode::kGroupCount; i++) {
158 ZoneList<Handle<HeapObject> >* group_objects = dependencies_[i];
159 if (group_objects == NULL) continue;
160 ASSERT(!object_wrapper_.is_null());
161 for (int j = 0; j < group_objects->length(); j++) {
162 DependentCode::DependencyGroup group =
163 static_cast<DependentCode::DependencyGroup>(i);
164 DependentCode* dependent_code =
165 DependentCode::ForObject(group_objects->at(j), group);
166 dependent_code->UpdateToFinishedCode(group, this, *code);
167 }
168 dependencies_[i] = NULL; // Zone-allocated, no need to delete.
169 }
170 }
171
172
RollbackDependencies()173 void CompilationInfo::RollbackDependencies() {
174 // Unregister from all dependent maps if not yet committed.
175 for (int i = 0; i < DependentCode::kGroupCount; i++) {
176 ZoneList<Handle<HeapObject> >* group_objects = dependencies_[i];
177 if (group_objects == NULL) continue;
178 for (int j = 0; j < group_objects->length(); j++) {
179 DependentCode::DependencyGroup group =
180 static_cast<DependentCode::DependencyGroup>(i);
181 DependentCode* dependent_code =
182 DependentCode::ForObject(group_objects->at(j), group);
183 dependent_code->RemoveCompilationInfo(group, this);
184 }
185 dependencies_[i] = NULL; // Zone-allocated, no need to delete.
186 }
187 }
188
189
num_parameters() const190 int CompilationInfo::num_parameters() const {
191 if (IsStub()) {
192 ASSERT(parameter_count_ > 0);
193 return parameter_count_;
194 } else {
195 return scope()->num_parameters();
196 }
197 }
198
199
num_heap_slots() const200 int CompilationInfo::num_heap_slots() const {
201 if (IsStub()) {
202 return 0;
203 } else {
204 return scope()->num_heap_slots();
205 }
206 }
207
208
flags() const209 Code::Flags CompilationInfo::flags() const {
210 if (IsStub()) {
211 return Code::ComputeFlags(code_stub()->GetCodeKind(),
212 code_stub()->GetICState(),
213 code_stub()->GetExtraICState(),
214 code_stub()->GetStubType(),
215 code_stub()->GetStubFlags());
216 } else {
217 return Code::ComputeFlags(Code::OPTIMIZED_FUNCTION);
218 }
219 }
220
221
222 // Disable optimization for the rest of the compilation pipeline.
DisableOptimization()223 void CompilationInfo::DisableOptimization() {
224 bool is_optimizable_closure =
225 FLAG_optimize_closures &&
226 closure_.is_null() &&
227 !scope_->HasTrivialOuterContext() &&
228 !scope_->outer_scope_calls_non_strict_eval() &&
229 !scope_->inside_with();
230 SetMode(is_optimizable_closure ? BASE : NONOPT);
231 }
232
233
234 // Primitive functions are unlikely to be picked up by the stack-walking
235 // profiler, so they trigger their own optimization when they're called
236 // for the SharedFunctionInfo::kCallsUntilPrimitiveOptimization-th time.
ShouldSelfOptimize()237 bool CompilationInfo::ShouldSelfOptimize() {
238 return FLAG_self_optimization &&
239 FLAG_crankshaft &&
240 !function()->flags()->Contains(kDontSelfOptimize) &&
241 !function()->dont_optimize() &&
242 function()->scope()->AllowsLazyCompilation() &&
243 (shared_info().is_null() || !shared_info()->optimization_disabled());
244 }
245
246
247 // Determine whether to use the full compiler for all code. If the flag
248 // --always-full-compiler is specified this is the case. For the virtual frame
249 // based compiler the full compiler is also used if a debugger is connected, as
250 // the code from the full compiler supports mode precise break points. For the
251 // crankshaft adaptive compiler debugging the optimized code is not possible at
252 // all. However crankshaft support recompilation of functions, so in this case
253 // the full compiler need not be be used if a debugger is attached, but only if
254 // break points has actually been set.
IsDebuggerActive(Isolate * isolate)255 static bool IsDebuggerActive(Isolate* isolate) {
256 #ifdef ENABLE_DEBUGGER_SUPPORT
257 return isolate->use_crankshaft() ?
258 isolate->debug()->has_break_points() :
259 isolate->debugger()->IsDebuggerActive();
260 #else
261 return false;
262 #endif
263 }
264
265
AlwaysFullCompiler(Isolate * isolate)266 static bool AlwaysFullCompiler(Isolate* isolate) {
267 return FLAG_always_full_compiler || IsDebuggerActive(isolate);
268 }
269
270
RecordOptimizationStats()271 void RecompileJob::RecordOptimizationStats() {
272 Handle<JSFunction> function = info()->closure();
273 if (!function->IsOptimized()) {
274 // Concurrent recompilation and OSR may race. Increment only once.
275 int opt_count = function->shared()->opt_count();
276 function->shared()->set_opt_count(opt_count + 1);
277 }
278 double ms_creategraph = time_taken_to_create_graph_.InMillisecondsF();
279 double ms_optimize = time_taken_to_optimize_.InMillisecondsF();
280 double ms_codegen = time_taken_to_codegen_.InMillisecondsF();
281 if (FLAG_trace_opt) {
282 PrintF("[optimizing ");
283 function->ShortPrint();
284 PrintF(" - took %0.3f, %0.3f, %0.3f ms]\n", ms_creategraph, ms_optimize,
285 ms_codegen);
286 }
287 if (FLAG_trace_opt_stats) {
288 static double compilation_time = 0.0;
289 static int compiled_functions = 0;
290 static int code_size = 0;
291
292 compilation_time += (ms_creategraph + ms_optimize + ms_codegen);
293 compiled_functions++;
294 code_size += function->shared()->SourceSize();
295 PrintF("Compiled: %d functions with %d byte source size in %fms.\n",
296 compiled_functions,
297 code_size,
298 compilation_time);
299 }
300 if (FLAG_hydrogen_stats) {
301 isolate()->GetHStatistics()->IncrementSubtotals(time_taken_to_create_graph_,
302 time_taken_to_optimize_,
303 time_taken_to_codegen_);
304 }
305 }
306
307
308 // A return value of true indicates the compilation pipeline is still
309 // going, not necessarily that we optimized the code.
MakeCrankshaftCode(CompilationInfo * info)310 static bool MakeCrankshaftCode(CompilationInfo* info) {
311 RecompileJob job(info);
312 RecompileJob::Status status = job.CreateGraph();
313
314 if (status != RecompileJob::SUCCEEDED) {
315 return status != RecompileJob::FAILED;
316 }
317 status = job.OptimizeGraph();
318 if (status != RecompileJob::SUCCEEDED) {
319 status = job.AbortOptimization();
320 return status != RecompileJob::FAILED;
321 }
322 status = job.GenerateAndInstallCode();
323 return status != RecompileJob::FAILED;
324 }
325
326
327 class HOptimizedGraphBuilderWithPositions: public HOptimizedGraphBuilder {
328 public:
HOptimizedGraphBuilderWithPositions(CompilationInfo * info)329 explicit HOptimizedGraphBuilderWithPositions(CompilationInfo* info)
330 : HOptimizedGraphBuilder(info) {
331 }
332
333 #define DEF_VISIT(type) \
334 virtual void Visit##type(type* node) V8_OVERRIDE { \
335 if (node->position() != RelocInfo::kNoPosition) { \
336 SetSourcePosition(node->position()); \
337 } \
338 HOptimizedGraphBuilder::Visit##type(node); \
339 }
340 EXPRESSION_NODE_LIST(DEF_VISIT)
341 #undef DEF_VISIT
342
343 #define DEF_VISIT(type) \
344 virtual void Visit##type(type* node) V8_OVERRIDE { \
345 if (node->position() != RelocInfo::kNoPosition) { \
346 SetSourcePosition(node->position()); \
347 } \
348 HOptimizedGraphBuilder::Visit##type(node); \
349 }
350 STATEMENT_NODE_LIST(DEF_VISIT)
351 #undef DEF_VISIT
352
353 #define DEF_VISIT(type) \
354 virtual void Visit##type(type* node) V8_OVERRIDE { \
355 HOptimizedGraphBuilder::Visit##type(node); \
356 }
357 MODULE_NODE_LIST(DEF_VISIT)
358 DECLARATION_NODE_LIST(DEF_VISIT)
359 AUXILIARY_NODE_LIST(DEF_VISIT)
360 #undef DEF_VISIT
361 };
362
363
CreateGraph()364 RecompileJob::Status RecompileJob::CreateGraph() {
365 ASSERT(isolate()->use_crankshaft());
366 ASSERT(info()->IsOptimizing());
367 ASSERT(!info()->IsCompilingForDebugging());
368
369 // We should never arrive here if there is no code object on the
370 // shared function object.
371 ASSERT(info()->shared_info()->code()->kind() == Code::FUNCTION);
372
373 // We should never arrive here if optimization has been disabled on the
374 // shared function info.
375 ASSERT(!info()->shared_info()->optimization_disabled());
376
377 // Fall back to using the full code generator if it's not possible
378 // to use the Hydrogen-based optimizing compiler. We already have
379 // generated code for this from the shared function object.
380 if (AlwaysFullCompiler(isolate())) {
381 info()->AbortOptimization();
382 return SetLastStatus(BAILED_OUT);
383 }
384
385 // Limit the number of times we re-compile a functions with
386 // the optimizing compiler.
387 const int kMaxOptCount =
388 FLAG_deopt_every_n_times == 0 ? FLAG_max_opt_count : 1000;
389 if (info()->opt_count() > kMaxOptCount) {
390 info()->set_bailout_reason(kOptimizedTooManyTimes);
391 return AbortOptimization();
392 }
393
394 // Due to an encoding limit on LUnallocated operands in the Lithium
395 // language, we cannot optimize functions with too many formal parameters
396 // or perform on-stack replacement for function with too many
397 // stack-allocated local variables.
398 //
399 // The encoding is as a signed value, with parameters and receiver using
400 // the negative indices and locals the non-negative ones.
401 const int parameter_limit = -LUnallocated::kMinFixedSlotIndex;
402 Scope* scope = info()->scope();
403 if ((scope->num_parameters() + 1) > parameter_limit) {
404 info()->set_bailout_reason(kTooManyParameters);
405 return AbortOptimization();
406 }
407
408 const int locals_limit = LUnallocated::kMaxFixedSlotIndex;
409 if (info()->is_osr() &&
410 scope->num_parameters() + 1 + scope->num_stack_slots() > locals_limit) {
411 info()->set_bailout_reason(kTooManyParametersLocals);
412 return AbortOptimization();
413 }
414
415 // Take --hydrogen-filter into account.
416 if (!info()->closure()->PassesFilter(FLAG_hydrogen_filter)) {
417 info()->AbortOptimization();
418 return SetLastStatus(BAILED_OUT);
419 }
420
421 // Recompile the unoptimized version of the code if the current version
422 // doesn't have deoptimization support. Alternatively, we may decide to
423 // run the full code generator to get a baseline for the compile-time
424 // performance of the hydrogen-based compiler.
425 bool should_recompile = !info()->shared_info()->has_deoptimization_support();
426 if (should_recompile || FLAG_hydrogen_stats) {
427 ElapsedTimer timer;
428 if (FLAG_hydrogen_stats) {
429 timer.Start();
430 }
431 CompilationInfoWithZone unoptimized(info()->shared_info());
432 // Note that we use the same AST that we will use for generating the
433 // optimized code.
434 unoptimized.SetFunction(info()->function());
435 unoptimized.SetScope(info()->scope());
436 unoptimized.SetContext(info()->context());
437 if (should_recompile) unoptimized.EnableDeoptimizationSupport();
438 bool succeeded = FullCodeGenerator::MakeCode(&unoptimized);
439 if (should_recompile) {
440 if (!succeeded) return SetLastStatus(FAILED);
441 Handle<SharedFunctionInfo> shared = info()->shared_info();
442 shared->EnableDeoptimizationSupport(*unoptimized.code());
443 // The existing unoptimized code was replaced with the new one.
444 Compiler::RecordFunctionCompilation(
445 Logger::LAZY_COMPILE_TAG, &unoptimized, shared);
446 }
447 if (FLAG_hydrogen_stats) {
448 isolate()->GetHStatistics()->IncrementFullCodeGen(timer.Elapsed());
449 }
450 }
451
452 // Check that the unoptimized, shared code is ready for
453 // optimizations. When using the always_opt flag we disregard the
454 // optimizable marker in the code object and optimize anyway. This
455 // is safe as long as the unoptimized code has deoptimization
456 // support.
457 ASSERT(FLAG_always_opt || info()->shared_info()->code()->optimizable());
458 ASSERT(info()->shared_info()->has_deoptimization_support());
459
460 if (FLAG_trace_hydrogen) {
461 Handle<String> name = info()->function()->debug_name();
462 PrintF("-----------------------------------------------------------\n");
463 PrintF("Compiling method %s using hydrogen\n", *name->ToCString());
464 isolate()->GetHTracer()->TraceCompilation(info());
465 }
466
467 // Type-check the function.
468 AstTyper::Run(info());
469
470 graph_builder_ = FLAG_emit_opt_code_positions
471 ? new(info()->zone()) HOptimizedGraphBuilderWithPositions(info())
472 : new(info()->zone()) HOptimizedGraphBuilder(info());
473
474 Timer t(this, &time_taken_to_create_graph_);
475 graph_ = graph_builder_->CreateGraph();
476
477 if (isolate()->has_pending_exception()) {
478 info()->SetCode(Handle<Code>::null());
479 return SetLastStatus(FAILED);
480 }
481
482 // The function being compiled may have bailed out due to an inline
483 // candidate bailing out. In such a case, we don't disable
484 // optimization on the shared_info.
485 ASSERT(!graph_builder_->inline_bailout() || graph_ == NULL);
486 if (graph_ == NULL) {
487 if (graph_builder_->inline_bailout()) {
488 info_->AbortOptimization();
489 return SetLastStatus(BAILED_OUT);
490 } else {
491 return AbortOptimization();
492 }
493 }
494
495 if (info()->HasAbortedDueToDependencyChange()) {
496 info_->set_bailout_reason(kBailedOutDueToDependencyChange);
497 info_->AbortOptimization();
498 return SetLastStatus(BAILED_OUT);
499 }
500
501 return SetLastStatus(SUCCEEDED);
502 }
503
504
OptimizeGraph()505 RecompileJob::Status RecompileJob::OptimizeGraph() {
506 DisallowHeapAllocation no_allocation;
507 DisallowHandleAllocation no_handles;
508 DisallowHandleDereference no_deref;
509 DisallowCodeDependencyChange no_dependency_change;
510
511 ASSERT(last_status() == SUCCEEDED);
512 Timer t(this, &time_taken_to_optimize_);
513 ASSERT(graph_ != NULL);
514 BailoutReason bailout_reason = kNoReason;
515 if (!graph_->Optimize(&bailout_reason)) {
516 if (bailout_reason != kNoReason) graph_builder_->Bailout(bailout_reason);
517 return SetLastStatus(BAILED_OUT);
518 } else {
519 chunk_ = LChunk::NewChunk(graph_);
520 if (chunk_ == NULL) {
521 return SetLastStatus(BAILED_OUT);
522 }
523 }
524 return SetLastStatus(SUCCEEDED);
525 }
526
527
GenerateAndInstallCode()528 RecompileJob::Status RecompileJob::GenerateAndInstallCode() {
529 ASSERT(last_status() == SUCCEEDED);
530 ASSERT(!info()->HasAbortedDueToDependencyChange());
531 DisallowCodeDependencyChange no_dependency_change;
532 { // Scope for timer.
533 Timer timer(this, &time_taken_to_codegen_);
534 ASSERT(chunk_ != NULL);
535 ASSERT(graph_ != NULL);
536 // Deferred handles reference objects that were accessible during
537 // graph creation. To make sure that we don't encounter inconsistencies
538 // between graph creation and code generation, we disallow accessing
539 // objects through deferred handles during the latter, with exceptions.
540 DisallowDeferredHandleDereference no_deferred_handle_deref;
541 Handle<Code> optimized_code = chunk_->Codegen();
542 if (optimized_code.is_null()) {
543 if (info()->bailout_reason() == kNoReason) {
544 info()->set_bailout_reason(kCodeGenerationFailed);
545 }
546 return AbortOptimization();
547 }
548 info()->SetCode(optimized_code);
549 }
550 RecordOptimizationStats();
551 // Add to the weak list of optimized code objects.
552 info()->context()->native_context()->AddOptimizedCode(*info()->code());
553 return SetLastStatus(SUCCEEDED);
554 }
555
556
GenerateCode(CompilationInfo * info)557 static bool GenerateCode(CompilationInfo* info) {
558 bool is_optimizing = info->isolate()->use_crankshaft() &&
559 !info->IsCompilingForDebugging() &&
560 info->IsOptimizing();
561 if (is_optimizing) {
562 Logger::TimerEventScope timer(
563 info->isolate(), Logger::TimerEventScope::v8_recompile_synchronous);
564 return MakeCrankshaftCode(info);
565 } else {
566 if (info->IsOptimizing()) {
567 // Have the CompilationInfo decide if the compilation should be
568 // BASE or NONOPT.
569 info->DisableOptimization();
570 }
571 Logger::TimerEventScope timer(
572 info->isolate(), Logger::TimerEventScope::v8_compile_full_code);
573 return FullCodeGenerator::MakeCode(info);
574 }
575 }
576
577
MakeCode(CompilationInfo * info)578 static bool MakeCode(CompilationInfo* info) {
579 // Precondition: code has been parsed. Postcondition: the code field in
580 // the compilation info is set if compilation succeeded.
581 ASSERT(info->function() != NULL);
582 return Rewriter::Rewrite(info) && Scope::Analyze(info) && GenerateCode(info);
583 }
584
585
586 #ifdef ENABLE_DEBUGGER_SUPPORT
MakeCodeForLiveEdit(CompilationInfo * info)587 bool Compiler::MakeCodeForLiveEdit(CompilationInfo* info) {
588 // Precondition: code has been parsed. Postcondition: the code field in
589 // the compilation info is set if compilation succeeded.
590 bool succeeded = MakeCode(info);
591 if (!info->shared_info().is_null()) {
592 Handle<ScopeInfo> scope_info = ScopeInfo::Create(info->scope(),
593 info->zone());
594 info->shared_info()->set_scope_info(*scope_info);
595 }
596 return succeeded;
597 }
598 #endif
599
600
DebuggerWantsEagerCompilation(CompilationInfo * info,bool allow_lazy_without_ctx=false)601 static bool DebuggerWantsEagerCompilation(CompilationInfo* info,
602 bool allow_lazy_without_ctx = false) {
603 return LiveEditFunctionTracker::IsActive(info->isolate()) ||
604 (info->isolate()->DebuggerHasBreakPoints() && !allow_lazy_without_ctx);
605 }
606
607
608 // Sets the expected number of properties based on estimate from compiler.
SetExpectedNofPropertiesFromEstimate(Handle<SharedFunctionInfo> shared,int estimate)609 void SetExpectedNofPropertiesFromEstimate(Handle<SharedFunctionInfo> shared,
610 int estimate) {
611 // See the comment in SetExpectedNofProperties.
612 if (shared->live_objects_may_exist()) return;
613
614 // If no properties are added in the constructor, they are more likely
615 // to be added later.
616 if (estimate == 0) estimate = 2;
617
618 // TODO(yangguo): check whether those heuristics are still up-to-date.
619 // We do not shrink objects that go into a snapshot (yet), so we adjust
620 // the estimate conservatively.
621 if (Serializer::enabled()) {
622 estimate += 2;
623 } else if (FLAG_clever_optimizations) {
624 // Inobject slack tracking will reclaim redundant inobject space later,
625 // so we can afford to adjust the estimate generously.
626 estimate += 8;
627 } else {
628 estimate += 3;
629 }
630
631 shared->set_expected_nof_properties(estimate);
632 }
633
634
MakeFunctionInfo(CompilationInfo * info)635 static Handle<SharedFunctionInfo> MakeFunctionInfo(CompilationInfo* info) {
636 Isolate* isolate = info->isolate();
637 PostponeInterruptsScope postpone(isolate);
638
639 ASSERT(!isolate->native_context().is_null());
640 Handle<Script> script = info->script();
641 // TODO(svenpanne) Obscure place for this, perhaps move to OnBeforeCompile?
642 FixedArray* array = isolate->native_context()->embedder_data();
643 script->set_context_data(array->get(0));
644
645 #ifdef ENABLE_DEBUGGER_SUPPORT
646 if (info->is_eval()) {
647 script->set_compilation_type(Script::COMPILATION_TYPE_EVAL);
648 // For eval scripts add information on the function from which eval was
649 // called.
650 if (info->is_eval()) {
651 StackTraceFrameIterator it(isolate);
652 if (!it.done()) {
653 script->set_eval_from_shared(it.frame()->function()->shared());
654 Code* code = it.frame()->LookupCode();
655 int offset = static_cast<int>(
656 it.frame()->pc() - code->instruction_start());
657 script->set_eval_from_instructions_offset(Smi::FromInt(offset));
658 }
659 }
660 }
661
662 // Notify debugger
663 isolate->debugger()->OnBeforeCompile(script);
664 #endif
665
666 // Only allow non-global compiles for eval.
667 ASSERT(info->is_eval() || info->is_global());
668 {
669 Parser parser(info);
670 if ((info->pre_parse_data() != NULL ||
671 String::cast(script->source())->length() > FLAG_min_preparse_length) &&
672 !DebuggerWantsEagerCompilation(info))
673 parser.set_allow_lazy(true);
674 if (!parser.Parse()) {
675 return Handle<SharedFunctionInfo>::null();
676 }
677 }
678
679 FunctionLiteral* lit = info->function();
680 LiveEditFunctionTracker live_edit_tracker(isolate, lit);
681 Handle<SharedFunctionInfo> result;
682 {
683 // Measure how long it takes to do the compilation; only take the
684 // rest of the function into account to avoid overlap with the
685 // parsing statistics.
686 HistogramTimer* rate = info->is_eval()
687 ? info->isolate()->counters()->compile_eval()
688 : info->isolate()->counters()->compile();
689 HistogramTimerScope timer(rate);
690
691 // Compile the code.
692 if (!MakeCode(info)) {
693 if (!isolate->has_pending_exception()) isolate->StackOverflow();
694 return Handle<SharedFunctionInfo>::null();
695 }
696
697 // Allocate function.
698 ASSERT(!info->code().is_null());
699 result =
700 isolate->factory()->NewSharedFunctionInfo(
701 lit->name(),
702 lit->materialized_literal_count(),
703 lit->is_generator(),
704 info->code(),
705 ScopeInfo::Create(info->scope(), info->zone()));
706
707 ASSERT_EQ(RelocInfo::kNoPosition, lit->function_token_position());
708 Compiler::SetFunctionInfo(result, lit, true, script);
709
710 if (script->name()->IsString()) {
711 PROFILE(isolate, CodeCreateEvent(
712 info->is_eval()
713 ? Logger::EVAL_TAG
714 : Logger::ToNativeByScript(Logger::SCRIPT_TAG, *script),
715 *info->code(),
716 *result,
717 info,
718 String::cast(script->name())));
719 GDBJIT(AddCode(Handle<String>(String::cast(script->name())),
720 script,
721 info->code(),
722 info));
723 } else {
724 PROFILE(isolate, CodeCreateEvent(
725 info->is_eval()
726 ? Logger::EVAL_TAG
727 : Logger::ToNativeByScript(Logger::SCRIPT_TAG, *script),
728 *info->code(),
729 *result,
730 info,
731 isolate->heap()->empty_string()));
732 GDBJIT(AddCode(Handle<String>(), script, info->code(), info));
733 }
734
735 // Hint to the runtime system used when allocating space for initial
736 // property space by setting the expected number of properties for
737 // the instances of the function.
738 SetExpectedNofPropertiesFromEstimate(result,
739 lit->expected_property_count());
740
741 script->set_compilation_state(Script::COMPILATION_STATE_COMPILED);
742 }
743
744 #ifdef ENABLE_DEBUGGER_SUPPORT
745 // Notify debugger
746 isolate->debugger()->OnAfterCompile(
747 script, Debugger::NO_AFTER_COMPILE_FLAGS);
748 #endif
749
750 live_edit_tracker.RecordFunctionInfo(result, lit, info->zone());
751
752 return result;
753 }
754
755
Compile(Handle<String> source,Handle<Object> script_name,int line_offset,int column_offset,bool is_shared_cross_origin,Handle<Context> context,v8::Extension * extension,ScriptDataImpl * pre_data,Handle<Object> script_data,NativesFlag natives)756 Handle<SharedFunctionInfo> Compiler::Compile(Handle<String> source,
757 Handle<Object> script_name,
758 int line_offset,
759 int column_offset,
760 bool is_shared_cross_origin,
761 Handle<Context> context,
762 v8::Extension* extension,
763 ScriptDataImpl* pre_data,
764 Handle<Object> script_data,
765 NativesFlag natives) {
766 Isolate* isolate = source->GetIsolate();
767 int source_length = source->length();
768 isolate->counters()->total_load_size()->Increment(source_length);
769 isolate->counters()->total_compile_size()->Increment(source_length);
770
771 // The VM is in the COMPILER state until exiting this function.
772 VMState<COMPILER> state(isolate);
773
774 CompilationCache* compilation_cache = isolate->compilation_cache();
775
776 // Do a lookup in the compilation cache but not for extensions.
777 Handle<SharedFunctionInfo> result;
778 if (extension == NULL) {
779 result = compilation_cache->LookupScript(source,
780 script_name,
781 line_offset,
782 column_offset,
783 is_shared_cross_origin,
784 context);
785 }
786
787 if (result.is_null()) {
788 // No cache entry found. Do pre-parsing, if it makes sense, and compile
789 // the script.
790 // Building preparse data that is only used immediately after is only a
791 // saving if we might skip building the AST for lazily compiled functions.
792 // I.e., preparse data isn't relevant when the lazy flag is off, and
793 // for small sources, odds are that there aren't many functions
794 // that would be compiled lazily anyway, so we skip the preparse step
795 // in that case too.
796
797 // Create a script object describing the script to be compiled.
798 Handle<Script> script = isolate->factory()->NewScript(source);
799 if (natives == NATIVES_CODE) {
800 script->set_type(Smi::FromInt(Script::TYPE_NATIVE));
801 }
802 if (!script_name.is_null()) {
803 script->set_name(*script_name);
804 script->set_line_offset(Smi::FromInt(line_offset));
805 script->set_column_offset(Smi::FromInt(column_offset));
806 }
807 script->set_is_shared_cross_origin(is_shared_cross_origin);
808
809 script->set_data(script_data.is_null() ? isolate->heap()->undefined_value()
810 : *script_data);
811
812 // Compile the function and add it to the cache.
813 CompilationInfoWithZone info(script);
814 info.MarkAsGlobal();
815 info.SetExtension(extension);
816 info.SetPreParseData(pre_data);
817 info.SetContext(context);
818 if (FLAG_use_strict) {
819 info.SetLanguageMode(FLAG_harmony_scoping ? EXTENDED_MODE : STRICT_MODE);
820 }
821 result = MakeFunctionInfo(&info);
822 if (extension == NULL && !result.is_null() && !result->dont_cache()) {
823 compilation_cache->PutScript(source, context, result);
824 }
825 } else {
826 if (result->ic_age() != isolate->heap()->global_ic_age()) {
827 result->ResetForNewContext(isolate->heap()->global_ic_age());
828 }
829 }
830
831 if (result.is_null()) isolate->ReportPendingMessages();
832 return result;
833 }
834
835
CompileEval(Handle<String> source,Handle<Context> context,bool is_global,LanguageMode language_mode,ParseRestriction restriction,int scope_position)836 Handle<SharedFunctionInfo> Compiler::CompileEval(Handle<String> source,
837 Handle<Context> context,
838 bool is_global,
839 LanguageMode language_mode,
840 ParseRestriction restriction,
841 int scope_position) {
842 Isolate* isolate = source->GetIsolate();
843 int source_length = source->length();
844 isolate->counters()->total_eval_size()->Increment(source_length);
845 isolate->counters()->total_compile_size()->Increment(source_length);
846
847 // The VM is in the COMPILER state until exiting this function.
848 VMState<COMPILER> state(isolate);
849
850 // Do a lookup in the compilation cache; if the entry is not there, invoke
851 // the compiler and add the result to the cache.
852 Handle<SharedFunctionInfo> result;
853 CompilationCache* compilation_cache = isolate->compilation_cache();
854 result = compilation_cache->LookupEval(source,
855 context,
856 is_global,
857 language_mode,
858 scope_position);
859
860 if (result.is_null()) {
861 // Create a script object describing the script to be compiled.
862 Handle<Script> script = isolate->factory()->NewScript(source);
863 CompilationInfoWithZone info(script);
864 info.MarkAsEval();
865 if (is_global) info.MarkAsGlobal();
866 info.SetLanguageMode(language_mode);
867 info.SetParseRestriction(restriction);
868 info.SetContext(context);
869 result = MakeFunctionInfo(&info);
870 if (!result.is_null()) {
871 // Explicitly disable optimization for eval code. We're not yet prepared
872 // to handle eval-code in the optimizing compiler.
873 result->DisableOptimization(kEval);
874
875 // If caller is strict mode, the result must be in strict mode or
876 // extended mode as well, but not the other way around. Consider:
877 // eval("'use strict'; ...");
878 ASSERT(language_mode != STRICT_MODE || !result->is_classic_mode());
879 // If caller is in extended mode, the result must also be in
880 // extended mode.
881 ASSERT(language_mode != EXTENDED_MODE ||
882 result->is_extended_mode());
883 if (!result->dont_cache()) {
884 compilation_cache->PutEval(
885 source, context, is_global, result, scope_position);
886 }
887 }
888 } else {
889 if (result->ic_age() != isolate->heap()->global_ic_age()) {
890 result->ResetForNewContext(isolate->heap()->global_ic_age());
891 }
892 }
893
894 return result;
895 }
896
897
InstallFullCode(CompilationInfo * info)898 static bool InstallFullCode(CompilationInfo* info) {
899 // Update the shared function info with the compiled code and the
900 // scope info. Please note, that the order of the shared function
901 // info initialization is important since set_scope_info might
902 // trigger a GC, causing the ASSERT below to be invalid if the code
903 // was flushed. By setting the code object last we avoid this.
904 Handle<SharedFunctionInfo> shared = info->shared_info();
905 Handle<Code> code = info->code();
906 CHECK(code->kind() == Code::FUNCTION);
907 Handle<JSFunction> function = info->closure();
908 Handle<ScopeInfo> scope_info =
909 ScopeInfo::Create(info->scope(), info->zone());
910 shared->set_scope_info(*scope_info);
911 shared->ReplaceCode(*code);
912 if (!function.is_null()) {
913 function->ReplaceCode(*code);
914 ASSERT(!function->IsOptimized());
915 }
916
917 // Set the expected number of properties for instances.
918 FunctionLiteral* lit = info->function();
919 int expected = lit->expected_property_count();
920 SetExpectedNofPropertiesFromEstimate(shared, expected);
921
922 // Check the function has compiled code.
923 ASSERT(shared->is_compiled());
924 shared->set_dont_optimize_reason(lit->dont_optimize_reason());
925 shared->set_dont_inline(lit->flags()->Contains(kDontInline));
926 shared->set_ast_node_count(lit->ast_node_count());
927
928 if (info->isolate()->use_crankshaft() &&
929 !function.is_null() &&
930 !shared->optimization_disabled()) {
931 // If we're asked to always optimize, we compile the optimized
932 // version of the function right away - unless the debugger is
933 // active as it makes no sense to compile optimized code then.
934 if (FLAG_always_opt &&
935 !info->isolate()->DebuggerHasBreakPoints()) {
936 CompilationInfoWithZone optimized(function);
937 optimized.SetOptimizing(BailoutId::None());
938 return Compiler::CompileLazy(&optimized);
939 }
940 }
941 return true;
942 }
943
944
InstallCodeCommon(CompilationInfo * info)945 static void InstallCodeCommon(CompilationInfo* info) {
946 Handle<SharedFunctionInfo> shared = info->shared_info();
947 Handle<Code> code = info->code();
948 ASSERT(!code.is_null());
949
950 // Set optimizable to false if this is disallowed by the shared
951 // function info, e.g., we might have flushed the code and must
952 // reset this bit when lazy compiling the code again.
953 if (shared->optimization_disabled()) code->set_optimizable(false);
954
955 if (shared->code() == *code) {
956 // Do not send compilation event for the same code twice.
957 return;
958 }
959 Compiler::RecordFunctionCompilation(Logger::LAZY_COMPILE_TAG, info, shared);
960 }
961
962
InsertCodeIntoOptimizedCodeMap(CompilationInfo * info)963 static void InsertCodeIntoOptimizedCodeMap(CompilationInfo* info) {
964 Handle<Code> code = info->code();
965 if (code->kind() != Code::OPTIMIZED_FUNCTION) return; // Nothing to do.
966
967 // Cache non-OSR optimized code.
968 if (FLAG_cache_optimized_code && !info->is_osr()) {
969 Handle<JSFunction> function = info->closure();
970 Handle<SharedFunctionInfo> shared(function->shared());
971 Handle<FixedArray> literals(function->literals());
972 Handle<Context> native_context(function->context()->native_context());
973 SharedFunctionInfo::AddToOptimizedCodeMap(
974 shared, native_context, code, literals);
975 }
976 }
977
978
InstallCodeFromOptimizedCodeMap(CompilationInfo * info)979 static bool InstallCodeFromOptimizedCodeMap(CompilationInfo* info) {
980 if (!info->IsOptimizing()) return false; // Nothing to look up.
981
982 // Lookup non-OSR optimized code.
983 if (FLAG_cache_optimized_code && !info->is_osr()) {
984 Handle<SharedFunctionInfo> shared = info->shared_info();
985 Handle<JSFunction> function = info->closure();
986 ASSERT(!function.is_null());
987 Handle<Context> native_context(function->context()->native_context());
988 int index = shared->SearchOptimizedCodeMap(*native_context);
989 if (index > 0) {
990 if (FLAG_trace_opt) {
991 PrintF("[found optimized code for ");
992 function->ShortPrint();
993 PrintF("]\n");
994 }
995 // Caching of optimized code enabled and optimized code found.
996 shared->InstallFromOptimizedCodeMap(*function, index);
997 return true;
998 }
999 }
1000 return false;
1001 }
1002
1003
CompileLazy(CompilationInfo * info)1004 bool Compiler::CompileLazy(CompilationInfo* info) {
1005 Isolate* isolate = info->isolate();
1006
1007 // The VM is in the COMPILER state until exiting this function.
1008 VMState<COMPILER> state(isolate);
1009
1010 PostponeInterruptsScope postpone(isolate);
1011
1012 Handle<SharedFunctionInfo> shared = info->shared_info();
1013 int compiled_size = shared->end_position() - shared->start_position();
1014 isolate->counters()->total_compile_size()->Increment(compiled_size);
1015
1016 if (InstallCodeFromOptimizedCodeMap(info)) return true;
1017
1018 // Generate the AST for the lazily compiled function.
1019 if (Parser::Parse(info)) {
1020 // Measure how long it takes to do the lazy compilation; only take the
1021 // rest of the function into account to avoid overlap with the lazy
1022 // parsing statistics.
1023 HistogramTimerScope timer(isolate->counters()->compile_lazy());
1024
1025 // After parsing we know the function's language mode. Remember it.
1026 LanguageMode language_mode = info->function()->language_mode();
1027 info->SetLanguageMode(language_mode);
1028 shared->set_language_mode(language_mode);
1029
1030 // Compile the code.
1031 if (!MakeCode(info)) {
1032 if (!isolate->has_pending_exception()) {
1033 isolate->StackOverflow();
1034 }
1035 } else {
1036 InstallCodeCommon(info);
1037
1038 if (info->IsOptimizing()) {
1039 // Optimized code successfully created.
1040 Handle<Code> code = info->code();
1041 ASSERT(shared->scope_info() != ScopeInfo::Empty(isolate));
1042 // TODO(titzer): Only replace the code if it was not an OSR compile.
1043 info->closure()->ReplaceCode(*code);
1044 InsertCodeIntoOptimizedCodeMap(info);
1045 return true;
1046 } else if (!info->is_osr()) {
1047 // Compilation failed. Replace with full code if not OSR compile.
1048 return InstallFullCode(info);
1049 }
1050 }
1051 }
1052
1053 ASSERT(info->code().is_null());
1054 return false;
1055 }
1056
1057
RecompileConcurrent(Handle<JSFunction> closure,uint32_t osr_pc_offset)1058 bool Compiler::RecompileConcurrent(Handle<JSFunction> closure,
1059 uint32_t osr_pc_offset) {
1060 bool compiling_for_osr = (osr_pc_offset != 0);
1061
1062 Isolate* isolate = closure->GetIsolate();
1063 // Here we prepare compile data for the concurrent recompilation thread, but
1064 // this still happens synchronously and interrupts execution.
1065 Logger::TimerEventScope timer(
1066 isolate, Logger::TimerEventScope::v8_recompile_synchronous);
1067
1068 if (!isolate->optimizing_compiler_thread()->IsQueueAvailable()) {
1069 if (FLAG_trace_concurrent_recompilation) {
1070 PrintF(" ** Compilation queue full, will retry optimizing ");
1071 closure->PrintName();
1072 PrintF(" on next run.\n");
1073 }
1074 return false;
1075 }
1076
1077 SmartPointer<CompilationInfo> info(new CompilationInfoWithZone(closure));
1078 Handle<SharedFunctionInfo> shared = info->shared_info();
1079
1080 if (compiling_for_osr) {
1081 BailoutId osr_ast_id =
1082 shared->code()->TranslatePcOffsetToAstId(osr_pc_offset);
1083 ASSERT(!osr_ast_id.IsNone());
1084 info->SetOptimizing(osr_ast_id);
1085 info->set_osr_pc_offset(osr_pc_offset);
1086
1087 if (FLAG_trace_osr) {
1088 PrintF("[COSR - attempt to queue ");
1089 closure->PrintName();
1090 PrintF(" at AST id %d]\n", osr_ast_id.ToInt());
1091 }
1092 } else {
1093 info->SetOptimizing(BailoutId::None());
1094 }
1095
1096 VMState<COMPILER> state(isolate);
1097 PostponeInterruptsScope postpone(isolate);
1098
1099 int compiled_size = shared->end_position() - shared->start_position();
1100 isolate->counters()->total_compile_size()->Increment(compiled_size);
1101
1102 {
1103 CompilationHandleScope handle_scope(*info);
1104
1105 if (!compiling_for_osr && InstallCodeFromOptimizedCodeMap(*info)) {
1106 return true;
1107 }
1108
1109 if (Parser::Parse(*info)) {
1110 LanguageMode language_mode = info->function()->language_mode();
1111 info->SetLanguageMode(language_mode);
1112 shared->set_language_mode(language_mode);
1113 info->SaveHandles();
1114
1115 if (Rewriter::Rewrite(*info) && Scope::Analyze(*info)) {
1116 RecompileJob* job = new(info->zone()) RecompileJob(*info);
1117 RecompileJob::Status status = job->CreateGraph();
1118 if (status == RecompileJob::SUCCEEDED) {
1119 info.Detach();
1120 shared->code()->set_profiler_ticks(0);
1121 isolate->optimizing_compiler_thread()->QueueForOptimization(job);
1122 ASSERT(!isolate->has_pending_exception());
1123 return true;
1124 } else if (status == RecompileJob::BAILED_OUT) {
1125 isolate->clear_pending_exception();
1126 InstallFullCode(*info);
1127 }
1128 }
1129 }
1130 }
1131
1132 if (isolate->has_pending_exception()) isolate->clear_pending_exception();
1133 return false;
1134 }
1135
1136
InstallOptimizedCode(RecompileJob * job)1137 Handle<Code> Compiler::InstallOptimizedCode(RecompileJob* job) {
1138 SmartPointer<CompilationInfo> info(job->info());
1139 // The function may have already been optimized by OSR. Simply continue.
1140 // Except when OSR already disabled optimization for some reason.
1141 if (info->shared_info()->optimization_disabled()) {
1142 info->AbortOptimization();
1143 InstallFullCode(*info);
1144 if (FLAG_trace_concurrent_recompilation) {
1145 PrintF(" ** aborting optimization for ");
1146 info->closure()->PrintName();
1147 PrintF(" as it has been disabled.\n");
1148 }
1149 ASSERT(!info->closure()->IsInRecompileQueue());
1150 return Handle<Code>::null();
1151 }
1152
1153 Isolate* isolate = info->isolate();
1154 VMState<COMPILER> state(isolate);
1155 Logger::TimerEventScope timer(
1156 isolate, Logger::TimerEventScope::v8_recompile_synchronous);
1157 // If crankshaft succeeded, install the optimized code else install
1158 // the unoptimized code.
1159 RecompileJob::Status status = job->last_status();
1160 if (info->HasAbortedDueToDependencyChange()) {
1161 info->set_bailout_reason(kBailedOutDueToDependencyChange);
1162 status = job->AbortOptimization();
1163 } else if (status != RecompileJob::SUCCEEDED) {
1164 info->set_bailout_reason(kFailedBailedOutLastTime);
1165 status = job->AbortOptimization();
1166 } else if (isolate->DebuggerHasBreakPoints()) {
1167 info->set_bailout_reason(kDebuggerIsActive);
1168 status = job->AbortOptimization();
1169 } else {
1170 status = job->GenerateAndInstallCode();
1171 ASSERT(status == RecompileJob::SUCCEEDED ||
1172 status == RecompileJob::BAILED_OUT);
1173 }
1174
1175 InstallCodeCommon(*info);
1176 if (status == RecompileJob::SUCCEEDED) {
1177 Handle<Code> code = info->code();
1178 ASSERT(info->shared_info()->scope_info() != ScopeInfo::Empty(isolate));
1179 info->closure()->ReplaceCode(*code);
1180 if (info->shared_info()->SearchOptimizedCodeMap(
1181 info->closure()->context()->native_context()) == -1) {
1182 InsertCodeIntoOptimizedCodeMap(*info);
1183 }
1184 if (FLAG_trace_concurrent_recompilation) {
1185 PrintF(" ** Optimized code for ");
1186 info->closure()->PrintName();
1187 PrintF(" installed.\n");
1188 }
1189 } else {
1190 info->AbortOptimization();
1191 InstallFullCode(*info);
1192 }
1193 // Optimized code is finally replacing unoptimized code. Reset the latter's
1194 // profiler ticks to prevent too soon re-opt after a deopt.
1195 info->shared_info()->code()->set_profiler_ticks(0);
1196 ASSERT(!info->closure()->IsInRecompileQueue());
1197 return (status == RecompileJob::SUCCEEDED) ? info->code()
1198 : Handle<Code>::null();
1199 }
1200
1201
BuildFunctionInfo(FunctionLiteral * literal,Handle<Script> script)1202 Handle<SharedFunctionInfo> Compiler::BuildFunctionInfo(FunctionLiteral* literal,
1203 Handle<Script> script) {
1204 // Precondition: code has been parsed and scopes have been analyzed.
1205 CompilationInfoWithZone info(script);
1206 info.SetFunction(literal);
1207 info.SetScope(literal->scope());
1208 info.SetLanguageMode(literal->scope()->language_mode());
1209
1210 Isolate* isolate = info.isolate();
1211 Factory* factory = isolate->factory();
1212 LiveEditFunctionTracker live_edit_tracker(isolate, literal);
1213 // Determine if the function can be lazily compiled. This is necessary to
1214 // allow some of our builtin JS files to be lazily compiled. These
1215 // builtins cannot be handled lazily by the parser, since we have to know
1216 // if a function uses the special natives syntax, which is something the
1217 // parser records.
1218 // If the debugger requests compilation for break points, we cannot be
1219 // aggressive about lazy compilation, because it might trigger compilation
1220 // of functions without an outer context when setting a breakpoint through
1221 // Debug::FindSharedFunctionInfoInScript.
1222 bool allow_lazy_without_ctx = literal->AllowsLazyCompilationWithoutContext();
1223 bool allow_lazy = literal->AllowsLazyCompilation() &&
1224 !DebuggerWantsEagerCompilation(&info, allow_lazy_without_ctx);
1225
1226 Handle<ScopeInfo> scope_info(ScopeInfo::Empty(isolate));
1227
1228 // Generate code
1229 if (FLAG_lazy && allow_lazy && !literal->is_parenthesized()) {
1230 Handle<Code> code = isolate->builtins()->LazyCompile();
1231 info.SetCode(code);
1232 } else if (GenerateCode(&info)) {
1233 ASSERT(!info.code().is_null());
1234 scope_info = ScopeInfo::Create(info.scope(), info.zone());
1235 } else {
1236 return Handle<SharedFunctionInfo>::null();
1237 }
1238
1239 // Create a shared function info object.
1240 Handle<SharedFunctionInfo> result =
1241 factory->NewSharedFunctionInfo(literal->name(),
1242 literal->materialized_literal_count(),
1243 literal->is_generator(),
1244 info.code(),
1245 scope_info);
1246 SetFunctionInfo(result, literal, false, script);
1247 RecordFunctionCompilation(Logger::FUNCTION_TAG, &info, result);
1248 result->set_allows_lazy_compilation(allow_lazy);
1249 result->set_allows_lazy_compilation_without_context(allow_lazy_without_ctx);
1250
1251 // Set the expected number of properties for instances and return
1252 // the resulting function.
1253 SetExpectedNofPropertiesFromEstimate(result,
1254 literal->expected_property_count());
1255 live_edit_tracker.RecordFunctionInfo(result, literal, info.zone());
1256 return result;
1257 }
1258
1259
1260 // Sets the function info on a function.
1261 // The start_position points to the first '(' character after the function name
1262 // in the full script source. When counting characters in the script source the
1263 // the first character is number 0 (not 1).
SetFunctionInfo(Handle<SharedFunctionInfo> function_info,FunctionLiteral * lit,bool is_toplevel,Handle<Script> script)1264 void Compiler::SetFunctionInfo(Handle<SharedFunctionInfo> function_info,
1265 FunctionLiteral* lit,
1266 bool is_toplevel,
1267 Handle<Script> script) {
1268 function_info->set_length(lit->parameter_count());
1269 function_info->set_formal_parameter_count(lit->parameter_count());
1270 function_info->set_script(*script);
1271 function_info->set_function_token_position(lit->function_token_position());
1272 function_info->set_start_position(lit->start_position());
1273 function_info->set_end_position(lit->end_position());
1274 function_info->set_is_expression(lit->is_expression());
1275 function_info->set_is_anonymous(lit->is_anonymous());
1276 function_info->set_is_toplevel(is_toplevel);
1277 function_info->set_inferred_name(*lit->inferred_name());
1278 function_info->set_allows_lazy_compilation(lit->AllowsLazyCompilation());
1279 function_info->set_allows_lazy_compilation_without_context(
1280 lit->AllowsLazyCompilationWithoutContext());
1281 function_info->set_language_mode(lit->language_mode());
1282 function_info->set_uses_arguments(lit->scope()->arguments() != NULL);
1283 function_info->set_has_duplicate_parameters(lit->has_duplicate_parameters());
1284 function_info->set_ast_node_count(lit->ast_node_count());
1285 function_info->set_is_function(lit->is_function());
1286 function_info->set_dont_optimize_reason(lit->dont_optimize_reason());
1287 function_info->set_dont_inline(lit->flags()->Contains(kDontInline));
1288 function_info->set_dont_cache(lit->flags()->Contains(kDontCache));
1289 function_info->set_is_generator(lit->is_generator());
1290 }
1291
1292
RecordFunctionCompilation(Logger::LogEventsAndTags tag,CompilationInfo * info,Handle<SharedFunctionInfo> shared)1293 void Compiler::RecordFunctionCompilation(Logger::LogEventsAndTags tag,
1294 CompilationInfo* info,
1295 Handle<SharedFunctionInfo> shared) {
1296 // SharedFunctionInfo is passed separately, because if CompilationInfo
1297 // was created using Script object, it will not have it.
1298
1299 // Log the code generation. If source information is available include
1300 // script name and line number. Check explicitly whether logging is
1301 // enabled as finding the line number is not free.
1302 if (info->isolate()->logger()->is_logging_code_events() ||
1303 info->isolate()->cpu_profiler()->is_profiling()) {
1304 Handle<Script> script = info->script();
1305 Handle<Code> code = info->code();
1306 if (*code == info->isolate()->builtins()->builtin(Builtins::kLazyCompile))
1307 return;
1308 int line_num = GetScriptLineNumber(script, shared->start_position()) + 1;
1309 int column_num =
1310 GetScriptColumnNumber(script, shared->start_position()) + 1;
1311 USE(line_num);
1312 if (script->name()->IsString()) {
1313 PROFILE(info->isolate(),
1314 CodeCreateEvent(Logger::ToNativeByScript(tag, *script),
1315 *code,
1316 *shared,
1317 info,
1318 String::cast(script->name()),
1319 line_num,
1320 column_num));
1321 } else {
1322 PROFILE(info->isolate(),
1323 CodeCreateEvent(Logger::ToNativeByScript(tag, *script),
1324 *code,
1325 *shared,
1326 info,
1327 info->isolate()->heap()->empty_string(),
1328 line_num,
1329 column_num));
1330 }
1331 }
1332
1333 GDBJIT(AddCode(Handle<String>(shared->DebugName()),
1334 Handle<Script>(info->script()),
1335 Handle<Code>(info->code()),
1336 info));
1337 }
1338
1339
CompilationPhase(const char * name,CompilationInfo * info)1340 CompilationPhase::CompilationPhase(const char* name, CompilationInfo* info)
1341 : name_(name), info_(info), zone_(info->isolate()) {
1342 if (FLAG_hydrogen_stats) {
1343 info_zone_start_allocation_size_ = info->zone()->allocation_size();
1344 timer_.Start();
1345 }
1346 }
1347
1348
~CompilationPhase()1349 CompilationPhase::~CompilationPhase() {
1350 if (FLAG_hydrogen_stats) {
1351 unsigned size = zone()->allocation_size();
1352 size += info_->zone()->allocation_size() - info_zone_start_allocation_size_;
1353 isolate()->GetHStatistics()->SaveTiming(name_, timer_.Elapsed(), size);
1354 }
1355 }
1356
1357
ShouldProduceTraceOutput() const1358 bool CompilationPhase::ShouldProduceTraceOutput() const {
1359 // Trace if the appropriate trace flag is set and the phase name's first
1360 // character is in the FLAG_trace_phase command line parameter.
1361 AllowHandleDereference allow_deref;
1362 bool tracing_on = info()->IsStub()
1363 ? FLAG_trace_hydrogen_stubs
1364 : (FLAG_trace_hydrogen &&
1365 info()->closure()->PassesFilter(FLAG_trace_hydrogen_filter));
1366 return (tracing_on &&
1367 OS::StrChr(const_cast<char*>(FLAG_trace_phase), name_[0]) != NULL);
1368 }
1369
1370 } } // namespace v8::internal
1371