1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/compiler.h"
6
7 #include <algorithm>
8 #include <memory>
9
10 #include "src/asmjs/asm-js.h"
11 #include "src/asmjs/asm-typer.h"
12 #include "src/assembler-inl.h"
13 #include "src/ast/ast-numbering.h"
14 #include "src/ast/prettyprinter.h"
15 #include "src/ast/scopes.h"
16 #include "src/bootstrapper.h"
17 #include "src/codegen.h"
18 #include "src/compilation-cache.h"
19 #include "src/compiler-dispatcher/compiler-dispatcher.h"
20 #include "src/compiler-dispatcher/optimizing-compile-dispatcher.h"
21 #include "src/compiler/pipeline.h"
22 #include "src/crankshaft/hydrogen.h"
23 #include "src/debug/debug.h"
24 #include "src/debug/liveedit.h"
25 #include "src/frames-inl.h"
26 #include "src/full-codegen/full-codegen.h"
27 #include "src/globals.h"
28 #include "src/heap/heap.h"
29 #include "src/interpreter/interpreter.h"
30 #include "src/isolate-inl.h"
31 #include "src/log-inl.h"
32 #include "src/messages.h"
33 #include "src/parsing/parsing.h"
34 #include "src/parsing/rewriter.h"
35 #include "src/parsing/scanner-character-streams.h"
36 #include "src/runtime-profiler.h"
37 #include "src/snapshot/code-serializer.h"
38 #include "src/vm-state-inl.h"
39
40 namespace v8 {
41 namespace internal {
42
43 // A wrapper around a ParseInfo that detaches the parser handles from the
44 // underlying DeferredHandleScope and stores them in info_ on destruction.
45 class ParseHandleScope final {
46 public:
ParseHandleScope(ParseInfo * info)47 explicit ParseHandleScope(ParseInfo* info)
48 : deferred_(info->isolate()), info_(info) {}
~ParseHandleScope()49 ~ParseHandleScope() { info_->set_deferred_handles(deferred_.Detach()); }
50
51 private:
52 DeferredHandleScope deferred_;
53 ParseInfo* info_;
54 };
55
56 // A wrapper around a CompilationInfo that detaches the Handles from
57 // the underlying DeferredHandleScope and stores them in info_ on
58 // destruction.
59 class CompilationHandleScope final {
60 public:
CompilationHandleScope(CompilationInfo * info)61 explicit CompilationHandleScope(CompilationInfo* info)
62 : deferred_(info->isolate()), info_(info) {}
~CompilationHandleScope()63 ~CompilationHandleScope() { info_->set_deferred_handles(deferred_.Detach()); }
64
65 private:
66 DeferredHandleScope deferred_;
67 CompilationInfo* info_;
68 };
69
70 // Helper that times a scoped region and records the elapsed time.
71 struct ScopedTimer {
ScopedTimerv8::internal::ScopedTimer72 explicit ScopedTimer(base::TimeDelta* location) : location_(location) {
73 DCHECK(location_ != NULL);
74 timer_.Start();
75 }
76
~ScopedTimerv8::internal::ScopedTimer77 ~ScopedTimer() { *location_ += timer_.Elapsed(); }
78
79 base::ElapsedTimer timer_;
80 base::TimeDelta* location_;
81 };
82
83 // ----------------------------------------------------------------------------
84 // Implementation of CompilationJob
85
CompilationJob(Isolate * isolate,CompilationInfo * info,const char * compiler_name,State initial_state)86 CompilationJob::CompilationJob(Isolate* isolate, CompilationInfo* info,
87 const char* compiler_name, State initial_state)
88 : info_(info),
89 isolate_thread_id_(isolate->thread_id()),
90 compiler_name_(compiler_name),
91 state_(initial_state),
92 stack_limit_(isolate->stack_guard()->real_climit()),
93 executed_on_background_thread_(false) {}
94
PrepareJob()95 CompilationJob::Status CompilationJob::PrepareJob() {
96 DCHECK(ThreadId::Current().Equals(info()->isolate()->thread_id()));
97 DisallowJavascriptExecution no_js(isolate());
98
99 if (FLAG_trace_opt && info()->IsOptimizing()) {
100 OFStream os(stdout);
101 os << "[compiling method " << Brief(*info()->closure()) << " using "
102 << compiler_name_;
103 if (info()->is_osr()) os << " OSR";
104 os << "]" << std::endl;
105 }
106
107 // Delegate to the underlying implementation.
108 DCHECK(state() == State::kReadyToPrepare);
109 ScopedTimer t(&time_taken_to_prepare_);
110 return UpdateState(PrepareJobImpl(), State::kReadyToExecute);
111 }
112
ExecuteJob()113 CompilationJob::Status CompilationJob::ExecuteJob() {
114 std::unique_ptr<DisallowHeapAllocation> no_allocation;
115 std::unique_ptr<DisallowHandleAllocation> no_handles;
116 std::unique_ptr<DisallowHandleDereference> no_deref;
117 std::unique_ptr<DisallowCodeDependencyChange> no_dependency_change;
118 if (can_execute_on_background_thread()) {
119 no_allocation.reset(new DisallowHeapAllocation());
120 no_handles.reset(new DisallowHandleAllocation());
121 no_deref.reset(new DisallowHandleDereference());
122 no_dependency_change.reset(new DisallowCodeDependencyChange());
123 executed_on_background_thread_ =
124 !ThreadId::Current().Equals(isolate_thread_id_);
125 } else {
126 DCHECK(ThreadId::Current().Equals(isolate_thread_id_));
127 }
128
129 // Delegate to the underlying implementation.
130 DCHECK(state() == State::kReadyToExecute);
131 ScopedTimer t(&time_taken_to_execute_);
132 return UpdateState(ExecuteJobImpl(), State::kReadyToFinalize);
133 }
134
FinalizeJob()135 CompilationJob::Status CompilationJob::FinalizeJob() {
136 DCHECK(ThreadId::Current().Equals(info()->isolate()->thread_id()));
137 DisallowCodeDependencyChange no_dependency_change;
138 DisallowJavascriptExecution no_js(isolate());
139 DCHECK(!info()->dependencies()->HasAborted());
140
141 // Delegate to the underlying implementation.
142 DCHECK(state() == State::kReadyToFinalize);
143 ScopedTimer t(&time_taken_to_finalize_);
144 return UpdateState(FinalizeJobImpl(), State::kSucceeded);
145 }
146
RetryOptimization(BailoutReason reason)147 CompilationJob::Status CompilationJob::RetryOptimization(BailoutReason reason) {
148 DCHECK(info_->IsOptimizing());
149 info_->RetryOptimization(reason);
150 state_ = State::kFailed;
151 return FAILED;
152 }
153
AbortOptimization(BailoutReason reason)154 CompilationJob::Status CompilationJob::AbortOptimization(BailoutReason reason) {
155 DCHECK(info_->IsOptimizing());
156 info_->AbortOptimization(reason);
157 state_ = State::kFailed;
158 return FAILED;
159 }
160
RecordUnoptimizedCompilationStats() const161 void CompilationJob::RecordUnoptimizedCompilationStats() const {
162 int code_size;
163 if (info()->has_bytecode_array()) {
164 code_size = info()->bytecode_array()->SizeIncludingMetadata();
165 } else {
166 code_size = info()->code()->SizeIncludingMetadata();
167 }
168
169 Counters* counters = isolate()->counters();
170 // TODO(4280): Rename counters from "baseline" to "unoptimized" eventually.
171 counters->total_baseline_code_size()->Increment(code_size);
172 counters->total_baseline_compile_count()->Increment(1);
173
174 // TODO(5203): Add timers for each phase of compilation.
175 }
176
RecordOptimizedCompilationStats() const177 void CompilationJob::RecordOptimizedCompilationStats() const {
178 DCHECK(info()->IsOptimizing());
179 Handle<JSFunction> function = info()->closure();
180 if (!function->IsOptimized()) {
181 // Concurrent recompilation and OSR may race. Increment only once.
182 int opt_count = function->shared()->opt_count();
183 function->shared()->set_opt_count(opt_count + 1);
184 }
185 double ms_creategraph = time_taken_to_prepare_.InMillisecondsF();
186 double ms_optimize = time_taken_to_execute_.InMillisecondsF();
187 double ms_codegen = time_taken_to_finalize_.InMillisecondsF();
188 if (FLAG_trace_opt) {
189 PrintF("[optimizing ");
190 function->ShortPrint();
191 PrintF(" - took %0.3f, %0.3f, %0.3f ms]\n", ms_creategraph, ms_optimize,
192 ms_codegen);
193 }
194 if (FLAG_trace_opt_stats) {
195 static double compilation_time = 0.0;
196 static int compiled_functions = 0;
197 static int code_size = 0;
198
199 compilation_time += (ms_creategraph + ms_optimize + ms_codegen);
200 compiled_functions++;
201 code_size += function->shared()->SourceSize();
202 PrintF("Compiled: %d functions with %d byte source size in %fms.\n",
203 compiled_functions, code_size, compilation_time);
204 }
205 if (FLAG_hydrogen_stats) {
206 isolate()->GetHStatistics()->IncrementSubtotals(time_taken_to_prepare_,
207 time_taken_to_execute_,
208 time_taken_to_finalize_);
209 }
210 }
211
isolate() const212 Isolate* CompilationJob::isolate() const { return info()->isolate(); }
213
214 namespace {
215
AddWeakObjectToCodeDependency(Isolate * isolate,Handle<HeapObject> object,Handle<Code> code)216 void AddWeakObjectToCodeDependency(Isolate* isolate, Handle<HeapObject> object,
217 Handle<Code> code) {
218 Handle<WeakCell> cell = Code::WeakCellFor(code);
219 Heap* heap = isolate->heap();
220 if (heap->InNewSpace(*object)) {
221 heap->AddWeakNewSpaceObjectToCodeDependency(object, cell);
222 } else {
223 Handle<DependentCode> dep(heap->LookupWeakObjectToCodeDependency(object));
224 dep =
225 DependentCode::InsertWeakCode(dep, DependentCode::kWeakCodeGroup, cell);
226 heap->AddWeakObjectToCodeDependency(object, dep);
227 }
228 }
229
230 } // namespace
231
RegisterWeakObjectsInOptimizedCode(Handle<Code> code)232 void CompilationJob::RegisterWeakObjectsInOptimizedCode(Handle<Code> code) {
233 // TODO(turbofan): Move this to pipeline.cc once Crankshaft dies.
234 Isolate* const isolate = code->GetIsolate();
235 DCHECK(code->is_optimized_code());
236 std::vector<Handle<Map>> maps;
237 std::vector<Handle<HeapObject>> objects;
238 {
239 DisallowHeapAllocation no_gc;
240 int const mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
241 RelocInfo::ModeMask(RelocInfo::CELL);
242 for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) {
243 RelocInfo::Mode mode = it.rinfo()->rmode();
244 if (mode == RelocInfo::CELL &&
245 code->IsWeakObjectInOptimizedCode(it.rinfo()->target_cell())) {
246 objects.push_back(handle(it.rinfo()->target_cell(), isolate));
247 } else if (mode == RelocInfo::EMBEDDED_OBJECT &&
248 code->IsWeakObjectInOptimizedCode(
249 it.rinfo()->target_object())) {
250 Handle<HeapObject> object(HeapObject::cast(it.rinfo()->target_object()),
251 isolate);
252 if (object->IsMap()) {
253 maps.push_back(Handle<Map>::cast(object));
254 } else {
255 objects.push_back(object);
256 }
257 }
258 }
259 }
260 for (Handle<Map> map : maps) {
261 if (map->dependent_code()->IsEmpty(DependentCode::kWeakCodeGroup)) {
262 isolate->heap()->AddRetainedMap(map);
263 }
264 Map::AddDependentCode(map, DependentCode::kWeakCodeGroup, code);
265 }
266 for (Handle<HeapObject> object : objects) {
267 AddWeakObjectToCodeDependency(isolate, object, code);
268 }
269 code->set_can_have_weak_objects(true);
270 }
271
272 // ----------------------------------------------------------------------------
273 // Local helper methods that make up the compilation pipeline.
274
275 namespace {
276
RecordFunctionCompilation(CodeEventListener::LogEventsAndTags tag,CompilationInfo * info)277 void RecordFunctionCompilation(CodeEventListener::LogEventsAndTags tag,
278 CompilationInfo* info) {
279 // Log the code generation. If source information is available include
280 // script name and line number. Check explicitly whether logging is
281 // enabled as finding the line number is not free.
282 if (info->isolate()->logger()->is_logging_code_events() ||
283 info->isolate()->is_profiling()) {
284 Handle<SharedFunctionInfo> shared = info->shared_info();
285 Handle<Script> script = info->parse_info()->script();
286 Handle<AbstractCode> abstract_code =
287 info->has_bytecode_array()
288 ? Handle<AbstractCode>::cast(info->bytecode_array())
289 : Handle<AbstractCode>::cast(info->code());
290 if (abstract_code.is_identical_to(
291 info->isolate()->builtins()->CompileLazy())) {
292 return;
293 }
294 int line_num = Script::GetLineNumber(script, shared->start_position()) + 1;
295 int column_num =
296 Script::GetColumnNumber(script, shared->start_position()) + 1;
297 String* script_name = script->name()->IsString()
298 ? String::cast(script->name())
299 : info->isolate()->heap()->empty_string();
300 CodeEventListener::LogEventsAndTags log_tag =
301 Logger::ToNativeByScript(tag, *script);
302 PROFILE(info->isolate(),
303 CodeCreateEvent(log_tag, *abstract_code, *shared, script_name,
304 line_num, column_num));
305 }
306 }
307
EnsureFeedbackMetadata(CompilationInfo * info)308 void EnsureFeedbackMetadata(CompilationInfo* info) {
309 DCHECK(info->has_shared_info());
310
311 // If no type feedback metadata exists, create it. At this point the
312 // AstNumbering pass has already run. Note the snapshot can contain outdated
313 // vectors for a different configuration, hence we also recreate a new vector
314 // when the function is not compiled (i.e. no code was serialized).
315
316 // TODO(mvstanton): reintroduce is_empty() predicate to feedback_metadata().
317 if (info->shared_info()->feedback_metadata()->length() == 0 ||
318 !info->shared_info()->is_compiled()) {
319 Handle<FeedbackMetadata> feedback_metadata = FeedbackMetadata::New(
320 info->isolate(), info->literal()->feedback_vector_spec());
321 info->shared_info()->set_feedback_metadata(*feedback_metadata);
322 }
323
324 // It's very important that recompiles do not alter the structure of the type
325 // feedback vector. Verify that the structure fits the function literal.
326 CHECK(!info->shared_info()->feedback_metadata()->SpecDiffersFrom(
327 info->literal()->feedback_vector_spec()));
328 }
329
UseTurboFan(Handle<SharedFunctionInfo> shared)330 bool UseTurboFan(Handle<SharedFunctionInfo> shared) {
331 if (shared->optimization_disabled()) {
332 return false;
333 }
334
335 bool must_use_ignition_turbo = shared->must_use_ignition_turbo();
336
337 // Check the enabling conditions for Turbofan.
338 // 1. "use asm" code.
339 bool is_turbofanable_asm = FLAG_turbo_asm && shared->asm_function();
340
341 // 2. Fallback for features unsupported by Crankshaft.
342 bool is_unsupported_by_crankshaft_but_turbofanable =
343 must_use_ignition_turbo && strcmp(FLAG_turbo_filter, "~~") == 0;
344
345 // 3. Explicitly enabled by the command-line filter.
346 bool passes_turbo_filter = shared->PassesFilter(FLAG_turbo_filter);
347
348 return is_turbofanable_asm || is_unsupported_by_crankshaft_but_turbofanable ||
349 passes_turbo_filter;
350 }
351
ShouldUseIgnition(Handle<SharedFunctionInfo> shared,bool marked_as_debug)352 bool ShouldUseIgnition(Handle<SharedFunctionInfo> shared,
353 bool marked_as_debug) {
354 // Code which can't be supported by the old pipeline should use Ignition.
355 if (shared->must_use_ignition_turbo()) return true;
356
357 // Resumable functions are not supported by {FullCodeGenerator}, suspended
358 // activations stored as {JSGeneratorObject} on the heap always assume the
359 // underlying code to be based on the bytecode array.
360 DCHECK(!IsResumableFunction(shared->kind()));
361
362 // Skip Ignition for asm.js functions.
363 if (shared->asm_function()) return false;
364
365 // Skip Ignition for asm wasm code.
366 if (FLAG_validate_asm && shared->HasAsmWasmData()) {
367 return false;
368 }
369
370 // When requesting debug code as a replacement for existing code, we provide
371 // the same kind as the existing code (to prevent implicit tier-change).
372 if (marked_as_debug && shared->is_compiled()) {
373 return !shared->HasBaselineCode();
374 }
375
376 // Code destined for TurboFan should be compiled with Ignition first.
377 if (UseTurboFan(shared)) return true;
378
379 // Only use Ignition for any other function if FLAG_ignition is true.
380 return FLAG_ignition;
381 }
382
ShouldUseIgnition(CompilationInfo * info)383 bool ShouldUseIgnition(CompilationInfo* info) {
384 DCHECK(info->has_shared_info());
385 return ShouldUseIgnition(info->shared_info(), info->is_debug());
386 }
387
UseAsmWasm(DeclarationScope * scope,Handle<SharedFunctionInfo> shared_info,bool is_debug)388 bool UseAsmWasm(DeclarationScope* scope, Handle<SharedFunctionInfo> shared_info,
389 bool is_debug) {
390 return FLAG_validate_asm && scope->asm_module() &&
391 !shared_info->is_asm_wasm_broken() && !is_debug;
392 }
393
UseCompilerDispatcher(Compiler::ConcurrencyMode inner_function_mode,CompilerDispatcher * dispatcher,DeclarationScope * scope,Handle<SharedFunctionInfo> shared_info,bool is_debug,bool will_serialize)394 bool UseCompilerDispatcher(Compiler::ConcurrencyMode inner_function_mode,
395 CompilerDispatcher* dispatcher,
396 DeclarationScope* scope,
397 Handle<SharedFunctionInfo> shared_info,
398 bool is_debug, bool will_serialize) {
399 return FLAG_compiler_dispatcher_eager_inner &&
400 inner_function_mode == Compiler::CONCURRENT &&
401 dispatcher->IsEnabled() && !is_debug && !will_serialize &&
402 !UseAsmWasm(scope, shared_info, is_debug);
403 }
404
GetUnoptimizedCompilationJob(CompilationInfo * info)405 CompilationJob* GetUnoptimizedCompilationJob(CompilationInfo* info) {
406 // Function should have been parsed and analyzed before creating a compilation
407 // job.
408 DCHECK_NOT_NULL(info->literal());
409 DCHECK_NOT_NULL(info->scope());
410
411 if (ShouldUseIgnition(info)) {
412 return interpreter::Interpreter::NewCompilationJob(info);
413 } else {
414 return FullCodeGenerator::NewCompilationJob(info);
415 }
416 }
417
InstallSharedScopeInfo(CompilationInfo * info,Handle<SharedFunctionInfo> shared)418 void InstallSharedScopeInfo(CompilationInfo* info,
419 Handle<SharedFunctionInfo> shared) {
420 Handle<ScopeInfo> scope_info = info->scope()->scope_info();
421 shared->set_scope_info(*scope_info);
422 Scope* outer_scope = info->scope()->GetOuterScopeWithContext();
423 if (outer_scope) {
424 shared->set_outer_scope_info(*outer_scope->scope_info());
425 }
426 }
427
InstallSharedCompilationResult(CompilationInfo * info,Handle<SharedFunctionInfo> shared)428 void InstallSharedCompilationResult(CompilationInfo* info,
429 Handle<SharedFunctionInfo> shared) {
430 // TODO(mstarzinger): Compiling for debug code might be used to reveal inner
431 // functions via {FindSharedFunctionInfoInScript}, in which case we end up
432 // regenerating existing bytecode. Fix this!
433 if (info->is_debug() && info->has_bytecode_array()) {
434 shared->ClearBytecodeArray();
435 }
436 DCHECK(!info->code().is_null());
437 shared->ReplaceCode(*info->code());
438 if (info->has_bytecode_array()) {
439 DCHECK(!shared->HasBytecodeArray()); // Only compiled once.
440 shared->set_bytecode_array(*info->bytecode_array());
441 }
442 }
443
InstallUnoptimizedCode(CompilationInfo * info)444 void InstallUnoptimizedCode(CompilationInfo* info) {
445 Handle<SharedFunctionInfo> shared = info->shared_info();
446
447 // Update the shared function info with the scope info.
448 InstallSharedScopeInfo(info, shared);
449
450 // Install compilation result on the shared function info
451 InstallSharedCompilationResult(info, shared);
452 }
453
FinalizeUnoptimizedCompilationJob(CompilationJob * job)454 CompilationJob::Status FinalizeUnoptimizedCompilationJob(CompilationJob* job) {
455 CompilationJob::Status status = job->FinalizeJob();
456 if (status == CompilationJob::SUCCEEDED) {
457 CompilationInfo* info = job->info();
458 EnsureFeedbackMetadata(info);
459 DCHECK(!info->code().is_null());
460 if (info->parse_info()->literal()->should_be_used_once_hint()) {
461 info->code()->MarkToBeExecutedOnce(info->isolate());
462 }
463 InstallUnoptimizedCode(info);
464 RecordFunctionCompilation(CodeEventListener::FUNCTION_TAG, info);
465 job->RecordUnoptimizedCompilationStats();
466 }
467 return status;
468 }
469
SetSharedFunctionFlagsFromLiteral(FunctionLiteral * literal,Handle<SharedFunctionInfo> shared_info)470 void SetSharedFunctionFlagsFromLiteral(FunctionLiteral* literal,
471 Handle<SharedFunctionInfo> shared_info) {
472 shared_info->set_ast_node_count(literal->ast_node_count());
473 if (literal->dont_optimize_reason() != kNoReason) {
474 shared_info->DisableOptimization(literal->dont_optimize_reason());
475 }
476 if (literal->flags() & AstProperties::kMustUseIgnitionTurbo) {
477 shared_info->set_must_use_ignition_turbo(true);
478 }
479 }
480
Renumber(ParseInfo * parse_info,Compiler::EagerInnerFunctionLiterals * eager_literals)481 bool Renumber(ParseInfo* parse_info,
482 Compiler::EagerInnerFunctionLiterals* eager_literals) {
483 RuntimeCallTimerScope runtimeTimer(parse_info->isolate(),
484 &RuntimeCallStats::CompileRenumber);
485 if (!AstNumbering::Renumber(
486 parse_info->isolate()->stack_guard()->real_climit(),
487 parse_info->zone(), parse_info->literal(), eager_literals)) {
488 return false;
489 }
490 if (!parse_info->shared_info().is_null()) {
491 SetSharedFunctionFlagsFromLiteral(parse_info->literal(),
492 parse_info->shared_info());
493 }
494 return true;
495 }
496
GenerateUnoptimizedCode(CompilationInfo * info)497 bool GenerateUnoptimizedCode(CompilationInfo* info) {
498 if (UseAsmWasm(info->scope(), info->shared_info(), info->is_debug())) {
499 EnsureFeedbackMetadata(info);
500 MaybeHandle<FixedArray> wasm_data;
501 wasm_data = AsmJs::CompileAsmViaWasm(info);
502 if (!wasm_data.is_null()) {
503 info->shared_info()->set_asm_wasm_data(*wasm_data.ToHandleChecked());
504 info->SetCode(info->isolate()->builtins()->InstantiateAsmJs());
505 InstallUnoptimizedCode(info);
506 return true;
507 }
508 }
509
510 std::unique_ptr<CompilationJob> job(GetUnoptimizedCompilationJob(info));
511 if (job->PrepareJob() != CompilationJob::SUCCEEDED) return false;
512 if (job->ExecuteJob() != CompilationJob::SUCCEEDED) return false;
513 if (FinalizeUnoptimizedCompilationJob(job.get()) !=
514 CompilationJob::SUCCEEDED) {
515 return false;
516 }
517 return true;
518 }
519
CompileUnoptimizedInnerFunctions(Compiler::EagerInnerFunctionLiterals * literals,Compiler::ConcurrencyMode inner_function_mode,std::shared_ptr<Zone> parse_zone,CompilationInfo * outer_info)520 bool CompileUnoptimizedInnerFunctions(
521 Compiler::EagerInnerFunctionLiterals* literals,
522 Compiler::ConcurrencyMode inner_function_mode,
523 std::shared_ptr<Zone> parse_zone, CompilationInfo* outer_info) {
524 TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
525 "V8.CompileUnoptimizedInnerFunctions");
526 Isolate* isolate = outer_info->isolate();
527 Handle<Script> script = outer_info->script();
528 bool is_debug = outer_info->is_debug();
529 bool will_serialize = outer_info->will_serialize();
530 RuntimeCallTimerScope runtimeTimer(isolate,
531 &RuntimeCallStats::CompileInnerFunction);
532
533 for (auto it : *literals) {
534 FunctionLiteral* literal = it->value();
535 Handle<SharedFunctionInfo> shared =
536 Compiler::GetSharedFunctionInfo(literal, script, outer_info);
537 if (shared->is_compiled()) continue;
538
539 // The {literal} has already been numbered because AstNumbering decends into
540 // eagerly compiled function literals.
541 SetSharedFunctionFlagsFromLiteral(literal, shared);
542
543 // Try to enqueue the eager function on the compiler dispatcher.
544 CompilerDispatcher* dispatcher = isolate->compiler_dispatcher();
545 if (UseCompilerDispatcher(inner_function_mode, dispatcher, literal->scope(),
546 shared, is_debug, will_serialize) &&
547 dispatcher->EnqueueAndStep(outer_info->script(), shared, literal,
548 parse_zone,
549 outer_info->parse_info()->deferred_handles(),
550 outer_info->deferred_handles())) {
551 // If we have successfully queued up the function for compilation on the
552 // compiler dispatcher then we are done.
553 continue;
554 } else {
555 // Otherwise generate unoptimized code now.
556 ParseInfo parse_info(script);
557 CompilationInfo info(parse_info.zone(), &parse_info,
558 Handle<JSFunction>::null());
559
560 parse_info.set_literal(literal);
561 parse_info.set_shared_info(shared);
562 parse_info.set_function_literal_id(shared->function_literal_id());
563 parse_info.set_language_mode(literal->scope()->language_mode());
564 parse_info.set_ast_value_factory(
565 outer_info->parse_info()->ast_value_factory());
566 parse_info.set_ast_value_factory_owned(false);
567
568 if (will_serialize) info.PrepareForSerializing();
569 if (is_debug) info.MarkAsDebug();
570
571 if (!GenerateUnoptimizedCode(&info)) {
572 if (!isolate->has_pending_exception()) isolate->StackOverflow();
573 return false;
574 }
575 }
576 }
577 return true;
578 }
579
InnerFunctionIsAsmModule(ThreadedList<ThreadedListZoneEntry<FunctionLiteral * >> * literals)580 bool InnerFunctionIsAsmModule(
581 ThreadedList<ThreadedListZoneEntry<FunctionLiteral*>>* literals) {
582 for (auto it : *literals) {
583 FunctionLiteral* literal = it->value();
584 if (literal->scope()->IsAsmModule()) return true;
585 }
586 return false;
587 }
588
CompileUnoptimizedCode(CompilationInfo * info,Compiler::ConcurrencyMode inner_function_mode)589 bool CompileUnoptimizedCode(CompilationInfo* info,
590 Compiler::ConcurrencyMode inner_function_mode) {
591 Isolate* isolate = info->isolate();
592 DCHECK(AllowCompilation::IsAllowed(isolate));
593
594 Compiler::EagerInnerFunctionLiterals inner_literals;
595 {
596 std::unique_ptr<CompilationHandleScope> compilation_handle_scope;
597 if (inner_function_mode == Compiler::CONCURRENT) {
598 compilation_handle_scope.reset(new CompilationHandleScope(info));
599 }
600 if (!Compiler::Analyze(info->parse_info(), &inner_literals)) {
601 if (!isolate->has_pending_exception()) isolate->StackOverflow();
602 return false;
603 }
604 }
605
606 // Disable concurrent inner compilation for asm-wasm code.
607 // TODO(rmcilroy,bradnelson): Remove this AsmWasm check once the asm-wasm
608 // builder doesn't do parsing when visiting function declarations.
609 if (info->scope()->IsAsmModule() ||
610 InnerFunctionIsAsmModule(&inner_literals)) {
611 inner_function_mode = Compiler::NOT_CONCURRENT;
612 }
613
614 std::shared_ptr<Zone> parse_zone;
615 if (inner_function_mode == Compiler::CONCURRENT) {
616 // Seal the parse zone so that it can be shared by parallel inner function
617 // compilation jobs.
618 DCHECK_NE(info->parse_info()->zone(), info->zone());
619 parse_zone = info->parse_info()->zone_shared();
620 parse_zone->Seal();
621 }
622
623 if (!CompileUnoptimizedInnerFunctions(&inner_literals, inner_function_mode,
624 parse_zone, info) ||
625 !GenerateUnoptimizedCode(info)) {
626 if (!isolate->has_pending_exception()) isolate->StackOverflow();
627 return false;
628 }
629
630 return true;
631 }
632
EnsureSharedFunctionInfosArrayOnScript(ParseInfo * info)633 void EnsureSharedFunctionInfosArrayOnScript(ParseInfo* info) {
634 DCHECK(info->is_toplevel());
635 DCHECK(!info->script().is_null());
636 if (info->script()->shared_function_infos()->length() > 0) {
637 DCHECK_EQ(info->script()->shared_function_infos()->length(),
638 info->max_function_literal_id() + 1);
639 return;
640 }
641 Isolate* isolate = info->isolate();
642 Handle<FixedArray> infos(
643 isolate->factory()->NewFixedArray(info->max_function_literal_id() + 1));
644 info->script()->set_shared_function_infos(*infos);
645 }
646
GetUnoptimizedCode(CompilationInfo * info,Compiler::ConcurrencyMode inner_function_mode)647 MUST_USE_RESULT MaybeHandle<Code> GetUnoptimizedCode(
648 CompilationInfo* info, Compiler::ConcurrencyMode inner_function_mode) {
649 RuntimeCallTimerScope runtimeTimer(
650 info->isolate(), &RuntimeCallStats::CompileGetUnoptimizedCode);
651 VMState<COMPILER> state(info->isolate());
652 PostponeInterruptsScope postpone(info->isolate());
653
654 // Parse and update ParseInfo with the results.
655 {
656 if (!parsing::ParseAny(info->parse_info(),
657 inner_function_mode != Compiler::CONCURRENT)) {
658 return MaybeHandle<Code>();
659 }
660
661 if (inner_function_mode == Compiler::CONCURRENT) {
662 ParseHandleScope parse_handles(info->parse_info());
663 info->parse_info()->ReopenHandlesInNewHandleScope();
664 info->parse_info()->ast_value_factory()->Internalize(info->isolate());
665 }
666 }
667
668 if (info->parse_info()->is_toplevel()) {
669 EnsureSharedFunctionInfosArrayOnScript(info->parse_info());
670 }
671 DCHECK_EQ(info->shared_info()->language_mode(),
672 info->literal()->language_mode());
673
674 // Compile either unoptimized code or bytecode for the interpreter.
675 if (!CompileUnoptimizedCode(info, inner_function_mode)) {
676 return MaybeHandle<Code>();
677 }
678
679 // Record the function compilation event.
680 RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, info);
681
682 return info->code();
683 }
684
GetCodeFromOptimizedCodeMap(Handle<JSFunction> function,BailoutId osr_ast_id)685 MUST_USE_RESULT MaybeHandle<Code> GetCodeFromOptimizedCodeMap(
686 Handle<JSFunction> function, BailoutId osr_ast_id) {
687 RuntimeCallTimerScope runtimeTimer(
688 function->GetIsolate(),
689 &RuntimeCallStats::CompileGetFromOptimizedCodeMap);
690 Handle<SharedFunctionInfo> shared(function->shared());
691 DisallowHeapAllocation no_gc;
692 Code* code = shared->SearchOptimizedCodeMap(
693 function->context()->native_context(), osr_ast_id);
694 if (code != nullptr) {
695 // Caching of optimized code enabled and optimized code found.
696 DCHECK(!code->marked_for_deoptimization());
697 DCHECK(function->shared()->is_compiled());
698 return Handle<Code>(code);
699 }
700 return MaybeHandle<Code>();
701 }
702
InsertCodeIntoOptimizedCodeMap(CompilationInfo * info)703 void InsertCodeIntoOptimizedCodeMap(CompilationInfo* info) {
704 Handle<Code> code = info->code();
705 if (code->kind() != Code::OPTIMIZED_FUNCTION) return; // Nothing to do.
706
707 // Function context specialization folds-in the function context,
708 // so no sharing can occur.
709 if (info->is_function_context_specializing()) return;
710 // Frame specialization implies function context specialization.
711 DCHECK(!info->is_frame_specializing());
712
713 // TODO(4764): When compiling for OSR from bytecode, BailoutId might derive
714 // from bytecode offset and overlap with actual BailoutId. No caching!
715 if (info->is_osr() && info->is_optimizing_from_bytecode()) return;
716
717 // Cache optimized context-specific code.
718 Handle<JSFunction> function = info->closure();
719 Handle<SharedFunctionInfo> shared(function->shared());
720 Handle<Context> native_context(function->context()->native_context());
721 SharedFunctionInfo::AddToOptimizedCodeMap(shared, native_context, code,
722 info->osr_ast_id());
723 }
724
GetOptimizedCodeNow(CompilationJob * job)725 bool GetOptimizedCodeNow(CompilationJob* job) {
726 CompilationInfo* info = job->info();
727 Isolate* isolate = info->isolate();
728
729 // Parsing is not required when optimizing from existing bytecode.
730 if (!info->is_optimizing_from_bytecode()) {
731 if (!Compiler::ParseAndAnalyze(info->parse_info())) return false;
732 EnsureFeedbackMetadata(info);
733 }
734
735 JSFunction::EnsureLiterals(info->closure());
736
737 TimerEventScope<TimerEventRecompileSynchronous> timer(isolate);
738 RuntimeCallTimerScope runtimeTimer(isolate,
739 &RuntimeCallStats::RecompileSynchronous);
740 TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
741 "V8.RecompileSynchronous");
742
743 if (job->PrepareJob() != CompilationJob::SUCCEEDED ||
744 job->ExecuteJob() != CompilationJob::SUCCEEDED ||
745 job->FinalizeJob() != CompilationJob::SUCCEEDED) {
746 if (FLAG_trace_opt) {
747 PrintF("[aborted optimizing ");
748 info->closure()->ShortPrint();
749 PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason()));
750 }
751 return false;
752 }
753
754 // Success!
755 job->RecordOptimizedCompilationStats();
756 DCHECK(!isolate->has_pending_exception());
757 InsertCodeIntoOptimizedCodeMap(info);
758 RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, info);
759 return true;
760 }
761
GetOptimizedCodeLater(CompilationJob * job)762 bool GetOptimizedCodeLater(CompilationJob* job) {
763 CompilationInfo* info = job->info();
764 Isolate* isolate = info->isolate();
765
766 if (!isolate->optimizing_compile_dispatcher()->IsQueueAvailable()) {
767 if (FLAG_trace_concurrent_recompilation) {
768 PrintF(" ** Compilation queue full, will retry optimizing ");
769 info->closure()->ShortPrint();
770 PrintF(" later.\n");
771 }
772 return false;
773 }
774
775 if (isolate->heap()->HighMemoryPressure()) {
776 if (FLAG_trace_concurrent_recompilation) {
777 PrintF(" ** High memory pressure, will retry optimizing ");
778 info->closure()->ShortPrint();
779 PrintF(" later.\n");
780 }
781 return false;
782 }
783
784 // Parsing is not required when optimizing from existing bytecode.
785 if (!info->is_optimizing_from_bytecode()) {
786 if (!Compiler::ParseAndAnalyze(info->parse_info())) return false;
787 EnsureFeedbackMetadata(info);
788 }
789
790 JSFunction::EnsureLiterals(info->closure());
791
792 TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate());
793 RuntimeCallTimerScope runtimeTimer(info->isolate(),
794 &RuntimeCallStats::RecompileSynchronous);
795 TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
796 "V8.RecompileSynchronous");
797
798 if (job->PrepareJob() != CompilationJob::SUCCEEDED) return false;
799 isolate->optimizing_compile_dispatcher()->QueueForOptimization(job);
800
801 if (FLAG_trace_concurrent_recompilation) {
802 PrintF(" ** Queued ");
803 info->closure()->ShortPrint();
804 PrintF(" for concurrent optimization.\n");
805 }
806 return true;
807 }
808
GetOptimizedCode(Handle<JSFunction> function,Compiler::ConcurrencyMode mode,BailoutId osr_ast_id=BailoutId::None (),JavaScriptFrame * osr_frame=nullptr)809 MaybeHandle<Code> GetOptimizedCode(Handle<JSFunction> function,
810 Compiler::ConcurrencyMode mode,
811 BailoutId osr_ast_id = BailoutId::None(),
812 JavaScriptFrame* osr_frame = nullptr) {
813 Isolate* isolate = function->GetIsolate();
814 Handle<SharedFunctionInfo> shared(function->shared(), isolate);
815
816 bool ignition_osr = osr_frame && osr_frame->is_interpreted();
817 DCHECK_IMPLIES(ignition_osr, !osr_ast_id.IsNone());
818 DCHECK_IMPLIES(ignition_osr, FLAG_ignition_osr);
819
820 // Shared function no longer needs to be tiered up
821 shared->set_marked_for_tier_up(false);
822
823 Handle<Code> cached_code;
824 // TODO(4764): When compiling for OSR from bytecode, BailoutId might derive
825 // from bytecode offset and overlap with actual BailoutId. No lookup!
826 if (!ignition_osr &&
827 GetCodeFromOptimizedCodeMap(function, osr_ast_id)
828 .ToHandle(&cached_code)) {
829 if (FLAG_trace_opt) {
830 PrintF("[found optimized code for ");
831 function->ShortPrint();
832 if (!osr_ast_id.IsNone()) {
833 PrintF(" at OSR AST id %d", osr_ast_id.ToInt());
834 }
835 PrintF("]\n");
836 }
837 return cached_code;
838 }
839
840 // Reset profiler ticks, function is no longer considered hot.
841 DCHECK(shared->is_compiled());
842 if (shared->HasBaselineCode()) {
843 shared->code()->set_profiler_ticks(0);
844 } else if (shared->HasBytecodeArray()) {
845 shared->set_profiler_ticks(0);
846 }
847
848 VMState<COMPILER> state(isolate);
849 DCHECK(!isolate->has_pending_exception());
850 PostponeInterruptsScope postpone(isolate);
851 bool use_turbofan = UseTurboFan(shared) || ignition_osr;
852 bool has_script = shared->script()->IsScript();
853 // BUG(5946): This DCHECK is necessary to make certain that we won't tolerate
854 // the lack of a script without bytecode.
855 DCHECK_IMPLIES(!has_script, ShouldUseIgnition(shared, false));
856 std::unique_ptr<CompilationJob> job(
857 use_turbofan ? compiler::Pipeline::NewCompilationJob(function, has_script)
858 : new HCompilationJob(function));
859 CompilationInfo* info = job->info();
860 ParseInfo* parse_info = info->parse_info();
861
862 info->SetOptimizingForOsr(osr_ast_id, osr_frame);
863
864 // Do not use Crankshaft/TurboFan if we need to be able to set break points.
865 if (info->shared_info()->HasDebugInfo()) {
866 info->AbortOptimization(kFunctionBeingDebugged);
867 return MaybeHandle<Code>();
868 }
869
870 // Limit the number of times we try to optimize functions.
871 const int kMaxOptCount =
872 FLAG_deopt_every_n_times == 0 ? FLAG_max_opt_count : 1000;
873 if (info->shared_info()->opt_count() > kMaxOptCount) {
874 info->AbortOptimization(kDeoptimizedTooManyTimes);
875 return MaybeHandle<Code>();
876 }
877
878 TimerEventScope<TimerEventOptimizeCode> optimize_code_timer(isolate);
879 RuntimeCallTimerScope runtimeTimer(isolate, &RuntimeCallStats::OptimizeCode);
880 TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), "V8.OptimizeCode");
881
882 // TurboFan can optimize directly from existing bytecode.
883 if (use_turbofan && ShouldUseIgnition(info)) {
884 if (info->is_osr() && !ignition_osr) return MaybeHandle<Code>();
885 DCHECK(shared->HasBytecodeArray());
886 info->MarkAsOptimizeFromBytecode();
887 }
888
889 // Verify that OSR compilations are delegated to the correct graph builder.
890 // Depending on the underlying frame the semantics of the {BailoutId} differ
891 // and the various graph builders hard-code a certain semantic:
892 // - Interpreter : The BailoutId represents a bytecode offset.
893 // - FullCodegen : The BailoutId represents the id of an AST node.
894 DCHECK_IMPLIES(info->is_osr() && ignition_osr,
895 info->is_optimizing_from_bytecode());
896 DCHECK_IMPLIES(info->is_osr() && !ignition_osr,
897 !info->is_optimizing_from_bytecode());
898
899 // In case of concurrent recompilation, all handles below this point will be
900 // allocated in a deferred handle scope that is detached and handed off to
901 // the background thread when we return.
902 std::unique_ptr<CompilationHandleScope> compilation;
903 if (mode == Compiler::CONCURRENT) {
904 compilation.reset(new CompilationHandleScope(info));
905 }
906
907 // In case of TurboFan, all handles below will be canonicalized.
908 std::unique_ptr<CanonicalHandleScope> canonical;
909 if (use_turbofan) canonical.reset(new CanonicalHandleScope(info->isolate()));
910
911 // Reopen handles in the new CompilationHandleScope.
912 info->ReopenHandlesInNewHandleScope();
913 parse_info->ReopenHandlesInNewHandleScope();
914
915 if (mode == Compiler::CONCURRENT) {
916 if (GetOptimizedCodeLater(job.get())) {
917 job.release(); // The background recompile job owns this now.
918 return isolate->builtins()->InOptimizationQueue();
919 }
920 } else {
921 if (GetOptimizedCodeNow(job.get())) return info->code();
922 }
923
924 if (isolate->has_pending_exception()) isolate->clear_pending_exception();
925 return MaybeHandle<Code>();
926 }
927
FinalizeOptimizedCompilationJob(CompilationJob * job)928 CompilationJob::Status FinalizeOptimizedCompilationJob(CompilationJob* job) {
929 CompilationInfo* info = job->info();
930 Isolate* isolate = info->isolate();
931
932 TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate());
933 RuntimeCallTimerScope runtimeTimer(isolate,
934 &RuntimeCallStats::RecompileSynchronous);
935 TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
936 "V8.RecompileSynchronous");
937
938 Handle<SharedFunctionInfo> shared = info->shared_info();
939
940 // Reset profiler ticks, function is no longer considered hot.
941 if (shared->HasBaselineCode()) {
942 shared->code()->set_profiler_ticks(0);
943 } else if (shared->HasBytecodeArray()) {
944 shared->set_profiler_ticks(0);
945 }
946
947 DCHECK(!shared->HasDebugInfo());
948
949 // 1) Optimization on the concurrent thread may have failed.
950 // 2) The function may have already been optimized by OSR. Simply continue.
951 // Except when OSR already disabled optimization for some reason.
952 // 3) The code may have already been invalidated due to dependency change.
953 // 4) Code generation may have failed.
954 if (job->state() == CompilationJob::State::kReadyToFinalize) {
955 if (shared->optimization_disabled()) {
956 job->RetryOptimization(kOptimizationDisabled);
957 } else if (info->dependencies()->HasAborted()) {
958 job->RetryOptimization(kBailedOutDueToDependencyChange);
959 } else if (job->FinalizeJob() == CompilationJob::SUCCEEDED) {
960 job->RecordOptimizedCompilationStats();
961 RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, info);
962 if (shared->SearchOptimizedCodeMap(info->context()->native_context(),
963 info->osr_ast_id()) == nullptr) {
964 InsertCodeIntoOptimizedCodeMap(info);
965 }
966 if (FLAG_trace_opt) {
967 PrintF("[completed optimizing ");
968 info->closure()->ShortPrint();
969 PrintF("]\n");
970 }
971 info->closure()->ReplaceCode(*info->code());
972 return CompilationJob::SUCCEEDED;
973 }
974 }
975
976 DCHECK(job->state() == CompilationJob::State::kFailed);
977 if (FLAG_trace_opt) {
978 PrintF("[aborted optimizing ");
979 info->closure()->ShortPrint();
980 PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason()));
981 }
982 info->closure()->ReplaceCode(shared->code());
983 return CompilationJob::FAILED;
984 }
985
GetBaselineCode(Handle<JSFunction> function)986 MaybeHandle<Code> GetBaselineCode(Handle<JSFunction> function) {
987 Isolate* isolate = function->GetIsolate();
988 VMState<COMPILER> state(isolate);
989 PostponeInterruptsScope postpone(isolate);
990 ParseInfo parse_info(handle(function->shared()));
991 CompilationInfo info(parse_info.zone(), &parse_info, function);
992
993 DCHECK(function->shared()->is_compiled());
994
995 // Function no longer needs to be tiered up
996 function->shared()->set_marked_for_tier_up(false);
997
998 // Reset profiler ticks, function is no longer considered hot.
999 if (function->shared()->HasBytecodeArray()) {
1000 function->shared()->set_profiler_ticks(0);
1001 }
1002
1003 // Nothing left to do if the function already has baseline code.
1004 if (function->shared()->code()->kind() == Code::FUNCTION) {
1005 return Handle<Code>(function->shared()->code());
1006 }
1007
1008 // We do not switch to baseline code when the debugger might have created a
1009 // copy of the bytecode with break slots to be able to set break points.
1010 if (function->shared()->HasDebugInfo()) {
1011 return MaybeHandle<Code>();
1012 }
1013
1014 // Don't generate full-codegen code for functions it can't support.
1015 if (function->shared()->must_use_ignition_turbo()) {
1016 return MaybeHandle<Code>();
1017 }
1018 DCHECK(!IsResumableFunction(function->shared()->kind()));
1019
1020 if (FLAG_trace_opt) {
1021 OFStream os(stdout);
1022 os << "[switching method " << Brief(*function) << " to baseline code]"
1023 << std::endl;
1024 }
1025
1026 // Parse and update CompilationInfo with the results.
1027 if (!parsing::ParseFunction(info.parse_info())) return MaybeHandle<Code>();
1028 Handle<SharedFunctionInfo> shared = info.shared_info();
1029 DCHECK_EQ(shared->language_mode(), info.literal()->language_mode());
1030
1031 // Compile baseline code using the full code generator.
1032 if (!Compiler::Analyze(info.parse_info()) ||
1033 !FullCodeGenerator::MakeCode(&info)) {
1034 if (!isolate->has_pending_exception()) isolate->StackOverflow();
1035 return MaybeHandle<Code>();
1036 }
1037
1038 // Update the shared function info with the scope info.
1039 InstallSharedScopeInfo(&info, shared);
1040
1041 // Install compilation result on the shared function info
1042 InstallSharedCompilationResult(&info, shared);
1043
1044 // Record the function compilation event.
1045 RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, &info);
1046
1047 return info.code();
1048 }
1049
GetLazyCode(Handle<JSFunction> function)1050 MaybeHandle<Code> GetLazyCode(Handle<JSFunction> function) {
1051 Isolate* isolate = function->GetIsolate();
1052 DCHECK(!isolate->has_pending_exception());
1053 DCHECK(!function->is_compiled());
1054 TimerEventScope<TimerEventCompileCode> compile_timer(isolate);
1055 RuntimeCallTimerScope runtimeTimer(isolate,
1056 &RuntimeCallStats::CompileFunction);
1057 TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), "V8.CompileCode");
1058 AggregatedHistogramTimerScope timer(isolate->counters()->compile_lazy());
1059
1060 Handle<Code> cached_code;
1061 if (GetCodeFromOptimizedCodeMap(function, BailoutId::None())
1062 .ToHandle(&cached_code)) {
1063 if (FLAG_trace_opt) {
1064 PrintF("[found optimized code for ");
1065 function->ShortPrint();
1066 PrintF(" during unoptimized compile]\n");
1067 }
1068 DCHECK(function->shared()->is_compiled());
1069 return cached_code;
1070 }
1071
1072 if (function->shared()->is_compiled() &&
1073 function->shared()->marked_for_tier_up()) {
1074 DCHECK(FLAG_mark_shared_functions_for_tier_up);
1075
1076 function->shared()->set_marked_for_tier_up(false);
1077
1078 switch (Compiler::NextCompilationTier(*function)) {
1079 case Compiler::BASELINE: {
1080 // We don't try to handle baseline here because GetBaselineCode()
1081 // doesn't handle top-level code. We aren't supporting
1082 // the hybrid pipeline going forward (where Ignition is a first
1083 // tier followed by full-code).
1084 break;
1085 }
1086 case Compiler::OPTIMIZED: {
1087 if (FLAG_trace_opt) {
1088 PrintF("[optimizing method ");
1089 function->ShortPrint();
1090 PrintF(" eagerly (shared function marked for tier up)]\n");
1091 }
1092
1093 Handle<Code> code;
1094 // TODO(leszeks): Look into performing this compilation concurrently.
1095 if (GetOptimizedCode(function, Compiler::NOT_CONCURRENT)
1096 .ToHandle(&code)) {
1097 return code;
1098 }
1099 break;
1100 }
1101 default:
1102 UNREACHABLE();
1103 }
1104 }
1105
1106 if (function->shared()->is_compiled()) {
1107 return Handle<Code>(function->shared()->code());
1108 }
1109
1110 if (function->shared()->HasBytecodeArray()) {
1111 Handle<Code> entry = isolate->builtins()->InterpreterEntryTrampoline();
1112 function->shared()->ReplaceCode(*entry);
1113 return entry;
1114 }
1115
1116 ParseInfo parse_info(handle(function->shared()));
1117 Zone compile_zone(isolate->allocator(), ZONE_NAME);
1118 CompilationInfo info(&compile_zone, &parse_info, function);
1119 Handle<Code> result;
1120 ASSIGN_RETURN_ON_EXCEPTION(
1121 isolate, result, GetUnoptimizedCode(&info, Compiler::CONCURRENT), Code);
1122
1123 if (FLAG_always_opt && !info.shared_info()->HasAsmWasmData()) {
1124 Handle<Code> opt_code;
1125 if (GetOptimizedCode(function, Compiler::NOT_CONCURRENT)
1126 .ToHandle(&opt_code)) {
1127 result = opt_code;
1128 }
1129 }
1130
1131 return result;
1132 }
1133
1134
CompileToplevel(CompilationInfo * info)1135 Handle<SharedFunctionInfo> CompileToplevel(CompilationInfo* info) {
1136 Isolate* isolate = info->isolate();
1137 TimerEventScope<TimerEventCompileCode> timer(isolate);
1138 TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), "V8.CompileCode");
1139 PostponeInterruptsScope postpone(isolate);
1140 DCHECK(!isolate->native_context().is_null());
1141 ParseInfo* parse_info = info->parse_info();
1142
1143 RuntimeCallTimerScope runtimeTimer(
1144 isolate, parse_info->is_eval() ? &RuntimeCallStats::CompileEval
1145 : &RuntimeCallStats::CompileScript);
1146
1147 Handle<Script> script = parse_info->script();
1148
1149 // TODO(svenpanne) Obscure place for this, perhaps move to OnBeforeCompile?
1150 FixedArray* array = isolate->native_context()->embedder_data();
1151 script->set_context_data(array->get(v8::Context::kDebugIdIndex));
1152
1153 Handle<SharedFunctionInfo> result;
1154
1155 { VMState<COMPILER> state(info->isolate());
1156 if (parse_info->literal() == nullptr) {
1157 if (!parsing::ParseProgram(parse_info, false)) {
1158 return Handle<SharedFunctionInfo>::null();
1159 }
1160
1161 {
1162 ParseHandleScope parse_handles(parse_info);
1163 parse_info->ReopenHandlesInNewHandleScope();
1164 parse_info->ast_value_factory()->Internalize(info->isolate());
1165 }
1166 }
1167
1168 EnsureSharedFunctionInfosArrayOnScript(parse_info);
1169
1170 // Measure how long it takes to do the compilation; only take the
1171 // rest of the function into account to avoid overlap with the
1172 // parsing statistics.
1173 HistogramTimer* rate = parse_info->is_eval()
1174 ? info->isolate()->counters()->compile_eval()
1175 : info->isolate()->counters()->compile();
1176 HistogramTimerScope timer(rate);
1177 TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
1178 parse_info->is_eval() ? "V8.CompileEval" : "V8.Compile");
1179
1180 // Allocate a shared function info object.
1181 FunctionLiteral* lit = parse_info->literal();
1182 DCHECK_EQ(kNoSourcePosition, lit->function_token_position());
1183 result = isolate->factory()->NewSharedFunctionInfoForLiteral(lit, script);
1184 result->set_is_toplevel(true);
1185 parse_info->set_shared_info(result);
1186 parse_info->set_function_literal_id(result->function_literal_id());
1187
1188 // Compile the code.
1189 if (!CompileUnoptimizedCode(info, Compiler::CONCURRENT)) {
1190 return Handle<SharedFunctionInfo>::null();
1191 }
1192
1193 Handle<String> script_name =
1194 script->name()->IsString()
1195 ? Handle<String>(String::cast(script->name()))
1196 : isolate->factory()->empty_string();
1197 CodeEventListener::LogEventsAndTags log_tag =
1198 parse_info->is_eval()
1199 ? CodeEventListener::EVAL_TAG
1200 : Logger::ToNativeByScript(CodeEventListener::SCRIPT_TAG, *script);
1201
1202 PROFILE(isolate, CodeCreateEvent(log_tag, result->abstract_code(), *result,
1203 *script_name));
1204
1205 if (!script.is_null())
1206 script->set_compilation_state(Script::COMPILATION_STATE_COMPILED);
1207 }
1208
1209 return result;
1210 }
1211
1212 } // namespace
1213
1214 // ----------------------------------------------------------------------------
1215 // Implementation of Compiler
1216
Analyze(ParseInfo * info,EagerInnerFunctionLiterals * eager_literals)1217 bool Compiler::Analyze(ParseInfo* info,
1218 EagerInnerFunctionLiterals* eager_literals) {
1219 DCHECK_NOT_NULL(info->literal());
1220 RuntimeCallTimerScope runtimeTimer(info->isolate(),
1221 &RuntimeCallStats::CompileAnalyse);
1222 if (!Rewriter::Rewrite(info)) return false;
1223 DeclarationScope::Analyze(info, AnalyzeMode::kRegular);
1224 if (!Renumber(info, eager_literals)) {
1225 return false;
1226 }
1227 DCHECK_NOT_NULL(info->scope());
1228 return true;
1229 }
1230
ParseAndAnalyze(ParseInfo * info)1231 bool Compiler::ParseAndAnalyze(ParseInfo* info) {
1232 if (!parsing::ParseAny(info)) return false;
1233 if (info->is_toplevel()) EnsureSharedFunctionInfosArrayOnScript(info);
1234 if (!Compiler::Analyze(info)) return false;
1235 DCHECK_NOT_NULL(info->literal());
1236 DCHECK_NOT_NULL(info->scope());
1237 return true;
1238 }
1239
Compile(Handle<JSFunction> function,ClearExceptionFlag flag)1240 bool Compiler::Compile(Handle<JSFunction> function, ClearExceptionFlag flag) {
1241 if (function->is_compiled()) return true;
1242 Isolate* isolate = function->GetIsolate();
1243 DCHECK(AllowCompilation::IsAllowed(isolate));
1244
1245 CompilerDispatcher* dispatcher = isolate->compiler_dispatcher();
1246 Handle<SharedFunctionInfo> shared(function->shared(), isolate);
1247 Handle<Code> code;
1248 if (dispatcher->IsEnqueued(shared)) {
1249 if (!dispatcher->FinishNow(shared)) {
1250 if (flag == CLEAR_EXCEPTION) {
1251 isolate->clear_pending_exception();
1252 }
1253 return false;
1254 }
1255 code = handle(shared->code(), isolate);
1256 } else {
1257 // Start a compilation.
1258 if (!GetLazyCode(function).ToHandle(&code)) {
1259 if (flag == CLEAR_EXCEPTION) {
1260 isolate->clear_pending_exception();
1261 }
1262 return false;
1263 }
1264 }
1265
1266 // Install code on closure.
1267 function->ReplaceCode(*code);
1268 JSFunction::EnsureLiterals(function);
1269
1270 // Check postconditions on success.
1271 DCHECK(!isolate->has_pending_exception());
1272 DCHECK(function->shared()->is_compiled());
1273 DCHECK(function->is_compiled());
1274 return true;
1275 }
1276
CompileBaseline(Handle<JSFunction> function)1277 bool Compiler::CompileBaseline(Handle<JSFunction> function) {
1278 Isolate* isolate = function->GetIsolate();
1279 DCHECK(AllowCompilation::IsAllowed(isolate));
1280
1281 // Start a compilation.
1282 Handle<Code> code;
1283 if (!GetBaselineCode(function).ToHandle(&code)) {
1284 // Baseline generation failed, get unoptimized code.
1285 DCHECK(function->shared()->is_compiled());
1286 code = handle(function->shared()->code());
1287 isolate->clear_pending_exception();
1288 }
1289
1290 // Install code on closure.
1291 function->ReplaceCode(*code);
1292 JSFunction::EnsureLiterals(function);
1293
1294 // Check postconditions on success.
1295 DCHECK(!isolate->has_pending_exception());
1296 DCHECK(function->shared()->is_compiled());
1297 DCHECK(function->is_compiled());
1298 return true;
1299 }
1300
CompileOptimized(Handle<JSFunction> function,ConcurrencyMode mode)1301 bool Compiler::CompileOptimized(Handle<JSFunction> function,
1302 ConcurrencyMode mode) {
1303 if (function->IsOptimized()) return true;
1304 Isolate* isolate = function->GetIsolate();
1305 DCHECK(AllowCompilation::IsAllowed(isolate));
1306
1307 // Start a compilation.
1308 Handle<Code> code;
1309 if (!GetOptimizedCode(function, mode).ToHandle(&code)) {
1310 // Optimization failed, get unoptimized code. Unoptimized code must exist
1311 // already if we are optimizing.
1312 DCHECK(!isolate->has_pending_exception());
1313 DCHECK(function->shared()->is_compiled());
1314 code = handle(function->shared()->code(), isolate);
1315 }
1316
1317 // Install code on closure.
1318 function->ReplaceCode(*code);
1319 JSFunction::EnsureLiterals(function);
1320
1321 // Check postconditions on success.
1322 DCHECK(!isolate->has_pending_exception());
1323 DCHECK(function->shared()->is_compiled());
1324 DCHECK(function->is_compiled());
1325 return true;
1326 }
1327
CompileDebugCode(Handle<SharedFunctionInfo> shared)1328 bool Compiler::CompileDebugCode(Handle<SharedFunctionInfo> shared) {
1329 Isolate* isolate = shared->GetIsolate();
1330 DCHECK(AllowCompilation::IsAllowed(isolate));
1331
1332 // Start a compilation.
1333 ParseInfo parse_info(shared);
1334 CompilationInfo info(parse_info.zone(), &parse_info,
1335 Handle<JSFunction>::null());
1336 info.MarkAsDebug();
1337 if (GetUnoptimizedCode(&info, Compiler::NOT_CONCURRENT).is_null()) {
1338 isolate->clear_pending_exception();
1339 return false;
1340 }
1341
1342 // Check postconditions on success.
1343 DCHECK(!isolate->has_pending_exception());
1344 DCHECK(shared->is_compiled());
1345 DCHECK(shared->HasDebugCode());
1346 return true;
1347 }
1348
CompileForLiveEdit(Handle<Script> script)1349 MaybeHandle<JSArray> Compiler::CompileForLiveEdit(Handle<Script> script) {
1350 Isolate* isolate = script->GetIsolate();
1351 DCHECK(AllowCompilation::IsAllowed(isolate));
1352
1353 // In order to ensure that live edit function info collection finds the newly
1354 // generated shared function infos, clear the script's list temporarily
1355 // and restore it at the end of this method.
1356 Handle<FixedArray> old_function_infos(script->shared_function_infos(),
1357 isolate);
1358 script->set_shared_function_infos(isolate->heap()->empty_fixed_array());
1359
1360 // Start a compilation.
1361 ParseInfo parse_info(script);
1362 Zone compile_zone(isolate->allocator(), ZONE_NAME);
1363 CompilationInfo info(&compile_zone, &parse_info, Handle<JSFunction>::null());
1364 info.MarkAsDebug();
1365
1366 // TODO(635): support extensions.
1367 const bool compilation_succeeded = !CompileToplevel(&info).is_null();
1368 Handle<JSArray> infos;
1369 if (compilation_succeeded) {
1370 // Check postconditions on success.
1371 DCHECK(!isolate->has_pending_exception());
1372 infos = LiveEditFunctionTracker::Collect(parse_info.literal(), script,
1373 parse_info.zone(), isolate);
1374 }
1375
1376 // Restore the original function info list in order to remain side-effect
1377 // free as much as possible, since some code expects the old shared function
1378 // infos to stick around.
1379 script->set_shared_function_infos(*old_function_infos);
1380
1381 return infos;
1382 }
1383
EnsureBytecode(CompilationInfo * info)1384 bool Compiler::EnsureBytecode(CompilationInfo* info) {
1385 if (!info->shared_info()->is_compiled()) {
1386 CompilerDispatcher* dispatcher = info->isolate()->compiler_dispatcher();
1387 if (dispatcher->IsEnqueued(info->shared_info())) {
1388 if (!dispatcher->FinishNow(info->shared_info())) return false;
1389 } else if (GetUnoptimizedCode(info, Compiler::NOT_CONCURRENT).is_null()) {
1390 return false;
1391 }
1392 }
1393 DCHECK(info->shared_info()->is_compiled());
1394
1395 if (info->shared_info()->HasAsmWasmData()) return false;
1396
1397 DCHECK_EQ(ShouldUseIgnition(info), info->shared_info()->HasBytecodeArray());
1398 return info->shared_info()->HasBytecodeArray();
1399 }
1400
1401 // TODO(turbofan): In the future, unoptimized code with deopt support could
1402 // be generated lazily once deopt is triggered.
EnsureDeoptimizationSupport(CompilationInfo * info)1403 bool Compiler::EnsureDeoptimizationSupport(CompilationInfo* info) {
1404 DCHECK_NOT_NULL(info->literal());
1405 DCHECK_NOT_NULL(info->scope());
1406 Handle<SharedFunctionInfo> shared = info->shared_info();
1407
1408 CompilerDispatcher* dispatcher = info->isolate()->compiler_dispatcher();
1409 if (dispatcher->IsEnqueued(shared)) {
1410 if (!dispatcher->FinishNow(shared)) return false;
1411 }
1412
1413 if (!shared->has_deoptimization_support()) {
1414 Zone compile_zone(info->isolate()->allocator(), ZONE_NAME);
1415 CompilationInfo unoptimized(&compile_zone, info->parse_info(),
1416 info->closure());
1417 unoptimized.EnableDeoptimizationSupport();
1418
1419 // Don't generate full-codegen code for functions it can't support.
1420 if (shared->must_use_ignition_turbo()) return false;
1421 DCHECK(!IsResumableFunction(shared->kind()));
1422
1423 // When we call PrepareForSerializing below, we will change the shared
1424 // ParseInfo. Make sure to reset it.
1425 bool old_will_serialize_value = info->parse_info()->will_serialize();
1426
1427 // If the current code has reloc info for serialization, also include
1428 // reloc info for serialization for the new code, so that deopt support
1429 // can be added without losing IC state.
1430 if (shared->code()->kind() == Code::FUNCTION &&
1431 shared->code()->has_reloc_info_for_serialization()) {
1432 unoptimized.PrepareForSerializing();
1433 }
1434 EnsureFeedbackMetadata(&unoptimized);
1435
1436 // Ensure we generate and install bytecode first if the function should use
1437 // Ignition to avoid implicit tier-down.
1438 if (!shared->is_compiled() && ShouldUseIgnition(info) &&
1439 !GenerateUnoptimizedCode(info)) {
1440 return false;
1441 }
1442
1443 if (!FullCodeGenerator::MakeCode(&unoptimized)) return false;
1444
1445 info->parse_info()->set_will_serialize(old_will_serialize_value);
1446
1447 // The scope info might not have been set if a lazily compiled
1448 // function is inlined before being called for the first time.
1449 if (shared->scope_info() == ScopeInfo::Empty(info->isolate())) {
1450 InstallSharedScopeInfo(info, shared);
1451 }
1452
1453 // Install compilation result on the shared function info
1454 shared->EnableDeoptimizationSupport(*unoptimized.code());
1455
1456 // The existing unoptimized code was replaced with the new one.
1457 RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG,
1458 &unoptimized);
1459 }
1460 return true;
1461 }
1462
1463 // static
NextCompilationTier(JSFunction * function)1464 Compiler::CompilationTier Compiler::NextCompilationTier(JSFunction* function) {
1465 Handle<SharedFunctionInfo> shared(function->shared(), function->GetIsolate());
1466 if (shared->IsInterpreted()) {
1467 if (UseTurboFan(shared)) {
1468 return OPTIMIZED;
1469 } else {
1470 return BASELINE;
1471 }
1472 } else {
1473 return OPTIMIZED;
1474 }
1475 }
1476
GetFunctionFromEval(Handle<String> source,Handle<SharedFunctionInfo> outer_info,Handle<Context> context,LanguageMode language_mode,ParseRestriction restriction,int parameters_end_pos,int eval_scope_position,int eval_position,int line_offset,int column_offset,Handle<Object> script_name,ScriptOriginOptions options)1477 MaybeHandle<JSFunction> Compiler::GetFunctionFromEval(
1478 Handle<String> source, Handle<SharedFunctionInfo> outer_info,
1479 Handle<Context> context, LanguageMode language_mode,
1480 ParseRestriction restriction, int parameters_end_pos,
1481 int eval_scope_position, int eval_position, int line_offset,
1482 int column_offset, Handle<Object> script_name,
1483 ScriptOriginOptions options) {
1484 Isolate* isolate = source->GetIsolate();
1485 int source_length = source->length();
1486 isolate->counters()->total_eval_size()->Increment(source_length);
1487 isolate->counters()->total_compile_size()->Increment(source_length);
1488
1489 // The cache lookup key needs to be aware of the separation between the
1490 // parameters and the body to prevent this valid invocation:
1491 // Function("", "function anonymous(\n/**/) {\n}");
1492 // from adding an entry that falsely approves this invalid invocation:
1493 // Function("\n/**/) {\nfunction anonymous(", "}");
1494 // The actual eval_scope_position for indirect eval and CreateDynamicFunction
1495 // is unused (just 0), which means it's an available field to use to indicate
1496 // this separation. But to make sure we're not causing other false hits, we
1497 // negate the scope position.
1498 int position = eval_scope_position;
1499 if (FLAG_harmony_function_tostring &&
1500 restriction == ONLY_SINGLE_FUNCTION_LITERAL &&
1501 parameters_end_pos != kNoSourcePosition) {
1502 // use the parameters_end_pos as the eval_scope_position in the eval cache.
1503 DCHECK_EQ(eval_scope_position, 0);
1504 position = -parameters_end_pos;
1505 }
1506 CompilationCache* compilation_cache = isolate->compilation_cache();
1507 InfoVectorPair eval_result = compilation_cache->LookupEval(
1508 source, outer_info, context, language_mode, position);
1509 Handle<SharedFunctionInfo> shared_info;
1510 if (eval_result.has_shared()) {
1511 shared_info = Handle<SharedFunctionInfo>(eval_result.shared(), isolate);
1512 }
1513 Handle<Cell> vector;
1514 if (eval_result.has_vector()) {
1515 vector = Handle<Cell>(eval_result.vector(), isolate);
1516 }
1517
1518 Handle<Script> script;
1519 if (!eval_result.has_shared()) {
1520 script = isolate->factory()->NewScript(source);
1521 if (isolate->NeedsSourcePositionsForProfiling()) {
1522 Script::InitLineEnds(script);
1523 }
1524 if (!script_name.is_null()) {
1525 script->set_name(*script_name);
1526 script->set_line_offset(line_offset);
1527 script->set_column_offset(column_offset);
1528 }
1529 script->set_origin_options(options);
1530 script->set_compilation_type(Script::COMPILATION_TYPE_EVAL);
1531 Script::SetEvalOrigin(script, outer_info, eval_position);
1532
1533 ParseInfo parse_info(script);
1534 Zone compile_zone(isolate->allocator(), ZONE_NAME);
1535 CompilationInfo info(&compile_zone, &parse_info,
1536 Handle<JSFunction>::null());
1537 parse_info.set_eval();
1538 parse_info.set_language_mode(language_mode);
1539 parse_info.set_parse_restriction(restriction);
1540 parse_info.set_parameters_end_pos(parameters_end_pos);
1541 if (!context->IsNativeContext()) {
1542 parse_info.set_outer_scope_info(handle(context->scope_info()));
1543 }
1544
1545 shared_info = CompileToplevel(&info);
1546 if (shared_info.is_null()) {
1547 return MaybeHandle<JSFunction>();
1548 }
1549 }
1550
1551 // If caller is strict mode, the result must be in strict mode as well.
1552 DCHECK(is_sloppy(language_mode) || is_strict(shared_info->language_mode()));
1553
1554 Handle<JSFunction> result;
1555 if (eval_result.has_shared()) {
1556 if (eval_result.has_vector()) {
1557 result = isolate->factory()->NewFunctionFromSharedFunctionInfo(
1558 shared_info, context, vector, NOT_TENURED);
1559 } else {
1560 result = isolate->factory()->NewFunctionFromSharedFunctionInfo(
1561 shared_info, context, NOT_TENURED);
1562 JSFunction::EnsureLiterals(result);
1563 // Make sure to cache this result.
1564 Handle<Cell> new_vector(result->feedback_vector_cell(), isolate);
1565 compilation_cache->PutEval(source, outer_info, context, shared_info,
1566 new_vector, eval_scope_position);
1567 }
1568 } else {
1569 result = isolate->factory()->NewFunctionFromSharedFunctionInfo(
1570 shared_info, context, NOT_TENURED);
1571 JSFunction::EnsureLiterals(result);
1572 // Add the SharedFunctionInfo and the LiteralsArray to the eval cache if
1573 // we didn't retrieve from there.
1574 Handle<Cell> vector(result->feedback_vector_cell(), isolate);
1575 compilation_cache->PutEval(source, outer_info, context, shared_info, vector,
1576 eval_scope_position);
1577 }
1578
1579 // OnAfterCompile has to be called after we create the JSFunction, which we
1580 // may require to recompile the eval for debugging, if we find a function
1581 // that contains break points in the eval script.
1582 isolate->debug()->OnAfterCompile(script);
1583
1584 return result;
1585 }
1586
1587 namespace {
1588
CodeGenerationFromStringsAllowed(Isolate * isolate,Handle<Context> context)1589 bool CodeGenerationFromStringsAllowed(Isolate* isolate,
1590 Handle<Context> context) {
1591 DCHECK(context->allow_code_gen_from_strings()->IsFalse(isolate));
1592 // Check with callback if set.
1593 AllowCodeGenerationFromStringsCallback callback =
1594 isolate->allow_code_gen_callback();
1595 if (callback == NULL) {
1596 // No callback set and code generation disallowed.
1597 return false;
1598 } else {
1599 // Callback set. Let it decide if code generation is allowed.
1600 VMState<EXTERNAL> state(isolate);
1601 return callback(v8::Utils::ToLocal(context));
1602 }
1603 }
1604
ContainsAsmModule(Handle<Script> script)1605 bool ContainsAsmModule(Handle<Script> script) {
1606 DisallowHeapAllocation no_gc;
1607 SharedFunctionInfo::ScriptIterator iter(script);
1608 while (SharedFunctionInfo* info = iter.Next()) {
1609 if (info->HasAsmWasmData()) return true;
1610 }
1611 return false;
1612 }
1613
1614 } // namespace
1615
GetFunctionFromString(Handle<Context> context,Handle<String> source,ParseRestriction restriction,int parameters_end_pos)1616 MaybeHandle<JSFunction> Compiler::GetFunctionFromString(
1617 Handle<Context> context, Handle<String> source,
1618 ParseRestriction restriction, int parameters_end_pos) {
1619 Isolate* const isolate = context->GetIsolate();
1620 Handle<Context> native_context(context->native_context(), isolate);
1621
1622 // Check if native context allows code generation from
1623 // strings. Throw an exception if it doesn't.
1624 if (native_context->allow_code_gen_from_strings()->IsFalse(isolate) &&
1625 !CodeGenerationFromStringsAllowed(isolate, native_context)) {
1626 Handle<Object> error_message =
1627 native_context->ErrorMessageForCodeGenerationFromStrings();
1628 THROW_NEW_ERROR(isolate, NewEvalError(MessageTemplate::kCodeGenFromStrings,
1629 error_message),
1630 JSFunction);
1631 }
1632
1633 // Compile source string in the native context.
1634 int eval_scope_position = 0;
1635 int eval_position = kNoSourcePosition;
1636 Handle<SharedFunctionInfo> outer_info(native_context->closure()->shared());
1637 return Compiler::GetFunctionFromEval(source, outer_info, native_context,
1638 SLOPPY, restriction, parameters_end_pos,
1639 eval_scope_position, eval_position);
1640 }
1641
GetSharedFunctionInfoForScript(Handle<String> source,Handle<Object> script_name,int line_offset,int column_offset,ScriptOriginOptions resource_options,Handle<Object> source_map_url,Handle<Context> context,v8::Extension * extension,ScriptData ** cached_data,ScriptCompiler::CompileOptions compile_options,NativesFlag natives)1642 Handle<SharedFunctionInfo> Compiler::GetSharedFunctionInfoForScript(
1643 Handle<String> source, Handle<Object> script_name, int line_offset,
1644 int column_offset, ScriptOriginOptions resource_options,
1645 Handle<Object> source_map_url, Handle<Context> context,
1646 v8::Extension* extension, ScriptData** cached_data,
1647 ScriptCompiler::CompileOptions compile_options, NativesFlag natives) {
1648 Isolate* isolate = source->GetIsolate();
1649 if (compile_options == ScriptCompiler::kNoCompileOptions) {
1650 cached_data = NULL;
1651 } else if (compile_options == ScriptCompiler::kProduceParserCache ||
1652 compile_options == ScriptCompiler::kProduceCodeCache) {
1653 DCHECK(cached_data && !*cached_data);
1654 DCHECK(extension == NULL);
1655 DCHECK(!isolate->debug()->is_loaded());
1656 } else {
1657 DCHECK(compile_options == ScriptCompiler::kConsumeParserCache ||
1658 compile_options == ScriptCompiler::kConsumeCodeCache);
1659 DCHECK(cached_data && *cached_data);
1660 DCHECK(extension == NULL);
1661 }
1662 int source_length = source->length();
1663 isolate->counters()->total_load_size()->Increment(source_length);
1664 isolate->counters()->total_compile_size()->Increment(source_length);
1665
1666 LanguageMode language_mode = construct_language_mode(FLAG_use_strict);
1667 CompilationCache* compilation_cache = isolate->compilation_cache();
1668
1669 // Do a lookup in the compilation cache but not for extensions.
1670 Handle<SharedFunctionInfo> result;
1671 Handle<Cell> vector;
1672 if (extension == NULL) {
1673 // First check per-isolate compilation cache.
1674 InfoVectorPair pair = compilation_cache->LookupScript(
1675 source, script_name, line_offset, column_offset, resource_options,
1676 context, language_mode);
1677 if (!pair.has_shared() && FLAG_serialize_toplevel &&
1678 compile_options == ScriptCompiler::kConsumeCodeCache &&
1679 !isolate->debug()->is_loaded()) {
1680 // Then check cached code provided by embedder.
1681 HistogramTimerScope timer(isolate->counters()->compile_deserialize());
1682 RuntimeCallTimerScope runtimeTimer(isolate,
1683 &RuntimeCallStats::CompileDeserialize);
1684 TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
1685 "V8.CompileDeserialize");
1686 Handle<SharedFunctionInfo> inner_result;
1687 if (CodeSerializer::Deserialize(isolate, *cached_data, source)
1688 .ToHandle(&inner_result)) {
1689 // Promote to per-isolate compilation cache.
1690 // TODO(mvstanton): create a feedback vector array here.
1691 DCHECK(inner_result->is_compiled());
1692 Handle<FeedbackVector> feedback_vector =
1693 FeedbackVector::New(isolate, inner_result);
1694 vector = isolate->factory()->NewCell(feedback_vector);
1695 compilation_cache->PutScript(source, context, language_mode,
1696 inner_result, vector);
1697 return inner_result;
1698 }
1699 // Deserializer failed. Fall through to compile.
1700 } else {
1701 if (pair.has_shared()) {
1702 result = Handle<SharedFunctionInfo>(pair.shared(), isolate);
1703 }
1704 if (pair.has_vector()) {
1705 vector = Handle<Cell>(pair.vector(), isolate);
1706 }
1707 }
1708 }
1709
1710 base::ElapsedTimer timer;
1711 if (FLAG_profile_deserialization && FLAG_serialize_toplevel &&
1712 compile_options == ScriptCompiler::kProduceCodeCache) {
1713 timer.Start();
1714 }
1715
1716 if (result.is_null() ||
1717 (FLAG_serialize_toplevel &&
1718 compile_options == ScriptCompiler::kProduceCodeCache)) {
1719 // No cache entry found, or embedder wants a code cache. Compile the script.
1720
1721 // Create a script object describing the script to be compiled.
1722 Handle<Script> script = isolate->factory()->NewScript(source);
1723 if (isolate->NeedsSourcePositionsForProfiling()) {
1724 Script::InitLineEnds(script);
1725 }
1726 if (natives == NATIVES_CODE) {
1727 script->set_type(Script::TYPE_NATIVE);
1728 } else if (natives == EXTENSION_CODE) {
1729 script->set_type(Script::TYPE_EXTENSION);
1730 } else if (natives == INSPECTOR_CODE) {
1731 script->set_type(Script::TYPE_INSPECTOR);
1732 }
1733 if (!script_name.is_null()) {
1734 script->set_name(*script_name);
1735 script->set_line_offset(line_offset);
1736 script->set_column_offset(column_offset);
1737 }
1738 script->set_origin_options(resource_options);
1739 if (!source_map_url.is_null()) {
1740 script->set_source_mapping_url(*source_map_url);
1741 }
1742
1743 // Compile the function and add it to the cache.
1744 ParseInfo parse_info(script);
1745 Zone compile_zone(isolate->allocator(), ZONE_NAME);
1746 CompilationInfo info(&compile_zone, &parse_info,
1747 Handle<JSFunction>::null());
1748 if (resource_options.IsModule()) parse_info.set_module();
1749 if (compile_options != ScriptCompiler::kNoCompileOptions) {
1750 parse_info.set_cached_data(cached_data);
1751 }
1752 parse_info.set_compile_options(compile_options);
1753 parse_info.set_extension(extension);
1754 if (!context->IsNativeContext()) {
1755 parse_info.set_outer_scope_info(handle(context->scope_info()));
1756 }
1757 if (FLAG_serialize_toplevel &&
1758 compile_options == ScriptCompiler::kProduceCodeCache) {
1759 info.PrepareForSerializing();
1760 }
1761
1762 parse_info.set_language_mode(
1763 static_cast<LanguageMode>(parse_info.language_mode() | language_mode));
1764 result = CompileToplevel(&info);
1765 if (extension == NULL && !result.is_null()) {
1766 // We need a feedback vector.
1767 DCHECK(result->is_compiled());
1768 Handle<FeedbackVector> feedback_vector =
1769 FeedbackVector::New(isolate, result);
1770 vector = isolate->factory()->NewCell(feedback_vector);
1771 compilation_cache->PutScript(source, context, language_mode, result,
1772 vector);
1773 if (FLAG_serialize_toplevel &&
1774 compile_options == ScriptCompiler::kProduceCodeCache &&
1775 !ContainsAsmModule(script)) {
1776 HistogramTimerScope histogram_timer(
1777 isolate->counters()->compile_serialize());
1778 RuntimeCallTimerScope runtimeTimer(isolate,
1779 &RuntimeCallStats::CompileSerialize);
1780 TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
1781 "V8.CompileSerialize");
1782 *cached_data = CodeSerializer::Serialize(isolate, result, source);
1783 if (FLAG_profile_deserialization) {
1784 PrintF("[Compiling and serializing took %0.3f ms]\n",
1785 timer.Elapsed().InMillisecondsF());
1786 }
1787 }
1788 }
1789
1790 if (result.is_null()) {
1791 if (natives != EXTENSION_CODE && natives != NATIVES_CODE) {
1792 isolate->ReportPendingMessages();
1793 }
1794 } else {
1795 isolate->debug()->OnAfterCompile(script);
1796 }
1797 } else if (result->ic_age() != isolate->heap()->global_ic_age()) {
1798 result->ResetForNewContext(isolate->heap()->global_ic_age());
1799 }
1800 return result;
1801 }
1802
GetSharedFunctionInfoForStreamedScript(Handle<Script> script,ParseInfo * parse_info,int source_length)1803 Handle<SharedFunctionInfo> Compiler::GetSharedFunctionInfoForStreamedScript(
1804 Handle<Script> script, ParseInfo* parse_info, int source_length) {
1805 Isolate* isolate = script->GetIsolate();
1806 // TODO(titzer): increment the counters in caller.
1807 isolate->counters()->total_load_size()->Increment(source_length);
1808 isolate->counters()->total_compile_size()->Increment(source_length);
1809
1810 LanguageMode language_mode = construct_language_mode(FLAG_use_strict);
1811 parse_info->set_language_mode(
1812 static_cast<LanguageMode>(parse_info->language_mode() | language_mode));
1813
1814 Zone compile_zone(isolate->allocator(), ZONE_NAME);
1815 CompilationInfo compile_info(&compile_zone, parse_info,
1816 Handle<JSFunction>::null());
1817
1818 // The source was parsed lazily, so compiling for debugging is not possible.
1819 DCHECK(!compile_info.is_debug());
1820
1821 Handle<SharedFunctionInfo> result = CompileToplevel(&compile_info);
1822 if (!result.is_null()) isolate->debug()->OnAfterCompile(script);
1823 return result;
1824 }
1825
GetSharedFunctionInfo(FunctionLiteral * literal,Handle<Script> script,CompilationInfo * outer_info)1826 Handle<SharedFunctionInfo> Compiler::GetSharedFunctionInfo(
1827 FunctionLiteral* literal, Handle<Script> script,
1828 CompilationInfo* outer_info) {
1829 // Precondition: code has been parsed and scopes have been analyzed.
1830 Isolate* isolate = outer_info->isolate();
1831 MaybeHandle<SharedFunctionInfo> maybe_existing;
1832
1833 // Find any previously allocated shared function info for the given literal.
1834 maybe_existing = script->FindSharedFunctionInfo(isolate, literal);
1835
1836 // If we found an existing shared function info, return it.
1837 Handle<SharedFunctionInfo> existing;
1838 if (maybe_existing.ToHandle(&existing)) {
1839 DCHECK(!existing->is_toplevel());
1840 return existing;
1841 }
1842
1843 // Allocate a shared function info object which will be compiled lazily.
1844 Handle<SharedFunctionInfo> result =
1845 isolate->factory()->NewSharedFunctionInfoForLiteral(literal, script);
1846 result->set_is_toplevel(false);
1847 Scope* outer_scope = literal->scope()->GetOuterScopeWithContext();
1848 if (outer_scope) {
1849 result->set_outer_scope_info(*outer_scope->scope_info());
1850 }
1851 return result;
1852 }
1853
GetSharedFunctionInfoForNative(v8::Extension * extension,Handle<String> name)1854 Handle<SharedFunctionInfo> Compiler::GetSharedFunctionInfoForNative(
1855 v8::Extension* extension, Handle<String> name) {
1856 Isolate* isolate = name->GetIsolate();
1857 v8::Isolate* v8_isolate = reinterpret_cast<v8::Isolate*>(isolate);
1858
1859 // Compute the function template for the native function.
1860 v8::Local<v8::FunctionTemplate> fun_template =
1861 extension->GetNativeFunctionTemplate(v8_isolate,
1862 v8::Utils::ToLocal(name));
1863 DCHECK(!fun_template.IsEmpty());
1864
1865 // Instantiate the function and create a shared function info from it.
1866 Handle<JSFunction> fun = Handle<JSFunction>::cast(Utils::OpenHandle(
1867 *fun_template->GetFunction(v8_isolate->GetCurrentContext())
1868 .ToLocalChecked()));
1869 Handle<Code> code = Handle<Code>(fun->shared()->code());
1870 Handle<Code> construct_stub = Handle<Code>(fun->shared()->construct_stub());
1871 Handle<SharedFunctionInfo> shared = isolate->factory()->NewSharedFunctionInfo(
1872 name, FunctionKind::kNormalFunction, code,
1873 Handle<ScopeInfo>(fun->shared()->scope_info()));
1874 shared->set_outer_scope_info(fun->shared()->outer_scope_info());
1875 shared->SetConstructStub(*construct_stub);
1876 shared->set_feedback_metadata(fun->shared()->feedback_metadata());
1877
1878 // Copy the function data to the shared function info.
1879 shared->set_function_data(fun->shared()->function_data());
1880 int parameters = fun->shared()->internal_formal_parameter_count();
1881 shared->set_internal_formal_parameter_count(parameters);
1882
1883 return shared;
1884 }
1885
GetOptimizedCodeForOSR(Handle<JSFunction> function,BailoutId osr_ast_id,JavaScriptFrame * osr_frame)1886 MaybeHandle<Code> Compiler::GetOptimizedCodeForOSR(Handle<JSFunction> function,
1887 BailoutId osr_ast_id,
1888 JavaScriptFrame* osr_frame) {
1889 DCHECK(!osr_ast_id.IsNone());
1890 DCHECK_NOT_NULL(osr_frame);
1891 return GetOptimizedCode(function, NOT_CONCURRENT, osr_ast_id, osr_frame);
1892 }
1893
PrepareUnoptimizedCompilationJob(CompilationInfo * info)1894 CompilationJob* Compiler::PrepareUnoptimizedCompilationJob(
1895 CompilationInfo* info) {
1896 VMState<COMPILER> state(info->isolate());
1897 std::unique_ptr<CompilationJob> job(GetUnoptimizedCompilationJob(info));
1898 if (job->PrepareJob() != CompilationJob::SUCCEEDED) {
1899 return nullptr;
1900 }
1901 return job.release();
1902 }
1903
FinalizeCompilationJob(CompilationJob * raw_job)1904 bool Compiler::FinalizeCompilationJob(CompilationJob* raw_job) {
1905 // Take ownership of compilation job. Deleting job also tears down the zone.
1906 std::unique_ptr<CompilationJob> job(raw_job);
1907
1908 VMState<COMPILER> state(job->info()->isolate());
1909 if (job->info()->IsOptimizing()) {
1910 return FinalizeOptimizedCompilationJob(job.get()) ==
1911 CompilationJob::SUCCEEDED;
1912 } else {
1913 return FinalizeUnoptimizedCompilationJob(job.get()) ==
1914 CompilationJob::SUCCEEDED;
1915 }
1916 }
1917
PostInstantiation(Handle<JSFunction> function,PretenureFlag pretenure)1918 void Compiler::PostInstantiation(Handle<JSFunction> function,
1919 PretenureFlag pretenure) {
1920 Handle<SharedFunctionInfo> shared(function->shared());
1921
1922 if (FLAG_always_opt && shared->allows_lazy_compilation() &&
1923 !function->shared()->HasAsmWasmData() &&
1924 function->shared()->is_compiled()) {
1925 function->MarkForOptimization();
1926 }
1927
1928 Code* code = shared->SearchOptimizedCodeMap(
1929 function->context()->native_context(), BailoutId::None());
1930 if (code != nullptr) {
1931 // Caching of optimized code enabled and optimized code found.
1932 DCHECK(!code->marked_for_deoptimization());
1933 DCHECK(function->shared()->is_compiled());
1934 function->ReplaceCode(code);
1935 }
1936
1937 if (shared->is_compiled()) {
1938 // TODO(mvstanton): pass pretenure flag to EnsureLiterals.
1939 JSFunction::EnsureLiterals(function);
1940 }
1941 }
1942
1943 } // namespace internal
1944 } // namespace v8
1945