1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/asmjs/asm-js.h"
6 #include "src/baseline/baseline.h"
7 #include "src/codegen/compilation-cache.h"
8 #include "src/codegen/compiler.h"
9 #include "src/codegen/optimized-compilation-info.h"
10 #include "src/common/assert-scope.h"
11 #include "src/common/globals.h"
12 #include "src/common/message-template.h"
13 #include "src/compiler-dispatcher/optimizing-compile-dispatcher.h"
14 #include "src/compiler/pipeline.h"
15 #include "src/deoptimizer/deoptimizer.h"
16 #include "src/execution/arguments-inl.h"
17 #include "src/execution/frames-inl.h"
18 #include "src/execution/isolate-inl.h"
19 #include "src/execution/v8threads.h"
20 #include "src/execution/vm-state-inl.h"
21 #include "src/heap/parked-scope.h"
22 #include "src/objects/js-array-buffer-inl.h"
23 #include "src/objects/js-array-inl.h"
24 #include "src/objects/shared-function-info.h"
25 #include "src/runtime/runtime-utils.h"
26
27 namespace v8 {
28 namespace internal {
29
30 namespace {
31
CompileOptimized(Isolate * isolate,Handle<JSFunction> function,CodeKind target_kind,ConcurrencyMode mode)32 Object CompileOptimized(Isolate* isolate, Handle<JSFunction> function,
33 CodeKind target_kind, ConcurrencyMode mode) {
34 // As a pre- and post-condition of CompileOptimized, the function *must* be
35 // compiled, i.e. the installed Code object must not be CompileLazy.
36 IsCompiledScope is_compiled_scope(function->shared(), isolate);
37 DCHECK(is_compiled_scope.is_compiled());
38
39 StackLimitCheck check(isolate);
40 // Concurrent optimization runs on another thread, thus no additional gap.
41 const int gap =
42 IsConcurrent(mode) ? 0 : kStackSpaceRequiredForCompilation * KB;
43 if (check.JsHasOverflowed(gap)) return isolate->StackOverflow();
44
45 Compiler::CompileOptimized(isolate, function, mode, target_kind);
46
47 DCHECK(function->is_compiled());
48 return function->code();
49 }
50
51 } // namespace
52
RUNTIME_FUNCTION(Runtime_CompileLazy)53 RUNTIME_FUNCTION(Runtime_CompileLazy) {
54 HandleScope scope(isolate);
55 DCHECK_EQ(1, args.length());
56 Handle<JSFunction> function = args.at<JSFunction>(0);
57
58 Handle<SharedFunctionInfo> sfi(function->shared(), isolate);
59
60 #ifdef DEBUG
61 if (FLAG_trace_lazy && !sfi->is_compiled()) {
62 PrintF("[unoptimized: %s]\n", function->DebugNameCStr().get());
63 }
64 #endif
65
66 StackLimitCheck check(isolate);
67 if (check.JsHasOverflowed(kStackSpaceRequiredForCompilation * KB)) {
68 return isolate->StackOverflow();
69 }
70 IsCompiledScope is_compiled_scope;
71 if (!Compiler::Compile(isolate, function, Compiler::KEEP_EXCEPTION,
72 &is_compiled_scope)) {
73 return ReadOnlyRoots(isolate).exception();
74 }
75 DCHECK(function->is_compiled());
76 return function->code();
77 }
78
RUNTIME_FUNCTION(Runtime_InstallBaselineCode)79 RUNTIME_FUNCTION(Runtime_InstallBaselineCode) {
80 HandleScope scope(isolate);
81 DCHECK_EQ(1, args.length());
82 Handle<JSFunction> function = args.at<JSFunction>(0);
83 Handle<SharedFunctionInfo> sfi(function->shared(), isolate);
84 DCHECK(sfi->HasBaselineCode());
85 IsCompiledScope is_compiled_scope(*sfi, isolate);
86 DCHECK(!function->HasAvailableOptimizedCode());
87 DCHECK(!function->has_feedback_vector());
88 JSFunction::CreateAndAttachFeedbackVector(isolate, function,
89 &is_compiled_scope);
90 CodeT baseline_code = sfi->baseline_code(kAcquireLoad);
91 function->set_code(baseline_code);
92 return baseline_code;
93 }
94
RUNTIME_FUNCTION(Runtime_CompileMaglev_Concurrent)95 RUNTIME_FUNCTION(Runtime_CompileMaglev_Concurrent) {
96 HandleScope scope(isolate);
97 DCHECK_EQ(1, args.length());
98 Handle<JSFunction> function = args.at<JSFunction>(0);
99 return CompileOptimized(isolate, function, CodeKind::MAGLEV,
100 ConcurrencyMode::kConcurrent);
101 }
102
RUNTIME_FUNCTION(Runtime_CompileMaglev_Synchronous)103 RUNTIME_FUNCTION(Runtime_CompileMaglev_Synchronous) {
104 HandleScope scope(isolate);
105 DCHECK_EQ(1, args.length());
106 Handle<JSFunction> function = args.at<JSFunction>(0);
107 return CompileOptimized(isolate, function, CodeKind::MAGLEV,
108 ConcurrencyMode::kSynchronous);
109 }
110
RUNTIME_FUNCTION(Runtime_CompileTurbofan_Concurrent)111 RUNTIME_FUNCTION(Runtime_CompileTurbofan_Concurrent) {
112 HandleScope scope(isolate);
113 DCHECK_EQ(1, args.length());
114 Handle<JSFunction> function = args.at<JSFunction>(0);
115 return CompileOptimized(isolate, function, CodeKind::TURBOFAN,
116 ConcurrencyMode::kConcurrent);
117 }
118
RUNTIME_FUNCTION(Runtime_CompileTurbofan_Synchronous)119 RUNTIME_FUNCTION(Runtime_CompileTurbofan_Synchronous) {
120 HandleScope scope(isolate);
121 DCHECK_EQ(1, args.length());
122 Handle<JSFunction> function = args.at<JSFunction>(0);
123 return CompileOptimized(isolate, function, CodeKind::TURBOFAN,
124 ConcurrencyMode::kSynchronous);
125 }
126
RUNTIME_FUNCTION(Runtime_HealOptimizedCodeSlot)127 RUNTIME_FUNCTION(Runtime_HealOptimizedCodeSlot) {
128 SealHandleScope scope(isolate);
129 DCHECK_EQ(1, args.length());
130 Handle<JSFunction> function = args.at<JSFunction>(0);
131
132 DCHECK(function->shared().is_compiled());
133
134 function->feedback_vector().EvictOptimizedCodeMarkedForDeoptimization(
135 function->shared(), "Runtime_HealOptimizedCodeSlot");
136 return function->code();
137 }
138
RUNTIME_FUNCTION(Runtime_InstantiateAsmJs)139 RUNTIME_FUNCTION(Runtime_InstantiateAsmJs) {
140 HandleScope scope(isolate);
141 DCHECK_EQ(args.length(), 4);
142 Handle<JSFunction> function = args.at<JSFunction>(0);
143
144 Handle<JSReceiver> stdlib;
145 if (args[1].IsJSReceiver()) {
146 stdlib = args.at<JSReceiver>(1);
147 }
148 Handle<JSReceiver> foreign;
149 if (args[2].IsJSReceiver()) {
150 foreign = args.at<JSReceiver>(2);
151 }
152 Handle<JSArrayBuffer> memory;
153 if (args[3].IsJSArrayBuffer()) {
154 memory = args.at<JSArrayBuffer>(3);
155 }
156 Handle<SharedFunctionInfo> shared(function->shared(), isolate);
157 #if V8_ENABLE_WEBASSEMBLY
158 if (shared->HasAsmWasmData()) {
159 Handle<AsmWasmData> data(shared->asm_wasm_data(), isolate);
160 MaybeHandle<Object> result = AsmJs::InstantiateAsmWasm(
161 isolate, shared, data, stdlib, foreign, memory);
162 if (!result.is_null()) return *result.ToHandleChecked();
163 // Remove wasm data, mark as broken for asm->wasm, replace function code
164 // with UncompiledData, and return a smi 0 to indicate failure.
165 SharedFunctionInfo::DiscardCompiled(isolate, shared);
166 }
167 shared->set_is_asm_wasm_broken(true);
168 #endif
169 DCHECK_EQ(function->code(), *BUILTIN_CODE(isolate, InstantiateAsmJs));
170 function->set_code(*BUILTIN_CODE(isolate, CompileLazy));
171 DCHECK(!isolate->has_pending_exception());
172 return Smi::zero();
173 }
174
RUNTIME_FUNCTION(Runtime_NotifyDeoptimized)175 RUNTIME_FUNCTION(Runtime_NotifyDeoptimized) {
176 HandleScope scope(isolate);
177 DCHECK_EQ(0, args.length());
178 Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate);
179 DCHECK(CodeKindCanDeoptimize(deoptimizer->compiled_code()->kind()));
180 DCHECK(AllowGarbageCollection::IsAllowed());
181 DCHECK(isolate->context().is_null());
182
183 TimerEventScope<TimerEventDeoptimizeCode> timer(isolate);
184 TRACE_EVENT0("v8", "V8.DeoptimizeCode");
185 Handle<JSFunction> function = deoptimizer->function();
186 // For OSR the optimized code isn't installed on the function, so get the
187 // code object from deoptimizer.
188 Handle<Code> optimized_code = deoptimizer->compiled_code();
189 DeoptimizeKind type = deoptimizer->deopt_kind();
190
191 // TODO(turbofan): We currently need the native context to materialize
192 // the arguments object, but only to get to its map.
193 isolate->set_context(deoptimizer->function()->native_context());
194
195 // Make sure to materialize objects before causing any allocation.
196 deoptimizer->MaterializeHeapObjects();
197 delete deoptimizer;
198
199 // Ensure the context register is updated for materialized objects.
200 JavaScriptFrameIterator top_it(isolate);
201 JavaScriptFrame* top_frame = top_it.frame();
202 isolate->set_context(Context::cast(top_frame->context()));
203
204 // Invalidate the underlying optimized code on eager deopts.
205 if (type == DeoptimizeKind::kEager) {
206 Deoptimizer::DeoptimizeFunction(*function, *optimized_code);
207 }
208
209 return ReadOnlyRoots(isolate).undefined_value();
210 }
211
RUNTIME_FUNCTION(Runtime_ObserveNode)212 RUNTIME_FUNCTION(Runtime_ObserveNode) {
213 // The %ObserveNode intrinsic only tracks the changes to an observed node in
214 // code compiled by TurboFan.
215 HandleScope scope(isolate);
216 DCHECK_EQ(1, args.length());
217 Handle<Object> obj = args.at(0);
218 return *obj;
219 }
220
RUNTIME_FUNCTION(Runtime_VerifyType)221 RUNTIME_FUNCTION(Runtime_VerifyType) {
222 // %VerifyType has no effect in the interpreter.
223 HandleScope scope(isolate);
224 DCHECK_EQ(1, args.length());
225 Handle<Object> obj = args.at(0);
226 return *obj;
227 }
228
RUNTIME_FUNCTION(Runtime_CompileOptimizedOSR)229 RUNTIME_FUNCTION(Runtime_CompileOptimizedOSR) {
230 HandleScope handle_scope(isolate);
231 DCHECK_EQ(0, args.length());
232 DCHECK(FLAG_use_osr);
233
234 // Determine the frame that triggered the OSR request.
235 JavaScriptFrameIterator it(isolate);
236 UnoptimizedFrame* frame = UnoptimizedFrame::cast(it.frame());
237
238 DCHECK_IMPLIES(frame->is_interpreted(),
239 frame->LookupCode().is_interpreter_trampoline_builtin());
240 DCHECK_IMPLIES(frame->is_baseline(),
241 frame->LookupCode().kind() == CodeKind::BASELINE);
242 DCHECK(frame->function().shared().HasBytecodeArray());
243
244 // Determine the entry point for which this OSR request has been fired.
245 BytecodeOffset osr_offset = BytecodeOffset(frame->GetBytecodeOffset());
246 DCHECK(!osr_offset.IsNone());
247
248 ConcurrencyMode mode =
249 V8_LIKELY(isolate->concurrent_recompilation_enabled() &&
250 FLAG_concurrent_osr)
251 ? ConcurrencyMode::kConcurrent
252 : ConcurrencyMode::kSynchronous;
253
254 Handle<JSFunction> function(frame->function(), isolate);
255 if (IsConcurrent(mode)) {
256 // The synchronous fallback mechanism triggers if we've already got OSR'd
257 // code for the current function but at a different OSR offset - that may
258 // indicate we're having trouble hitting the correct JumpLoop for code
259 // installation. In this case, fall back to synchronous OSR.
260 base::Optional<BytecodeOffset> cached_osr_offset =
261 function->native_context().osr_code_cache().FirstOsrOffsetFor(
262 function->shared());
263 if (cached_osr_offset.has_value() &&
264 cached_osr_offset.value() != osr_offset) {
265 if (V8_UNLIKELY(FLAG_trace_osr)) {
266 CodeTracer::Scope scope(isolate->GetCodeTracer());
267 PrintF(
268 scope.file(),
269 "[OSR - falling back to synchronous compilation due to mismatched "
270 "cached entry. function: %s, requested: %d, cached: %d]\n",
271 function->DebugNameCStr().get(), osr_offset.ToInt(),
272 cached_osr_offset.value().ToInt());
273 }
274 mode = ConcurrencyMode::kSynchronous;
275 }
276 }
277
278 Handle<CodeT> result;
279 if (!Compiler::CompileOptimizedOSR(isolate, function, osr_offset, frame, mode)
280 .ToHandle(&result)) {
281 // An empty result can mean one of two things:
282 // 1) we've started a concurrent compilation job - everything is fine.
283 // 2) synchronous compilation failed for some reason.
284
285 if (!function->HasAttachedOptimizedCode()) {
286 function->set_code(function->shared().GetCode(), kReleaseStore);
287 }
288
289 return {};
290 }
291
292 DCHECK(!result.is_null());
293 DCHECK(result->is_turbofanned()); // TODO(v8:7700): Support Maglev.
294 DCHECK(CodeKindIsOptimizedJSFunction(result->kind()));
295
296 DeoptimizationData data =
297 DeoptimizationData::cast(result->deoptimization_data());
298 DCHECK_EQ(BytecodeOffset(data.OsrBytecodeOffset().value()), osr_offset);
299 DCHECK_GE(data.OsrPcOffset().value(), 0);
300
301 if (FLAG_trace_osr) {
302 CodeTracer::Scope scope(isolate->GetCodeTracer());
303 PrintF(scope.file(),
304 "[OSR - entry. function: %s, osr offset: %d, pc offset: %d]\n",
305 function->DebugNameCStr().get(), osr_offset.ToInt(),
306 data.OsrPcOffset().value());
307 }
308
309 if (function->feedback_vector().invocation_count() <= 1 &&
310 !IsNone(function->tiering_state()) &&
311 !IsInProgress(function->tiering_state())) {
312 // With lazy feedback allocation we may not have feedback for the
313 // initial part of the function that was executed before we allocated a
314 // feedback vector. Reset any tiering states for such functions.
315 //
316 // TODO(mythria): Instead of resetting the tiering state here we
317 // should only mark a function for optimization if it has sufficient
318 // feedback. We cannot do this currently since we OSR only after we mark
319 // a function for optimization. We should instead change it to be based
320 // based on number of ticks.
321 function->reset_tiering_state();
322 }
323
324 // TODO(mythria): Once we have OSR code cache we may not need to mark
325 // the function for non-concurrent compilation. We could arm the loops
326 // early so the second execution uses the already compiled OSR code and
327 // the optimization occurs concurrently off main thread.
328 if (!function->HasAvailableOptimizedCode() &&
329 function->feedback_vector().invocation_count() > 1) {
330 // If we're not already optimized, set to optimize non-concurrently on the
331 // next call, otherwise we'd run unoptimized once more and potentially
332 // compile for OSR again.
333 if (FLAG_trace_osr) {
334 CodeTracer::Scope scope(isolate->GetCodeTracer());
335 PrintF(scope.file(),
336 "[OSR - forcing synchronous optimization on next entry. function: "
337 "%s]\n",
338 function->DebugNameCStr().get());
339 }
340 function->set_tiering_state(TieringState::kRequestTurbofan_Synchronous);
341 }
342
343 return *result;
344 }
345
CompileGlobalEval(Isolate * isolate,Handle<i::Object> source_object,Handle<SharedFunctionInfo> outer_info,LanguageMode language_mode,int eval_scope_position,int eval_position)346 static Object CompileGlobalEval(Isolate* isolate,
347 Handle<i::Object> source_object,
348 Handle<SharedFunctionInfo> outer_info,
349 LanguageMode language_mode,
350 int eval_scope_position, int eval_position) {
351 Handle<Context> context(isolate->context(), isolate);
352 Handle<Context> native_context(context->native_context(), isolate);
353
354 // Check if native context allows code generation from
355 // strings. Throw an exception if it doesn't.
356 MaybeHandle<String> source;
357 bool unknown_object;
358 std::tie(source, unknown_object) = Compiler::ValidateDynamicCompilationSource(
359 isolate, native_context, source_object);
360 // If the argument is an unhandled string time, bounce to GlobalEval.
361 if (unknown_object) {
362 return native_context->global_eval_fun();
363 }
364 if (source.is_null()) {
365 Handle<Object> error_message =
366 native_context->ErrorMessageForCodeGenerationFromStrings();
367 Handle<Object> error;
368 MaybeHandle<Object> maybe_error = isolate->factory()->NewEvalError(
369 MessageTemplate::kCodeGenFromStrings, error_message);
370 if (maybe_error.ToHandle(&error)) isolate->Throw(*error);
371 return ReadOnlyRoots(isolate).exception();
372 }
373
374 // Deal with a normal eval call with a string argument. Compile it
375 // and return the compiled function bound in the local context.
376 static const ParseRestriction restriction = NO_PARSE_RESTRICTION;
377 Handle<JSFunction> compiled;
378 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
379 isolate, compiled,
380 Compiler::GetFunctionFromEval(
381 source.ToHandleChecked(), outer_info, context, language_mode,
382 restriction, kNoSourcePosition, eval_scope_position, eval_position),
383 ReadOnlyRoots(isolate).exception());
384 return *compiled;
385 }
386
RUNTIME_FUNCTION(Runtime_ResolvePossiblyDirectEval)387 RUNTIME_FUNCTION(Runtime_ResolvePossiblyDirectEval) {
388 HandleScope scope(isolate);
389 DCHECK_EQ(6, args.length());
390
391 Handle<Object> callee = args.at(0);
392
393 // If "eval" didn't refer to the original GlobalEval, it's not a
394 // direct call to eval.
395 if (*callee != isolate->native_context()->global_eval_fun()) {
396 return *callee;
397 }
398
399 DCHECK(is_valid_language_mode(args.smi_value_at(3)));
400 LanguageMode language_mode = static_cast<LanguageMode>(args.smi_value_at(3));
401 Handle<SharedFunctionInfo> outer_info(args.at<JSFunction>(2)->shared(),
402 isolate);
403 return CompileGlobalEval(isolate, args.at<Object>(1), outer_info,
404 language_mode, args.smi_value_at(4),
405 args.smi_value_at(5));
406 }
407
408 } // namespace internal
409 } // namespace v8
410