1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/runtime/runtime-utils.h"
6
7 #include "src/arguments.h"
8 #include "src/compiler.h"
9 #include "src/deoptimizer.h"
10 #include "src/frames-inl.h"
11 #include "src/full-codegen/full-codegen.h"
12 #include "src/isolate-inl.h"
13 #include "src/messages.h"
14 #include "src/v8threads.h"
15 #include "src/vm-state-inl.h"
16
17 namespace v8 {
18 namespace internal {
19
RUNTIME_FUNCTION(Runtime_CompileLazy)20 RUNTIME_FUNCTION(Runtime_CompileLazy) {
21 HandleScope scope(isolate);
22 DCHECK(args.length() == 1);
23 CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
24 #ifdef DEBUG
25 if (FLAG_trace_lazy && !function->shared()->is_compiled()) {
26 PrintF("[unoptimized: ");
27 function->PrintName();
28 PrintF("]\n");
29 }
30 #endif
31 StackLimitCheck check(isolate);
32 if (check.JsHasOverflowed(1 * KB)) return isolate->StackOverflow();
33
34 // Compile the target function.
35 DCHECK(function->shared()->allows_lazy_compilation());
36
37 Handle<Code> code;
38 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, code,
39 Compiler::GetLazyCode(function));
40 DCHECK(code->IsJavaScriptCode());
41
42 function->ReplaceCode(*code);
43 return *code;
44 }
45
46
47 namespace {
48
CompileOptimized(Isolate * isolate,Handle<JSFunction> function,Compiler::ConcurrencyMode mode)49 Object* CompileOptimized(Isolate* isolate, Handle<JSFunction> function,
50 Compiler::ConcurrencyMode mode) {
51 StackLimitCheck check(isolate);
52 if (check.JsHasOverflowed(1 * KB)) return isolate->StackOverflow();
53
54 Handle<Code> code;
55 Handle<Code> unoptimized(function->shared()->code());
56 if (Compiler::GetOptimizedCode(function, unoptimized, mode).ToHandle(&code)) {
57 // Optimization succeeded, return optimized code.
58 function->ReplaceCode(*code);
59 } else {
60 // Optimization failed, get unoptimized code.
61 if (isolate->has_pending_exception()) { // Possible stack overflow.
62 return isolate->heap()->exception();
63 }
64 code = Handle<Code>(function->shared()->code(), isolate);
65 if (code->kind() != Code::FUNCTION &&
66 code->kind() != Code::OPTIMIZED_FUNCTION) {
67 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
68 isolate, code, Compiler::GetUnoptimizedCode(function));
69 }
70 function->ReplaceCode(*code);
71 }
72
73 DCHECK(function->code()->kind() == Code::FUNCTION ||
74 function->code()->kind() == Code::OPTIMIZED_FUNCTION ||
75 function->IsInOptimizationQueue());
76 return function->code();
77 }
78
79 } // namespace
80
81
RUNTIME_FUNCTION(Runtime_CompileOptimized_Concurrent)82 RUNTIME_FUNCTION(Runtime_CompileOptimized_Concurrent) {
83 HandleScope scope(isolate);
84 DCHECK(args.length() == 1);
85 CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
86 return CompileOptimized(isolate, function, Compiler::CONCURRENT);
87 }
88
89
RUNTIME_FUNCTION(Runtime_CompileOptimized_NotConcurrent)90 RUNTIME_FUNCTION(Runtime_CompileOptimized_NotConcurrent) {
91 HandleScope scope(isolate);
92 DCHECK(args.length() == 1);
93 CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
94 return CompileOptimized(isolate, function, Compiler::NOT_CONCURRENT);
95 }
96
97
RUNTIME_FUNCTION(Runtime_NotifyStubFailure)98 RUNTIME_FUNCTION(Runtime_NotifyStubFailure) {
99 HandleScope scope(isolate);
100 DCHECK(args.length() == 0);
101 Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate);
102 DCHECK(AllowHeapAllocation::IsAllowed());
103 delete deoptimizer;
104 return isolate->heap()->undefined_value();
105 }
106
107
108 class ActivationsFinder : public ThreadVisitor {
109 public:
110 Code* code_;
111 bool has_code_activations_;
112
ActivationsFinder(Code * code)113 explicit ActivationsFinder(Code* code)
114 : code_(code), has_code_activations_(false) {}
115
VisitThread(Isolate * isolate,ThreadLocalTop * top)116 void VisitThread(Isolate* isolate, ThreadLocalTop* top) {
117 JavaScriptFrameIterator it(isolate, top);
118 VisitFrames(&it);
119 }
120
VisitFrames(JavaScriptFrameIterator * it)121 void VisitFrames(JavaScriptFrameIterator* it) {
122 for (; !it->done(); it->Advance()) {
123 JavaScriptFrame* frame = it->frame();
124 if (code_->contains(frame->pc())) has_code_activations_ = true;
125 }
126 }
127 };
128
129
RUNTIME_FUNCTION(Runtime_NotifyDeoptimized)130 RUNTIME_FUNCTION(Runtime_NotifyDeoptimized) {
131 HandleScope scope(isolate);
132 DCHECK(args.length() == 1);
133 CONVERT_SMI_ARG_CHECKED(type_arg, 0);
134 Deoptimizer::BailoutType type =
135 static_cast<Deoptimizer::BailoutType>(type_arg);
136 Deoptimizer* deoptimizer = Deoptimizer::Grab(isolate);
137 DCHECK(AllowHeapAllocation::IsAllowed());
138
139 Handle<JSFunction> function = deoptimizer->function();
140 Handle<Code> optimized_code = deoptimizer->compiled_code();
141
142 DCHECK(optimized_code->kind() == Code::OPTIMIZED_FUNCTION);
143 DCHECK(type == deoptimizer->bailout_type());
144
145 // Make sure to materialize objects before causing any allocation.
146 JavaScriptFrameIterator it(isolate);
147 deoptimizer->MaterializeHeapObjects(&it);
148 delete deoptimizer;
149
150 JavaScriptFrame* frame = it.frame();
151 RUNTIME_ASSERT(frame->function()->IsJSFunction());
152 DCHECK(frame->function() == *function);
153
154 // Ensure the context register is updated for materialized objects.
155 JavaScriptFrameIterator top_it(isolate);
156 JavaScriptFrame* top_frame = top_it.frame();
157 isolate->set_context(Context::cast(top_frame->context()));
158
159 if (type == Deoptimizer::LAZY) {
160 return isolate->heap()->undefined_value();
161 }
162
163 // Search for other activations of the same function and code.
164 ActivationsFinder activations_finder(*optimized_code);
165 activations_finder.VisitFrames(&it);
166 isolate->thread_manager()->IterateArchivedThreads(&activations_finder);
167
168 if (!activations_finder.has_code_activations_) {
169 if (function->code() == *optimized_code) {
170 if (FLAG_trace_deopt) {
171 PrintF("[removing optimized code for: ");
172 function->PrintName();
173 PrintF("]\n");
174 }
175 function->ReplaceCode(function->shared()->code());
176 }
177 // Evict optimized code for this function from the cache so that it
178 // doesn't get used for new closures.
179 function->shared()->EvictFromOptimizedCodeMap(*optimized_code,
180 "notify deoptimized");
181 } else {
182 // TODO(titzer): we should probably do DeoptimizeCodeList(code)
183 // unconditionally if the code is not already marked for deoptimization.
184 // If there is an index by shared function info, all the better.
185 Deoptimizer::DeoptimizeFunction(*function);
186 }
187
188 return isolate->heap()->undefined_value();
189 }
190
191
IsSuitableForOnStackReplacement(Isolate * isolate,Handle<JSFunction> function)192 static bool IsSuitableForOnStackReplacement(Isolate* isolate,
193 Handle<JSFunction> function) {
194 // Keep track of whether we've succeeded in optimizing.
195 if (function->shared()->optimization_disabled()) return false;
196 // If we are trying to do OSR when there are already optimized
197 // activations of the function, it means (a) the function is directly or
198 // indirectly recursive and (b) an optimized invocation has been
199 // deoptimized so that we are currently in an unoptimized activation.
200 // Check for optimized activations of this function.
201 for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) {
202 JavaScriptFrame* frame = it.frame();
203 if (frame->is_optimized() && frame->function() == *function) return false;
204 }
205
206 return true;
207 }
208
209
RUNTIME_FUNCTION(Runtime_CompileForOnStackReplacement)210 RUNTIME_FUNCTION(Runtime_CompileForOnStackReplacement) {
211 HandleScope scope(isolate);
212 DCHECK(args.length() == 1);
213 CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
214 Handle<Code> caller_code(function->shared()->code());
215
216 // We're not prepared to handle a function with arguments object.
217 DCHECK(!function->shared()->uses_arguments());
218
219 RUNTIME_ASSERT(FLAG_use_osr);
220
221 // Passing the PC in the javascript frame from the caller directly is
222 // not GC safe, so we walk the stack to get it.
223 JavaScriptFrameIterator it(isolate);
224 JavaScriptFrame* frame = it.frame();
225 if (!caller_code->contains(frame->pc())) {
226 // Code on the stack may not be the code object referenced by the shared
227 // function info. It may have been replaced to include deoptimization data.
228 caller_code = Handle<Code>(frame->LookupCode());
229 }
230
231 uint32_t pc_offset =
232 static_cast<uint32_t>(frame->pc() - caller_code->instruction_start());
233
234 #ifdef DEBUG
235 DCHECK_EQ(frame->function(), *function);
236 DCHECK_EQ(frame->LookupCode(), *caller_code);
237 DCHECK(caller_code->contains(frame->pc()));
238 #endif // DEBUG
239
240
241 BailoutId ast_id = caller_code->TranslatePcOffsetToAstId(pc_offset);
242 DCHECK(!ast_id.IsNone());
243
244 // Disable concurrent OSR for asm.js, to enable frame specialization.
245 Compiler::ConcurrencyMode mode = (isolate->concurrent_osr_enabled() &&
246 !function->shared()->asm_function() &&
247 function->shared()->ast_node_count() > 512)
248 ? Compiler::CONCURRENT
249 : Compiler::NOT_CONCURRENT;
250 Handle<Code> result = Handle<Code>::null();
251
252 OptimizedCompileJob* job = NULL;
253 if (mode == Compiler::CONCURRENT) {
254 // Gate the OSR entry with a stack check.
255 BackEdgeTable::AddStackCheck(caller_code, pc_offset);
256 // Poll already queued compilation jobs.
257 OptimizingCompileDispatcher* dispatcher =
258 isolate->optimizing_compile_dispatcher();
259 if (dispatcher->IsQueuedForOSR(function, ast_id)) {
260 if (FLAG_trace_osr) {
261 PrintF("[OSR - Still waiting for queued: ");
262 function->PrintName();
263 PrintF(" at AST id %d]\n", ast_id.ToInt());
264 }
265 return NULL;
266 }
267
268 job = dispatcher->FindReadyOSRCandidate(function, ast_id);
269 }
270
271 if (job != NULL) {
272 if (FLAG_trace_osr) {
273 PrintF("[OSR - Found ready: ");
274 function->PrintName();
275 PrintF(" at AST id %d]\n", ast_id.ToInt());
276 }
277 result = Compiler::GetConcurrentlyOptimizedCode(job);
278 } else if (IsSuitableForOnStackReplacement(isolate, function)) {
279 if (FLAG_trace_osr) {
280 PrintF("[OSR - Compiling: ");
281 function->PrintName();
282 PrintF(" at AST id %d]\n", ast_id.ToInt());
283 }
284 MaybeHandle<Code> maybe_result = Compiler::GetOptimizedCode(
285 function, caller_code, mode, ast_id,
286 (mode == Compiler::NOT_CONCURRENT) ? frame : nullptr);
287 if (maybe_result.ToHandle(&result) &&
288 result.is_identical_to(isolate->builtins()->InOptimizationQueue())) {
289 // Optimization is queued. Return to check later.
290 return NULL;
291 }
292 }
293
294 // Revert the patched back edge table, regardless of whether OSR succeeds.
295 BackEdgeTable::Revert(isolate, *caller_code);
296
297 // Check whether we ended up with usable optimized code.
298 if (!result.is_null() && result->kind() == Code::OPTIMIZED_FUNCTION) {
299 DeoptimizationInputData* data =
300 DeoptimizationInputData::cast(result->deoptimization_data());
301
302 if (data->OsrPcOffset()->value() >= 0) {
303 DCHECK(BailoutId(data->OsrAstId()->value()) == ast_id);
304 if (FLAG_trace_osr) {
305 PrintF("[OSR - Entry at AST id %d, offset %d in optimized code]\n",
306 ast_id.ToInt(), data->OsrPcOffset()->value());
307 }
308 // TODO(titzer): this is a massive hack to make the deopt counts
309 // match. Fix heuristics for reenabling optimizations!
310 function->shared()->increment_deopt_count();
311
312 if (result->is_turbofanned()) {
313 // TurboFanned OSR code cannot be installed into the function.
314 // But the function is obviously hot, so optimize it next time.
315 function->ReplaceCode(
316 isolate->builtins()->builtin(Builtins::kCompileOptimized));
317 } else {
318 // Crankshafted OSR code can be installed into the function.
319 function->ReplaceCode(*result);
320 }
321 return *result;
322 }
323 }
324
325 // Failed.
326 if (FLAG_trace_osr) {
327 PrintF("[OSR - Failed: ");
328 function->PrintName();
329 PrintF(" at AST id %d]\n", ast_id.ToInt());
330 }
331
332 if (!function->IsOptimized()) {
333 function->ReplaceCode(function->shared()->code());
334 }
335 return NULL;
336 }
337
338
RUNTIME_FUNCTION(Runtime_TryInstallOptimizedCode)339 RUNTIME_FUNCTION(Runtime_TryInstallOptimizedCode) {
340 HandleScope scope(isolate);
341 DCHECK(args.length() == 1);
342 CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
343
344 // First check if this is a real stack overflow.
345 StackLimitCheck check(isolate);
346 if (check.JsHasOverflowed()) {
347 SealHandleScope shs(isolate);
348 return isolate->StackOverflow();
349 }
350
351 isolate->optimizing_compile_dispatcher()->InstallOptimizedFunctions();
352 return (function->IsOptimized()) ? function->code()
353 : function->shared()->code();
354 }
355
356
CodeGenerationFromStringsAllowed(Isolate * isolate,Handle<Context> context)357 bool CodeGenerationFromStringsAllowed(Isolate* isolate,
358 Handle<Context> context) {
359 DCHECK(context->allow_code_gen_from_strings()->IsFalse());
360 // Check with callback if set.
361 AllowCodeGenerationFromStringsCallback callback =
362 isolate->allow_code_gen_callback();
363 if (callback == NULL) {
364 // No callback set and code generation disallowed.
365 return false;
366 } else {
367 // Callback set. Let it decide if code generation is allowed.
368 VMState<EXTERNAL> state(isolate);
369 return callback(v8::Utils::ToLocal(context));
370 }
371 }
372
373
CompileGlobalEval(Isolate * isolate,Handle<String> source,Handle<SharedFunctionInfo> outer_info,LanguageMode language_mode,int scope_position)374 static Object* CompileGlobalEval(Isolate* isolate, Handle<String> source,
375 Handle<SharedFunctionInfo> outer_info,
376 LanguageMode language_mode,
377 int scope_position) {
378 Handle<Context> context = Handle<Context>(isolate->context());
379 Handle<Context> native_context = Handle<Context>(context->native_context());
380
381 // Check if native context allows code generation from
382 // strings. Throw an exception if it doesn't.
383 if (native_context->allow_code_gen_from_strings()->IsFalse() &&
384 !CodeGenerationFromStringsAllowed(isolate, native_context)) {
385 Handle<Object> error_message =
386 native_context->ErrorMessageForCodeGenerationFromStrings();
387 Handle<Object> error;
388 MaybeHandle<Object> maybe_error = isolate->factory()->NewEvalError(
389 MessageTemplate::kCodeGenFromStrings, error_message);
390 if (maybe_error.ToHandle(&error)) isolate->Throw(*error);
391 return isolate->heap()->exception();
392 }
393
394 // Deal with a normal eval call with a string argument. Compile it
395 // and return the compiled function bound in the local context.
396 static const ParseRestriction restriction = NO_PARSE_RESTRICTION;
397 Handle<JSFunction> compiled;
398 ASSIGN_RETURN_ON_EXCEPTION_VALUE(
399 isolate, compiled,
400 Compiler::GetFunctionFromEval(source, outer_info, context, language_mode,
401 restriction, scope_position),
402 isolate->heap()->exception());
403 return *compiled;
404 }
405
406
RUNTIME_FUNCTION(Runtime_ResolvePossiblyDirectEval)407 RUNTIME_FUNCTION(Runtime_ResolvePossiblyDirectEval) {
408 HandleScope scope(isolate);
409 DCHECK(args.length() == 5);
410
411 Handle<Object> callee = args.at<Object>(0);
412
413 // If "eval" didn't refer to the original GlobalEval, it's not a
414 // direct call to eval.
415 // (And even if it is, but the first argument isn't a string, just let
416 // execution default to an indirect call to eval, which will also return
417 // the first argument without doing anything).
418 if (*callee != isolate->native_context()->global_eval_fun() ||
419 !args[1]->IsString()) {
420 return *callee;
421 }
422
423 DCHECK(args[3]->IsSmi());
424 DCHECK(is_valid_language_mode(args.smi_at(3)));
425 LanguageMode language_mode = static_cast<LanguageMode>(args.smi_at(3));
426 DCHECK(args[4]->IsSmi());
427 Handle<SharedFunctionInfo> outer_info(args.at<JSFunction>(2)->shared(),
428 isolate);
429 return CompileGlobalEval(isolate, args.at<String>(1), outer_info,
430 language_mode, args.smi_at(4));
431 }
432 } // namespace internal
433 } // namespace v8
434