1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include <fstream>
6 #include <memory>
7
8 #include "include/v8-function.h"
9 #include "src/api/api-inl.h"
10 #include "src/base/numbers/double.h"
11 #include "src/base/platform/mutex.h"
12 #include "src/codegen/assembler-inl.h"
13 #include "src/codegen/compiler.h"
14 #include "src/codegen/pending-optimization-table.h"
15 #include "src/compiler-dispatcher/lazy-compile-dispatcher.h"
16 #include "src/compiler-dispatcher/optimizing-compile-dispatcher.h"
17 #include "src/debug/debug-evaluate.h"
18 #include "src/deoptimizer/deoptimizer.h"
19 #include "src/execution/arguments-inl.h"
20 #include "src/execution/frames-inl.h"
21 #include "src/execution/isolate-inl.h"
22 #include "src/execution/protectors-inl.h"
23 #include "src/execution/tiering-manager.h"
24 #include "src/heap/heap-inl.h" // For ToBoolean. TODO(jkummerow): Drop.
25 #include "src/heap/heap-write-barrier-inl.h"
26 #include "src/ic/stub-cache.h"
27 #include "src/logging/counters.h"
28 #include "src/objects/heap-object-inl.h"
29 #include "src/objects/js-array-inl.h"
30 #include "src/objects/js-function-inl.h"
31 #include "src/objects/js-regexp-inl.h"
32 #include "src/objects/managed-inl.h"
33 #include "src/objects/smi.h"
34 #include "src/profiler/heap-snapshot-generator.h"
35 #include "src/regexp/regexp.h"
36 #include "src/runtime/runtime-utils.h"
37 #include "src/snapshot/snapshot.h"
38 #include "src/web-snapshot/web-snapshot.h"
39
40 #ifdef V8_ENABLE_MAGLEV
41 #include "src/maglev/maglev.h"
42 #endif // V8_ENABLE_MAGLEV
43
44 #if V8_ENABLE_WEBASSEMBLY
45 #include "src/wasm/wasm-engine.h"
46 #endif // V8_ENABLE_WEBASSEMBLY
47
48 namespace v8 {
49 namespace internal {
50
51 namespace {
CrashUnlessFuzzing(Isolate * isolate)52 V8_WARN_UNUSED_RESULT Object CrashUnlessFuzzing(Isolate* isolate) {
53 CHECK(FLAG_fuzzing);
54 return ReadOnlyRoots(isolate).undefined_value();
55 }
56
CrashUnlessFuzzingReturnFalse(Isolate * isolate)57 V8_WARN_UNUSED_RESULT bool CrashUnlessFuzzingReturnFalse(Isolate* isolate) {
58 CHECK(FLAG_fuzzing);
59 return false;
60 }
61
62 // Returns |value| unless correctness-fuzzer-supressions is enabled,
63 // otherwise returns undefined_value.
ReturnFuzzSafe(Object value,Isolate * isolate)64 V8_WARN_UNUSED_RESULT Object ReturnFuzzSafe(Object value, Isolate* isolate) {
65 return FLAG_correctness_fuzzer_suppressions
66 ? ReadOnlyRoots(isolate).undefined_value()
67 : value;
68 }
69
70 // Assert that the given argument is a number within the Int32 range
71 // and convert it to int32_t. If the argument is not an Int32 we crash if not
72 // in fuzzing mode.
73 #define CONVERT_INT32_ARG_FUZZ_SAFE(name, index) \
74 if (!args[index].IsNumber()) return CrashUnlessFuzzing(isolate); \
75 int32_t name = 0; \
76 if (!args[index].ToInt32(&name)) return CrashUnlessFuzzing(isolate);
77
78 // Cast the given object to a boolean and store it in a variable with
79 // the given name. If the object is not a boolean we crash if not in
80 // fuzzing mode.
81 #define CONVERT_BOOLEAN_ARG_FUZZ_SAFE(name, index) \
82 if (!args[index].IsBoolean()) return CrashUnlessFuzzing(isolate); \
83 bool name = args[index].IsTrue(isolate);
84
IsAsmWasmFunction(Isolate * isolate,JSFunction function)85 bool IsAsmWasmFunction(Isolate* isolate, JSFunction function) {
86 DisallowGarbageCollection no_gc;
87 #if V8_ENABLE_WEBASSEMBLY
88 // For simplicity we include invalid asm.js functions whose code hasn't yet
89 // been updated to CompileLazy but is still the InstantiateAsmJs builtin.
90 return function.shared().HasAsmWasmData() ||
91 function.code().builtin_id() == Builtin::kInstantiateAsmJs;
92 #else
93 return false;
94 #endif // V8_ENABLE_WEBASSEMBLY
95 }
96
97 } // namespace
98
RUNTIME_FUNCTION(Runtime_ClearMegamorphicStubCache)99 RUNTIME_FUNCTION(Runtime_ClearMegamorphicStubCache) {
100 HandleScope scope(isolate);
101 DCHECK_EQ(0, args.length());
102 isolate->load_stub_cache()->Clear();
103 isolate->store_stub_cache()->Clear();
104 return ReadOnlyRoots(isolate).undefined_value();
105 }
106
RUNTIME_FUNCTION(Runtime_ConstructDouble)107 RUNTIME_FUNCTION(Runtime_ConstructDouble) {
108 HandleScope scope(isolate);
109 DCHECK_EQ(2, args.length());
110 uint32_t hi = NumberToUint32(args[0]);
111 uint32_t lo = NumberToUint32(args[1]);
112 uint64_t result = (static_cast<uint64_t>(hi) << 32) | lo;
113 return *isolate->factory()->NewNumber(base::uint64_to_double(result));
114 }
115
RUNTIME_FUNCTION(Runtime_ConstructConsString)116 RUNTIME_FUNCTION(Runtime_ConstructConsString) {
117 HandleScope scope(isolate);
118 DCHECK_EQ(2, args.length());
119 Handle<String> left = args.at<String>(0);
120 Handle<String> right = args.at<String>(1);
121
122 CHECK(left->IsOneByteRepresentation());
123 CHECK(right->IsOneByteRepresentation());
124
125 const bool kIsOneByte = true;
126 const int length = left->length() + right->length();
127 return *isolate->factory()->NewConsString(left, right, length, kIsOneByte);
128 }
129
RUNTIME_FUNCTION(Runtime_ConstructSlicedString)130 RUNTIME_FUNCTION(Runtime_ConstructSlicedString) {
131 HandleScope scope(isolate);
132 DCHECK_EQ(2, args.length());
133 Handle<String> string = args.at<String>(0);
134 int index = args.smi_value_at(1);
135
136 CHECK(string->IsOneByteRepresentation());
137 CHECK_LT(index, string->length());
138
139 Handle<String> sliced_string =
140 isolate->factory()->NewSubString(string, index, string->length());
141 CHECK(sliced_string->IsSlicedString());
142 return *sliced_string;
143 }
144
RUNTIME_FUNCTION(Runtime_DeoptimizeFunction)145 RUNTIME_FUNCTION(Runtime_DeoptimizeFunction) {
146 HandleScope scope(isolate);
147 DCHECK_EQ(1, args.length());
148
149 Handle<Object> function_object = args.at(0);
150 if (!function_object->IsJSFunction()) return CrashUnlessFuzzing(isolate);
151 Handle<JSFunction> function = Handle<JSFunction>::cast(function_object);
152
153 if (function->HasAttachedOptimizedCode()) {
154 Deoptimizer::DeoptimizeFunction(*function);
155 }
156
157 return ReadOnlyRoots(isolate).undefined_value();
158 }
159
RUNTIME_FUNCTION(Runtime_DeoptimizeNow)160 RUNTIME_FUNCTION(Runtime_DeoptimizeNow) {
161 HandleScope scope(isolate);
162 DCHECK_EQ(0, args.length());
163
164 Handle<JSFunction> function;
165
166 // Find the JavaScript function on the top of the stack.
167 JavaScriptFrameIterator it(isolate);
168 if (!it.done()) function = handle(it.frame()->function(), isolate);
169 if (function.is_null()) return CrashUnlessFuzzing(isolate);
170
171 if (function->HasAttachedOptimizedCode()) {
172 Deoptimizer::DeoptimizeFunction(*function);
173 }
174
175 return ReadOnlyRoots(isolate).undefined_value();
176 }
177
RUNTIME_FUNCTION(Runtime_RunningInSimulator)178 RUNTIME_FUNCTION(Runtime_RunningInSimulator) {
179 SealHandleScope shs(isolate);
180 DCHECK_EQ(0, args.length());
181 #if defined(USE_SIMULATOR)
182 return ReadOnlyRoots(isolate).true_value();
183 #else
184 return ReadOnlyRoots(isolate).false_value();
185 #endif
186 }
187
RUNTIME_FUNCTION(Runtime_RuntimeEvaluateREPL)188 RUNTIME_FUNCTION(Runtime_RuntimeEvaluateREPL) {
189 HandleScope scope(isolate);
190 DCHECK_EQ(1, args.length());
191 Handle<String> source = args.at<String>(0);
192 Handle<Object> result;
193 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
194 isolate, result,
195 DebugEvaluate::Global(isolate, source,
196 debug::EvaluateGlobalMode::kDefault,
197 REPLMode::kYes));
198
199 return *result;
200 }
201
RUNTIME_FUNCTION(Runtime_ICsAreEnabled)202 RUNTIME_FUNCTION(Runtime_ICsAreEnabled) {
203 SealHandleScope shs(isolate);
204 DCHECK_EQ(0, args.length());
205 return isolate->heap()->ToBoolean(FLAG_use_ic);
206 }
207
RUNTIME_FUNCTION(Runtime_IsConcurrentRecompilationSupported)208 RUNTIME_FUNCTION(Runtime_IsConcurrentRecompilationSupported) {
209 SealHandleScope shs(isolate);
210 DCHECK_EQ(0, args.length());
211 return isolate->heap()->ToBoolean(
212 isolate->concurrent_recompilation_enabled());
213 }
214
RUNTIME_FUNCTION(Runtime_IsAtomicsWaitAllowed)215 RUNTIME_FUNCTION(Runtime_IsAtomicsWaitAllowed) {
216 SealHandleScope shs(isolate);
217 DCHECK_EQ(0, args.length());
218 return isolate->heap()->ToBoolean(isolate->allow_atomics_wait());
219 }
220
221 namespace {
222
223 template <CodeKind code_kind>
224 bool CanOptimizeFunction(Handle<JSFunction> function, Isolate* isolate,
225 IsCompiledScope* is_compiled_scope);
226
227 template <>
CanOptimizeFunction(Handle<JSFunction> function,Isolate * isolate,IsCompiledScope * is_compiled_scope)228 bool CanOptimizeFunction<CodeKind::TURBOFAN>(
229 Handle<JSFunction> function, Isolate* isolate,
230 IsCompiledScope* is_compiled_scope) {
231 // The following conditions were lifted (in part) from the DCHECK inside
232 // JSFunction::MarkForOptimization().
233
234 if (!function->shared().allows_lazy_compilation()) {
235 return CrashUnlessFuzzingReturnFalse(isolate);
236 }
237
238 // If function isn't compiled, compile it now.
239 if (!is_compiled_scope->is_compiled() &&
240 !Compiler::Compile(isolate, function, Compiler::CLEAR_EXCEPTION,
241 is_compiled_scope)) {
242 return CrashUnlessFuzzingReturnFalse(isolate);
243 }
244
245 if (!FLAG_opt) return false;
246
247 if (function->shared().optimization_disabled() &&
248 function->shared().disabled_optimization_reason() ==
249 BailoutReason::kNeverOptimize) {
250 return CrashUnlessFuzzingReturnFalse(isolate);
251 }
252
253 if (IsAsmWasmFunction(isolate, *function)) {
254 return CrashUnlessFuzzingReturnFalse(isolate);
255 }
256
257 if (FLAG_testing_d8_test_runner) {
258 PendingOptimizationTable::MarkedForOptimization(isolate, function);
259 }
260
261 CodeKind kind = CodeKindForTopTier();
262 if (function->HasAvailableOptimizedCode() ||
263 function->HasAvailableCodeKind(kind)) {
264 DCHECK(function->HasAttachedOptimizedCode() ||
265 function->ChecksTieringState());
266 if (FLAG_testing_d8_test_runner) {
267 PendingOptimizationTable::FunctionWasOptimized(isolate, function);
268 }
269 return false;
270 }
271
272 return true;
273 }
274
275 #ifdef V8_ENABLE_MAGLEV
276 template <>
CanOptimizeFunction(Handle<JSFunction> function,Isolate * isolate,IsCompiledScope * is_compiled_scope)277 bool CanOptimizeFunction<CodeKind::MAGLEV>(Handle<JSFunction> function,
278 Isolate* isolate,
279 IsCompiledScope* is_compiled_scope) {
280 if (!FLAG_maglev) return false;
281
282 CHECK(!IsAsmWasmFunction(isolate, *function));
283
284 // TODO(v8:7700): Disabled optimization due to deopts?
285 // TODO(v8:7700): Already cached?
286
287 return function->GetActiveTier() < CodeKind::MAGLEV;
288 }
289 #endif // V8_ENABLE_MAGLEV
290
OptimizeFunctionOnNextCall(RuntimeArguments & args,Isolate * isolate)291 Object OptimizeFunctionOnNextCall(RuntimeArguments& args, Isolate* isolate) {
292 if (args.length() != 1 && args.length() != 2) {
293 return CrashUnlessFuzzing(isolate);
294 }
295
296 Handle<Object> function_object = args.at(0);
297 if (!function_object->IsJSFunction()) return CrashUnlessFuzzing(isolate);
298 Handle<JSFunction> function = Handle<JSFunction>::cast(function_object);
299
300 static constexpr CodeKind kCodeKind = CodeKind::TURBOFAN;
301
302 IsCompiledScope is_compiled_scope(
303 function->shared().is_compiled_scope(isolate));
304 if (!CanOptimizeFunction<kCodeKind>(function, isolate, &is_compiled_scope)) {
305 return ReadOnlyRoots(isolate).undefined_value();
306 }
307
308 ConcurrencyMode concurrency_mode = ConcurrencyMode::kSynchronous;
309 if (args.length() == 2) {
310 Handle<Object> type = args.at(1);
311 if (!type->IsString()) return CrashUnlessFuzzing(isolate);
312 if (Handle<String>::cast(type)->IsOneByteEqualTo(
313 base::StaticCharVector("concurrent")) &&
314 isolate->concurrent_recompilation_enabled()) {
315 concurrency_mode = ConcurrencyMode::kConcurrent;
316 }
317 }
318
319 // This function may not have been lazily compiled yet, even though its shared
320 // function has.
321 if (!function->is_compiled()) {
322 DCHECK(function->shared().HasBytecodeArray());
323 CodeT codet = *BUILTIN_CODE(isolate, InterpreterEntryTrampoline);
324 if (function->shared().HasBaselineCode()) {
325 codet = function->shared().baseline_code(kAcquireLoad);
326 }
327 function->set_code(codet);
328 }
329
330 TraceManualRecompile(*function, kCodeKind, concurrency_mode);
331 JSFunction::EnsureFeedbackVector(isolate, function, &is_compiled_scope);
332 function->MarkForOptimization(isolate, CodeKind::TURBOFAN, concurrency_mode);
333
334 return ReadOnlyRoots(isolate).undefined_value();
335 }
336
EnsureFeedbackVector(Isolate * isolate,Handle<JSFunction> function)337 bool EnsureFeedbackVector(Isolate* isolate, Handle<JSFunction> function) {
338 // Check function allows lazy compilation.
339 if (!function->shared().allows_lazy_compilation()) return false;
340
341 if (function->has_feedback_vector()) return true;
342
343 // If function isn't compiled, compile it now.
344 IsCompiledScope is_compiled_scope(
345 function->shared().is_compiled_scope(function->GetIsolate()));
346 // If the JSFunction isn't compiled but it has a initialized feedback cell
347 // then no need to compile. CompileLazy builtin would handle these cases by
348 // installing the code from SFI. Calling compile here may cause another
349 // optimization if FLAG_always_opt is set.
350 bool needs_compilation =
351 !function->is_compiled() && !function->has_closure_feedback_cell_array();
352 if (needs_compilation &&
353 !Compiler::Compile(isolate, function, Compiler::CLEAR_EXCEPTION,
354 &is_compiled_scope)) {
355 return false;
356 }
357
358 // Ensure function has a feedback vector to hold type feedback for
359 // optimization.
360 JSFunction::EnsureFeedbackVector(isolate, function, &is_compiled_scope);
361 return true;
362 }
363
364 } // namespace
365
RUNTIME_FUNCTION(Runtime_CompileBaseline)366 RUNTIME_FUNCTION(Runtime_CompileBaseline) {
367 HandleScope scope(isolate);
368 if (args.length() != 1) {
369 return CrashUnlessFuzzing(isolate);
370 }
371 Handle<Object> function_object = args.at(0);
372 if (!function_object->IsJSFunction()) return CrashUnlessFuzzing(isolate);
373 Handle<JSFunction> function = Handle<JSFunction>::cast(function_object);
374
375 IsCompiledScope is_compiled_scope =
376 function->shared(isolate).is_compiled_scope(isolate);
377
378 if (!function->shared(isolate).IsUserJavaScript()) {
379 return CrashUnlessFuzzing(isolate);
380 }
381
382 // First compile the bytecode, if we have to.
383 if (!is_compiled_scope.is_compiled() &&
384 !Compiler::Compile(isolate, function, Compiler::CLEAR_EXCEPTION,
385 &is_compiled_scope)) {
386 return CrashUnlessFuzzing(isolate);
387 }
388
389 if (!Compiler::CompileBaseline(isolate, function, Compiler::CLEAR_EXCEPTION,
390 &is_compiled_scope)) {
391 return CrashUnlessFuzzing(isolate);
392 }
393
394 return *function;
395 }
396
397 // TODO(v8:7700): Remove this function once we no longer need it to measure
398 // maglev compile times. For normal tierup, OptimizeMaglevOnNextCall should be
399 // used instead.
400 #ifdef V8_ENABLE_MAGLEV
RUNTIME_FUNCTION(Runtime_BenchMaglev)401 RUNTIME_FUNCTION(Runtime_BenchMaglev) {
402 HandleScope scope(isolate);
403 DCHECK_EQ(args.length(), 2);
404 Handle<JSFunction> function = args.at<JSFunction>(0);
405 int count = args.smi_value_at(1);
406
407 Handle<CodeT> codet;
408 base::ElapsedTimer timer;
409 timer.Start();
410 codet = Maglev::Compile(isolate, function).ToHandleChecked();
411 for (int i = 1; i < count; ++i) {
412 HandleScope handle_scope(isolate);
413 Maglev::Compile(isolate, function);
414 }
415 PrintF("Maglev compile time: %g ms!\n",
416 timer.Elapsed().InMillisecondsF() / count);
417
418 function->set_code(*codet);
419
420 return ReadOnlyRoots(isolate).undefined_value();
421 }
422 #else
RUNTIME_FUNCTION(Runtime_BenchMaglev)423 RUNTIME_FUNCTION(Runtime_BenchMaglev) {
424 PrintF("Maglev is not enabled.\n");
425 return ReadOnlyRoots(isolate).undefined_value();
426 }
427 #endif // V8_ENABLE_MAGLEV
428
RUNTIME_FUNCTION(Runtime_ActiveTierIsMaglev)429 RUNTIME_FUNCTION(Runtime_ActiveTierIsMaglev) {
430 HandleScope scope(isolate);
431 DCHECK_EQ(args.length(), 1);
432 Handle<JSFunction> function = args.at<JSFunction>(0);
433 return isolate->heap()->ToBoolean(function->ActiveTierIsMaglev());
434 }
435
436 #ifdef V8_ENABLE_MAGLEV
RUNTIME_FUNCTION(Runtime_OptimizeMaglevOnNextCall)437 RUNTIME_FUNCTION(Runtime_OptimizeMaglevOnNextCall) {
438 HandleScope scope(isolate);
439 DCHECK_EQ(args.length(), 1);
440 Handle<JSFunction> function = args.at<JSFunction>(0);
441
442 static constexpr CodeKind kCodeKind = CodeKind::MAGLEV;
443
444 IsCompiledScope is_compiled_scope(
445 function->shared().is_compiled_scope(isolate));
446 if (!CanOptimizeFunction<kCodeKind>(function, isolate, &is_compiled_scope)) {
447 return ReadOnlyRoots(isolate).undefined_value();
448 }
449 DCHECK(is_compiled_scope.is_compiled());
450 DCHECK(function->is_compiled());
451
452 // TODO(v8:7700): Support concurrent compiles.
453 const ConcurrencyMode concurrency_mode = ConcurrencyMode::kSynchronous;
454
455 TraceManualRecompile(*function, kCodeKind, concurrency_mode);
456 JSFunction::EnsureFeedbackVector(isolate, function, &is_compiled_scope);
457 function->MarkForOptimization(isolate, kCodeKind, concurrency_mode);
458
459 return ReadOnlyRoots(isolate).undefined_value();
460 }
461 #else
RUNTIME_FUNCTION(Runtime_OptimizeMaglevOnNextCall)462 RUNTIME_FUNCTION(Runtime_OptimizeMaglevOnNextCall) {
463 PrintF("Maglev is not enabled.\n");
464 return ReadOnlyRoots(isolate).undefined_value();
465 }
466 #endif // V8_ENABLE_MAGLEV
467
468 // TODO(jgruber): Rename to OptimizeTurbofanOnNextCall.
RUNTIME_FUNCTION(Runtime_OptimizeFunctionOnNextCall)469 RUNTIME_FUNCTION(Runtime_OptimizeFunctionOnNextCall) {
470 HandleScope scope(isolate);
471 return OptimizeFunctionOnNextCall(args, isolate);
472 }
473
RUNTIME_FUNCTION(Runtime_EnsureFeedbackVectorForFunction)474 RUNTIME_FUNCTION(Runtime_EnsureFeedbackVectorForFunction) {
475 HandleScope scope(isolate);
476 DCHECK_EQ(1, args.length());
477 Handle<JSFunction> function = args.at<JSFunction>(0);
478 EnsureFeedbackVector(isolate, function);
479 return ReadOnlyRoots(isolate).undefined_value();
480 }
481
RUNTIME_FUNCTION(Runtime_PrepareFunctionForOptimization)482 RUNTIME_FUNCTION(Runtime_PrepareFunctionForOptimization) {
483 HandleScope scope(isolate);
484 if ((args.length() != 1 && args.length() != 2) || !args[0].IsJSFunction()) {
485 return CrashUnlessFuzzing(isolate);
486 }
487 Handle<JSFunction> function = args.at<JSFunction>(0);
488
489 bool allow_heuristic_optimization = false;
490 if (args.length() == 2) {
491 Handle<Object> sync_object = args.at(1);
492 if (!sync_object->IsString()) return CrashUnlessFuzzing(isolate);
493 Handle<String> sync = Handle<String>::cast(sync_object);
494 if (sync->IsOneByteEqualTo(
495 base::StaticCharVector("allow heuristic optimization"))) {
496 allow_heuristic_optimization = true;
497 }
498 }
499
500 if (!EnsureFeedbackVector(isolate, function)) {
501 return CrashUnlessFuzzing(isolate);
502 }
503
504 // If optimization is disabled for the function, return without making it
505 // pending optimize for test.
506 if (function->shared().optimization_disabled() &&
507 function->shared().disabled_optimization_reason() ==
508 BailoutReason::kNeverOptimize) {
509 return CrashUnlessFuzzing(isolate);
510 }
511
512 if (IsAsmWasmFunction(isolate, *function)) return CrashUnlessFuzzing(isolate);
513
514 // Hold onto the bytecode array between marking and optimization to ensure
515 // it's not flushed.
516 if (FLAG_testing_d8_test_runner) {
517 PendingOptimizationTable::PreparedForOptimization(
518 isolate, function, allow_heuristic_optimization);
519 }
520
521 return ReadOnlyRoots(isolate).undefined_value();
522 }
523
524 namespace {
525
FinalizeOptimization(Isolate * isolate)526 void FinalizeOptimization(Isolate* isolate) {
527 DCHECK(isolate->concurrent_recompilation_enabled());
528 isolate->optimizing_compile_dispatcher()->AwaitCompileTasks();
529 isolate->optimizing_compile_dispatcher()->InstallOptimizedFunctions();
530 isolate->optimizing_compile_dispatcher()->set_finalize(true);
531 }
532
OffsetOfNextJumpLoop(Isolate * isolate,UnoptimizedFrame * frame)533 BytecodeOffset OffsetOfNextJumpLoop(Isolate* isolate, UnoptimizedFrame* frame) {
534 Handle<BytecodeArray> bytecode_array(frame->GetBytecodeArray(), isolate);
535 const int current_offset = frame->GetBytecodeOffset();
536
537 interpreter::BytecodeArrayIterator it(bytecode_array, current_offset);
538
539 // First, look for a loop that contains the current bytecode offset.
540 for (; !it.done(); it.Advance()) {
541 if (it.current_bytecode() != interpreter::Bytecode::kJumpLoop) {
542 continue;
543 }
544 if (!base::IsInRange(current_offset, it.GetJumpTargetOffset(),
545 it.current_offset())) {
546 continue;
547 }
548
549 return BytecodeOffset(it.current_offset());
550 }
551
552 // Fall back to any loop after the current offset.
553 it.SetOffset(current_offset);
554 for (; !it.done(); it.Advance()) {
555 if (it.current_bytecode() == interpreter::Bytecode::kJumpLoop) {
556 return BytecodeOffset(it.current_offset());
557 }
558 }
559
560 return BytecodeOffset::None();
561 }
562
563 } // namespace
564
RUNTIME_FUNCTION(Runtime_OptimizeOsr)565 RUNTIME_FUNCTION(Runtime_OptimizeOsr) {
566 HandleScope handle_scope(isolate);
567 DCHECK(args.length() == 0 || args.length() == 1);
568
569 Handle<JSFunction> function;
570
571 // The optional parameter determines the frame being targeted.
572 int stack_depth = 0;
573 if (args.length() == 1) {
574 if (!args[0].IsSmi()) return CrashUnlessFuzzing(isolate);
575 stack_depth = args.smi_value_at(0);
576 }
577
578 // Find the JavaScript function on the top of the stack.
579 JavaScriptFrameIterator it(isolate);
580 while (!it.done() && stack_depth--) it.Advance();
581 if (!it.done()) function = handle(it.frame()->function(), isolate);
582 if (function.is_null()) return CrashUnlessFuzzing(isolate);
583
584 if (V8_UNLIKELY(!FLAG_opt) || V8_UNLIKELY(!FLAG_use_osr)) {
585 return ReadOnlyRoots(isolate).undefined_value();
586 }
587
588 if (!function->shared().allows_lazy_compilation()) {
589 return CrashUnlessFuzzing(isolate);
590 }
591
592 if (function->shared().optimization_disabled() &&
593 function->shared().disabled_optimization_reason() ==
594 BailoutReason::kNeverOptimize) {
595 return CrashUnlessFuzzing(isolate);
596 }
597
598 if (FLAG_testing_d8_test_runner) {
599 PendingOptimizationTable::MarkedForOptimization(isolate, function);
600 }
601
602 if (function->HasAvailableOptimizedCode()) {
603 DCHECK(function->HasAttachedOptimizedCode() ||
604 function->ChecksTieringState());
605 // If function is already optimized, remove the bytecode array from the
606 // pending optimize for test table and return.
607 if (FLAG_testing_d8_test_runner) {
608 PendingOptimizationTable::FunctionWasOptimized(isolate, function);
609 }
610 return ReadOnlyRoots(isolate).undefined_value();
611 }
612
613 if (!it.frame()->is_unoptimized()) {
614 // Nothing to be done.
615 return ReadOnlyRoots(isolate).undefined_value();
616 }
617
618 // Ensure that the function is marked for non-concurrent optimization, so that
619 // subsequent runs don't also optimize.
620 if (FLAG_trace_osr) {
621 CodeTracer::Scope scope(isolate->GetCodeTracer());
622 PrintF(scope.file(), "[OSR - OptimizeOsr marking ");
623 function->ShortPrint(scope.file());
624 PrintF(scope.file(), " for non-concurrent optimization]\n");
625 }
626 IsCompiledScope is_compiled_scope(
627 function->shared().is_compiled_scope(isolate));
628 JSFunction::EnsureFeedbackVector(isolate, function, &is_compiled_scope);
629 function->MarkForOptimization(isolate, CodeKind::TURBOFAN,
630 ConcurrencyMode::kSynchronous);
631
632 isolate->tiering_manager()->RequestOsrAtNextOpportunity(*function);
633
634 // If concurrent OSR is enabled, the testing workflow is a bit tricky. We
635 // must guarantee that the next JumpLoop installs the finished OSR'd code
636 // object, but we still want to exercise concurrent code paths. To do so,
637 // we attempt to find the next JumpLoop, start an OSR job for it now, and
638 // immediately force finalization.
639 // If this succeeds and we correctly match up the next JumpLoop, once we
640 // reach the JumpLoop we'll hit the OSR cache and install the generated code.
641 // If not (e.g. because we enter a nested loop first), the next JumpLoop will
642 // see the cached OSR code with a mismatched offset, and trigger
643 // non-concurrent OSR compilation and installation.
644 if (isolate->concurrent_recompilation_enabled() && FLAG_concurrent_osr) {
645 const BytecodeOffset osr_offset =
646 OffsetOfNextJumpLoop(isolate, UnoptimizedFrame::cast(it.frame()));
647 if (osr_offset.IsNone()) {
648 // The loop may have been elided by bytecode generation (e.g. for
649 // patterns such as `do { ... } while (false);`.
650 return ReadOnlyRoots(isolate).undefined_value();
651 }
652
653 // Finalize first to ensure all pending tasks are done (since we can't
654 // queue more than one OSR job for each function).
655 FinalizeOptimization(isolate);
656
657 // Queue the job.
658 auto unused_result = Compiler::CompileOptimizedOSR(
659 isolate, function, osr_offset, UnoptimizedFrame::cast(it.frame()),
660 ConcurrencyMode::kConcurrent);
661 USE(unused_result);
662
663 // Finalize again to finish the queued job. The next call into
664 // Runtime::kCompileOptimizedOSR will pick up the cached Code object.
665 FinalizeOptimization(isolate);
666 }
667
668 return ReadOnlyRoots(isolate).undefined_value();
669 }
670
RUNTIME_FUNCTION(Runtime_BaselineOsr)671 RUNTIME_FUNCTION(Runtime_BaselineOsr) {
672 HandleScope scope(isolate);
673 DCHECK_EQ(0, args.length());
674
675 // Find the JavaScript function on the top of the stack.
676 JavaScriptFrameIterator it(isolate);
677 Handle<JSFunction> function = handle(it.frame()->function(), isolate);
678 if (function.is_null()) return CrashUnlessFuzzing(isolate);
679 if (!FLAG_sparkplug || !FLAG_use_osr) {
680 return ReadOnlyRoots(isolate).undefined_value();
681 }
682 if (!it.frame()->is_unoptimized()) {
683 return ReadOnlyRoots(isolate).undefined_value();
684 }
685
686 IsCompiledScope is_compiled_scope(
687 function->shared().is_compiled_scope(isolate));
688 Compiler::CompileBaseline(isolate, function, Compiler::CLEAR_EXCEPTION,
689 &is_compiled_scope);
690
691 return ReadOnlyRoots(isolate).undefined_value();
692 }
693
RUNTIME_FUNCTION(Runtime_NeverOptimizeFunction)694 RUNTIME_FUNCTION(Runtime_NeverOptimizeFunction) {
695 HandleScope scope(isolate);
696 DCHECK_EQ(1, args.length());
697 Handle<Object> function_object = args.at(0);
698 if (!function_object->IsJSFunction()) return CrashUnlessFuzzing(isolate);
699 Handle<JSFunction> function = Handle<JSFunction>::cast(function_object);
700 Handle<SharedFunctionInfo> sfi(function->shared(), isolate);
701 if (sfi->abstract_code(isolate).kind() != CodeKind::INTERPRETED_FUNCTION &&
702 sfi->abstract_code(isolate).kind() != CodeKind::BUILTIN) {
703 return CrashUnlessFuzzing(isolate);
704 }
705 // Make sure to finish compilation if there is a parallel lazy compilation in
706 // progress, to make sure that the compilation finalization doesn't clobber
707 // the SharedFunctionInfo's disable_optimization field.
708 if (isolate->lazy_compile_dispatcher() &&
709 isolate->lazy_compile_dispatcher()->IsEnqueued(sfi)) {
710 isolate->lazy_compile_dispatcher()->FinishNow(sfi);
711 }
712
713 sfi->DisableOptimization(BailoutReason::kNeverOptimize);
714 return ReadOnlyRoots(isolate).undefined_value();
715 }
716
RUNTIME_FUNCTION(Runtime_GetOptimizationStatus)717 RUNTIME_FUNCTION(Runtime_GetOptimizationStatus) {
718 HandleScope scope(isolate);
719 DCHECK_EQ(args.length(), 1);
720
721 int status = 0;
722 if (FLAG_lite_mode || FLAG_jitless) {
723 // Both jitless and lite modes cannot optimize. Unit tests should handle
724 // these the same way. In the future, the two flags may become synonyms.
725 status |= static_cast<int>(OptimizationStatus::kLiteMode);
726 }
727 if (!isolate->use_optimizer()) {
728 status |= static_cast<int>(OptimizationStatus::kNeverOptimize);
729 }
730 if (FLAG_always_opt || FLAG_prepare_always_opt) {
731 status |= static_cast<int>(OptimizationStatus::kAlwaysOptimize);
732 }
733 if (FLAG_deopt_every_n_times) {
734 status |= static_cast<int>(OptimizationStatus::kMaybeDeopted);
735 }
736
737 Handle<Object> function_object = args.at(0);
738 if (function_object->IsUndefined()) return Smi::FromInt(status);
739 if (!function_object->IsJSFunction()) return CrashUnlessFuzzing(isolate);
740
741 Handle<JSFunction> function = Handle<JSFunction>::cast(function_object);
742 status |= static_cast<int>(OptimizationStatus::kIsFunction);
743
744 switch (function->tiering_state()) {
745 case TieringState::kRequestTurbofan_Synchronous:
746 status |= static_cast<int>(OptimizationStatus::kMarkedForOptimization);
747 break;
748 case TieringState::kRequestTurbofan_Concurrent:
749 status |= static_cast<int>(
750 OptimizationStatus::kMarkedForConcurrentOptimization);
751 break;
752 case TieringState::kInProgress:
753 status |= static_cast<int>(OptimizationStatus::kOptimizingConcurrently);
754 break;
755 case TieringState::kNone:
756 case TieringState::kRequestMaglev_Synchronous:
757 case TieringState::kRequestMaglev_Concurrent:
758 // TODO(v8:7700): Maglev support.
759 break;
760 }
761
762 if (function->HasAttachedOptimizedCode()) {
763 CodeT code = function->code();
764 if (code.marked_for_deoptimization()) {
765 status |= static_cast<int>(OptimizationStatus::kMarkedForDeoptimization);
766 } else {
767 status |= static_cast<int>(OptimizationStatus::kOptimized);
768 }
769 if (code.is_maglevved()) {
770 status |= static_cast<int>(OptimizationStatus::kMaglevved);
771 } else if (code.is_turbofanned()) {
772 status |= static_cast<int>(OptimizationStatus::kTurboFanned);
773 }
774 }
775 if (function->HasAttachedCodeKind(CodeKind::BASELINE)) {
776 status |= static_cast<int>(OptimizationStatus::kBaseline);
777 }
778 if (function->ActiveTierIsIgnition()) {
779 status |= static_cast<int>(OptimizationStatus::kInterpreted);
780 }
781
782 // Additionally, detect activations of this frame on the stack, and report the
783 // status of the topmost frame.
784 JavaScriptFrame* frame = nullptr;
785 JavaScriptFrameIterator it(isolate);
786 while (!it.done()) {
787 if (it.frame()->function() == *function) {
788 frame = it.frame();
789 break;
790 }
791 it.Advance();
792 }
793 if (frame != nullptr) {
794 status |= static_cast<int>(OptimizationStatus::kIsExecuting);
795 if (frame->is_optimized()) {
796 status |=
797 static_cast<int>(OptimizationStatus::kTopmostFrameIsTurboFanned);
798 } else if (frame->is_interpreted()) {
799 status |=
800 static_cast<int>(OptimizationStatus::kTopmostFrameIsInterpreted);
801 } else if (frame->is_baseline()) {
802 status |= static_cast<int>(OptimizationStatus::kTopmostFrameIsBaseline);
803 }
804 }
805
806 return Smi::FromInt(status);
807 }
808
RUNTIME_FUNCTION(Runtime_DisableOptimizationFinalization)809 RUNTIME_FUNCTION(Runtime_DisableOptimizationFinalization) {
810 DCHECK_EQ(0, args.length());
811 if (isolate->concurrent_recompilation_enabled()) {
812 isolate->optimizing_compile_dispatcher()->AwaitCompileTasks();
813 isolate->optimizing_compile_dispatcher()->InstallOptimizedFunctions();
814 isolate->stack_guard()->ClearInstallCode();
815 isolate->optimizing_compile_dispatcher()->set_finalize(false);
816 }
817 return ReadOnlyRoots(isolate).undefined_value();
818 }
819
RUNTIME_FUNCTION(Runtime_WaitForBackgroundOptimization)820 RUNTIME_FUNCTION(Runtime_WaitForBackgroundOptimization) {
821 DCHECK_EQ(0, args.length());
822 if (isolate->concurrent_recompilation_enabled()) {
823 isolate->optimizing_compile_dispatcher()->AwaitCompileTasks();
824 }
825 return ReadOnlyRoots(isolate).undefined_value();
826 }
827
RUNTIME_FUNCTION(Runtime_FinalizeOptimization)828 RUNTIME_FUNCTION(Runtime_FinalizeOptimization) {
829 DCHECK_EQ(0, args.length());
830 if (isolate->concurrent_recompilation_enabled()) {
831 FinalizeOptimization(isolate);
832 }
833 return ReadOnlyRoots(isolate).undefined_value();
834 }
835
ReturnNull(const v8::FunctionCallbackInfo<v8::Value> & args)836 static void ReturnNull(const v8::FunctionCallbackInfo<v8::Value>& args) {
837 args.GetReturnValue().SetNull();
838 }
839
RUNTIME_FUNCTION(Runtime_GetUndetectable)840 RUNTIME_FUNCTION(Runtime_GetUndetectable) {
841 HandleScope scope(isolate);
842 DCHECK_EQ(0, args.length());
843 v8::Isolate* v8_isolate = reinterpret_cast<v8::Isolate*>(isolate);
844 Local<v8::ObjectTemplate> desc = v8::ObjectTemplate::New(v8_isolate);
845 desc->MarkAsUndetectable();
846 desc->SetCallAsFunctionHandler(ReturnNull);
847 Local<v8::Object> obj =
848 desc->NewInstance(v8_isolate->GetCurrentContext()).ToLocalChecked();
849 return *Utils::OpenHandle(*obj);
850 }
851
call_as_function(const v8::FunctionCallbackInfo<v8::Value> & args)852 static void call_as_function(const v8::FunctionCallbackInfo<v8::Value>& args) {
853 double v1 =
854 args[0]->NumberValue(args.GetIsolate()->GetCurrentContext()).ToChecked();
855 double v2 =
856 args[1]->NumberValue(args.GetIsolate()->GetCurrentContext()).ToChecked();
857 args.GetReturnValue().Set(v8::Number::New(args.GetIsolate(), v1 - v2));
858 }
859
860 // Returns a callable object. The object returns the difference of its two
861 // parameters when it is called.
RUNTIME_FUNCTION(Runtime_GetCallable)862 RUNTIME_FUNCTION(Runtime_GetCallable) {
863 HandleScope scope(isolate);
864 DCHECK_EQ(0, args.length());
865 v8::Isolate* v8_isolate = reinterpret_cast<v8::Isolate*>(isolate);
866 Local<v8::FunctionTemplate> t = v8::FunctionTemplate::New(v8_isolate);
867 Local<ObjectTemplate> instance_template = t->InstanceTemplate();
868 instance_template->SetCallAsFunctionHandler(call_as_function);
869 v8_isolate->GetCurrentContext();
870 Local<v8::Object> instance =
871 t->GetFunction(v8_isolate->GetCurrentContext())
872 .ToLocalChecked()
873 ->NewInstance(v8_isolate->GetCurrentContext())
874 .ToLocalChecked();
875 return *Utils::OpenHandle(*instance);
876 }
877
RUNTIME_FUNCTION(Runtime_ClearFunctionFeedback)878 RUNTIME_FUNCTION(Runtime_ClearFunctionFeedback) {
879 HandleScope scope(isolate);
880 DCHECK_EQ(1, args.length());
881 Handle<JSFunction> function = args.at<JSFunction>(0);
882 function->ClearTypeFeedbackInfo();
883 return ReadOnlyRoots(isolate).undefined_value();
884 }
885
RUNTIME_FUNCTION(Runtime_NotifyContextDisposed)886 RUNTIME_FUNCTION(Runtime_NotifyContextDisposed) {
887 HandleScope scope(isolate);
888 DCHECK_EQ(0, args.length());
889 isolate->heap()->NotifyContextDisposed(true);
890 return ReadOnlyRoots(isolate).undefined_value();
891 }
892
RUNTIME_FUNCTION(Runtime_SetAllocationTimeout)893 RUNTIME_FUNCTION(Runtime_SetAllocationTimeout) {
894 SealHandleScope shs(isolate);
895 DCHECK(args.length() == 2 || args.length() == 3);
896 #ifdef V8_ENABLE_ALLOCATION_TIMEOUT
897 CONVERT_INT32_ARG_FUZZ_SAFE(timeout, 1);
898 isolate->heap()->set_allocation_timeout(timeout);
899 #endif
900 #ifdef DEBUG
901 CONVERT_INT32_ARG_FUZZ_SAFE(interval, 0);
902 FLAG_gc_interval = interval;
903 if (args.length() == 3) {
904 // Enable/disable inline allocation if requested.
905 CONVERT_BOOLEAN_ARG_FUZZ_SAFE(inline_allocation, 2);
906 if (inline_allocation) {
907 isolate->heap()->EnableInlineAllocation();
908 } else {
909 isolate->heap()->DisableInlineAllocation();
910 }
911 }
912 #endif
913 return ReadOnlyRoots(isolate).undefined_value();
914 }
915
916 namespace {
917
FixedArrayLenFromSize(int size)918 int FixedArrayLenFromSize(int size) {
919 return std::min({(size - FixedArray::kHeaderSize) / kTaggedSize,
920 FixedArray::kMaxRegularLength});
921 }
922
FillUpOneNewSpacePage(Isolate * isolate,Heap * heap)923 void FillUpOneNewSpacePage(Isolate* isolate, Heap* heap) {
924 DCHECK(!FLAG_single_generation);
925 PauseAllocationObserversScope pause_observers(heap);
926 NewSpace* space = heap->new_space();
927 // We cannot rely on `space->limit()` to point to the end of the current page
928 // in the case where inline allocations are disabled, it actually points to
929 // the current allocation pointer.
930 DCHECK_IMPLIES(!space->IsInlineAllocationEnabled(),
931 space->limit() == space->top());
932 int space_remaining =
933 static_cast<int>(space->to_space().page_high() - space->top());
934 while (space_remaining > 0) {
935 int length = FixedArrayLenFromSize(space_remaining);
936 if (length > 0) {
937 Handle<FixedArray> padding =
938 isolate->factory()->NewFixedArray(length, AllocationType::kYoung);
939 DCHECK(heap->new_space()->Contains(*padding));
940 space_remaining -= padding->Size();
941 } else {
942 // Not enough room to create another fixed array. Create a filler.
943 heap->CreateFillerObjectAt(*heap->new_space()->allocation_top_address(),
944 space_remaining, ClearRecordedSlots::kNo);
945 break;
946 }
947 }
948 }
949
950 } // namespace
951
RUNTIME_FUNCTION(Runtime_SimulateNewspaceFull)952 RUNTIME_FUNCTION(Runtime_SimulateNewspaceFull) {
953 HandleScope scope(isolate);
954 Heap* heap = isolate->heap();
955 NewSpace* space = heap->new_space();
956 AlwaysAllocateScopeForTesting always_allocate(heap);
957 do {
958 FillUpOneNewSpacePage(isolate, heap);
959 } while (space->AddFreshPage());
960
961 return ReadOnlyRoots(isolate).undefined_value();
962 }
963
RUNTIME_FUNCTION(Runtime_ScheduleGCInStackCheck)964 RUNTIME_FUNCTION(Runtime_ScheduleGCInStackCheck) {
965 SealHandleScope shs(isolate);
966 DCHECK_EQ(0, args.length());
967 isolate->RequestInterrupt(
968 [](v8::Isolate* isolate, void*) {
969 isolate->RequestGarbageCollectionForTesting(
970 v8::Isolate::kFullGarbageCollection);
971 },
972 nullptr);
973 return ReadOnlyRoots(isolate).undefined_value();
974 }
975
976 class FileOutputStream : public v8::OutputStream {
977 public:
FileOutputStream(const char * filename)978 explicit FileOutputStream(const char* filename) : os_(filename) {}
~FileOutputStream()979 ~FileOutputStream() override { os_.close(); }
980
WriteAsciiChunk(char * data,int size)981 WriteResult WriteAsciiChunk(char* data, int size) override {
982 os_.write(data, size);
983 return kContinue;
984 }
985
EndOfStream()986 void EndOfStream() override { os_.close(); }
987
988 private:
989 std::ofstream os_;
990 };
991
RUNTIME_FUNCTION(Runtime_TakeHeapSnapshot)992 RUNTIME_FUNCTION(Runtime_TakeHeapSnapshot) {
993 if (FLAG_fuzzing) {
994 // We don't want to create snapshots in fuzzers.
995 return ReadOnlyRoots(isolate).undefined_value();
996 }
997
998 std::string filename = "heap.heapsnapshot";
999
1000 if (args.length() >= 1) {
1001 HandleScope hs(isolate);
1002 Handle<String> filename_as_js_string = args.at<String>(0);
1003 std::unique_ptr<char[]> buffer = filename_as_js_string->ToCString();
1004 filename = std::string(buffer.get());
1005 }
1006
1007 HeapProfiler* heap_profiler = isolate->heap_profiler();
1008 // Since this API is intended for V8 devs, we do not treat globals as roots
1009 // here on purpose.
1010 HeapSnapshot* snapshot = heap_profiler->TakeSnapshot(
1011 /* control = */ nullptr, /* resolver = */ nullptr,
1012 /* treat_global_objects_as_roots = */ false,
1013 /* capture_numeric_value = */ true);
1014 FileOutputStream stream(filename.c_str());
1015 HeapSnapshotJSONSerializer serializer(snapshot);
1016 serializer.Serialize(&stream);
1017 return ReadOnlyRoots(isolate).undefined_value();
1018 }
1019
DebugPrintImpl(MaybeObject maybe_object)1020 static void DebugPrintImpl(MaybeObject maybe_object) {
1021 StdoutStream os;
1022 if (maybe_object->IsCleared()) {
1023 os << "[weak cleared]";
1024 } else {
1025 Object object = maybe_object.GetHeapObjectOrSmi();
1026 bool weak = maybe_object.IsWeak();
1027
1028 #ifdef OBJECT_PRINT
1029 os << "DebugPrint: ";
1030 if (weak) os << "[weak] ";
1031 object.Print(os);
1032 if (object.IsHeapObject()) {
1033 HeapObject::cast(object).map().Print(os);
1034 }
1035 #else
1036 if (weak) os << "[weak] ";
1037 // ShortPrint is available in release mode. Print is not.
1038 os << Brief(object);
1039 #endif
1040 }
1041 os << std::endl;
1042 }
1043
RUNTIME_FUNCTION(Runtime_DebugPrint)1044 RUNTIME_FUNCTION(Runtime_DebugPrint) {
1045 SealHandleScope shs(isolate);
1046 DCHECK_EQ(1, args.length());
1047
1048 MaybeObject maybe_object(*args.address_of_arg_at(0));
1049 DebugPrintImpl(maybe_object);
1050 return args[0];
1051 }
1052
RUNTIME_FUNCTION(Runtime_DebugPrintPtr)1053 RUNTIME_FUNCTION(Runtime_DebugPrintPtr) {
1054 SealHandleScope shs(isolate);
1055 StdoutStream os;
1056 DCHECK_EQ(1, args.length());
1057
1058 MaybeObject maybe_object(*args.address_of_arg_at(0));
1059 if (!maybe_object.IsCleared()) {
1060 Object object = maybe_object.GetHeapObjectOrSmi();
1061 size_t pointer;
1062 if (object.ToIntegerIndex(&pointer)) {
1063 MaybeObject from_pointer(static_cast<Address>(pointer));
1064 DebugPrintImpl(from_pointer);
1065 }
1066 }
1067 // We don't allow the converted pointer to leak out to JavaScript.
1068 return args[0];
1069 }
1070
RUNTIME_FUNCTION(Runtime_PrintWithNameForAssert)1071 RUNTIME_FUNCTION(Runtime_PrintWithNameForAssert) {
1072 SealHandleScope shs(isolate);
1073 DCHECK_EQ(2, args.length());
1074
1075 auto name = String::cast(args[0]);
1076
1077 PrintF(" * ");
1078 StringCharacterStream stream(name);
1079 while (stream.HasMore()) {
1080 uint16_t character = stream.GetNext();
1081 PrintF("%c", character);
1082 }
1083 PrintF(": ");
1084 args[1].ShortPrint();
1085 PrintF("\n");
1086
1087 return ReadOnlyRoots(isolate).undefined_value();
1088 }
1089
RUNTIME_FUNCTION(Runtime_DebugTrace)1090 RUNTIME_FUNCTION(Runtime_DebugTrace) {
1091 SealHandleScope shs(isolate);
1092 DCHECK_EQ(0, args.length());
1093 isolate->PrintStack(stdout);
1094 return ReadOnlyRoots(isolate).undefined_value();
1095 }
1096
RUNTIME_FUNCTION(Runtime_DebugTrackRetainingPath)1097 RUNTIME_FUNCTION(Runtime_DebugTrackRetainingPath) {
1098 HandleScope scope(isolate);
1099 DCHECK_LE(1, args.length());
1100 DCHECK_GE(2, args.length());
1101 CHECK(FLAG_track_retaining_path);
1102 Handle<HeapObject> object = args.at<HeapObject>(0);
1103 RetainingPathOption option = RetainingPathOption::kDefault;
1104 if (args.length() == 2) {
1105 Handle<String> str = args.at<String>(1);
1106 const char track_ephemeron_path[] = "track-ephemeron-path";
1107 if (str->IsOneByteEqualTo(base::StaticCharVector(track_ephemeron_path))) {
1108 option = RetainingPathOption::kTrackEphemeronPath;
1109 } else {
1110 CHECK_EQ(str->length(), 0);
1111 }
1112 }
1113 isolate->heap()->AddRetainingPathTarget(object, option);
1114 return ReadOnlyRoots(isolate).undefined_value();
1115 }
1116
1117 // This will not allocate (flatten the string), but it may run
1118 // very slowly for very deeply nested ConsStrings. For debugging use only.
RUNTIME_FUNCTION(Runtime_GlobalPrint)1119 RUNTIME_FUNCTION(Runtime_GlobalPrint) {
1120 SealHandleScope shs(isolate);
1121 DCHECK_EQ(1, args.length());
1122
1123 auto string = String::cast(args[0]);
1124 StringCharacterStream stream(string);
1125 while (stream.HasMore()) {
1126 uint16_t character = stream.GetNext();
1127 PrintF("%c", character);
1128 }
1129 return string;
1130 }
1131
RUNTIME_FUNCTION(Runtime_SystemBreak)1132 RUNTIME_FUNCTION(Runtime_SystemBreak) {
1133 // The code below doesn't create handles, but when breaking here in GDB
1134 // having a handle scope might be useful.
1135 HandleScope scope(isolate);
1136 DCHECK_EQ(0, args.length());
1137 base::OS::DebugBreak();
1138 return ReadOnlyRoots(isolate).undefined_value();
1139 }
1140
RUNTIME_FUNCTION(Runtime_SetForceSlowPath)1141 RUNTIME_FUNCTION(Runtime_SetForceSlowPath) {
1142 SealHandleScope shs(isolate);
1143 DCHECK_EQ(1, args.length());
1144 Object arg = args[0];
1145 if (arg.IsTrue(isolate)) {
1146 isolate->set_force_slow_path(true);
1147 } else {
1148 DCHECK(arg.IsFalse(isolate));
1149 isolate->set_force_slow_path(false);
1150 }
1151 return ReadOnlyRoots(isolate).undefined_value();
1152 }
1153
RUNTIME_FUNCTION(Runtime_Abort)1154 RUNTIME_FUNCTION(Runtime_Abort) {
1155 SealHandleScope shs(isolate);
1156 DCHECK_EQ(1, args.length());
1157 int message_id = args.smi_value_at(0);
1158 const char* message = GetAbortReason(static_cast<AbortReason>(message_id));
1159 base::OS::PrintError("abort: %s\n", message);
1160 isolate->PrintStack(stderr);
1161 base::OS::Abort();
1162 UNREACHABLE();
1163 }
1164
RUNTIME_FUNCTION(Runtime_AbortJS)1165 RUNTIME_FUNCTION(Runtime_AbortJS) {
1166 HandleScope scope(isolate);
1167 DCHECK_EQ(1, args.length());
1168 Handle<String> message = args.at<String>(0);
1169 if (FLAG_disable_abortjs) {
1170 base::OS::PrintError("[disabled] abort: %s\n", message->ToCString().get());
1171 return Object();
1172 }
1173 base::OS::PrintError("abort: %s\n", message->ToCString().get());
1174 isolate->PrintStack(stderr);
1175 base::OS::Abort();
1176 UNREACHABLE();
1177 }
1178
RUNTIME_FUNCTION(Runtime_AbortCSADcheck)1179 RUNTIME_FUNCTION(Runtime_AbortCSADcheck) {
1180 HandleScope scope(isolate);
1181 DCHECK_EQ(1, args.length());
1182 Handle<String> message = args.at<String>(0);
1183 base::OS::PrintError("abort: CSA_DCHECK failed: %s\n",
1184 message->ToCString().get());
1185 isolate->PrintStack(stderr);
1186 base::OS::Abort();
1187 UNREACHABLE();
1188 }
1189
RUNTIME_FUNCTION(Runtime_DisassembleFunction)1190 RUNTIME_FUNCTION(Runtime_DisassembleFunction) {
1191 HandleScope scope(isolate);
1192 #ifdef DEBUG
1193 DCHECK_EQ(1, args.length());
1194 // Get the function and make sure it is compiled.
1195 Handle<JSFunction> func = args.at<JSFunction>(0);
1196 IsCompiledScope is_compiled_scope;
1197 if (!func->is_compiled() && func->HasAvailableOptimizedCode()) {
1198 func->set_code(func->feedback_vector().optimized_code());
1199 }
1200 CHECK(func->is_compiled() ||
1201 Compiler::Compile(isolate, func, Compiler::KEEP_EXCEPTION,
1202 &is_compiled_scope));
1203 StdoutStream os;
1204 func->code().Print(os);
1205 os << std::endl;
1206 #endif // DEBUG
1207 return ReadOnlyRoots(isolate).undefined_value();
1208 }
1209
1210 namespace {
1211
StackSize(Isolate * isolate)1212 int StackSize(Isolate* isolate) {
1213 int n = 0;
1214 for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) n++;
1215 return n;
1216 }
1217
PrintIndentation(int stack_size)1218 void PrintIndentation(int stack_size) {
1219 const int max_display = 80;
1220 if (stack_size <= max_display) {
1221 PrintF("%4d:%*s", stack_size, stack_size, "");
1222 } else {
1223 PrintF("%4d:%*s", stack_size, max_display, "...");
1224 }
1225 }
1226
1227 } // namespace
1228
RUNTIME_FUNCTION(Runtime_TraceEnter)1229 RUNTIME_FUNCTION(Runtime_TraceEnter) {
1230 SealHandleScope shs(isolate);
1231 DCHECK_EQ(0, args.length());
1232 PrintIndentation(StackSize(isolate));
1233 JavaScriptFrame::PrintTop(isolate, stdout, true, false);
1234 PrintF(" {\n");
1235 return ReadOnlyRoots(isolate).undefined_value();
1236 }
1237
RUNTIME_FUNCTION(Runtime_TraceExit)1238 RUNTIME_FUNCTION(Runtime_TraceExit) {
1239 SealHandleScope shs(isolate);
1240 DCHECK_EQ(1, args.length());
1241 Object obj = args[0];
1242 PrintIndentation(StackSize(isolate));
1243 PrintF("} -> ");
1244 obj.ShortPrint();
1245 PrintF("\n");
1246 return obj; // return TOS
1247 }
1248
RUNTIME_FUNCTION(Runtime_HaveSameMap)1249 RUNTIME_FUNCTION(Runtime_HaveSameMap) {
1250 SealHandleScope shs(isolate);
1251 DCHECK_EQ(2, args.length());
1252 auto obj1 = JSObject::cast(args[0]);
1253 auto obj2 = JSObject::cast(args[1]);
1254 return isolate->heap()->ToBoolean(obj1.map() == obj2.map());
1255 }
1256
RUNTIME_FUNCTION(Runtime_InLargeObjectSpace)1257 RUNTIME_FUNCTION(Runtime_InLargeObjectSpace) {
1258 SealHandleScope shs(isolate);
1259 DCHECK_EQ(1, args.length());
1260 auto obj = HeapObject::cast(args[0]);
1261 return isolate->heap()->ToBoolean(
1262 isolate->heap()->new_lo_space()->Contains(obj) ||
1263 isolate->heap()->code_lo_space()->Contains(obj) ||
1264 isolate->heap()->lo_space()->Contains(obj));
1265 }
1266
RUNTIME_FUNCTION(Runtime_HasElementsInALargeObjectSpace)1267 RUNTIME_FUNCTION(Runtime_HasElementsInALargeObjectSpace) {
1268 SealHandleScope shs(isolate);
1269 DCHECK_EQ(1, args.length());
1270 auto array = JSArray::cast(args[0]);
1271 FixedArrayBase elements = array.elements();
1272 return isolate->heap()->ToBoolean(
1273 isolate->heap()->new_lo_space()->Contains(elements) ||
1274 isolate->heap()->lo_space()->Contains(elements));
1275 }
1276
RUNTIME_FUNCTION(Runtime_InYoungGeneration)1277 RUNTIME_FUNCTION(Runtime_InYoungGeneration) {
1278 SealHandleScope shs(isolate);
1279 DCHECK_EQ(1, args.length());
1280 Object obj = args[0];
1281 return isolate->heap()->ToBoolean(ObjectInYoungGeneration(obj));
1282 }
1283
1284 // Force pretenuring for the allocation site the passed object belongs to.
RUNTIME_FUNCTION(Runtime_PretenureAllocationSite)1285 RUNTIME_FUNCTION(Runtime_PretenureAllocationSite) {
1286 DisallowGarbageCollection no_gc;
1287
1288 if (args.length() != 1) return CrashUnlessFuzzing(isolate);
1289 Object arg = args[0];
1290 if (!arg.IsJSObject()) return CrashUnlessFuzzing(isolate);
1291 JSObject object = JSObject::cast(arg);
1292
1293 Heap* heap = object.GetHeap();
1294 if (!heap->InYoungGeneration(object)) {
1295 // Object is not in new space, thus there is no memento and nothing to do.
1296 return ReturnFuzzSafe(ReadOnlyRoots(isolate).false_value(), isolate);
1297 }
1298
1299 AllocationMemento memento =
1300 heap->FindAllocationMemento<Heap::kForRuntime>(object.map(), object);
1301 if (memento.is_null())
1302 return ReturnFuzzSafe(ReadOnlyRoots(isolate).false_value(), isolate);
1303 AllocationSite site = memento.GetAllocationSite();
1304 heap->PretenureAllocationSiteOnNextCollection(site);
1305 return ReturnFuzzSafe(ReadOnlyRoots(isolate).true_value(), isolate);
1306 }
1307
1308 namespace {
1309
DisallowCodegenFromStringsCallback(v8::Local<v8::Context> context,v8::Local<v8::Value> source,bool is_code_kind)1310 v8::ModifyCodeGenerationFromStringsResult DisallowCodegenFromStringsCallback(
1311 v8::Local<v8::Context> context, v8::Local<v8::Value> source,
1312 bool is_code_kind) {
1313 return {false, {}};
1314 }
1315
1316 } // namespace
1317
RUNTIME_FUNCTION(Runtime_DisallowCodegenFromStrings)1318 RUNTIME_FUNCTION(Runtime_DisallowCodegenFromStrings) {
1319 SealHandleScope shs(isolate);
1320 DCHECK_EQ(1, args.length());
1321 bool flag = Oddball::cast(args[0]).ToBool(isolate);
1322 v8::Isolate* v8_isolate = reinterpret_cast<v8::Isolate*>(isolate);
1323 v8_isolate->SetModifyCodeGenerationFromStringsCallback(
1324 flag ? DisallowCodegenFromStringsCallback : nullptr);
1325 return ReadOnlyRoots(isolate).undefined_value();
1326 }
1327
RUNTIME_FUNCTION(Runtime_RegexpHasBytecode)1328 RUNTIME_FUNCTION(Runtime_RegexpHasBytecode) {
1329 SealHandleScope shs(isolate);
1330 DCHECK_EQ(2, args.length());
1331 auto regexp = JSRegExp::cast(args[0]);
1332 bool is_latin1 = Oddball::cast(args[1]).ToBool(isolate);
1333 bool result;
1334 if (regexp.type_tag() == JSRegExp::IRREGEXP) {
1335 result = regexp.bytecode(is_latin1).IsByteArray();
1336 } else {
1337 result = false;
1338 }
1339 return isolate->heap()->ToBoolean(result);
1340 }
1341
RUNTIME_FUNCTION(Runtime_RegexpHasNativeCode)1342 RUNTIME_FUNCTION(Runtime_RegexpHasNativeCode) {
1343 SealHandleScope shs(isolate);
1344 DCHECK_EQ(2, args.length());
1345 auto regexp = JSRegExp::cast(args[0]);
1346 bool is_latin1 = Oddball::cast(args[1]).ToBool(isolate);
1347 bool result;
1348 if (regexp.type_tag() == JSRegExp::IRREGEXP) {
1349 result = regexp.code(is_latin1).IsCodeT();
1350 } else {
1351 result = false;
1352 }
1353 return isolate->heap()->ToBoolean(result);
1354 }
1355
RUNTIME_FUNCTION(Runtime_RegexpTypeTag)1356 RUNTIME_FUNCTION(Runtime_RegexpTypeTag) {
1357 HandleScope shs(isolate);
1358 DCHECK_EQ(1, args.length());
1359 auto regexp = JSRegExp::cast(args[0]);
1360 const char* type_str;
1361 switch (regexp.type_tag()) {
1362 case JSRegExp::NOT_COMPILED:
1363 type_str = "NOT_COMPILED";
1364 break;
1365 case JSRegExp::ATOM:
1366 type_str = "ATOM";
1367 break;
1368 case JSRegExp::IRREGEXP:
1369 type_str = "IRREGEXP";
1370 break;
1371 case JSRegExp::EXPERIMENTAL:
1372 type_str = "EXPERIMENTAL";
1373 break;
1374 }
1375 return *isolate->factory()->NewStringFromAsciiChecked(type_str);
1376 }
1377
RUNTIME_FUNCTION(Runtime_RegexpIsUnmodified)1378 RUNTIME_FUNCTION(Runtime_RegexpIsUnmodified) {
1379 HandleScope shs(isolate);
1380 DCHECK_EQ(1, args.length());
1381 Handle<JSRegExp> regexp = args.at<JSRegExp>(0);
1382 return isolate->heap()->ToBoolean(
1383 RegExp::IsUnmodifiedRegExp(isolate, regexp));
1384 }
1385
1386 #define ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(Name) \
1387 RUNTIME_FUNCTION(Runtime_##Name) { \
1388 auto obj = JSObject::cast(args[0]); \
1389 return isolate->heap()->ToBoolean(obj.Name()); \
1390 }
1391
1392 ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(HasFastElements)
ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(HasSmiElements)1393 ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(HasSmiElements)
1394 ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(HasObjectElements)
1395 ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(HasSmiOrObjectElements)
1396 ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(HasDoubleElements)
1397 ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(HasHoleyElements)
1398 ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(HasDictionaryElements)
1399 ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(HasPackedElements)
1400 ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(HasSloppyArgumentsElements)
1401 // Properties test sitting with elements tests - not fooling anyone.
1402 ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(HasFastProperties)
1403
1404 #undef ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION
1405
1406 #define FIXED_TYPED_ARRAYS_CHECK_RUNTIME_FUNCTION(Type, type, TYPE, ctype) \
1407 RUNTIME_FUNCTION(Runtime_HasFixed##Type##Elements) { \
1408 auto obj = JSObject::cast(args[0]); \
1409 return isolate->heap()->ToBoolean(obj.HasFixed##Type##Elements()); \
1410 }
1411
1412 TYPED_ARRAYS(FIXED_TYPED_ARRAYS_CHECK_RUNTIME_FUNCTION)
1413
1414 #undef FIXED_TYPED_ARRAYS_CHECK_RUNTIME_FUNCTION
1415
1416 RUNTIME_FUNCTION(Runtime_IsConcatSpreadableProtector) {
1417 SealHandleScope shs(isolate);
1418 DCHECK_EQ(0, args.length());
1419 return isolate->heap()->ToBoolean(
1420 Protectors::IsIsConcatSpreadableLookupChainIntact(isolate));
1421 }
1422
RUNTIME_FUNCTION(Runtime_TypedArraySpeciesProtector)1423 RUNTIME_FUNCTION(Runtime_TypedArraySpeciesProtector) {
1424 SealHandleScope shs(isolate);
1425 DCHECK_EQ(0, args.length());
1426 return isolate->heap()->ToBoolean(
1427 Protectors::IsTypedArraySpeciesLookupChainIntact(isolate));
1428 }
1429
RUNTIME_FUNCTION(Runtime_RegExpSpeciesProtector)1430 RUNTIME_FUNCTION(Runtime_RegExpSpeciesProtector) {
1431 SealHandleScope shs(isolate);
1432 DCHECK_EQ(0, args.length());
1433 return isolate->heap()->ToBoolean(
1434 Protectors::IsRegExpSpeciesLookupChainIntact(isolate));
1435 }
1436
RUNTIME_FUNCTION(Runtime_PromiseSpeciesProtector)1437 RUNTIME_FUNCTION(Runtime_PromiseSpeciesProtector) {
1438 SealHandleScope shs(isolate);
1439 DCHECK_EQ(0, args.length());
1440 return isolate->heap()->ToBoolean(
1441 Protectors::IsPromiseSpeciesLookupChainIntact(isolate));
1442 }
1443
RUNTIME_FUNCTION(Runtime_ArraySpeciesProtector)1444 RUNTIME_FUNCTION(Runtime_ArraySpeciesProtector) {
1445 SealHandleScope shs(isolate);
1446 DCHECK_EQ(0, args.length());
1447 return isolate->heap()->ToBoolean(
1448 Protectors::IsArraySpeciesLookupChainIntact(isolate));
1449 }
1450
RUNTIME_FUNCTION(Runtime_MapIteratorProtector)1451 RUNTIME_FUNCTION(Runtime_MapIteratorProtector) {
1452 SealHandleScope shs(isolate);
1453 DCHECK_EQ(0, args.length());
1454 return isolate->heap()->ToBoolean(
1455 Protectors::IsMapIteratorLookupChainIntact(isolate));
1456 }
1457
RUNTIME_FUNCTION(Runtime_SetIteratorProtector)1458 RUNTIME_FUNCTION(Runtime_SetIteratorProtector) {
1459 SealHandleScope shs(isolate);
1460 DCHECK_EQ(0, args.length());
1461 return isolate->heap()->ToBoolean(
1462 Protectors::IsSetIteratorLookupChainIntact(isolate));
1463 }
1464
RUNTIME_FUNCTION(Runtime_StringIteratorProtector)1465 RUNTIME_FUNCTION(Runtime_StringIteratorProtector) {
1466 SealHandleScope shs(isolate);
1467 DCHECK_EQ(0, args.length());
1468 return isolate->heap()->ToBoolean(
1469 Protectors::IsStringIteratorLookupChainIntact(isolate));
1470 }
1471
RUNTIME_FUNCTION(Runtime_ArrayIteratorProtector)1472 RUNTIME_FUNCTION(Runtime_ArrayIteratorProtector) {
1473 SealHandleScope shs(isolate);
1474 DCHECK_EQ(0, args.length());
1475 return isolate->heap()->ToBoolean(
1476 Protectors::IsArrayIteratorLookupChainIntact(isolate));
1477 }
1478 // For use by tests and fuzzers. It
1479 //
1480 // 1. serializes a snapshot of the current isolate,
1481 // 2. deserializes the snapshot,
1482 // 3. and runs VerifyHeap on the resulting isolate.
1483 //
1484 // The current isolate should not be modified by this call and can keep running
1485 // once it completes.
RUNTIME_FUNCTION(Runtime_SerializeDeserializeNow)1486 RUNTIME_FUNCTION(Runtime_SerializeDeserializeNow) {
1487 HandleScope scope(isolate);
1488 DCHECK_EQ(0, args.length());
1489 Snapshot::SerializeDeserializeAndVerifyForTesting(isolate,
1490 isolate->native_context());
1491 return ReadOnlyRoots(isolate).undefined_value();
1492 }
1493
RUNTIME_FUNCTION(Runtime_HeapObjectVerify)1494 RUNTIME_FUNCTION(Runtime_HeapObjectVerify) {
1495 HandleScope shs(isolate);
1496 DCHECK_EQ(1, args.length());
1497 Handle<Object> object = args.at(0);
1498 #ifdef VERIFY_HEAP
1499 object->ObjectVerify(isolate);
1500 #else
1501 CHECK(object->IsObject());
1502 if (object->IsHeapObject()) {
1503 CHECK(HeapObject::cast(*object).map().IsMap());
1504 } else {
1505 CHECK(object->IsSmi());
1506 }
1507 #endif
1508 return isolate->heap()->ToBoolean(true);
1509 }
1510
RUNTIME_FUNCTION(Runtime_ArrayBufferMaxByteLength)1511 RUNTIME_FUNCTION(Runtime_ArrayBufferMaxByteLength) {
1512 HandleScope shs(isolate);
1513 DCHECK_EQ(0, args.length());
1514 return *isolate->factory()->NewNumber(JSArrayBuffer::kMaxByteLength);
1515 }
1516
RUNTIME_FUNCTION(Runtime_TypedArrayMaxLength)1517 RUNTIME_FUNCTION(Runtime_TypedArrayMaxLength) {
1518 HandleScope shs(isolate);
1519 DCHECK_EQ(0, args.length());
1520 return *isolate->factory()->NewNumber(JSTypedArray::kMaxLength);
1521 }
1522
RUNTIME_FUNCTION(Runtime_CompleteInobjectSlackTracking)1523 RUNTIME_FUNCTION(Runtime_CompleteInobjectSlackTracking) {
1524 HandleScope scope(isolate);
1525 DCHECK_EQ(1, args.length());
1526
1527 Handle<JSObject> object = args.at<JSObject>(0);
1528 MapUpdater::CompleteInobjectSlackTracking(isolate, object->map());
1529
1530 return ReadOnlyRoots(isolate).undefined_value();
1531 }
1532
RUNTIME_FUNCTION(Runtime_TurbofanStaticAssert)1533 RUNTIME_FUNCTION(Runtime_TurbofanStaticAssert) {
1534 SealHandleScope shs(isolate);
1535 // Always lowered to StaticAssert node in Turbofan, so we never get here in
1536 // compiled code.
1537 return ReadOnlyRoots(isolate).undefined_value();
1538 }
1539
RUNTIME_FUNCTION(Runtime_IsBeingInterpreted)1540 RUNTIME_FUNCTION(Runtime_IsBeingInterpreted) {
1541 SealHandleScope shs(isolate);
1542 // Always lowered to false in Turbofan, so we never get here in compiled code.
1543 return ReadOnlyRoots(isolate).true_value();
1544 }
1545
RUNTIME_FUNCTION(Runtime_EnableCodeLoggingForTesting)1546 RUNTIME_FUNCTION(Runtime_EnableCodeLoggingForTesting) {
1547 // The {NoopListener} currently does nothing on any callback, but reports
1548 // {true} on {is_listening_to_code_events()}. Feel free to add assertions to
1549 // any method to further test the code logging callbacks.
1550 class NoopListener final : public CodeEventListener {
1551 void CodeCreateEvent(LogEventsAndTags tag, Handle<AbstractCode> code,
1552 const char* name) final {}
1553 void CodeCreateEvent(LogEventsAndTags tag, Handle<AbstractCode> code,
1554 Handle<Name> name) final {}
1555 void CodeCreateEvent(LogEventsAndTags tag, Handle<AbstractCode> code,
1556 Handle<SharedFunctionInfo> shared,
1557 Handle<Name> script_name) final {}
1558 void CodeCreateEvent(LogEventsAndTags tag, Handle<AbstractCode> code,
1559 Handle<SharedFunctionInfo> shared,
1560 Handle<Name> script_name, int line, int column) final {
1561 }
1562 #if V8_ENABLE_WEBASSEMBLY
1563 void CodeCreateEvent(LogEventsAndTags tag, const wasm::WasmCode* code,
1564 wasm::WasmName name, const char* source_url,
1565 int code_offset, int script_id) final {}
1566 #endif // V8_ENABLE_WEBASSEMBLY
1567
1568 void CallbackEvent(Handle<Name> name, Address entry_point) final {}
1569 void GetterCallbackEvent(Handle<Name> name, Address entry_point) final {}
1570 void SetterCallbackEvent(Handle<Name> name, Address entry_point) final {}
1571 void RegExpCodeCreateEvent(Handle<AbstractCode> code,
1572 Handle<String> source) final {}
1573 void CodeMoveEvent(AbstractCode from, AbstractCode to) final {}
1574 void SharedFunctionInfoMoveEvent(Address from, Address to) final {}
1575 void NativeContextMoveEvent(Address from, Address to) final {}
1576 void CodeMovingGCEvent() final {}
1577 void CodeDisableOptEvent(Handle<AbstractCode> code,
1578 Handle<SharedFunctionInfo> shared) final {}
1579 void CodeDeoptEvent(Handle<Code> code, DeoptimizeKind kind, Address pc,
1580 int fp_to_sp_delta) final {}
1581 void CodeDependencyChangeEvent(Handle<Code> code,
1582 Handle<SharedFunctionInfo> shared,
1583 const char* reason) final {}
1584 void WeakCodeClearEvent() final {}
1585
1586 bool is_listening_to_code_events() final { return true; }
1587 };
1588 static base::LeakyObject<NoopListener> noop_listener;
1589 #if V8_ENABLE_WEBASSEMBLY
1590 wasm::GetWasmEngine()->EnableCodeLogging(isolate);
1591 #endif // V8_ENABLE_WEBASSEMBLY
1592 isolate->code_event_dispatcher()->AddListener(noop_listener.get());
1593 return ReadOnlyRoots(isolate).undefined_value();
1594 }
1595
RUNTIME_FUNCTION(Runtime_NewRegExpWithBacktrackLimit)1596 RUNTIME_FUNCTION(Runtime_NewRegExpWithBacktrackLimit) {
1597 HandleScope scope(isolate);
1598 DCHECK_EQ(3, args.length());
1599
1600 Handle<String> pattern = args.at<String>(0);
1601 Handle<String> flags_string = args.at<String>(1);
1602 uint32_t backtrack_limit = args.positive_smi_value_at(2);
1603
1604 JSRegExp::Flags flags =
1605 JSRegExp::FlagsFromString(isolate, flags_string).value();
1606
1607 RETURN_RESULT_OR_FAILURE(
1608 isolate, JSRegExp::New(isolate, pattern, flags, backtrack_limit));
1609 }
1610
RUNTIME_FUNCTION(Runtime_Is64Bit)1611 RUNTIME_FUNCTION(Runtime_Is64Bit) {
1612 SealHandleScope shs(isolate);
1613 DCHECK_EQ(0, args.length());
1614 return isolate->heap()->ToBoolean(kSystemPointerSize == 8);
1615 }
1616
RUNTIME_FUNCTION(Runtime_BigIntMaxLengthBits)1617 RUNTIME_FUNCTION(Runtime_BigIntMaxLengthBits) {
1618 HandleScope scope(isolate);
1619 DCHECK_EQ(0, args.length());
1620 return *isolate->factory()->NewNumber(BigInt::kMaxLengthBits);
1621 }
1622
RUNTIME_FUNCTION(Runtime_IsSameHeapObject)1623 RUNTIME_FUNCTION(Runtime_IsSameHeapObject) {
1624 HandleScope scope(isolate);
1625 DCHECK_EQ(2, args.length());
1626 Handle<HeapObject> obj1 = args.at<HeapObject>(0);
1627 Handle<HeapObject> obj2 = args.at<HeapObject>(1);
1628 return isolate->heap()->ToBoolean(obj1->address() == obj2->address());
1629 }
1630
RUNTIME_FUNCTION(Runtime_IsSharedString)1631 RUNTIME_FUNCTION(Runtime_IsSharedString) {
1632 HandleScope scope(isolate);
1633 DCHECK_EQ(1, args.length());
1634 Handle<HeapObject> obj = args.at<HeapObject>(0);
1635 return isolate->heap()->ToBoolean(obj->IsString() &&
1636 Handle<String>::cast(obj)->IsShared());
1637 }
1638
RUNTIME_FUNCTION(Runtime_WebSnapshotSerialize)1639 RUNTIME_FUNCTION(Runtime_WebSnapshotSerialize) {
1640 if (!FLAG_allow_natives_syntax) {
1641 return ReadOnlyRoots(isolate).undefined_value();
1642 }
1643 HandleScope scope(isolate);
1644 if (args.length() < 1 || args.length() > 2) {
1645 THROW_NEW_ERROR_RETURN_FAILURE(
1646 isolate, NewTypeError(MessageTemplate::kRuntimeWrongNumArgs));
1647 }
1648 Handle<Object> object = args.at(0);
1649 Handle<FixedArray> block_list = isolate->factory()->empty_fixed_array();
1650 Handle<JSArray> block_list_js_array;
1651 if (args.length() == 2) {
1652 if (!args[1].IsJSArray()) {
1653 THROW_NEW_ERROR_RETURN_FAILURE(
1654 isolate, NewTypeError(MessageTemplate::kInvalidArgument));
1655 }
1656 block_list_js_array = args.at<JSArray>(1);
1657 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
1658 isolate, block_list,
1659 JSReceiver::GetOwnValues(block_list_js_array,
1660 PropertyFilter::ENUMERABLE_STRINGS));
1661 }
1662
1663 auto snapshot_data = std::make_shared<WebSnapshotData>();
1664 WebSnapshotSerializer serializer(isolate);
1665 if (!serializer.TakeSnapshot(object, block_list, *snapshot_data)) {
1666 DCHECK(isolate->has_pending_exception());
1667 return ReadOnlyRoots(isolate).exception();
1668 }
1669 if (!block_list_js_array.is_null() &&
1670 static_cast<uint32_t>(block_list->length()) <
1671 serializer.external_objects_count()) {
1672 Handle<FixedArray> externals = serializer.GetExternals();
1673 Handle<Map> map = JSObject::GetElementsTransitionMap(block_list_js_array,
1674 PACKED_ELEMENTS);
1675 block_list_js_array->set_elements(*externals);
1676 block_list_js_array->set_length(Smi::FromInt(externals->length()));
1677 block_list_js_array->set_map(*map);
1678 }
1679 i::Handle<i::Object> managed_object = Managed<WebSnapshotData>::FromSharedPtr(
1680 isolate, snapshot_data->buffer_size, snapshot_data);
1681 return *managed_object;
1682 }
1683
RUNTIME_FUNCTION(Runtime_WebSnapshotDeserialize)1684 RUNTIME_FUNCTION(Runtime_WebSnapshotDeserialize) {
1685 if (!FLAG_allow_natives_syntax) {
1686 return ReadOnlyRoots(isolate).undefined_value();
1687 }
1688 HandleScope scope(isolate);
1689 if (args.length() == 0 || args.length() > 2) {
1690 THROW_NEW_ERROR_RETURN_FAILURE(
1691 isolate, NewTypeError(MessageTemplate::kRuntimeWrongNumArgs));
1692 }
1693 if (!args[0].IsForeign()) {
1694 THROW_NEW_ERROR_RETURN_FAILURE(
1695 isolate, NewTypeError(MessageTemplate::kInvalidArgument));
1696 }
1697 Handle<Foreign> foreign_data = args.at<Foreign>(0);
1698 Handle<FixedArray> injected_references =
1699 isolate->factory()->empty_fixed_array();
1700 if (args.length() == 2) {
1701 if (!args[1].IsJSArray()) {
1702 THROW_NEW_ERROR_RETURN_FAILURE(
1703 isolate, NewTypeError(MessageTemplate::kInvalidArgument));
1704 }
1705 auto js_array = args.at<JSArray>(1);
1706 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
1707 isolate, injected_references,
1708 JSReceiver::GetOwnValues(js_array, PropertyFilter::ENUMERABLE_STRINGS));
1709 }
1710
1711 auto data = Managed<WebSnapshotData>::cast(*foreign_data).get();
1712 v8::Isolate* v8_isolate = reinterpret_cast<v8::Isolate*>(isolate);
1713 WebSnapshotDeserializer deserializer(v8_isolate, data->buffer,
1714 data->buffer_size);
1715 if (!deserializer.Deserialize(injected_references)) {
1716 DCHECK(isolate->has_pending_exception());
1717 return ReadOnlyRoots(isolate).exception();
1718 }
1719 Handle<Object> object;
1720 if (!deserializer.value().ToHandle(&object)) {
1721 THROW_NEW_ERROR_RETURN_FAILURE(
1722 isolate, NewTypeError(MessageTemplate::kWebSnapshotError));
1723 }
1724 return *object;
1725 }
1726
RUNTIME_FUNCTION(Runtime_SharedGC)1727 RUNTIME_FUNCTION(Runtime_SharedGC) {
1728 SealHandleScope scope(isolate);
1729 isolate->heap()->CollectSharedGarbage(GarbageCollectionReason::kTesting);
1730 return ReadOnlyRoots(isolate).undefined_value();
1731 }
1732
1733 } // namespace internal
1734 } // namespace v8
1735