1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include <fstream>
6 #include <memory>
7
8 #include "include/v8-function.h"
9 #include "src/api/api-inl.h"
10 #include "src/base/numbers/double.h"
11 #include "src/base/platform/mutex.h"
12 #include "src/codegen/assembler-inl.h"
13 #include "src/codegen/compiler.h"
14 #include "src/codegen/pending-optimization-table.h"
15 #include "src/compiler-dispatcher/lazy-compile-dispatcher.h"
16 #include "src/compiler-dispatcher/optimizing-compile-dispatcher.h"
17 #include "src/debug/debug-evaluate.h"
18 #include "src/deoptimizer/deoptimizer.h"
19 #include "src/execution/arguments-inl.h"
20 #include "src/execution/frames-inl.h"
21 #include "src/execution/isolate-inl.h"
22 #include "src/execution/protectors-inl.h"
23 #include "src/execution/tiering-manager.h"
24 #include "src/heap/heap-inl.h" // For ToBoolean. TODO(jkummerow): Drop.
25 #include "src/heap/heap-write-barrier-inl.h"
26 #include "src/ic/stub-cache.h"
27 #include "src/logging/counters.h"
28 #include "src/objects/heap-object-inl.h"
29 #include "src/objects/js-array-inl.h"
30 #include "src/objects/js-collection-inl.h"
31 #include "src/objects/js-function-inl.h"
32 #include "src/objects/js-regexp-inl.h"
33 #include "src/objects/managed-inl.h"
34 #include "src/objects/smi.h"
35 #include "src/profiler/heap-snapshot-generator.h"
36 #include "src/regexp/regexp.h"
37 #include "src/runtime/runtime-utils.h"
38 #include "src/snapshot/snapshot.h"
39 #include "src/web-snapshot/web-snapshot.h"
40
41 #ifdef V8_ENABLE_MAGLEV
42 #include "src/maglev/maglev.h"
43 #endif // V8_ENABLE_MAGLEV
44
45 #if V8_ENABLE_WEBASSEMBLY
46 #include "src/wasm/wasm-engine.h"
47 #endif // V8_ENABLE_WEBASSEMBLY
48
49 namespace v8 {
50 namespace internal {
51
52 namespace {
CrashUnlessFuzzing(Isolate * isolate)53 V8_WARN_UNUSED_RESULT Object CrashUnlessFuzzing(Isolate* isolate) {
54 CHECK(FLAG_fuzzing);
55 return ReadOnlyRoots(isolate).undefined_value();
56 }
57
CrashUnlessFuzzingReturnFalse(Isolate * isolate)58 V8_WARN_UNUSED_RESULT bool CrashUnlessFuzzingReturnFalse(Isolate* isolate) {
59 CHECK(FLAG_fuzzing);
60 return false;
61 }
62
63 // Returns |value| unless correctness-fuzzer-supressions is enabled,
64 // otherwise returns undefined_value.
ReturnFuzzSafe(Object value,Isolate * isolate)65 V8_WARN_UNUSED_RESULT Object ReturnFuzzSafe(Object value, Isolate* isolate) {
66 return FLAG_correctness_fuzzer_suppressions
67 ? ReadOnlyRoots(isolate).undefined_value()
68 : value;
69 }
70
71 // Assert that the given argument is a number within the Int32 range
72 // and convert it to int32_t. If the argument is not an Int32 we crash if not
73 // in fuzzing mode.
74 #define CONVERT_INT32_ARG_FUZZ_SAFE(name, index) \
75 if (!args[index].IsNumber()) return CrashUnlessFuzzing(isolate); \
76 int32_t name = 0; \
77 if (!args[index].ToInt32(&name)) return CrashUnlessFuzzing(isolate);
78
79 // Cast the given object to a boolean and store it in a variable with
80 // the given name. If the object is not a boolean we crash if not in
81 // fuzzing mode.
82 #define CONVERT_BOOLEAN_ARG_FUZZ_SAFE(name, index) \
83 if (!args[index].IsBoolean()) return CrashUnlessFuzzing(isolate); \
84 bool name = args[index].IsTrue(isolate);
85
IsAsmWasmFunction(Isolate * isolate,JSFunction function)86 bool IsAsmWasmFunction(Isolate* isolate, JSFunction function) {
87 DisallowGarbageCollection no_gc;
88 #if V8_ENABLE_WEBASSEMBLY
89 // For simplicity we include invalid asm.js functions whose code hasn't yet
90 // been updated to CompileLazy but is still the InstantiateAsmJs builtin.
91 return function.shared().HasAsmWasmData() ||
92 function.code().builtin_id() == Builtin::kInstantiateAsmJs;
93 #else
94 return false;
95 #endif // V8_ENABLE_WEBASSEMBLY
96 }
97
98 } // namespace
99
RUNTIME_FUNCTION(Runtime_ClearMegamorphicStubCache)100 RUNTIME_FUNCTION(Runtime_ClearMegamorphicStubCache) {
101 HandleScope scope(isolate);
102 DCHECK_EQ(0, args.length());
103 isolate->load_stub_cache()->Clear();
104 isolate->store_stub_cache()->Clear();
105 return ReadOnlyRoots(isolate).undefined_value();
106 }
107
RUNTIME_FUNCTION(Runtime_ConstructDouble)108 RUNTIME_FUNCTION(Runtime_ConstructDouble) {
109 HandleScope scope(isolate);
110 DCHECK_EQ(2, args.length());
111 uint32_t hi = NumberToUint32(args[0]);
112 uint32_t lo = NumberToUint32(args[1]);
113 uint64_t result = (static_cast<uint64_t>(hi) << 32) | lo;
114 return *isolate->factory()->NewNumber(base::uint64_to_double(result));
115 }
116
RUNTIME_FUNCTION(Runtime_ConstructConsString)117 RUNTIME_FUNCTION(Runtime_ConstructConsString) {
118 HandleScope scope(isolate);
119 DCHECK_EQ(2, args.length());
120 Handle<String> left = args.at<String>(0);
121 Handle<String> right = args.at<String>(1);
122
123 CHECK(left->IsOneByteRepresentation());
124 CHECK(right->IsOneByteRepresentation());
125
126 const bool kIsOneByte = true;
127 const int length = left->length() + right->length();
128 return *isolate->factory()->NewConsString(left, right, length, kIsOneByte);
129 }
130
RUNTIME_FUNCTION(Runtime_ConstructSlicedString)131 RUNTIME_FUNCTION(Runtime_ConstructSlicedString) {
132 HandleScope scope(isolate);
133 DCHECK_EQ(2, args.length());
134 Handle<String> string = args.at<String>(0);
135 int index = args.smi_value_at(1);
136
137 CHECK(string->IsOneByteRepresentation());
138 CHECK_LT(index, string->length());
139
140 Handle<String> sliced_string =
141 isolate->factory()->NewSubString(string, index, string->length());
142 CHECK(sliced_string->IsSlicedString());
143 return *sliced_string;
144 }
145
RUNTIME_FUNCTION(Runtime_DeoptimizeFunction)146 RUNTIME_FUNCTION(Runtime_DeoptimizeFunction) {
147 HandleScope scope(isolate);
148 DCHECK_EQ(1, args.length());
149
150 Handle<Object> function_object = args.at(0);
151 if (!function_object->IsJSFunction()) return CrashUnlessFuzzing(isolate);
152 Handle<JSFunction> function = Handle<JSFunction>::cast(function_object);
153
154 if (function->HasAttachedOptimizedCode()) {
155 Deoptimizer::DeoptimizeFunction(*function);
156 }
157
158 return ReadOnlyRoots(isolate).undefined_value();
159 }
160
RUNTIME_FUNCTION(Runtime_DeoptimizeNow)161 RUNTIME_FUNCTION(Runtime_DeoptimizeNow) {
162 HandleScope scope(isolate);
163 DCHECK_EQ(0, args.length());
164
165 Handle<JSFunction> function;
166
167 // Find the JavaScript function on the top of the stack.
168 JavaScriptFrameIterator it(isolate);
169 if (!it.done()) function = handle(it.frame()->function(), isolate);
170 if (function.is_null()) return CrashUnlessFuzzing(isolate);
171
172 if (function->HasAttachedOptimizedCode()) {
173 Deoptimizer::DeoptimizeFunction(*function);
174 }
175
176 return ReadOnlyRoots(isolate).undefined_value();
177 }
178
RUNTIME_FUNCTION(Runtime_RunningInSimulator)179 RUNTIME_FUNCTION(Runtime_RunningInSimulator) {
180 SealHandleScope shs(isolate);
181 DCHECK_EQ(0, args.length());
182 #if defined(USE_SIMULATOR)
183 return ReadOnlyRoots(isolate).true_value();
184 #else
185 return ReadOnlyRoots(isolate).false_value();
186 #endif
187 }
188
RUNTIME_FUNCTION(Runtime_RuntimeEvaluateREPL)189 RUNTIME_FUNCTION(Runtime_RuntimeEvaluateREPL) {
190 HandleScope scope(isolate);
191 DCHECK_EQ(1, args.length());
192 Handle<String> source = args.at<String>(0);
193 Handle<Object> result;
194 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
195 isolate, result,
196 DebugEvaluate::Global(isolate, source,
197 debug::EvaluateGlobalMode::kDefault,
198 REPLMode::kYes));
199
200 return *result;
201 }
202
RUNTIME_FUNCTION(Runtime_ICsAreEnabled)203 RUNTIME_FUNCTION(Runtime_ICsAreEnabled) {
204 SealHandleScope shs(isolate);
205 DCHECK_EQ(0, args.length());
206 return isolate->heap()->ToBoolean(FLAG_use_ic);
207 }
208
RUNTIME_FUNCTION(Runtime_IsConcurrentRecompilationSupported)209 RUNTIME_FUNCTION(Runtime_IsConcurrentRecompilationSupported) {
210 SealHandleScope shs(isolate);
211 DCHECK_EQ(0, args.length());
212 return isolate->heap()->ToBoolean(
213 isolate->concurrent_recompilation_enabled());
214 }
215
RUNTIME_FUNCTION(Runtime_IsAtomicsWaitAllowed)216 RUNTIME_FUNCTION(Runtime_IsAtomicsWaitAllowed) {
217 SealHandleScope shs(isolate);
218 DCHECK_EQ(0, args.length());
219 return isolate->heap()->ToBoolean(isolate->allow_atomics_wait());
220 }
221
222 namespace {
223
224 template <CodeKind code_kind>
225 bool CanOptimizeFunction(Handle<JSFunction> function, Isolate* isolate,
226 IsCompiledScope* is_compiled_scope);
227
228 template <>
CanOptimizeFunction(Handle<JSFunction> function,Isolate * isolate,IsCompiledScope * is_compiled_scope)229 bool CanOptimizeFunction<CodeKind::TURBOFAN>(
230 Handle<JSFunction> function, Isolate* isolate,
231 IsCompiledScope* is_compiled_scope) {
232 // The following conditions were lifted (in part) from the DCHECK inside
233 // JSFunction::MarkForOptimization().
234
235 if (!function->shared().allows_lazy_compilation()) {
236 return CrashUnlessFuzzingReturnFalse(isolate);
237 }
238
239 // If function isn't compiled, compile it now.
240 if (!is_compiled_scope->is_compiled() &&
241 !Compiler::Compile(isolate, function, Compiler::CLEAR_EXCEPTION,
242 is_compiled_scope)) {
243 return CrashUnlessFuzzingReturnFalse(isolate);
244 }
245
246 if (!FLAG_opt) return false;
247
248 if (function->shared().optimization_disabled() &&
249 function->shared().disabled_optimization_reason() ==
250 BailoutReason::kNeverOptimize) {
251 return CrashUnlessFuzzingReturnFalse(isolate);
252 }
253
254 if (IsAsmWasmFunction(isolate, *function)) {
255 return CrashUnlessFuzzingReturnFalse(isolate);
256 }
257
258 if (FLAG_testing_d8_test_runner) {
259 PendingOptimizationTable::MarkedForOptimization(isolate, function);
260 }
261
262 CodeKind kind = CodeKindForTopTier();
263 if (function->HasAvailableOptimizedCode() ||
264 function->HasAvailableCodeKind(kind)) {
265 DCHECK(function->HasAttachedOptimizedCode() ||
266 function->ChecksTieringState());
267 if (FLAG_testing_d8_test_runner) {
268 PendingOptimizationTable::FunctionWasOptimized(isolate, function);
269 }
270 return false;
271 }
272
273 return true;
274 }
275
276 #ifdef V8_ENABLE_MAGLEV
277 template <>
CanOptimizeFunction(Handle<JSFunction> function,Isolate * isolate,IsCompiledScope * is_compiled_scope)278 bool CanOptimizeFunction<CodeKind::MAGLEV>(Handle<JSFunction> function,
279 Isolate* isolate,
280 IsCompiledScope* is_compiled_scope) {
281 if (!FLAG_maglev) return false;
282
283 CHECK(!IsAsmWasmFunction(isolate, *function));
284
285 // TODO(v8:7700): Disabled optimization due to deopts?
286 // TODO(v8:7700): Already cached?
287
288 return function->GetActiveTier() < CodeKind::MAGLEV;
289 }
290 #endif // V8_ENABLE_MAGLEV
291
OptimizeFunctionOnNextCall(RuntimeArguments & args,Isolate * isolate)292 Object OptimizeFunctionOnNextCall(RuntimeArguments& args, Isolate* isolate) {
293 if (args.length() != 1 && args.length() != 2) {
294 return CrashUnlessFuzzing(isolate);
295 }
296
297 Handle<Object> function_object = args.at(0);
298 if (!function_object->IsJSFunction()) return CrashUnlessFuzzing(isolate);
299 Handle<JSFunction> function = Handle<JSFunction>::cast(function_object);
300
301 static constexpr CodeKind kCodeKind = CodeKind::TURBOFAN;
302
303 IsCompiledScope is_compiled_scope(
304 function->shared().is_compiled_scope(isolate));
305 if (!CanOptimizeFunction<kCodeKind>(function, isolate, &is_compiled_scope)) {
306 return ReadOnlyRoots(isolate).undefined_value();
307 }
308
309 ConcurrencyMode concurrency_mode = ConcurrencyMode::kSynchronous;
310 if (args.length() == 2) {
311 Handle<Object> type = args.at(1);
312 if (!type->IsString()) return CrashUnlessFuzzing(isolate);
313 if (Handle<String>::cast(type)->IsOneByteEqualTo(
314 base::StaticCharVector("concurrent")) &&
315 isolate->concurrent_recompilation_enabled()) {
316 concurrency_mode = ConcurrencyMode::kConcurrent;
317 }
318 }
319
320 // This function may not have been lazily compiled yet, even though its shared
321 // function has.
322 if (!function->is_compiled()) {
323 DCHECK(function->shared().HasBytecodeArray());
324 CodeT codet = *BUILTIN_CODE(isolate, InterpreterEntryTrampoline);
325 if (function->shared().HasBaselineCode()) {
326 codet = function->shared().baseline_code(kAcquireLoad);
327 }
328 function->set_code(codet);
329 }
330
331 TraceManualRecompile(*function, kCodeKind, concurrency_mode);
332 JSFunction::EnsureFeedbackVector(isolate, function, &is_compiled_scope);
333 function->MarkForOptimization(isolate, CodeKind::TURBOFAN, concurrency_mode);
334
335 return ReadOnlyRoots(isolate).undefined_value();
336 }
337
EnsureFeedbackVector(Isolate * isolate,Handle<JSFunction> function)338 bool EnsureFeedbackVector(Isolate* isolate, Handle<JSFunction> function) {
339 // Check function allows lazy compilation.
340 if (!function->shared().allows_lazy_compilation()) return false;
341
342 if (function->has_feedback_vector()) return true;
343
344 // If function isn't compiled, compile it now.
345 IsCompiledScope is_compiled_scope(
346 function->shared().is_compiled_scope(function->GetIsolate()));
347 // If the JSFunction isn't compiled but it has a initialized feedback cell
348 // then no need to compile. CompileLazy builtin would handle these cases by
349 // installing the code from SFI. Calling compile here may cause another
350 // optimization if FLAG_always_opt is set.
351 bool needs_compilation =
352 !function->is_compiled() && !function->has_closure_feedback_cell_array();
353 if (needs_compilation &&
354 !Compiler::Compile(isolate, function, Compiler::CLEAR_EXCEPTION,
355 &is_compiled_scope)) {
356 return false;
357 }
358
359 // Ensure function has a feedback vector to hold type feedback for
360 // optimization.
361 JSFunction::EnsureFeedbackVector(isolate, function, &is_compiled_scope);
362 return true;
363 }
364
365 } // namespace
366
RUNTIME_FUNCTION(Runtime_CompileBaseline)367 RUNTIME_FUNCTION(Runtime_CompileBaseline) {
368 HandleScope scope(isolate);
369 if (args.length() != 1) {
370 return CrashUnlessFuzzing(isolate);
371 }
372 Handle<Object> function_object = args.at(0);
373 if (!function_object->IsJSFunction()) return CrashUnlessFuzzing(isolate);
374 Handle<JSFunction> function = Handle<JSFunction>::cast(function_object);
375
376 IsCompiledScope is_compiled_scope =
377 function->shared(isolate).is_compiled_scope(isolate);
378
379 if (!function->shared(isolate).IsUserJavaScript()) {
380 return CrashUnlessFuzzing(isolate);
381 }
382
383 // First compile the bytecode, if we have to.
384 if (!is_compiled_scope.is_compiled() &&
385 !Compiler::Compile(isolate, function, Compiler::CLEAR_EXCEPTION,
386 &is_compiled_scope)) {
387 return CrashUnlessFuzzing(isolate);
388 }
389
390 if (!Compiler::CompileBaseline(isolate, function, Compiler::CLEAR_EXCEPTION,
391 &is_compiled_scope)) {
392 return CrashUnlessFuzzing(isolate);
393 }
394
395 return *function;
396 }
397
398 // TODO(v8:7700): Remove this function once we no longer need it to measure
399 // maglev compile times. For normal tierup, OptimizeMaglevOnNextCall should be
400 // used instead.
401 #ifdef V8_ENABLE_MAGLEV
RUNTIME_FUNCTION(Runtime_BenchMaglev)402 RUNTIME_FUNCTION(Runtime_BenchMaglev) {
403 HandleScope scope(isolate);
404 DCHECK_EQ(args.length(), 2);
405 Handle<JSFunction> function = args.at<JSFunction>(0);
406 int count = args.smi_value_at(1);
407
408 Handle<CodeT> codet;
409 base::ElapsedTimer timer;
410 timer.Start();
411 codet = Maglev::Compile(isolate, function).ToHandleChecked();
412 for (int i = 1; i < count; ++i) {
413 HandleScope handle_scope(isolate);
414 Maglev::Compile(isolate, function);
415 }
416 PrintF("Maglev compile time: %g ms!\n",
417 timer.Elapsed().InMillisecondsF() / count);
418
419 function->set_code(*codet);
420
421 return ReadOnlyRoots(isolate).undefined_value();
422 }
423 #else
RUNTIME_FUNCTION(Runtime_BenchMaglev)424 RUNTIME_FUNCTION(Runtime_BenchMaglev) {
425 PrintF("Maglev is not enabled.\n");
426 return ReadOnlyRoots(isolate).undefined_value();
427 }
428 #endif // V8_ENABLE_MAGLEV
429
RUNTIME_FUNCTION(Runtime_ActiveTierIsMaglev)430 RUNTIME_FUNCTION(Runtime_ActiveTierIsMaglev) {
431 HandleScope scope(isolate);
432 DCHECK_EQ(args.length(), 1);
433 Handle<JSFunction> function = args.at<JSFunction>(0);
434 return isolate->heap()->ToBoolean(function->ActiveTierIsMaglev());
435 }
436
437 #ifdef V8_ENABLE_MAGLEV
RUNTIME_FUNCTION(Runtime_OptimizeMaglevOnNextCall)438 RUNTIME_FUNCTION(Runtime_OptimizeMaglevOnNextCall) {
439 HandleScope scope(isolate);
440 DCHECK_EQ(args.length(), 1);
441 Handle<JSFunction> function = args.at<JSFunction>(0);
442
443 static constexpr CodeKind kCodeKind = CodeKind::MAGLEV;
444
445 IsCompiledScope is_compiled_scope(
446 function->shared().is_compiled_scope(isolate));
447 if (!CanOptimizeFunction<kCodeKind>(function, isolate, &is_compiled_scope)) {
448 return ReadOnlyRoots(isolate).undefined_value();
449 }
450 DCHECK(is_compiled_scope.is_compiled());
451 DCHECK(function->is_compiled());
452
453 // TODO(v8:7700): Support concurrent compiles.
454 const ConcurrencyMode concurrency_mode = ConcurrencyMode::kSynchronous;
455
456 TraceManualRecompile(*function, kCodeKind, concurrency_mode);
457 JSFunction::EnsureFeedbackVector(isolate, function, &is_compiled_scope);
458 function->MarkForOptimization(isolate, kCodeKind, concurrency_mode);
459
460 return ReadOnlyRoots(isolate).undefined_value();
461 }
462 #else
RUNTIME_FUNCTION(Runtime_OptimizeMaglevOnNextCall)463 RUNTIME_FUNCTION(Runtime_OptimizeMaglevOnNextCall) {
464 PrintF("Maglev is not enabled.\n");
465 return ReadOnlyRoots(isolate).undefined_value();
466 }
467 #endif // V8_ENABLE_MAGLEV
468
469 // TODO(jgruber): Rename to OptimizeTurbofanOnNextCall.
RUNTIME_FUNCTION(Runtime_OptimizeFunctionOnNextCall)470 RUNTIME_FUNCTION(Runtime_OptimizeFunctionOnNextCall) {
471 HandleScope scope(isolate);
472 return OptimizeFunctionOnNextCall(args, isolate);
473 }
474
RUNTIME_FUNCTION(Runtime_EnsureFeedbackVectorForFunction)475 RUNTIME_FUNCTION(Runtime_EnsureFeedbackVectorForFunction) {
476 HandleScope scope(isolate);
477 DCHECK_EQ(1, args.length());
478 Handle<JSFunction> function = args.at<JSFunction>(0);
479 EnsureFeedbackVector(isolate, function);
480 return ReadOnlyRoots(isolate).undefined_value();
481 }
482
RUNTIME_FUNCTION(Runtime_PrepareFunctionForOptimization)483 RUNTIME_FUNCTION(Runtime_PrepareFunctionForOptimization) {
484 HandleScope scope(isolate);
485 if ((args.length() != 1 && args.length() != 2) || !args[0].IsJSFunction()) {
486 return CrashUnlessFuzzing(isolate);
487 }
488 Handle<JSFunction> function = args.at<JSFunction>(0);
489
490 bool allow_heuristic_optimization = false;
491 if (args.length() == 2) {
492 Handle<Object> sync_object = args.at(1);
493 if (!sync_object->IsString()) return CrashUnlessFuzzing(isolate);
494 Handle<String> sync = Handle<String>::cast(sync_object);
495 if (sync->IsOneByteEqualTo(
496 base::StaticCharVector("allow heuristic optimization"))) {
497 allow_heuristic_optimization = true;
498 }
499 }
500
501 if (!EnsureFeedbackVector(isolate, function)) {
502 return CrashUnlessFuzzing(isolate);
503 }
504
505 // If optimization is disabled for the function, return without making it
506 // pending optimize for test.
507 if (function->shared().optimization_disabled() &&
508 function->shared().disabled_optimization_reason() ==
509 BailoutReason::kNeverOptimize) {
510 return CrashUnlessFuzzing(isolate);
511 }
512
513 if (IsAsmWasmFunction(isolate, *function)) return CrashUnlessFuzzing(isolate);
514
515 // Hold onto the bytecode array between marking and optimization to ensure
516 // it's not flushed.
517 if (FLAG_testing_d8_test_runner) {
518 PendingOptimizationTable::PreparedForOptimization(
519 isolate, function, allow_heuristic_optimization);
520 }
521
522 return ReadOnlyRoots(isolate).undefined_value();
523 }
524
525 namespace {
526
FinalizeOptimization(Isolate * isolate)527 void FinalizeOptimization(Isolate* isolate) {
528 DCHECK(isolate->concurrent_recompilation_enabled());
529 isolate->optimizing_compile_dispatcher()->AwaitCompileTasks();
530 isolate->optimizing_compile_dispatcher()->InstallOptimizedFunctions();
531 isolate->optimizing_compile_dispatcher()->set_finalize(true);
532 }
533
OffsetOfNextJumpLoop(Isolate * isolate,UnoptimizedFrame * frame)534 BytecodeOffset OffsetOfNextJumpLoop(Isolate* isolate, UnoptimizedFrame* frame) {
535 Handle<BytecodeArray> bytecode_array(frame->GetBytecodeArray(), isolate);
536 const int current_offset = frame->GetBytecodeOffset();
537
538 interpreter::BytecodeArrayIterator it(bytecode_array, current_offset);
539
540 // First, look for a loop that contains the current bytecode offset.
541 for (; !it.done(); it.Advance()) {
542 if (it.current_bytecode() != interpreter::Bytecode::kJumpLoop) {
543 continue;
544 }
545 if (!base::IsInRange(current_offset, it.GetJumpTargetOffset(),
546 it.current_offset())) {
547 continue;
548 }
549
550 return BytecodeOffset(it.current_offset());
551 }
552
553 // Fall back to any loop after the current offset.
554 it.SetOffset(current_offset);
555 for (; !it.done(); it.Advance()) {
556 if (it.current_bytecode() == interpreter::Bytecode::kJumpLoop) {
557 return BytecodeOffset(it.current_offset());
558 }
559 }
560
561 return BytecodeOffset::None();
562 }
563
564 } // namespace
565
RUNTIME_FUNCTION(Runtime_OptimizeOsr)566 RUNTIME_FUNCTION(Runtime_OptimizeOsr) {
567 HandleScope handle_scope(isolate);
568 DCHECK(args.length() == 0 || args.length() == 1);
569
570 Handle<JSFunction> function;
571
572 // The optional parameter determines the frame being targeted.
573 int stack_depth = 0;
574 if (args.length() == 1) {
575 if (!args[0].IsSmi()) return CrashUnlessFuzzing(isolate);
576 stack_depth = args.smi_value_at(0);
577 }
578
579 // Find the JavaScript function on the top of the stack.
580 JavaScriptFrameIterator it(isolate);
581 while (!it.done() && stack_depth--) it.Advance();
582 if (!it.done()) function = handle(it.frame()->function(), isolate);
583 if (function.is_null()) return CrashUnlessFuzzing(isolate);
584
585 if (V8_UNLIKELY(!FLAG_opt) || V8_UNLIKELY(!FLAG_use_osr)) {
586 return ReadOnlyRoots(isolate).undefined_value();
587 }
588
589 if (!function->shared().allows_lazy_compilation()) {
590 return CrashUnlessFuzzing(isolate);
591 }
592
593 if (function->shared().optimization_disabled() &&
594 function->shared().disabled_optimization_reason() ==
595 BailoutReason::kNeverOptimize) {
596 return CrashUnlessFuzzing(isolate);
597 }
598
599 if (FLAG_testing_d8_test_runner) {
600 PendingOptimizationTable::MarkedForOptimization(isolate, function);
601 }
602
603 if (function->HasAvailableOptimizedCode()) {
604 DCHECK(function->HasAttachedOptimizedCode() ||
605 function->ChecksTieringState());
606 // If function is already optimized, remove the bytecode array from the
607 // pending optimize for test table and return.
608 if (FLAG_testing_d8_test_runner) {
609 PendingOptimizationTable::FunctionWasOptimized(isolate, function);
610 }
611 return ReadOnlyRoots(isolate).undefined_value();
612 }
613
614 if (!it.frame()->is_unoptimized()) {
615 // Nothing to be done.
616 return ReadOnlyRoots(isolate).undefined_value();
617 }
618
619 // Ensure that the function is marked for non-concurrent optimization, so that
620 // subsequent runs don't also optimize.
621 if (FLAG_trace_osr) {
622 CodeTracer::Scope scope(isolate->GetCodeTracer());
623 PrintF(scope.file(), "[OSR - OptimizeOsr marking ");
624 function->ShortPrint(scope.file());
625 PrintF(scope.file(), " for non-concurrent optimization]\n");
626 }
627 IsCompiledScope is_compiled_scope(
628 function->shared().is_compiled_scope(isolate));
629 JSFunction::EnsureFeedbackVector(isolate, function, &is_compiled_scope);
630 function->MarkForOptimization(isolate, CodeKind::TURBOFAN,
631 ConcurrencyMode::kSynchronous);
632
633 isolate->tiering_manager()->RequestOsrAtNextOpportunity(*function);
634
635 // If concurrent OSR is enabled, the testing workflow is a bit tricky. We
636 // must guarantee that the next JumpLoop installs the finished OSR'd code
637 // object, but we still want to exercise concurrent code paths. To do so,
638 // we attempt to find the next JumpLoop, start an OSR job for it now, and
639 // immediately force finalization.
640 // If this succeeds and we correctly match up the next JumpLoop, once we
641 // reach the JumpLoop we'll hit the OSR cache and install the generated code.
642 // If not (e.g. because we enter a nested loop first), the next JumpLoop will
643 // see the cached OSR code with a mismatched offset, and trigger
644 // non-concurrent OSR compilation and installation.
645 if (isolate->concurrent_recompilation_enabled() && FLAG_concurrent_osr) {
646 const BytecodeOffset osr_offset =
647 OffsetOfNextJumpLoop(isolate, UnoptimizedFrame::cast(it.frame()));
648 if (osr_offset.IsNone()) {
649 // The loop may have been elided by bytecode generation (e.g. for
650 // patterns such as `do { ... } while (false);`.
651 return ReadOnlyRoots(isolate).undefined_value();
652 }
653
654 // Finalize first to ensure all pending tasks are done (since we can't
655 // queue more than one OSR job for each function).
656 FinalizeOptimization(isolate);
657
658 // Queue the job.
659 auto unused_result = Compiler::CompileOptimizedOSR(
660 isolate, function, osr_offset, UnoptimizedFrame::cast(it.frame()),
661 ConcurrencyMode::kConcurrent);
662 USE(unused_result);
663
664 // Finalize again to finish the queued job. The next call into
665 // Runtime::kCompileOptimizedOSR will pick up the cached Code object.
666 FinalizeOptimization(isolate);
667 }
668
669 return ReadOnlyRoots(isolate).undefined_value();
670 }
671
RUNTIME_FUNCTION(Runtime_BaselineOsr)672 RUNTIME_FUNCTION(Runtime_BaselineOsr) {
673 HandleScope scope(isolate);
674 DCHECK_EQ(0, args.length());
675
676 // Find the JavaScript function on the top of the stack.
677 JavaScriptFrameIterator it(isolate);
678 Handle<JSFunction> function = handle(it.frame()->function(), isolate);
679 if (function.is_null()) return CrashUnlessFuzzing(isolate);
680 if (!FLAG_sparkplug || !FLAG_use_osr) {
681 return ReadOnlyRoots(isolate).undefined_value();
682 }
683 if (!it.frame()->is_unoptimized()) {
684 return ReadOnlyRoots(isolate).undefined_value();
685 }
686
687 IsCompiledScope is_compiled_scope(
688 function->shared().is_compiled_scope(isolate));
689 Compiler::CompileBaseline(isolate, function, Compiler::CLEAR_EXCEPTION,
690 &is_compiled_scope);
691
692 return ReadOnlyRoots(isolate).undefined_value();
693 }
694
RUNTIME_FUNCTION(Runtime_NeverOptimizeFunction)695 RUNTIME_FUNCTION(Runtime_NeverOptimizeFunction) {
696 HandleScope scope(isolate);
697 DCHECK_EQ(1, args.length());
698 Handle<Object> function_object = args.at(0);
699 if (!function_object->IsJSFunction()) return CrashUnlessFuzzing(isolate);
700 Handle<JSFunction> function = Handle<JSFunction>::cast(function_object);
701 Handle<SharedFunctionInfo> sfi(function->shared(), isolate);
702 if (sfi->abstract_code(isolate).kind() != CodeKind::INTERPRETED_FUNCTION &&
703 sfi->abstract_code(isolate).kind() != CodeKind::BUILTIN) {
704 return CrashUnlessFuzzing(isolate);
705 }
706 // Make sure to finish compilation if there is a parallel lazy compilation in
707 // progress, to make sure that the compilation finalization doesn't clobber
708 // the SharedFunctionInfo's disable_optimization field.
709 if (isolate->lazy_compile_dispatcher() &&
710 isolate->lazy_compile_dispatcher()->IsEnqueued(sfi)) {
711 isolate->lazy_compile_dispatcher()->FinishNow(sfi);
712 }
713
714 sfi->DisableOptimization(BailoutReason::kNeverOptimize);
715 return ReadOnlyRoots(isolate).undefined_value();
716 }
717
RUNTIME_FUNCTION(Runtime_GetOptimizationStatus)718 RUNTIME_FUNCTION(Runtime_GetOptimizationStatus) {
719 HandleScope scope(isolate);
720 DCHECK_EQ(args.length(), 1);
721
722 int status = 0;
723 if (FLAG_lite_mode || FLAG_jitless) {
724 // Both jitless and lite modes cannot optimize. Unit tests should handle
725 // these the same way. In the future, the two flags may become synonyms.
726 status |= static_cast<int>(OptimizationStatus::kLiteMode);
727 }
728 if (!isolate->use_optimizer()) {
729 status |= static_cast<int>(OptimizationStatus::kNeverOptimize);
730 }
731 if (FLAG_always_opt || FLAG_prepare_always_opt) {
732 status |= static_cast<int>(OptimizationStatus::kAlwaysOptimize);
733 }
734 if (FLAG_deopt_every_n_times) {
735 status |= static_cast<int>(OptimizationStatus::kMaybeDeopted);
736 }
737
738 Handle<Object> function_object = args.at(0);
739 if (function_object->IsUndefined()) return Smi::FromInt(status);
740 if (!function_object->IsJSFunction()) return CrashUnlessFuzzing(isolate);
741
742 Handle<JSFunction> function = Handle<JSFunction>::cast(function_object);
743 status |= static_cast<int>(OptimizationStatus::kIsFunction);
744
745 switch (function->tiering_state()) {
746 case TieringState::kRequestTurbofan_Synchronous:
747 status |= static_cast<int>(OptimizationStatus::kMarkedForOptimization);
748 break;
749 case TieringState::kRequestTurbofan_Concurrent:
750 status |= static_cast<int>(
751 OptimizationStatus::kMarkedForConcurrentOptimization);
752 break;
753 case TieringState::kInProgress:
754 status |= static_cast<int>(OptimizationStatus::kOptimizingConcurrently);
755 break;
756 case TieringState::kNone:
757 case TieringState::kRequestMaglev_Synchronous:
758 case TieringState::kRequestMaglev_Concurrent:
759 // TODO(v8:7700): Maglev support.
760 break;
761 }
762
763 if (function->HasAttachedOptimizedCode()) {
764 CodeT code = function->code();
765 if (code.marked_for_deoptimization()) {
766 status |= static_cast<int>(OptimizationStatus::kMarkedForDeoptimization);
767 } else {
768 status |= static_cast<int>(OptimizationStatus::kOptimized);
769 }
770 if (code.is_maglevved()) {
771 status |= static_cast<int>(OptimizationStatus::kMaglevved);
772 } else if (code.is_turbofanned()) {
773 status |= static_cast<int>(OptimizationStatus::kTurboFanned);
774 }
775 }
776 if (function->HasAttachedCodeKind(CodeKind::BASELINE)) {
777 status |= static_cast<int>(OptimizationStatus::kBaseline);
778 }
779 if (function->ActiveTierIsIgnition()) {
780 status |= static_cast<int>(OptimizationStatus::kInterpreted);
781 }
782
783 // Additionally, detect activations of this frame on the stack, and report the
784 // status of the topmost frame.
785 JavaScriptFrame* frame = nullptr;
786 JavaScriptFrameIterator it(isolate);
787 while (!it.done()) {
788 if (it.frame()->function() == *function) {
789 frame = it.frame();
790 break;
791 }
792 it.Advance();
793 }
794 if (frame != nullptr) {
795 status |= static_cast<int>(OptimizationStatus::kIsExecuting);
796 if (frame->is_optimized()) {
797 status |=
798 static_cast<int>(OptimizationStatus::kTopmostFrameIsTurboFanned);
799 } else if (frame->is_interpreted()) {
800 status |=
801 static_cast<int>(OptimizationStatus::kTopmostFrameIsInterpreted);
802 } else if (frame->is_baseline()) {
803 status |= static_cast<int>(OptimizationStatus::kTopmostFrameIsBaseline);
804 }
805 }
806
807 return Smi::FromInt(status);
808 }
809
RUNTIME_FUNCTION(Runtime_DisableOptimizationFinalization)810 RUNTIME_FUNCTION(Runtime_DisableOptimizationFinalization) {
811 DCHECK_EQ(0, args.length());
812 if (isolate->concurrent_recompilation_enabled()) {
813 isolate->optimizing_compile_dispatcher()->AwaitCompileTasks();
814 isolate->optimizing_compile_dispatcher()->InstallOptimizedFunctions();
815 isolate->stack_guard()->ClearInstallCode();
816 isolate->optimizing_compile_dispatcher()->set_finalize(false);
817 }
818 return ReadOnlyRoots(isolate).undefined_value();
819 }
820
RUNTIME_FUNCTION(Runtime_WaitForBackgroundOptimization)821 RUNTIME_FUNCTION(Runtime_WaitForBackgroundOptimization) {
822 DCHECK_EQ(0, args.length());
823 if (isolate->concurrent_recompilation_enabled()) {
824 isolate->optimizing_compile_dispatcher()->AwaitCompileTasks();
825 }
826 return ReadOnlyRoots(isolate).undefined_value();
827 }
828
RUNTIME_FUNCTION(Runtime_FinalizeOptimization)829 RUNTIME_FUNCTION(Runtime_FinalizeOptimization) {
830 DCHECK_EQ(0, args.length());
831 if (isolate->concurrent_recompilation_enabled()) {
832 FinalizeOptimization(isolate);
833 }
834 return ReadOnlyRoots(isolate).undefined_value();
835 }
836
ReturnNull(const v8::FunctionCallbackInfo<v8::Value> & args)837 static void ReturnNull(const v8::FunctionCallbackInfo<v8::Value>& args) {
838 args.GetReturnValue().SetNull();
839 }
840
RUNTIME_FUNCTION(Runtime_GetUndetectable)841 RUNTIME_FUNCTION(Runtime_GetUndetectable) {
842 HandleScope scope(isolate);
843 DCHECK_EQ(0, args.length());
844 v8::Isolate* v8_isolate = reinterpret_cast<v8::Isolate*>(isolate);
845 Local<v8::ObjectTemplate> desc = v8::ObjectTemplate::New(v8_isolate);
846 desc->MarkAsUndetectable();
847 desc->SetCallAsFunctionHandler(ReturnNull);
848 Local<v8::Object> obj =
849 desc->NewInstance(v8_isolate->GetCurrentContext()).ToLocalChecked();
850 return *Utils::OpenHandle(*obj);
851 }
852
call_as_function(const v8::FunctionCallbackInfo<v8::Value> & args)853 static void call_as_function(const v8::FunctionCallbackInfo<v8::Value>& args) {
854 double v1 =
855 args[0]->NumberValue(args.GetIsolate()->GetCurrentContext()).ToChecked();
856 double v2 =
857 args[1]->NumberValue(args.GetIsolate()->GetCurrentContext()).ToChecked();
858 args.GetReturnValue().Set(v8::Number::New(args.GetIsolate(), v1 - v2));
859 }
860
861 // Returns a callable object. The object returns the difference of its two
862 // parameters when it is called.
RUNTIME_FUNCTION(Runtime_GetCallable)863 RUNTIME_FUNCTION(Runtime_GetCallable) {
864 HandleScope scope(isolate);
865 DCHECK_EQ(0, args.length());
866 v8::Isolate* v8_isolate = reinterpret_cast<v8::Isolate*>(isolate);
867 Local<v8::FunctionTemplate> t = v8::FunctionTemplate::New(v8_isolate);
868 Local<ObjectTemplate> instance_template = t->InstanceTemplate();
869 instance_template->SetCallAsFunctionHandler(call_as_function);
870 v8_isolate->GetCurrentContext();
871 Local<v8::Object> instance =
872 t->GetFunction(v8_isolate->GetCurrentContext())
873 .ToLocalChecked()
874 ->NewInstance(v8_isolate->GetCurrentContext())
875 .ToLocalChecked();
876 return *Utils::OpenHandle(*instance);
877 }
878
RUNTIME_FUNCTION(Runtime_ClearFunctionFeedback)879 RUNTIME_FUNCTION(Runtime_ClearFunctionFeedback) {
880 HandleScope scope(isolate);
881 DCHECK_EQ(1, args.length());
882 Handle<JSFunction> function = args.at<JSFunction>(0);
883 function->ClearTypeFeedbackInfo();
884 return ReadOnlyRoots(isolate).undefined_value();
885 }
886
RUNTIME_FUNCTION(Runtime_NotifyContextDisposed)887 RUNTIME_FUNCTION(Runtime_NotifyContextDisposed) {
888 HandleScope scope(isolate);
889 DCHECK_EQ(0, args.length());
890 isolate->heap()->NotifyContextDisposed(true);
891 return ReadOnlyRoots(isolate).undefined_value();
892 }
893
RUNTIME_FUNCTION(Runtime_SetAllocationTimeout)894 RUNTIME_FUNCTION(Runtime_SetAllocationTimeout) {
895 SealHandleScope shs(isolate);
896 DCHECK(args.length() == 2 || args.length() == 3);
897 #ifdef V8_ENABLE_ALLOCATION_TIMEOUT
898 CONVERT_INT32_ARG_FUZZ_SAFE(timeout, 1);
899 isolate->heap()->set_allocation_timeout(timeout);
900 #endif
901 #ifdef DEBUG
902 CONVERT_INT32_ARG_FUZZ_SAFE(interval, 0);
903 FLAG_gc_interval = interval;
904 if (args.length() == 3) {
905 // Enable/disable inline allocation if requested.
906 CONVERT_BOOLEAN_ARG_FUZZ_SAFE(inline_allocation, 2);
907 if (inline_allocation) {
908 isolate->heap()->EnableInlineAllocation();
909 } else {
910 isolate->heap()->DisableInlineAllocation();
911 }
912 }
913 #endif
914 return ReadOnlyRoots(isolate).undefined_value();
915 }
916
917 namespace {
918
FixedArrayLenFromSize(int size)919 int FixedArrayLenFromSize(int size) {
920 return std::min({(size - FixedArray::kHeaderSize) / kTaggedSize,
921 FixedArray::kMaxRegularLength});
922 }
923
FillUpOneNewSpacePage(Isolate * isolate,Heap * heap)924 void FillUpOneNewSpacePage(Isolate* isolate, Heap* heap) {
925 DCHECK(!FLAG_single_generation);
926 PauseAllocationObserversScope pause_observers(heap);
927 NewSpace* space = heap->new_space();
928 // We cannot rely on `space->limit()` to point to the end of the current page
929 // in the case where inline allocations are disabled, it actually points to
930 // the current allocation pointer.
931 DCHECK_IMPLIES(!space->IsInlineAllocationEnabled(),
932 space->limit() == space->top());
933 int space_remaining =
934 static_cast<int>(space->to_space().page_high() - space->top());
935 while (space_remaining > 0) {
936 int length = FixedArrayLenFromSize(space_remaining);
937 if (length > 0) {
938 Handle<FixedArray> padding =
939 isolate->factory()->NewFixedArray(length, AllocationType::kYoung);
940 DCHECK(heap->new_space()->Contains(*padding));
941 space_remaining -= padding->Size();
942 } else {
943 // Not enough room to create another fixed array. Create a filler.
944 heap->CreateFillerObjectAt(*heap->new_space()->allocation_top_address(),
945 space_remaining, ClearRecordedSlots::kNo);
946 break;
947 }
948 }
949 }
950
951 } // namespace
952
RUNTIME_FUNCTION(Runtime_SimulateNewspaceFull)953 RUNTIME_FUNCTION(Runtime_SimulateNewspaceFull) {
954 HandleScope scope(isolate);
955 Heap* heap = isolate->heap();
956 NewSpace* space = heap->new_space();
957 AlwaysAllocateScopeForTesting always_allocate(heap);
958 do {
959 FillUpOneNewSpacePage(isolate, heap);
960 } while (space->AddFreshPage());
961
962 return ReadOnlyRoots(isolate).undefined_value();
963 }
964
RUNTIME_FUNCTION(Runtime_ScheduleGCInStackCheck)965 RUNTIME_FUNCTION(Runtime_ScheduleGCInStackCheck) {
966 SealHandleScope shs(isolate);
967 DCHECK_EQ(0, args.length());
968 isolate->RequestInterrupt(
969 [](v8::Isolate* isolate, void*) {
970 isolate->RequestGarbageCollectionForTesting(
971 v8::Isolate::kFullGarbageCollection);
972 },
973 nullptr);
974 return ReadOnlyRoots(isolate).undefined_value();
975 }
976
977 class FileOutputStream : public v8::OutputStream {
978 public:
FileOutputStream(const char * filename)979 explicit FileOutputStream(const char* filename) : os_(filename) {}
~FileOutputStream()980 ~FileOutputStream() override { os_.close(); }
981
WriteAsciiChunk(char * data,int size)982 WriteResult WriteAsciiChunk(char* data, int size) override {
983 os_.write(data, size);
984 return kContinue;
985 }
986
EndOfStream()987 void EndOfStream() override { os_.close(); }
988
989 private:
990 std::ofstream os_;
991 };
992
RUNTIME_FUNCTION(Runtime_TakeHeapSnapshot)993 RUNTIME_FUNCTION(Runtime_TakeHeapSnapshot) {
994 if (FLAG_fuzzing) {
995 // We don't want to create snapshots in fuzzers.
996 return ReadOnlyRoots(isolate).undefined_value();
997 }
998
999 std::string filename = "heap.heapsnapshot";
1000
1001 if (args.length() >= 1) {
1002 HandleScope hs(isolate);
1003 Handle<String> filename_as_js_string = args.at<String>(0);
1004 std::unique_ptr<char[]> buffer = filename_as_js_string->ToCString();
1005 filename = std::string(buffer.get());
1006 }
1007
1008 HeapProfiler* heap_profiler = isolate->heap_profiler();
1009 // Since this API is intended for V8 devs, we do not treat globals as roots
1010 // here on purpose.
1011 HeapSnapshot* snapshot = heap_profiler->TakeSnapshot(
1012 /* control = */ nullptr, /* resolver = */ nullptr,
1013 /* treat_global_objects_as_roots = */ false,
1014 /* capture_numeric_value = */ true);
1015 FileOutputStream stream(filename.c_str());
1016 HeapSnapshotJSONSerializer serializer(snapshot);
1017 serializer.Serialize(&stream);
1018 return ReadOnlyRoots(isolate).undefined_value();
1019 }
1020
DebugPrintImpl(MaybeObject maybe_object)1021 static void DebugPrintImpl(MaybeObject maybe_object) {
1022 StdoutStream os;
1023 if (maybe_object->IsCleared()) {
1024 os << "[weak cleared]";
1025 } else {
1026 Object object = maybe_object.GetHeapObjectOrSmi();
1027 bool weak = maybe_object.IsWeak();
1028
1029 #ifdef OBJECT_PRINT
1030 os << "DebugPrint: ";
1031 if (weak) os << "[weak] ";
1032 object.Print(os);
1033 if (object.IsHeapObject()) {
1034 HeapObject::cast(object).map().Print(os);
1035 }
1036 #else
1037 if (weak) os << "[weak] ";
1038 // ShortPrint is available in release mode. Print is not.
1039 os << Brief(object);
1040 #endif
1041 }
1042 os << std::endl;
1043 }
1044
RUNTIME_FUNCTION(Runtime_DebugPrint)1045 RUNTIME_FUNCTION(Runtime_DebugPrint) {
1046 SealHandleScope shs(isolate);
1047 DCHECK_EQ(1, args.length());
1048
1049 MaybeObject maybe_object(*args.address_of_arg_at(0));
1050 DebugPrintImpl(maybe_object);
1051 return args[0];
1052 }
1053
RUNTIME_FUNCTION(Runtime_DebugPrintPtr)1054 RUNTIME_FUNCTION(Runtime_DebugPrintPtr) {
1055 SealHandleScope shs(isolate);
1056 StdoutStream os;
1057 DCHECK_EQ(1, args.length());
1058
1059 MaybeObject maybe_object(*args.address_of_arg_at(0));
1060 if (!maybe_object.IsCleared()) {
1061 Object object = maybe_object.GetHeapObjectOrSmi();
1062 size_t pointer;
1063 if (object.ToIntegerIndex(&pointer)) {
1064 MaybeObject from_pointer(static_cast<Address>(pointer));
1065 DebugPrintImpl(from_pointer);
1066 }
1067 }
1068 // We don't allow the converted pointer to leak out to JavaScript.
1069 return args[0];
1070 }
1071
RUNTIME_FUNCTION(Runtime_PrintWithNameForAssert)1072 RUNTIME_FUNCTION(Runtime_PrintWithNameForAssert) {
1073 SealHandleScope shs(isolate);
1074 DCHECK_EQ(2, args.length());
1075
1076 auto name = String::cast(args[0]);
1077
1078 PrintF(" * ");
1079 StringCharacterStream stream(name);
1080 while (stream.HasMore()) {
1081 uint16_t character = stream.GetNext();
1082 PrintF("%c", character);
1083 }
1084 PrintF(": ");
1085 args[1].ShortPrint();
1086 PrintF("\n");
1087
1088 return ReadOnlyRoots(isolate).undefined_value();
1089 }
1090
RUNTIME_FUNCTION(Runtime_DebugTrace)1091 RUNTIME_FUNCTION(Runtime_DebugTrace) {
1092 SealHandleScope shs(isolate);
1093 DCHECK_EQ(0, args.length());
1094 isolate->PrintStack(stdout);
1095 return ReadOnlyRoots(isolate).undefined_value();
1096 }
1097
RUNTIME_FUNCTION(Runtime_DebugTrackRetainingPath)1098 RUNTIME_FUNCTION(Runtime_DebugTrackRetainingPath) {
1099 HandleScope scope(isolate);
1100 DCHECK_LE(1, args.length());
1101 DCHECK_GE(2, args.length());
1102 CHECK(FLAG_track_retaining_path);
1103 Handle<HeapObject> object = args.at<HeapObject>(0);
1104 RetainingPathOption option = RetainingPathOption::kDefault;
1105 if (args.length() == 2) {
1106 Handle<String> str = args.at<String>(1);
1107 const char track_ephemeron_path[] = "track-ephemeron-path";
1108 if (str->IsOneByteEqualTo(base::StaticCharVector(track_ephemeron_path))) {
1109 option = RetainingPathOption::kTrackEphemeronPath;
1110 } else {
1111 CHECK_EQ(str->length(), 0);
1112 }
1113 }
1114 isolate->heap()->AddRetainingPathTarget(object, option);
1115 return ReadOnlyRoots(isolate).undefined_value();
1116 }
1117
1118 // This will not allocate (flatten the string), but it may run
1119 // very slowly for very deeply nested ConsStrings. For debugging use only.
RUNTIME_FUNCTION(Runtime_GlobalPrint)1120 RUNTIME_FUNCTION(Runtime_GlobalPrint) {
1121 SealHandleScope shs(isolate);
1122 DCHECK_EQ(1, args.length());
1123
1124 auto string = String::cast(args[0]);
1125 StringCharacterStream stream(string);
1126 while (stream.HasMore()) {
1127 uint16_t character = stream.GetNext();
1128 PrintF("%c", character);
1129 }
1130 return string;
1131 }
1132
RUNTIME_FUNCTION(Runtime_SystemBreak)1133 RUNTIME_FUNCTION(Runtime_SystemBreak) {
1134 // The code below doesn't create handles, but when breaking here in GDB
1135 // having a handle scope might be useful.
1136 HandleScope scope(isolate);
1137 DCHECK_EQ(0, args.length());
1138 base::OS::DebugBreak();
1139 return ReadOnlyRoots(isolate).undefined_value();
1140 }
1141
RUNTIME_FUNCTION(Runtime_SetForceSlowPath)1142 RUNTIME_FUNCTION(Runtime_SetForceSlowPath) {
1143 SealHandleScope shs(isolate);
1144 DCHECK_EQ(1, args.length());
1145 Object arg = args[0];
1146 if (arg.IsTrue(isolate)) {
1147 isolate->set_force_slow_path(true);
1148 } else {
1149 DCHECK(arg.IsFalse(isolate));
1150 isolate->set_force_slow_path(false);
1151 }
1152 return ReadOnlyRoots(isolate).undefined_value();
1153 }
1154
RUNTIME_FUNCTION(Runtime_Abort)1155 RUNTIME_FUNCTION(Runtime_Abort) {
1156 SealHandleScope shs(isolate);
1157 DCHECK_EQ(1, args.length());
1158 int message_id = args.smi_value_at(0);
1159 const char* message = GetAbortReason(static_cast<AbortReason>(message_id));
1160 base::OS::PrintError("abort: %s\n", message);
1161 isolate->PrintStack(stderr);
1162 base::OS::Abort();
1163 UNREACHABLE();
1164 }
1165
RUNTIME_FUNCTION(Runtime_AbortJS)1166 RUNTIME_FUNCTION(Runtime_AbortJS) {
1167 HandleScope scope(isolate);
1168 DCHECK_EQ(1, args.length());
1169 Handle<String> message = args.at<String>(0);
1170 if (FLAG_disable_abortjs) {
1171 base::OS::PrintError("[disabled] abort: %s\n", message->ToCString().get());
1172 return Object();
1173 }
1174 base::OS::PrintError("abort: %s\n", message->ToCString().get());
1175 isolate->PrintStack(stderr);
1176 base::OS::Abort();
1177 UNREACHABLE();
1178 }
1179
RUNTIME_FUNCTION(Runtime_AbortCSADcheck)1180 RUNTIME_FUNCTION(Runtime_AbortCSADcheck) {
1181 HandleScope scope(isolate);
1182 DCHECK_EQ(1, args.length());
1183 Handle<String> message = args.at<String>(0);
1184 base::OS::PrintError("abort: CSA_DCHECK failed: %s\n",
1185 message->ToCString().get());
1186 isolate->PrintStack(stderr);
1187 base::OS::Abort();
1188 UNREACHABLE();
1189 }
1190
RUNTIME_FUNCTION(Runtime_DisassembleFunction)1191 RUNTIME_FUNCTION(Runtime_DisassembleFunction) {
1192 HandleScope scope(isolate);
1193 #ifdef DEBUG
1194 DCHECK_EQ(1, args.length());
1195 // Get the function and make sure it is compiled.
1196 Handle<JSFunction> func = args.at<JSFunction>(0);
1197 IsCompiledScope is_compiled_scope;
1198 if (!func->is_compiled() && func->HasAvailableOptimizedCode()) {
1199 func->set_code(func->feedback_vector().optimized_code());
1200 }
1201 CHECK(func->is_compiled() ||
1202 Compiler::Compile(isolate, func, Compiler::KEEP_EXCEPTION,
1203 &is_compiled_scope));
1204 StdoutStream os;
1205 func->code().Print(os);
1206 os << std::endl;
1207 #endif // DEBUG
1208 return ReadOnlyRoots(isolate).undefined_value();
1209 }
1210
1211 namespace {
1212
StackSize(Isolate * isolate)1213 int StackSize(Isolate* isolate) {
1214 int n = 0;
1215 for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) n++;
1216 return n;
1217 }
1218
PrintIndentation(int stack_size)1219 void PrintIndentation(int stack_size) {
1220 const int max_display = 80;
1221 if (stack_size <= max_display) {
1222 PrintF("%4d:%*s", stack_size, stack_size, "");
1223 } else {
1224 PrintF("%4d:%*s", stack_size, max_display, "...");
1225 }
1226 }
1227
1228 } // namespace
1229
RUNTIME_FUNCTION(Runtime_TraceEnter)1230 RUNTIME_FUNCTION(Runtime_TraceEnter) {
1231 SealHandleScope shs(isolate);
1232 DCHECK_EQ(0, args.length());
1233 PrintIndentation(StackSize(isolate));
1234 JavaScriptFrame::PrintTop(isolate, stdout, true, false);
1235 PrintF(" {\n");
1236 return ReadOnlyRoots(isolate).undefined_value();
1237 }
1238
RUNTIME_FUNCTION(Runtime_TraceExit)1239 RUNTIME_FUNCTION(Runtime_TraceExit) {
1240 SealHandleScope shs(isolate);
1241 DCHECK_EQ(1, args.length());
1242 Object obj = args[0];
1243 PrintIndentation(StackSize(isolate));
1244 PrintF("} -> ");
1245 obj.ShortPrint();
1246 PrintF("\n");
1247 return obj; // return TOS
1248 }
1249
RUNTIME_FUNCTION(Runtime_HaveSameMap)1250 RUNTIME_FUNCTION(Runtime_HaveSameMap) {
1251 SealHandleScope shs(isolate);
1252 DCHECK_EQ(2, args.length());
1253 auto obj1 = JSObject::cast(args[0]);
1254 auto obj2 = JSObject::cast(args[1]);
1255 return isolate->heap()->ToBoolean(obj1.map() == obj2.map());
1256 }
1257
RUNTIME_FUNCTION(Runtime_InLargeObjectSpace)1258 RUNTIME_FUNCTION(Runtime_InLargeObjectSpace) {
1259 SealHandleScope shs(isolate);
1260 DCHECK_EQ(1, args.length());
1261 auto obj = HeapObject::cast(args[0]);
1262 return isolate->heap()->ToBoolean(
1263 isolate->heap()->new_lo_space()->Contains(obj) ||
1264 isolate->heap()->code_lo_space()->Contains(obj) ||
1265 isolate->heap()->lo_space()->Contains(obj));
1266 }
1267
RUNTIME_FUNCTION(Runtime_HasElementsInALargeObjectSpace)1268 RUNTIME_FUNCTION(Runtime_HasElementsInALargeObjectSpace) {
1269 SealHandleScope shs(isolate);
1270 DCHECK_EQ(1, args.length());
1271 auto array = JSArray::cast(args[0]);
1272 FixedArrayBase elements = array.elements();
1273 return isolate->heap()->ToBoolean(
1274 isolate->heap()->new_lo_space()->Contains(elements) ||
1275 isolate->heap()->lo_space()->Contains(elements));
1276 }
1277
RUNTIME_FUNCTION(Runtime_InYoungGeneration)1278 RUNTIME_FUNCTION(Runtime_InYoungGeneration) {
1279 SealHandleScope shs(isolate);
1280 DCHECK_EQ(1, args.length());
1281 Object obj = args[0];
1282 return isolate->heap()->ToBoolean(ObjectInYoungGeneration(obj));
1283 }
1284
1285 // Force pretenuring for the allocation site the passed object belongs to.
RUNTIME_FUNCTION(Runtime_PretenureAllocationSite)1286 RUNTIME_FUNCTION(Runtime_PretenureAllocationSite) {
1287 DisallowGarbageCollection no_gc;
1288
1289 if (args.length() != 1) return CrashUnlessFuzzing(isolate);
1290 Object arg = args[0];
1291 if (!arg.IsJSObject()) return CrashUnlessFuzzing(isolate);
1292 JSObject object = JSObject::cast(arg);
1293
1294 Heap* heap = object.GetHeap();
1295 if (!heap->InYoungGeneration(object)) {
1296 // Object is not in new space, thus there is no memento and nothing to do.
1297 return ReturnFuzzSafe(ReadOnlyRoots(isolate).false_value(), isolate);
1298 }
1299
1300 AllocationMemento memento =
1301 heap->FindAllocationMemento<Heap::kForRuntime>(object.map(), object);
1302 if (memento.is_null())
1303 return ReturnFuzzSafe(ReadOnlyRoots(isolate).false_value(), isolate);
1304 AllocationSite site = memento.GetAllocationSite();
1305 heap->PretenureAllocationSiteOnNextCollection(site);
1306 return ReturnFuzzSafe(ReadOnlyRoots(isolate).true_value(), isolate);
1307 }
1308
1309 namespace {
1310
DisallowCodegenFromStringsCallback(v8::Local<v8::Context> context,v8::Local<v8::Value> source,bool is_code_kind)1311 v8::ModifyCodeGenerationFromStringsResult DisallowCodegenFromStringsCallback(
1312 v8::Local<v8::Context> context, v8::Local<v8::Value> source,
1313 bool is_code_kind) {
1314 return {false, {}};
1315 }
1316
1317 } // namespace
1318
RUNTIME_FUNCTION(Runtime_DisallowCodegenFromStrings)1319 RUNTIME_FUNCTION(Runtime_DisallowCodegenFromStrings) {
1320 SealHandleScope shs(isolate);
1321 DCHECK_EQ(1, args.length());
1322 bool flag = Oddball::cast(args[0]).ToBool(isolate);
1323 v8::Isolate* v8_isolate = reinterpret_cast<v8::Isolate*>(isolate);
1324 v8_isolate->SetModifyCodeGenerationFromStringsCallback(
1325 flag ? DisallowCodegenFromStringsCallback : nullptr);
1326 return ReadOnlyRoots(isolate).undefined_value();
1327 }
1328
RUNTIME_FUNCTION(Runtime_RegexpHasBytecode)1329 RUNTIME_FUNCTION(Runtime_RegexpHasBytecode) {
1330 SealHandleScope shs(isolate);
1331 DCHECK_EQ(2, args.length());
1332 auto regexp = JSRegExp::cast(args[0]);
1333 bool is_latin1 = Oddball::cast(args[1]).ToBool(isolate);
1334 bool result;
1335 if (regexp.type_tag() == JSRegExp::IRREGEXP) {
1336 result = regexp.bytecode(is_latin1).IsByteArray();
1337 } else {
1338 result = false;
1339 }
1340 return isolate->heap()->ToBoolean(result);
1341 }
1342
RUNTIME_FUNCTION(Runtime_RegexpHasNativeCode)1343 RUNTIME_FUNCTION(Runtime_RegexpHasNativeCode) {
1344 SealHandleScope shs(isolate);
1345 DCHECK_EQ(2, args.length());
1346 auto regexp = JSRegExp::cast(args[0]);
1347 bool is_latin1 = Oddball::cast(args[1]).ToBool(isolate);
1348 bool result;
1349 if (regexp.type_tag() == JSRegExp::IRREGEXP) {
1350 result = regexp.code(is_latin1).IsCodeT();
1351 } else {
1352 result = false;
1353 }
1354 return isolate->heap()->ToBoolean(result);
1355 }
1356
RUNTIME_FUNCTION(Runtime_RegexpTypeTag)1357 RUNTIME_FUNCTION(Runtime_RegexpTypeTag) {
1358 HandleScope shs(isolate);
1359 DCHECK_EQ(1, args.length());
1360 auto regexp = JSRegExp::cast(args[0]);
1361 const char* type_str;
1362 switch (regexp.type_tag()) {
1363 case JSRegExp::NOT_COMPILED:
1364 type_str = "NOT_COMPILED";
1365 break;
1366 case JSRegExp::ATOM:
1367 type_str = "ATOM";
1368 break;
1369 case JSRegExp::IRREGEXP:
1370 type_str = "IRREGEXP";
1371 break;
1372 case JSRegExp::EXPERIMENTAL:
1373 type_str = "EXPERIMENTAL";
1374 break;
1375 }
1376 return *isolate->factory()->NewStringFromAsciiChecked(type_str);
1377 }
1378
RUNTIME_FUNCTION(Runtime_RegexpIsUnmodified)1379 RUNTIME_FUNCTION(Runtime_RegexpIsUnmodified) {
1380 HandleScope shs(isolate);
1381 DCHECK_EQ(1, args.length());
1382 Handle<JSRegExp> regexp = args.at<JSRegExp>(0);
1383 return isolate->heap()->ToBoolean(
1384 RegExp::IsUnmodifiedRegExp(isolate, regexp));
1385 }
1386
1387 #define ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(Name) \
1388 RUNTIME_FUNCTION(Runtime_##Name) { \
1389 auto obj = JSObject::cast(args[0]); \
1390 return isolate->heap()->ToBoolean(obj.Name()); \
1391 }
1392
1393 ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(HasFastElements)
ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(HasSmiElements)1394 ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(HasSmiElements)
1395 ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(HasObjectElements)
1396 ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(HasSmiOrObjectElements)
1397 ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(HasDoubleElements)
1398 ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(HasHoleyElements)
1399 ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(HasDictionaryElements)
1400 ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(HasPackedElements)
1401 ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(HasSloppyArgumentsElements)
1402 // Properties test sitting with elements tests - not fooling anyone.
1403 ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION(HasFastProperties)
1404
1405 #undef ELEMENTS_KIND_CHECK_RUNTIME_FUNCTION
1406
1407 #define FIXED_TYPED_ARRAYS_CHECK_RUNTIME_FUNCTION(Type, type, TYPE, ctype) \
1408 RUNTIME_FUNCTION(Runtime_HasFixed##Type##Elements) { \
1409 auto obj = JSObject::cast(args[0]); \
1410 return isolate->heap()->ToBoolean(obj.HasFixed##Type##Elements()); \
1411 }
1412
1413 TYPED_ARRAYS(FIXED_TYPED_ARRAYS_CHECK_RUNTIME_FUNCTION)
1414
1415 #undef FIXED_TYPED_ARRAYS_CHECK_RUNTIME_FUNCTION
1416
1417 RUNTIME_FUNCTION(Runtime_IsConcatSpreadableProtector) {
1418 SealHandleScope shs(isolate);
1419 DCHECK_EQ(0, args.length());
1420 return isolate->heap()->ToBoolean(
1421 Protectors::IsIsConcatSpreadableLookupChainIntact(isolate));
1422 }
1423
RUNTIME_FUNCTION(Runtime_TypedArraySpeciesProtector)1424 RUNTIME_FUNCTION(Runtime_TypedArraySpeciesProtector) {
1425 SealHandleScope shs(isolate);
1426 DCHECK_EQ(0, args.length());
1427 return isolate->heap()->ToBoolean(
1428 Protectors::IsTypedArraySpeciesLookupChainIntact(isolate));
1429 }
1430
RUNTIME_FUNCTION(Runtime_RegExpSpeciesProtector)1431 RUNTIME_FUNCTION(Runtime_RegExpSpeciesProtector) {
1432 SealHandleScope shs(isolate);
1433 DCHECK_EQ(0, args.length());
1434 return isolate->heap()->ToBoolean(
1435 Protectors::IsRegExpSpeciesLookupChainIntact(isolate));
1436 }
1437
RUNTIME_FUNCTION(Runtime_PromiseSpeciesProtector)1438 RUNTIME_FUNCTION(Runtime_PromiseSpeciesProtector) {
1439 SealHandleScope shs(isolate);
1440 DCHECK_EQ(0, args.length());
1441 return isolate->heap()->ToBoolean(
1442 Protectors::IsPromiseSpeciesLookupChainIntact(isolate));
1443 }
1444
RUNTIME_FUNCTION(Runtime_ArraySpeciesProtector)1445 RUNTIME_FUNCTION(Runtime_ArraySpeciesProtector) {
1446 SealHandleScope shs(isolate);
1447 DCHECK_EQ(0, args.length());
1448 return isolate->heap()->ToBoolean(
1449 Protectors::IsArraySpeciesLookupChainIntact(isolate));
1450 }
1451
RUNTIME_FUNCTION(Runtime_MapIteratorProtector)1452 RUNTIME_FUNCTION(Runtime_MapIteratorProtector) {
1453 SealHandleScope shs(isolate);
1454 DCHECK_EQ(0, args.length());
1455 return isolate->heap()->ToBoolean(
1456 Protectors::IsMapIteratorLookupChainIntact(isolate));
1457 }
1458
RUNTIME_FUNCTION(Runtime_SetIteratorProtector)1459 RUNTIME_FUNCTION(Runtime_SetIteratorProtector) {
1460 SealHandleScope shs(isolate);
1461 DCHECK_EQ(0, args.length());
1462 return isolate->heap()->ToBoolean(
1463 Protectors::IsSetIteratorLookupChainIntact(isolate));
1464 }
1465
RUNTIME_FUNCTION(Runtime_StringIteratorProtector)1466 RUNTIME_FUNCTION(Runtime_StringIteratorProtector) {
1467 SealHandleScope shs(isolate);
1468 DCHECK_EQ(0, args.length());
1469 return isolate->heap()->ToBoolean(
1470 Protectors::IsStringIteratorLookupChainIntact(isolate));
1471 }
1472
RUNTIME_FUNCTION(Runtime_ArrayIteratorProtector)1473 RUNTIME_FUNCTION(Runtime_ArrayIteratorProtector) {
1474 SealHandleScope shs(isolate);
1475 DCHECK_EQ(0, args.length());
1476 return isolate->heap()->ToBoolean(
1477 Protectors::IsArrayIteratorLookupChainIntact(isolate));
1478 }
1479 // For use by tests and fuzzers. It
1480 //
1481 // 1. serializes a snapshot of the current isolate,
1482 // 2. deserializes the snapshot,
1483 // 3. and runs VerifyHeap on the resulting isolate.
1484 //
1485 // The current isolate should not be modified by this call and can keep running
1486 // once it completes.
RUNTIME_FUNCTION(Runtime_SerializeDeserializeNow)1487 RUNTIME_FUNCTION(Runtime_SerializeDeserializeNow) {
1488 HandleScope scope(isolate);
1489 DCHECK_EQ(0, args.length());
1490 Snapshot::SerializeDeserializeAndVerifyForTesting(isolate,
1491 isolate->native_context());
1492 return ReadOnlyRoots(isolate).undefined_value();
1493 }
1494
RUNTIME_FUNCTION(Runtime_HeapObjectVerify)1495 RUNTIME_FUNCTION(Runtime_HeapObjectVerify) {
1496 HandleScope shs(isolate);
1497 DCHECK_EQ(1, args.length());
1498 Handle<Object> object = args.at(0);
1499 #ifdef VERIFY_HEAP
1500 object->ObjectVerify(isolate);
1501 #else
1502 CHECK(object->IsObject());
1503 if (object->IsHeapObject()) {
1504 CHECK(HeapObject::cast(*object).map().IsMap());
1505 } else {
1506 CHECK(object->IsSmi());
1507 }
1508 #endif
1509 return isolate->heap()->ToBoolean(true);
1510 }
1511
RUNTIME_FUNCTION(Runtime_ArrayBufferMaxByteLength)1512 RUNTIME_FUNCTION(Runtime_ArrayBufferMaxByteLength) {
1513 HandleScope shs(isolate);
1514 DCHECK_EQ(0, args.length());
1515 return *isolate->factory()->NewNumber(JSArrayBuffer::kMaxByteLength);
1516 }
1517
RUNTIME_FUNCTION(Runtime_TypedArrayMaxLength)1518 RUNTIME_FUNCTION(Runtime_TypedArrayMaxLength) {
1519 HandleScope shs(isolate);
1520 DCHECK_EQ(0, args.length());
1521 return *isolate->factory()->NewNumber(JSTypedArray::kMaxLength);
1522 }
1523
RUNTIME_FUNCTION(Runtime_CompleteInobjectSlackTracking)1524 RUNTIME_FUNCTION(Runtime_CompleteInobjectSlackTracking) {
1525 HandleScope scope(isolate);
1526 DCHECK_EQ(1, args.length());
1527
1528 Handle<JSObject> object = args.at<JSObject>(0);
1529 MapUpdater::CompleteInobjectSlackTracking(isolate, object->map());
1530
1531 return ReadOnlyRoots(isolate).undefined_value();
1532 }
1533
RUNTIME_FUNCTION(Runtime_TurbofanStaticAssert)1534 RUNTIME_FUNCTION(Runtime_TurbofanStaticAssert) {
1535 SealHandleScope shs(isolate);
1536 // Always lowered to StaticAssert node in Turbofan, so we never get here in
1537 // compiled code.
1538 return ReadOnlyRoots(isolate).undefined_value();
1539 }
1540
RUNTIME_FUNCTION(Runtime_IsBeingInterpreted)1541 RUNTIME_FUNCTION(Runtime_IsBeingInterpreted) {
1542 SealHandleScope shs(isolate);
1543 // Always lowered to false in Turbofan, so we never get here in compiled code.
1544 return ReadOnlyRoots(isolate).true_value();
1545 }
1546
RUNTIME_FUNCTION(Runtime_EnableCodeLoggingForTesting)1547 RUNTIME_FUNCTION(Runtime_EnableCodeLoggingForTesting) {
1548 // The {NoopListener} currently does nothing on any callback, but reports
1549 // {true} on {is_listening_to_code_events()}. Feel free to add assertions to
1550 // any method to further test the code logging callbacks.
1551 class NoopListener final : public CodeEventListener {
1552 void CodeCreateEvent(LogEventsAndTags tag, Handle<AbstractCode> code,
1553 const char* name) final {}
1554 void CodeCreateEvent(LogEventsAndTags tag, Handle<AbstractCode> code,
1555 Handle<Name> name) final {}
1556 void CodeCreateEvent(LogEventsAndTags tag, Handle<AbstractCode> code,
1557 Handle<SharedFunctionInfo> shared,
1558 Handle<Name> script_name) final {}
1559 void CodeCreateEvent(LogEventsAndTags tag, Handle<AbstractCode> code,
1560 Handle<SharedFunctionInfo> shared,
1561 Handle<Name> script_name, int line, int column) final {
1562 }
1563 #if V8_ENABLE_WEBASSEMBLY
1564 void CodeCreateEvent(LogEventsAndTags tag, const wasm::WasmCode* code,
1565 wasm::WasmName name, const char* source_url,
1566 int code_offset, int script_id) final {}
1567 #endif // V8_ENABLE_WEBASSEMBLY
1568
1569 void CallbackEvent(Handle<Name> name, Address entry_point) final {}
1570 void GetterCallbackEvent(Handle<Name> name, Address entry_point) final {}
1571 void SetterCallbackEvent(Handle<Name> name, Address entry_point) final {}
1572 void RegExpCodeCreateEvent(Handle<AbstractCode> code,
1573 Handle<String> source) final {}
1574 void CodeMoveEvent(AbstractCode from, AbstractCode to) final {}
1575 void SharedFunctionInfoMoveEvent(Address from, Address to) final {}
1576 void NativeContextMoveEvent(Address from, Address to) final {}
1577 void CodeMovingGCEvent() final {}
1578 void CodeDisableOptEvent(Handle<AbstractCode> code,
1579 Handle<SharedFunctionInfo> shared) final {}
1580 void CodeDeoptEvent(Handle<Code> code, DeoptimizeKind kind, Address pc,
1581 int fp_to_sp_delta) final {}
1582 void CodeDependencyChangeEvent(Handle<Code> code,
1583 Handle<SharedFunctionInfo> shared,
1584 const char* reason) final {}
1585 void WeakCodeClearEvent() final {}
1586
1587 bool is_listening_to_code_events() final { return true; }
1588 };
1589 static base::LeakyObject<NoopListener> noop_listener;
1590 #if V8_ENABLE_WEBASSEMBLY
1591 wasm::GetWasmEngine()->EnableCodeLogging(isolate);
1592 #endif // V8_ENABLE_WEBASSEMBLY
1593 isolate->code_event_dispatcher()->AddListener(noop_listener.get());
1594 return ReadOnlyRoots(isolate).undefined_value();
1595 }
1596
RUNTIME_FUNCTION(Runtime_NewRegExpWithBacktrackLimit)1597 RUNTIME_FUNCTION(Runtime_NewRegExpWithBacktrackLimit) {
1598 HandleScope scope(isolate);
1599 DCHECK_EQ(3, args.length());
1600
1601 Handle<String> pattern = args.at<String>(0);
1602 Handle<String> flags_string = args.at<String>(1);
1603 uint32_t backtrack_limit = args.positive_smi_value_at(2);
1604
1605 JSRegExp::Flags flags =
1606 JSRegExp::FlagsFromString(isolate, flags_string).value();
1607
1608 RETURN_RESULT_OR_FAILURE(
1609 isolate, JSRegExp::New(isolate, pattern, flags, backtrack_limit));
1610 }
1611
RUNTIME_FUNCTION(Runtime_Is64Bit)1612 RUNTIME_FUNCTION(Runtime_Is64Bit) {
1613 SealHandleScope shs(isolate);
1614 DCHECK_EQ(0, args.length());
1615 return isolate->heap()->ToBoolean(kSystemPointerSize == 8);
1616 }
1617
RUNTIME_FUNCTION(Runtime_BigIntMaxLengthBits)1618 RUNTIME_FUNCTION(Runtime_BigIntMaxLengthBits) {
1619 HandleScope scope(isolate);
1620 DCHECK_EQ(0, args.length());
1621 return *isolate->factory()->NewNumber(BigInt::kMaxLengthBits);
1622 }
1623
RUNTIME_FUNCTION(Runtime_IsSameHeapObject)1624 RUNTIME_FUNCTION(Runtime_IsSameHeapObject) {
1625 HandleScope scope(isolate);
1626 DCHECK_EQ(2, args.length());
1627 Handle<HeapObject> obj1 = args.at<HeapObject>(0);
1628 Handle<HeapObject> obj2 = args.at<HeapObject>(1);
1629 return isolate->heap()->ToBoolean(obj1->address() == obj2->address());
1630 }
1631
RUNTIME_FUNCTION(Runtime_IsSharedString)1632 RUNTIME_FUNCTION(Runtime_IsSharedString) {
1633 HandleScope scope(isolate);
1634 DCHECK_EQ(1, args.length());
1635 Handle<HeapObject> obj = args.at<HeapObject>(0);
1636 return isolate->heap()->ToBoolean(obj->IsString() &&
1637 Handle<String>::cast(obj)->IsShared());
1638 }
1639
RUNTIME_FUNCTION(Runtime_WebSnapshotSerialize)1640 RUNTIME_FUNCTION(Runtime_WebSnapshotSerialize) {
1641 if (!FLAG_allow_natives_syntax) {
1642 return ReadOnlyRoots(isolate).undefined_value();
1643 }
1644 HandleScope scope(isolate);
1645 if (args.length() < 1 || args.length() > 2) {
1646 THROW_NEW_ERROR_RETURN_FAILURE(
1647 isolate, NewTypeError(MessageTemplate::kRuntimeWrongNumArgs));
1648 }
1649 Handle<Object> object = args.at(0);
1650 Handle<FixedArray> block_list = isolate->factory()->empty_fixed_array();
1651 Handle<JSArray> block_list_js_array;
1652 if (args.length() == 2) {
1653 if (!args[1].IsJSArray()) {
1654 THROW_NEW_ERROR_RETURN_FAILURE(
1655 isolate, NewTypeError(MessageTemplate::kInvalidArgument));
1656 }
1657 block_list_js_array = args.at<JSArray>(1);
1658 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
1659 isolate, block_list,
1660 JSReceiver::GetOwnValues(block_list_js_array,
1661 PropertyFilter::ENUMERABLE_STRINGS));
1662 }
1663
1664 auto snapshot_data = std::make_shared<WebSnapshotData>();
1665 WebSnapshotSerializer serializer(isolate);
1666 if (!serializer.TakeSnapshot(object, block_list, *snapshot_data)) {
1667 DCHECK(isolate->has_pending_exception());
1668 return ReadOnlyRoots(isolate).exception();
1669 }
1670 if (!block_list_js_array.is_null() &&
1671 static_cast<uint32_t>(block_list->length()) <
1672 serializer.external_objects_count()) {
1673 Handle<FixedArray> externals = serializer.GetExternals();
1674 Handle<Map> map = JSObject::GetElementsTransitionMap(block_list_js_array,
1675 PACKED_ELEMENTS);
1676 block_list_js_array->set_elements(*externals);
1677 block_list_js_array->set_length(Smi::FromInt(externals->length()));
1678 block_list_js_array->set_map(*map);
1679 }
1680 i::Handle<i::Object> managed_object = Managed<WebSnapshotData>::FromSharedPtr(
1681 isolate, snapshot_data->buffer_size, snapshot_data);
1682 return *managed_object;
1683 }
1684
RUNTIME_FUNCTION(Runtime_WebSnapshotDeserialize)1685 RUNTIME_FUNCTION(Runtime_WebSnapshotDeserialize) {
1686 if (!FLAG_allow_natives_syntax) {
1687 return ReadOnlyRoots(isolate).undefined_value();
1688 }
1689 HandleScope scope(isolate);
1690 if (args.length() == 0 || args.length() > 2) {
1691 THROW_NEW_ERROR_RETURN_FAILURE(
1692 isolate, NewTypeError(MessageTemplate::kRuntimeWrongNumArgs));
1693 }
1694 if (!args[0].IsForeign()) {
1695 THROW_NEW_ERROR_RETURN_FAILURE(
1696 isolate, NewTypeError(MessageTemplate::kInvalidArgument));
1697 }
1698 Handle<Foreign> foreign_data = args.at<Foreign>(0);
1699 Handle<FixedArray> injected_references =
1700 isolate->factory()->empty_fixed_array();
1701 if (args.length() == 2) {
1702 if (!args[1].IsJSArray()) {
1703 THROW_NEW_ERROR_RETURN_FAILURE(
1704 isolate, NewTypeError(MessageTemplate::kInvalidArgument));
1705 }
1706 auto js_array = args.at<JSArray>(1);
1707 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
1708 isolate, injected_references,
1709 JSReceiver::GetOwnValues(js_array, PropertyFilter::ENUMERABLE_STRINGS));
1710 }
1711
1712 auto data = Managed<WebSnapshotData>::cast(*foreign_data).get();
1713 v8::Isolate* v8_isolate = reinterpret_cast<v8::Isolate*>(isolate);
1714 WebSnapshotDeserializer deserializer(v8_isolate, data->buffer,
1715 data->buffer_size);
1716 if (!deserializer.Deserialize(injected_references)) {
1717 DCHECK(isolate->has_pending_exception());
1718 return ReadOnlyRoots(isolate).exception();
1719 }
1720 Handle<Object> object;
1721 if (!deserializer.value().ToHandle(&object)) {
1722 THROW_NEW_ERROR_RETURN_FAILURE(
1723 isolate, NewTypeError(MessageTemplate::kWebSnapshotError));
1724 }
1725 return *object;
1726 }
1727
RUNTIME_FUNCTION(Runtime_SharedGC)1728 RUNTIME_FUNCTION(Runtime_SharedGC) {
1729 SealHandleScope scope(isolate);
1730 isolate->heap()->CollectSharedGarbage(GarbageCollectionReason::kTesting);
1731 return ReadOnlyRoots(isolate).undefined_value();
1732 }
1733
RUNTIME_FUNCTION(Runtime_GetWeakCollectionSize)1734 RUNTIME_FUNCTION(Runtime_GetWeakCollectionSize) {
1735 HandleScope scope(isolate);
1736 DCHECK_EQ(1, args.length());
1737 Handle<JSWeakCollection> collection = args.at<JSWeakCollection>(0);
1738
1739 return Smi::FromInt(
1740 EphemeronHashTable::cast(collection->table()).NumberOfElements());
1741 }
1742
1743 } // namespace internal
1744 } // namespace v8
1745