• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/isolate.h"
6 
7 #include <stdlib.h>
8 
9 #include <atomic>
10 #include <fstream>  // NOLINT(readability/streams)
11 #include <sstream>
12 #include <unordered_map>
13 
14 #include "src/api-inl.h"
15 #include "src/assembler-inl.h"
16 #include "src/ast/ast-value-factory.h"
17 #include "src/ast/context-slot-cache.h"
18 #include "src/base/adapters.h"
19 #include "src/base/hashmap.h"
20 #include "src/base/platform/platform.h"
21 #include "src/base/sys-info.h"
22 #include "src/base/utils/random-number-generator.h"
23 #include "src/bootstrapper.h"
24 #include "src/builtins/constants-table-builder.h"
25 #include "src/cancelable-task.h"
26 #include "src/code-stubs.h"
27 #include "src/compilation-cache.h"
28 #include "src/compilation-statistics.h"
29 #include "src/compiler-dispatcher/compiler-dispatcher.h"
30 #include "src/compiler-dispatcher/optimizing-compile-dispatcher.h"
31 #include "src/debug/debug.h"
32 #include "src/deoptimizer.h"
33 #include "src/elements.h"
34 #include "src/frames-inl.h"
35 #include "src/ic/stub-cache.h"
36 #include "src/instruction-stream.h"
37 #include "src/interpreter/interpreter.h"
38 #include "src/isolate-inl.h"
39 #include "src/libsampler/sampler.h"
40 #include "src/log.h"
41 #include "src/messages.h"
42 #include "src/objects/frame-array-inl.h"
43 #include "src/objects/hash-table-inl.h"
44 #include "src/objects/js-array-inl.h"
45 #include "src/objects/module-inl.h"
46 #include "src/objects/promise-inl.h"
47 #include "src/profiler/tracing-cpu-profiler.h"
48 #include "src/prototype.h"
49 #include "src/regexp/regexp-stack.h"
50 #include "src/runtime-profiler.h"
51 #include "src/setup-isolate.h"
52 #include "src/simulator.h"
53 #include "src/snapshot/startup-deserializer.h"
54 #include "src/tracing/tracing-category-observer.h"
55 #include "src/trap-handler/trap-handler.h"
56 #include "src/unicode-cache.h"
57 #include "src/v8.h"
58 #include "src/version.h"
59 #include "src/visitors.h"
60 #include "src/vm-state-inl.h"
61 #include "src/wasm/wasm-code-manager.h"
62 #include "src/wasm/wasm-engine.h"
63 #include "src/wasm/wasm-objects.h"
64 #include "src/zone/accounting-allocator.h"
65 #ifdef V8_INTL_SUPPORT
66 #include "unicode/regex.h"
67 #endif  // V8_INTL_SUPPORT
68 
69 namespace v8 {
70 namespace internal {
71 
72 #ifdef DEBUG
73 #define TRACE_ISOLATE(tag)                                                  \
74   do {                                                                      \
75     if (FLAG_trace_isolates) {                                              \
76       PrintF("Isolate %p (id %d)" #tag "\n", reinterpret_cast<void*>(this), \
77              id());                                                         \
78     }                                                                       \
79   } while (false)
80 #else
81 #define TRACE_ISOLATE(tag)
82 #endif
83 
84 base::Atomic32 ThreadId::highest_thread_id_ = 0;
85 
86 extern const uint8_t* DefaultEmbeddedBlob();
87 extern uint32_t DefaultEmbeddedBlobSize();
88 
89 #ifdef V8_MULTI_SNAPSHOTS
90 extern const uint8_t* TrustedEmbeddedBlob();
91 extern uint32_t TrustedEmbeddedBlobSize();
92 #endif
93 
94 namespace {
95 // These variables provide access to the current embedded blob without requiring
96 // an isolate instance. This is needed e.g. by Code::InstructionStart, which may
97 // not have access to an isolate but still needs to access the embedded blob.
98 // The variables are initialized by each isolate in Init(). Writes and reads are
99 // relaxed since we can guarantee that the current thread has initialized these
100 // variables before accessing them. Different threads may race, but this is fine
101 // since they all attempt to set the same values of the blob pointer and size.
102 
103 std::atomic<const uint8_t*> current_embedded_blob_(nullptr);
104 std::atomic<uint32_t> current_embedded_blob_size_(0);
105 }  // namespace
106 
SetEmbeddedBlob(const uint8_t * blob,uint32_t blob_size)107 void Isolate::SetEmbeddedBlob(const uint8_t* blob, uint32_t blob_size) {
108   embedded_blob_ = blob;
109   embedded_blob_size_ = blob_size;
110   current_embedded_blob_.store(blob, std::memory_order_relaxed);
111   current_embedded_blob_size_.store(blob_size, std::memory_order_relaxed);
112 
113 #ifdef DEBUG
114   if (blob != nullptr) {
115     // Verify that the contents of the embedded blob are unchanged from
116     // serialization-time, just to ensure the compiler isn't messing with us.
117     EmbeddedData d = EmbeddedData::FromBlob();
118     CHECK_EQ(d.Hash(), d.CreateHash());
119   }
120 #endif  // DEBUG
121 }
122 
embedded_blob() const123 const uint8_t* Isolate::embedded_blob() const { return embedded_blob_; }
embedded_blob_size() const124 uint32_t Isolate::embedded_blob_size() const { return embedded_blob_size_; }
125 
126 // static
CurrentEmbeddedBlob()127 const uint8_t* Isolate::CurrentEmbeddedBlob() {
128   return current_embedded_blob_.load(std::memory_order::memory_order_relaxed);
129 }
130 
131 // static
CurrentEmbeddedBlobSize()132 uint32_t Isolate::CurrentEmbeddedBlobSize() {
133   return current_embedded_blob_size_.load(
134       std::memory_order::memory_order_relaxed);
135 }
136 
AllocateThreadId()137 int ThreadId::AllocateThreadId() {
138   int new_id = base::Relaxed_AtomicIncrement(&highest_thread_id_, 1);
139   return new_id;
140 }
141 
142 
GetCurrentThreadId()143 int ThreadId::GetCurrentThreadId() {
144   int thread_id = base::Thread::GetThreadLocalInt(Isolate::thread_id_key_);
145   if (thread_id == 0) {
146     thread_id = AllocateThreadId();
147     base::Thread::SetThreadLocalInt(Isolate::thread_id_key_, thread_id);
148   }
149   return thread_id;
150 }
151 
Initialize(Isolate * isolate)152 void ThreadLocalTop::Initialize(Isolate* isolate) {
153   *this = ThreadLocalTop();
154   isolate_ = isolate;
155 #ifdef USE_SIMULATOR
156   simulator_ = Simulator::current(isolate);
157 #endif
158   thread_id_ = ThreadId::Current();
159   thread_in_wasm_flag_address_ = reinterpret_cast<Address>(
160       trap_handler::GetThreadInWasmThreadLocalAddress());
161 }
162 
Free()163 void ThreadLocalTop::Free() {
164   wasm_caught_exception_ = nullptr;
165   // Match unmatched PopPromise calls.
166   while (promise_on_stack_) isolate_->PopPromise();
167 }
168 
169 
170 base::Thread::LocalStorageKey Isolate::isolate_key_;
171 base::Thread::LocalStorageKey Isolate::thread_id_key_;
172 base::Thread::LocalStorageKey Isolate::per_isolate_thread_data_key_;
173 base::Atomic32 Isolate::isolate_counter_ = 0;
174 #if DEBUG
175 base::Atomic32 Isolate::isolate_key_created_ = 0;
176 #endif
177 
178 Isolate::PerIsolateThreadData*
FindOrAllocatePerThreadDataForThisThread()179     Isolate::FindOrAllocatePerThreadDataForThisThread() {
180   ThreadId thread_id = ThreadId::Current();
181   PerIsolateThreadData* per_thread = nullptr;
182   {
183     base::LockGuard<base::Mutex> lock_guard(&thread_data_table_mutex_);
184     per_thread = thread_data_table_.Lookup(thread_id);
185     if (per_thread == nullptr) {
186       per_thread = new PerIsolateThreadData(this, thread_id);
187       thread_data_table_.Insert(per_thread);
188     }
189     DCHECK(thread_data_table_.Lookup(thread_id) == per_thread);
190   }
191   return per_thread;
192 }
193 
194 
DiscardPerThreadDataForThisThread()195 void Isolate::DiscardPerThreadDataForThisThread() {
196   int thread_id_int = base::Thread::GetThreadLocalInt(Isolate::thread_id_key_);
197   if (thread_id_int) {
198     ThreadId thread_id = ThreadId(thread_id_int);
199     DCHECK(!thread_manager_->mutex_owner_.Equals(thread_id));
200     base::LockGuard<base::Mutex> lock_guard(&thread_data_table_mutex_);
201     PerIsolateThreadData* per_thread = thread_data_table_.Lookup(thread_id);
202     if (per_thread) {
203       DCHECK(!per_thread->thread_state_);
204       thread_data_table_.Remove(per_thread);
205     }
206   }
207 }
208 
209 
FindPerThreadDataForThisThread()210 Isolate::PerIsolateThreadData* Isolate::FindPerThreadDataForThisThread() {
211   ThreadId thread_id = ThreadId::Current();
212   return FindPerThreadDataForThread(thread_id);
213 }
214 
215 
FindPerThreadDataForThread(ThreadId thread_id)216 Isolate::PerIsolateThreadData* Isolate::FindPerThreadDataForThread(
217     ThreadId thread_id) {
218   PerIsolateThreadData* per_thread = nullptr;
219   {
220     base::LockGuard<base::Mutex> lock_guard(&thread_data_table_mutex_);
221     per_thread = thread_data_table_.Lookup(thread_id);
222   }
223   return per_thread;
224 }
225 
226 
InitializeOncePerProcess()227 void Isolate::InitializeOncePerProcess() {
228   isolate_key_ = base::Thread::CreateThreadLocalKey();
229 #if DEBUG
230   base::Relaxed_Store(&isolate_key_created_, 1);
231 #endif
232   thread_id_key_ = base::Thread::CreateThreadLocalKey();
233   per_isolate_thread_data_key_ = base::Thread::CreateThreadLocalKey();
234 }
235 
get_address_from_id(IsolateAddressId id)236 Address Isolate::get_address_from_id(IsolateAddressId id) {
237   return isolate_addresses_[id];
238 }
239 
Iterate(RootVisitor * v,char * thread_storage)240 char* Isolate::Iterate(RootVisitor* v, char* thread_storage) {
241   ThreadLocalTop* thread = reinterpret_cast<ThreadLocalTop*>(thread_storage);
242   Iterate(v, thread);
243   return thread_storage + sizeof(ThreadLocalTop);
244 }
245 
246 
IterateThread(ThreadVisitor * v,char * t)247 void Isolate::IterateThread(ThreadVisitor* v, char* t) {
248   ThreadLocalTop* thread = reinterpret_cast<ThreadLocalTop*>(t);
249   v->VisitThread(this, thread);
250 }
251 
Iterate(RootVisitor * v,ThreadLocalTop * thread)252 void Isolate::Iterate(RootVisitor* v, ThreadLocalTop* thread) {
253   // Visit the roots from the top for a given thread.
254   v->VisitRootPointer(Root::kTop, nullptr, &thread->pending_exception_);
255   v->VisitRootPointer(Root::kTop, nullptr, &thread->wasm_caught_exception_);
256   v->VisitRootPointer(Root::kTop, nullptr, &thread->pending_message_obj_);
257   v->VisitRootPointer(Root::kTop, nullptr,
258                       bit_cast<Object**>(&(thread->context_)));
259   v->VisitRootPointer(Root::kTop, nullptr, &thread->scheduled_exception_);
260 
261   for (v8::TryCatch* block = thread->try_catch_handler(); block != nullptr;
262        block = block->next_) {
263     v->VisitRootPointer(Root::kTop, nullptr,
264                         bit_cast<Object**>(&(block->exception_)));
265     v->VisitRootPointer(Root::kTop, nullptr,
266                         bit_cast<Object**>(&(block->message_obj_)));
267   }
268 
269   // Iterate over pointers on native execution stack.
270   for (StackFrameIterator it(this, thread); !it.done(); it.Advance()) {
271     it.frame()->Iterate(v);
272   }
273 }
274 
Iterate(RootVisitor * v)275 void Isolate::Iterate(RootVisitor* v) {
276   ThreadLocalTop* current_t = thread_local_top();
277   Iterate(v, current_t);
278 }
279 
IterateDeferredHandles(RootVisitor * visitor)280 void Isolate::IterateDeferredHandles(RootVisitor* visitor) {
281   for (DeferredHandles* deferred = deferred_handles_head_; deferred != nullptr;
282        deferred = deferred->next_) {
283     deferred->Iterate(visitor);
284   }
285 }
286 
287 
288 #ifdef DEBUG
IsDeferredHandle(Object ** handle)289 bool Isolate::IsDeferredHandle(Object** handle) {
290   // Each DeferredHandles instance keeps the handles to one job in the
291   // concurrent recompilation queue, containing a list of blocks.  Each block
292   // contains kHandleBlockSize handles except for the first block, which may
293   // not be fully filled.
294   // We iterate through all the blocks to see whether the argument handle
295   // belongs to one of the blocks.  If so, it is deferred.
296   for (DeferredHandles* deferred = deferred_handles_head_; deferred != nullptr;
297        deferred = deferred->next_) {
298     std::vector<Object**>* blocks = &deferred->blocks_;
299     for (size_t i = 0; i < blocks->size(); i++) {
300       Object** block_limit = (i == 0) ? deferred->first_block_limit_
301                                       : blocks->at(i) + kHandleBlockSize;
302       if (blocks->at(i) <= handle && handle < block_limit) return true;
303     }
304   }
305   return false;
306 }
307 #endif  // DEBUG
308 
309 
RegisterTryCatchHandler(v8::TryCatch * that)310 void Isolate::RegisterTryCatchHandler(v8::TryCatch* that) {
311   thread_local_top()->set_try_catch_handler(that);
312 }
313 
314 
UnregisterTryCatchHandler(v8::TryCatch * that)315 void Isolate::UnregisterTryCatchHandler(v8::TryCatch* that) {
316   DCHECK(thread_local_top()->try_catch_handler() == that);
317   thread_local_top()->set_try_catch_handler(that->next_);
318 }
319 
320 
StackTraceString()321 Handle<String> Isolate::StackTraceString() {
322   if (stack_trace_nesting_level_ == 0) {
323     stack_trace_nesting_level_++;
324     HeapStringAllocator allocator;
325     StringStream::ClearMentionedObjectCache(this);
326     StringStream accumulator(&allocator);
327     incomplete_message_ = &accumulator;
328     PrintStack(&accumulator);
329     Handle<String> stack_trace = accumulator.ToString(this);
330     incomplete_message_ = nullptr;
331     stack_trace_nesting_level_ = 0;
332     return stack_trace;
333   } else if (stack_trace_nesting_level_ == 1) {
334     stack_trace_nesting_level_++;
335     base::OS::PrintError(
336       "\n\nAttempt to print stack while printing stack (double fault)\n");
337     base::OS::PrintError(
338       "If you are lucky you may find a partial stack dump on stdout.\n\n");
339     incomplete_message_->OutputToStdOut();
340     return factory()->empty_string();
341   } else {
342     base::OS::Abort();
343     // Unreachable
344     return factory()->empty_string();
345   }
346 }
347 
PushStackTraceAndDie(void * ptr1,void * ptr2,void * ptr3,void * ptr4)348 void Isolate::PushStackTraceAndDie(void* ptr1, void* ptr2, void* ptr3,
349                                    void* ptr4) {
350   StackTraceFailureMessage message(this, ptr1, ptr2, ptr3, ptr4);
351   message.Print();
352   base::OS::Abort();
353 }
354 
Print()355 void StackTraceFailureMessage::Print() volatile {
356   // Print the details of this failure message object, including its own address
357   // to force stack allocation.
358   base::OS::PrintError(
359       "Stacktrace:\n   ptr1=%p\n    ptr2=%p\n    ptr3=%p\n    ptr4=%p\n    "
360       "failure_message_object=%p\n%s",
361       ptr1_, ptr2_, ptr3_, ptr4_, this, &js_stack_trace_[0]);
362 }
363 
StackTraceFailureMessage(Isolate * isolate,void * ptr1,void * ptr2,void * ptr3,void * ptr4)364 StackTraceFailureMessage::StackTraceFailureMessage(Isolate* isolate, void* ptr1,
365                                                    void* ptr2, void* ptr3,
366                                                    void* ptr4) {
367   isolate_ = isolate;
368   ptr1_ = ptr1;
369   ptr2_ = ptr2;
370   ptr3_ = ptr3;
371   ptr4_ = ptr4;
372   // Write a stracktrace into the {js_stack_trace_} buffer.
373   const size_t buffer_length = arraysize(js_stack_trace_);
374   memset(&js_stack_trace_, 0, buffer_length);
375   FixedStringAllocator fixed(&js_stack_trace_[0], buffer_length - 1);
376   StringStream accumulator(&fixed, StringStream::kPrintObjectConcise);
377   isolate->PrintStack(&accumulator, Isolate::kPrintStackVerbose);
378   // Keeping a reference to the last code objects to increase likelyhood that
379   // they get included in the minidump.
380   const size_t code_objects_length = arraysize(code_objects_);
381   size_t i = 0;
382   StackFrameIterator it(isolate);
383   for (; !it.done() && i < code_objects_length; it.Advance()) {
384     if (it.frame()->type() == StackFrame::INTERNAL) continue;
385     code_objects_[i++] = it.frame()->unchecked_code();
386   }
387 }
388 
389 namespace {
390 
391 class FrameArrayBuilder {
392  public:
FrameArrayBuilder(Isolate * isolate,FrameSkipMode mode,int limit,Handle<Object> caller)393   FrameArrayBuilder(Isolate* isolate, FrameSkipMode mode, int limit,
394                     Handle<Object> caller)
395       : isolate_(isolate), mode_(mode), limit_(limit), caller_(caller) {
396     switch (mode_) {
397       case SKIP_FIRST:
398         skip_next_frame_ = true;
399         break;
400       case SKIP_UNTIL_SEEN:
401         DCHECK(caller_->IsJSFunction());
402         skip_next_frame_ = true;
403         break;
404       case SKIP_NONE:
405         skip_next_frame_ = false;
406         break;
407     }
408 
409     elements_ = isolate->factory()->NewFrameArray(Min(limit, 10));
410   }
411 
AppendStandardFrame(StandardFrame * frame)412   void AppendStandardFrame(StandardFrame* frame) {
413     std::vector<FrameSummary> frames;
414     frame->Summarize(&frames);
415     // A standard frame may include many summarized frames (due to inlining).
416     for (size_t i = frames.size(); i != 0 && !full(); i--) {
417       const auto& summ = frames[i - 1];
418       if (summ.IsJavaScript()) {
419         //====================================================================
420         // Handle a JavaScript frame.
421         //====================================================================
422         const auto& summary = summ.AsJavaScript();
423 
424         // Filter out internal frames that we do not want to show.
425         if (!IsVisibleInStackTrace(summary.function())) continue;
426 
427         Handle<AbstractCode> abstract_code = summary.abstract_code();
428         const int offset = summary.code_offset();
429 
430         bool is_constructor = summary.is_constructor();
431         // Help CallSite::IsConstructor correctly detect hand-written
432         // construct stubs.
433         if (abstract_code->IsCode() &&
434             Code::cast(*abstract_code)->is_construct_stub()) {
435           is_constructor = true;
436         }
437 
438         int flags = 0;
439         Handle<JSFunction> function = summary.function();
440         if (IsStrictFrame(function)) flags |= FrameArray::kIsStrict;
441         if (is_constructor) flags |= FrameArray::kIsConstructor;
442 
443         elements_ = FrameArray::AppendJSFrame(
444             elements_, TheHoleToUndefined(isolate_, summary.receiver()),
445             function, abstract_code, offset, flags);
446       } else if (summ.IsWasmCompiled()) {
447         //====================================================================
448         // Handle a WASM compiled frame.
449         //====================================================================
450         const auto& summary = summ.AsWasmCompiled();
451         if (summary.code()->kind() != wasm::WasmCode::kFunction) {
452           continue;
453         }
454         Handle<WasmInstanceObject> instance = summary.wasm_instance();
455         int flags = 0;
456         if (instance->module_object()->is_asm_js()) {
457           flags |= FrameArray::kIsAsmJsWasmFrame;
458           if (WasmCompiledFrame::cast(frame)->at_to_number_conversion()) {
459             flags |= FrameArray::kAsmJsAtNumberConversion;
460           }
461         } else {
462           flags |= FrameArray::kIsWasmFrame;
463         }
464 
465         elements_ = FrameArray::AppendWasmFrame(
466             elements_, instance, summary.function_index(), summary.code(),
467             summary.code_offset(), flags);
468       } else if (summ.IsWasmInterpreted()) {
469         //====================================================================
470         // Handle a WASM interpreted frame.
471         //====================================================================
472         const auto& summary = summ.AsWasmInterpreted();
473         Handle<WasmInstanceObject> instance = summary.wasm_instance();
474         int flags = FrameArray::kIsWasmInterpretedFrame;
475         DCHECK(!instance->module_object()->is_asm_js());
476         elements_ = FrameArray::AppendWasmFrame(elements_, instance,
477                                                 summary.function_index(), {},
478                                                 summary.byte_offset(), flags);
479       }
480     }
481   }
482 
AppendBuiltinExitFrame(BuiltinExitFrame * exit_frame)483   void AppendBuiltinExitFrame(BuiltinExitFrame* exit_frame) {
484     Handle<JSFunction> function = handle(exit_frame->function(), isolate_);
485 
486     // Filter out internal frames that we do not want to show.
487     if (!IsVisibleInStackTrace(function)) return;
488 
489     Handle<Object> receiver(exit_frame->receiver(), isolate_);
490     Handle<Code> code(exit_frame->LookupCode(), isolate_);
491     const int offset =
492         static_cast<int>(exit_frame->pc() - code->InstructionStart());
493 
494     int flags = 0;
495     if (IsStrictFrame(function)) flags |= FrameArray::kIsStrict;
496     if (exit_frame->IsConstructor()) flags |= FrameArray::kIsConstructor;
497 
498     elements_ = FrameArray::AppendJSFrame(elements_, receiver, function,
499                                           Handle<AbstractCode>::cast(code),
500                                           offset, flags);
501   }
502 
full()503   bool full() { return elements_->FrameCount() >= limit_; }
504 
GetElements()505   Handle<FrameArray> GetElements() {
506     elements_->ShrinkToFit(isolate_);
507     return elements_;
508   }
509 
510  private:
511   // Poison stack frames below the first strict mode frame.
512   // The stack trace API should not expose receivers and function
513   // objects on frames deeper than the top-most one with a strict mode
514   // function.
IsStrictFrame(Handle<JSFunction> function)515   bool IsStrictFrame(Handle<JSFunction> function) {
516     if (!encountered_strict_function_) {
517       encountered_strict_function_ =
518           is_strict(function->shared()->language_mode());
519     }
520     return encountered_strict_function_;
521   }
522 
523   // Determines whether the given stack frame should be displayed in a stack
524   // trace.
IsVisibleInStackTrace(Handle<JSFunction> function)525   bool IsVisibleInStackTrace(Handle<JSFunction> function) {
526     return ShouldIncludeFrame(function) && IsNotHidden(function) &&
527            IsInSameSecurityContext(function);
528   }
529 
530   // This mechanism excludes a number of uninteresting frames from the stack
531   // trace. This can be be the first frame (which will be a builtin-exit frame
532   // for the error constructor builtin) or every frame until encountering a
533   // user-specified function.
ShouldIncludeFrame(Handle<JSFunction> function)534   bool ShouldIncludeFrame(Handle<JSFunction> function) {
535     switch (mode_) {
536       case SKIP_NONE:
537         return true;
538       case SKIP_FIRST:
539         if (!skip_next_frame_) return true;
540         skip_next_frame_ = false;
541         return false;
542       case SKIP_UNTIL_SEEN:
543         if (skip_next_frame_ && (*function == *caller_)) {
544           skip_next_frame_ = false;
545           return false;
546         }
547         return !skip_next_frame_;
548     }
549     UNREACHABLE();
550   }
551 
IsNotHidden(Handle<JSFunction> function)552   bool IsNotHidden(Handle<JSFunction> function) {
553     // Functions defined not in user scripts are not visible unless directly
554     // exposed, in which case the native flag is set.
555     // The --builtins-in-stack-traces command line flag allows including
556     // internal call sites in the stack trace for debugging purposes.
557     if (!FLAG_builtins_in_stack_traces &&
558         !function->shared()->IsUserJavaScript()) {
559       return function->shared()->native();
560     }
561     return true;
562   }
563 
IsInSameSecurityContext(Handle<JSFunction> function)564   bool IsInSameSecurityContext(Handle<JSFunction> function) {
565     return isolate_->context()->HasSameSecurityTokenAs(function->context());
566   }
567 
568   // TODO(jgruber): Fix all cases in which frames give us a hole value (e.g. the
569   // receiver in RegExp constructor frames.
TheHoleToUndefined(Isolate * isolate,Handle<Object> in)570   Handle<Object> TheHoleToUndefined(Isolate* isolate, Handle<Object> in) {
571     return (in->IsTheHole(isolate))
572                ? Handle<Object>::cast(isolate->factory()->undefined_value())
573                : in;
574   }
575 
576   Isolate* isolate_;
577   const FrameSkipMode mode_;
578   int limit_;
579   const Handle<Object> caller_;
580   bool skip_next_frame_ = true;
581   bool encountered_strict_function_ = false;
582   Handle<FrameArray> elements_;
583 };
584 
GetStackTraceLimit(Isolate * isolate,int * result)585 bool GetStackTraceLimit(Isolate* isolate, int* result) {
586   Handle<JSObject> error = isolate->error_function();
587 
588   Handle<String> key = isolate->factory()->stackTraceLimit_string();
589   Handle<Object> stack_trace_limit = JSReceiver::GetDataProperty(error, key);
590   if (!stack_trace_limit->IsNumber()) return false;
591 
592   // Ensure that limit is not negative.
593   *result = Max(FastD2IChecked(stack_trace_limit->Number()), 0);
594 
595   if (*result != FLAG_stack_trace_limit) {
596     isolate->CountUsage(v8::Isolate::kErrorStackTraceLimit);
597   }
598 
599   return true;
600 }
601 
NoExtension(const v8::FunctionCallbackInfo<v8::Value> &)602 bool NoExtension(const v8::FunctionCallbackInfo<v8::Value>&) { return false; }
603 }  // namespace
604 
CaptureSimpleStackTrace(Handle<JSReceiver> error_object,FrameSkipMode mode,Handle<Object> caller)605 Handle<Object> Isolate::CaptureSimpleStackTrace(Handle<JSReceiver> error_object,
606                                                 FrameSkipMode mode,
607                                                 Handle<Object> caller) {
608   DisallowJavascriptExecution no_js(this);
609 
610   int limit;
611   if (!GetStackTraceLimit(this, &limit)) return factory()->undefined_value();
612 
613   FrameArrayBuilder builder(this, mode, limit, caller);
614 
615   for (StackFrameIterator iter(this); !iter.done() && !builder.full();
616        iter.Advance()) {
617     StackFrame* frame = iter.frame();
618 
619     switch (frame->type()) {
620       case StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION:
621       case StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH:
622       case StackFrame::OPTIMIZED:
623       case StackFrame::INTERPRETED:
624       case StackFrame::BUILTIN:
625         builder.AppendStandardFrame(JavaScriptFrame::cast(frame));
626         break;
627       case StackFrame::BUILTIN_EXIT:
628         // BuiltinExitFrames are not standard frames, so they do not have
629         // Summarize(). However, they may have one JS frame worth showing.
630         builder.AppendBuiltinExitFrame(BuiltinExitFrame::cast(frame));
631         break;
632       case StackFrame::WASM_COMPILED:
633         builder.AppendStandardFrame(WasmCompiledFrame::cast(frame));
634         break;
635       case StackFrame::WASM_INTERPRETER_ENTRY:
636         builder.AppendStandardFrame(WasmInterpreterEntryFrame::cast(frame));
637         break;
638 
639       default:
640         break;
641     }
642   }
643 
644   // TODO(yangguo): Queue this structured stack trace for preprocessing on GC.
645   return factory()->NewJSArrayWithElements(builder.GetElements());
646 }
647 
CaptureAndSetDetailedStackTrace(Handle<JSReceiver> error_object)648 MaybeHandle<JSReceiver> Isolate::CaptureAndSetDetailedStackTrace(
649     Handle<JSReceiver> error_object) {
650   if (capture_stack_trace_for_uncaught_exceptions_) {
651     // Capture stack trace for a detailed exception message.
652     Handle<Name> key = factory()->detailed_stack_trace_symbol();
653     Handle<FixedArray> stack_trace = CaptureCurrentStackTrace(
654         stack_trace_for_uncaught_exceptions_frame_limit_,
655         stack_trace_for_uncaught_exceptions_options_);
656     RETURN_ON_EXCEPTION(
657         this,
658         JSReceiver::SetProperty(this, error_object, key, stack_trace,
659                                 LanguageMode::kStrict),
660         JSReceiver);
661   }
662   return error_object;
663 }
664 
CaptureAndSetSimpleStackTrace(Handle<JSReceiver> error_object,FrameSkipMode mode,Handle<Object> caller)665 MaybeHandle<JSReceiver> Isolate::CaptureAndSetSimpleStackTrace(
666     Handle<JSReceiver> error_object, FrameSkipMode mode,
667     Handle<Object> caller) {
668   // Capture stack trace for simple stack trace string formatting.
669   Handle<Name> key = factory()->stack_trace_symbol();
670   Handle<Object> stack_trace =
671       CaptureSimpleStackTrace(error_object, mode, caller);
672   RETURN_ON_EXCEPTION(
673       this,
674       JSReceiver::SetProperty(this, error_object, key, stack_trace,
675                               LanguageMode::kStrict),
676       JSReceiver);
677   return error_object;
678 }
679 
GetDetailedStackTrace(Handle<JSObject> error_object)680 Handle<FixedArray> Isolate::GetDetailedStackTrace(
681     Handle<JSObject> error_object) {
682   Handle<Name> key_detailed = factory()->detailed_stack_trace_symbol();
683   Handle<Object> stack_trace =
684       JSReceiver::GetDataProperty(error_object, key_detailed);
685   if (stack_trace->IsFixedArray()) return Handle<FixedArray>::cast(stack_trace);
686   return Handle<FixedArray>();
687 }
688 
GetAbstractPC(int * line,int * column)689 Address Isolate::GetAbstractPC(int* line, int* column) {
690   JavaScriptFrameIterator it(this);
691 
692   if (it.done()) {
693     *line = -1;
694     *column = -1;
695     return kNullAddress;
696   }
697   JavaScriptFrame* frame = it.frame();
698   DCHECK(!frame->is_builtin());
699   int position = frame->position();
700 
701   Object* maybe_script = frame->function()->shared()->script();
702   if (maybe_script->IsScript()) {
703     Handle<Script> script(Script::cast(maybe_script), this);
704     Script::PositionInfo info;
705     Script::GetPositionInfo(script, position, &info, Script::WITH_OFFSET);
706     *line = info.line + 1;
707     *column = info.column + 1;
708   } else {
709     *line = position;
710     *column = -1;
711   }
712 
713   if (frame->is_interpreted()) {
714     InterpretedFrame* iframe = static_cast<InterpretedFrame*>(frame);
715     Address bytecode_start =
716         reinterpret_cast<Address>(iframe->GetBytecodeArray()) - kHeapObjectTag +
717         BytecodeArray::kHeaderSize;
718     return bytecode_start + iframe->GetBytecodeOffset();
719   }
720 
721   return frame->pc();
722 }
723 
724 class CaptureStackTraceHelper {
725  public:
CaptureStackTraceHelper(Isolate * isolate)726   explicit CaptureStackTraceHelper(Isolate* isolate) : isolate_(isolate) {}
727 
NewStackFrameObject(FrameSummary & summ)728   Handle<StackFrameInfo> NewStackFrameObject(FrameSummary& summ) {
729     if (summ.IsJavaScript()) return NewStackFrameObject(summ.AsJavaScript());
730     if (summ.IsWasm()) return NewStackFrameObject(summ.AsWasm());
731     UNREACHABLE();
732   }
733 
NewStackFrameObject(const FrameSummary::JavaScriptFrameSummary & summ)734   Handle<StackFrameInfo> NewStackFrameObject(
735       const FrameSummary::JavaScriptFrameSummary& summ) {
736     int code_offset;
737     Handle<ByteArray> source_position_table;
738     Handle<Object> maybe_cache;
739     Handle<SimpleNumberDictionary> cache;
740     if (!FLAG_optimize_for_size) {
741       code_offset = summ.code_offset();
742       source_position_table =
743           handle(summ.abstract_code()->source_position_table(), isolate_);
744       maybe_cache = handle(summ.abstract_code()->stack_frame_cache(), isolate_);
745       if (maybe_cache->IsSimpleNumberDictionary()) {
746         cache = Handle<SimpleNumberDictionary>::cast(maybe_cache);
747       } else {
748         cache = SimpleNumberDictionary::New(isolate_, 1);
749       }
750       int entry = cache->FindEntry(isolate_, code_offset);
751       if (entry != NumberDictionary::kNotFound) {
752         Handle<StackFrameInfo> frame(
753             StackFrameInfo::cast(cache->ValueAt(entry)), isolate_);
754         return frame;
755       }
756     }
757 
758     Handle<StackFrameInfo> frame = factory()->NewStackFrameInfo();
759     Handle<Script> script = Handle<Script>::cast(summ.script());
760     Script::PositionInfo info;
761     bool valid_pos = Script::GetPositionInfo(script, summ.SourcePosition(),
762                                              &info, Script::WITH_OFFSET);
763     if (valid_pos) {
764       frame->set_line_number(info.line + 1);
765       frame->set_column_number(info.column + 1);
766     }
767     frame->set_script_id(script->id());
768     frame->set_script_name(script->name());
769     frame->set_script_name_or_source_url(script->GetNameOrSourceURL());
770     frame->set_is_eval(script->compilation_type() ==
771                        Script::COMPILATION_TYPE_EVAL);
772     Handle<String> function_name = summ.FunctionName();
773     frame->set_function_name(*function_name);
774     frame->set_is_constructor(summ.is_constructor());
775     frame->set_is_wasm(false);
776     if (!FLAG_optimize_for_size) {
777       auto new_cache =
778           SimpleNumberDictionary::Set(isolate_, cache, code_offset, frame);
779       if (*new_cache != *cache || !maybe_cache->IsNumberDictionary()) {
780         AbstractCode::SetStackFrameCache(summ.abstract_code(), new_cache);
781       }
782     }
783     frame->set_id(next_id());
784     return frame;
785   }
786 
NewStackFrameObject(const FrameSummary::WasmFrameSummary & summ)787   Handle<StackFrameInfo> NewStackFrameObject(
788       const FrameSummary::WasmFrameSummary& summ) {
789     Handle<StackFrameInfo> info = factory()->NewStackFrameInfo();
790 
791     Handle<WasmModuleObject> module_object(
792         summ.wasm_instance()->module_object(), isolate_);
793     Handle<String> name = WasmModuleObject::GetFunctionName(
794         isolate_, module_object, summ.function_index());
795     info->set_function_name(*name);
796     // Encode the function index as line number (1-based).
797     info->set_line_number(summ.function_index() + 1);
798     // Encode the byte offset as column (1-based).
799     int position = summ.byte_offset();
800     // Make position 1-based.
801     if (position >= 0) ++position;
802     info->set_column_number(position);
803     info->set_script_id(summ.script()->id());
804     info->set_is_wasm(true);
805     info->set_id(next_id());
806     return info;
807   }
808 
809  private:
factory()810   inline Factory* factory() { return isolate_->factory(); }
811 
next_id() const812   int next_id() const {
813     int id = isolate_->last_stack_frame_info_id() + 1;
814     isolate_->set_last_stack_frame_info_id(id);
815     return id;
816   }
817 
818   Isolate* isolate_;
819 };
820 
CaptureCurrentStackTrace(int frame_limit,StackTrace::StackTraceOptions options)821 Handle<FixedArray> Isolate::CaptureCurrentStackTrace(
822     int frame_limit, StackTrace::StackTraceOptions options) {
823   DisallowJavascriptExecution no_js(this);
824   CaptureStackTraceHelper helper(this);
825 
826   // Ensure no negative values.
827   int limit = Max(frame_limit, 0);
828   Handle<FixedArray> stack_trace_elems = factory()->NewFixedArray(limit);
829 
830   int frames_seen = 0;
831   for (StackTraceFrameIterator it(this); !it.done() && (frames_seen < limit);
832        it.Advance()) {
833     StandardFrame* frame = it.frame();
834     // Set initial size to the maximum inlining level + 1 for the outermost
835     // function.
836     std::vector<FrameSummary> frames;
837     frame->Summarize(&frames);
838     for (size_t i = frames.size(); i != 0 && frames_seen < limit; i--) {
839       FrameSummary& frame = frames[i - 1];
840       if (!frame.is_subject_to_debugging()) continue;
841       // Filter frames from other security contexts.
842       if (!(options & StackTrace::kExposeFramesAcrossSecurityOrigins) &&
843           !this->context()->HasSameSecurityTokenAs(*frame.native_context()))
844         continue;
845       Handle<StackFrameInfo> new_frame_obj = helper.NewStackFrameObject(frame);
846       stack_trace_elems->set(frames_seen, *new_frame_obj);
847       frames_seen++;
848     }
849   }
850   return FixedArray::ShrinkOrEmpty(this, stack_trace_elems, frames_seen);
851 }
852 
853 
PrintStack(FILE * out,PrintStackMode mode)854 void Isolate::PrintStack(FILE* out, PrintStackMode mode) {
855   if (stack_trace_nesting_level_ == 0) {
856     stack_trace_nesting_level_++;
857     StringStream::ClearMentionedObjectCache(this);
858     HeapStringAllocator allocator;
859     StringStream accumulator(&allocator);
860     incomplete_message_ = &accumulator;
861     PrintStack(&accumulator, mode);
862     accumulator.OutputToFile(out);
863     InitializeLoggingAndCounters();
864     accumulator.Log(this);
865     incomplete_message_ = nullptr;
866     stack_trace_nesting_level_ = 0;
867   } else if (stack_trace_nesting_level_ == 1) {
868     stack_trace_nesting_level_++;
869     base::OS::PrintError(
870       "\n\nAttempt to print stack while printing stack (double fault)\n");
871     base::OS::PrintError(
872       "If you are lucky you may find a partial stack dump on stdout.\n\n");
873     incomplete_message_->OutputToFile(out);
874   }
875 }
876 
877 
PrintFrames(Isolate * isolate,StringStream * accumulator,StackFrame::PrintMode mode)878 static void PrintFrames(Isolate* isolate,
879                         StringStream* accumulator,
880                         StackFrame::PrintMode mode) {
881   StackFrameIterator it(isolate);
882   for (int i = 0; !it.done(); it.Advance()) {
883     it.frame()->Print(accumulator, mode, i++);
884   }
885 }
886 
PrintStack(StringStream * accumulator,PrintStackMode mode)887 void Isolate::PrintStack(StringStream* accumulator, PrintStackMode mode) {
888   // The MentionedObjectCache is not GC-proof at the moment.
889   DisallowHeapAllocation no_gc;
890   HandleScope scope(this);
891   DCHECK(accumulator->IsMentionedObjectCacheClear(this));
892 
893   // Avoid printing anything if there are no frames.
894   if (c_entry_fp(thread_local_top()) == 0) return;
895 
896   accumulator->Add(
897       "\n==== JS stack trace =========================================\n\n");
898   PrintFrames(this, accumulator, StackFrame::OVERVIEW);
899   if (mode == kPrintStackVerbose) {
900     accumulator->Add(
901         "\n==== Details ================================================\n\n");
902     PrintFrames(this, accumulator, StackFrame::DETAILS);
903     accumulator->PrintMentionedObjectCache(this);
904   }
905   accumulator->Add("=====================\n\n");
906 }
907 
908 
SetFailedAccessCheckCallback(v8::FailedAccessCheckCallback callback)909 void Isolate::SetFailedAccessCheckCallback(
910     v8::FailedAccessCheckCallback callback) {
911   thread_local_top()->failed_access_check_callback_ = callback;
912 }
913 
914 
ReportFailedAccessCheck(Handle<JSObject> receiver)915 void Isolate::ReportFailedAccessCheck(Handle<JSObject> receiver) {
916   if (!thread_local_top()->failed_access_check_callback_) {
917     return ScheduleThrow(*factory()->NewTypeError(MessageTemplate::kNoAccess));
918   }
919 
920   DCHECK(receiver->IsAccessCheckNeeded());
921   DCHECK(context());
922 
923   // Get the data object from access check info.
924   HandleScope scope(this);
925   Handle<Object> data;
926   { DisallowHeapAllocation no_gc;
927     AccessCheckInfo* access_check_info = AccessCheckInfo::Get(this, receiver);
928     if (!access_check_info) {
929       AllowHeapAllocation doesnt_matter_anymore;
930       return ScheduleThrow(
931           *factory()->NewTypeError(MessageTemplate::kNoAccess));
932     }
933     data = handle(access_check_info->data(), this);
934   }
935 
936   // Leaving JavaScript.
937   VMState<EXTERNAL> state(this);
938   thread_local_top()->failed_access_check_callback_(
939       v8::Utils::ToLocal(receiver), v8::ACCESS_HAS, v8::Utils::ToLocal(data));
940 }
941 
942 
MayAccess(Handle<Context> accessing_context,Handle<JSObject> receiver)943 bool Isolate::MayAccess(Handle<Context> accessing_context,
944                         Handle<JSObject> receiver) {
945   DCHECK(receiver->IsJSGlobalProxy() || receiver->IsAccessCheckNeeded());
946 
947   // Check for compatibility between the security tokens in the
948   // current lexical context and the accessed object.
949 
950   // During bootstrapping, callback functions are not enabled yet.
951   if (bootstrapper()->IsActive()) return true;
952   {
953     DisallowHeapAllocation no_gc;
954 
955     if (receiver->IsJSGlobalProxy()) {
956       Object* receiver_context =
957           JSGlobalProxy::cast(*receiver)->native_context();
958       if (!receiver_context->IsContext()) return false;
959 
960       // Get the native context of current top context.
961       // avoid using Isolate::native_context() because it uses Handle.
962       Context* native_context =
963           accessing_context->global_object()->native_context();
964       if (receiver_context == native_context) return true;
965 
966       if (Context::cast(receiver_context)->security_token() ==
967           native_context->security_token())
968         return true;
969     }
970   }
971 
972   HandleScope scope(this);
973   Handle<Object> data;
974   v8::AccessCheckCallback callback = nullptr;
975   { DisallowHeapAllocation no_gc;
976     AccessCheckInfo* access_check_info = AccessCheckInfo::Get(this, receiver);
977     if (!access_check_info) return false;
978     Object* fun_obj = access_check_info->callback();
979     callback = v8::ToCData<v8::AccessCheckCallback>(fun_obj);
980     data = handle(access_check_info->data(), this);
981   }
982 
983   LOG(this, ApiSecurityCheck());
984 
985   {
986     // Leaving JavaScript.
987     VMState<EXTERNAL> state(this);
988     return callback(v8::Utils::ToLocal(accessing_context),
989                     v8::Utils::ToLocal(receiver), v8::Utils::ToLocal(data));
990   }
991 }
992 
993 
StackOverflow()994 Object* Isolate::StackOverflow() {
995   if (FLAG_abort_on_stack_or_string_length_overflow) {
996     FATAL("Aborting on stack overflow");
997   }
998 
999   DisallowJavascriptExecution no_js(this);
1000   HandleScope scope(this);
1001 
1002   Handle<JSFunction> fun = range_error_function();
1003   Handle<Object> msg = factory()->NewStringFromAsciiChecked(
1004       MessageTemplate::TemplateString(MessageTemplate::kStackOverflow));
1005   Handle<Object> no_caller;
1006   Handle<Object> exception;
1007   ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
1008       this, exception,
1009       ErrorUtils::Construct(this, fun, fun, msg, SKIP_NONE, no_caller, true));
1010 
1011   Throw(*exception, nullptr);
1012 
1013 #ifdef VERIFY_HEAP
1014   if (FLAG_verify_heap && FLAG_stress_compaction) {
1015     heap()->CollectAllGarbage(Heap::kNoGCFlags,
1016                               GarbageCollectionReason::kTesting);
1017   }
1018 #endif  // VERIFY_HEAP
1019 
1020   return ReadOnlyRoots(heap()).exception();
1021 }
1022 
1023 
TerminateExecution()1024 Object* Isolate::TerminateExecution() {
1025   return Throw(ReadOnlyRoots(this).termination_exception(), nullptr);
1026 }
1027 
1028 
CancelTerminateExecution()1029 void Isolate::CancelTerminateExecution() {
1030   if (try_catch_handler()) {
1031     try_catch_handler()->has_terminated_ = false;
1032   }
1033   if (has_pending_exception() &&
1034       pending_exception() == ReadOnlyRoots(this).termination_exception()) {
1035     thread_local_top()->external_caught_exception_ = false;
1036     clear_pending_exception();
1037   }
1038   if (has_scheduled_exception() &&
1039       scheduled_exception() == ReadOnlyRoots(this).termination_exception()) {
1040     thread_local_top()->external_caught_exception_ = false;
1041     clear_scheduled_exception();
1042   }
1043 }
1044 
1045 
RequestInterrupt(InterruptCallback callback,void * data)1046 void Isolate::RequestInterrupt(InterruptCallback callback, void* data) {
1047   ExecutionAccess access(this);
1048   api_interrupts_queue_.push(InterruptEntry(callback, data));
1049   stack_guard()->RequestApiInterrupt();
1050 }
1051 
1052 
InvokeApiInterruptCallbacks()1053 void Isolate::InvokeApiInterruptCallbacks() {
1054   RuntimeCallTimerScope runtimeTimer(
1055       this, RuntimeCallCounterId::kInvokeApiInterruptCallbacks);
1056   // Note: callback below should be called outside of execution access lock.
1057   while (true) {
1058     InterruptEntry entry;
1059     {
1060       ExecutionAccess access(this);
1061       if (api_interrupts_queue_.empty()) return;
1062       entry = api_interrupts_queue_.front();
1063       api_interrupts_queue_.pop();
1064     }
1065     VMState<EXTERNAL> state(this);
1066     HandleScope handle_scope(this);
1067     entry.first(reinterpret_cast<v8::Isolate*>(this), entry.second);
1068   }
1069 }
1070 
1071 
ReportBootstrappingException(Handle<Object> exception,MessageLocation * location)1072 void ReportBootstrappingException(Handle<Object> exception,
1073                                   MessageLocation* location) {
1074   base::OS::PrintError("Exception thrown during bootstrapping\n");
1075   if (location == nullptr || location->script().is_null()) return;
1076   // We are bootstrapping and caught an error where the location is set
1077   // and we have a script for the location.
1078   // In this case we could have an extension (or an internal error
1079   // somewhere) and we print out the line number at which the error occurred
1080   // to the console for easier debugging.
1081   int line_number =
1082       location->script()->GetLineNumber(location->start_pos()) + 1;
1083   if (exception->IsString() && location->script()->name()->IsString()) {
1084     base::OS::PrintError(
1085         "Extension or internal compilation error: %s in %s at line %d.\n",
1086         String::cast(*exception)->ToCString().get(),
1087         String::cast(location->script()->name())->ToCString().get(),
1088         line_number);
1089   } else if (location->script()->name()->IsString()) {
1090     base::OS::PrintError(
1091         "Extension or internal compilation error in %s at line %d.\n",
1092         String::cast(location->script()->name())->ToCString().get(),
1093         line_number);
1094   } else if (exception->IsString()) {
1095     base::OS::PrintError("Extension or internal compilation error: %s.\n",
1096                          String::cast(*exception)->ToCString().get());
1097   } else {
1098     base::OS::PrintError("Extension or internal compilation error.\n");
1099   }
1100 #ifdef OBJECT_PRINT
1101   // Since comments and empty lines have been stripped from the source of
1102   // builtins, print the actual source here so that line numbers match.
1103   if (location->script()->source()->IsString()) {
1104     Handle<String> src(String::cast(location->script()->source()),
1105                        location->script()->GetIsolate());
1106     PrintF("Failing script:");
1107     int len = src->length();
1108     if (len == 0) {
1109       PrintF(" <not available>\n");
1110     } else {
1111       PrintF("\n");
1112       int line_number = 1;
1113       PrintF("%5d: ", line_number);
1114       for (int i = 0; i < len; i++) {
1115         uint16_t character = src->Get(i);
1116         PrintF("%c", character);
1117         if (character == '\n' && i < len - 2) {
1118           PrintF("%5d: ", ++line_number);
1119         }
1120       }
1121       PrintF("\n");
1122     }
1123   }
1124 #endif
1125 }
1126 
is_catchable_by_wasm(Object * exception)1127 bool Isolate::is_catchable_by_wasm(Object* exception) {
1128   // TODO(titzer): thread WASM features here, or just remove this check?
1129   if (!FLAG_experimental_wasm_eh) return false;
1130   if (!is_catchable_by_javascript(exception) || !exception->IsJSError())
1131     return false;
1132   HandleScope scope(this);
1133   Handle<Object> exception_handle(exception, this);
1134   return JSReceiver::HasProperty(Handle<JSReceiver>::cast(exception_handle),
1135                                  factory()->InternalizeUtf8String(
1136                                      wasm::WasmException::kRuntimeIdStr))
1137       .IsJust();
1138 }
1139 
Throw(Object * raw_exception,MessageLocation * location)1140 Object* Isolate::Throw(Object* raw_exception, MessageLocation* location) {
1141   DCHECK(!has_pending_exception());
1142 
1143   HandleScope scope(this);
1144   Handle<Object> exception(raw_exception, this);
1145 
1146   if (FLAG_print_all_exceptions) {
1147     printf("=========================================================\n");
1148     printf("Exception thrown:\n");
1149     if (location) {
1150       Handle<Script> script = location->script();
1151       Handle<Object> name(script->GetNameOrSourceURL(), this);
1152       printf("at ");
1153       if (name->IsString() && String::cast(*name)->length() > 0)
1154         String::cast(*name)->PrintOn(stdout);
1155       else
1156         printf("<anonymous>");
1157 // Script::GetLineNumber and Script::GetColumnNumber can allocate on the heap to
1158 // initialize the line_ends array, so be careful when calling them.
1159 #ifdef DEBUG
1160       if (AllowHeapAllocation::IsAllowed()) {
1161 #else
1162       if ((false)) {
1163 #endif
1164         printf(", %d:%d - %d:%d\n",
1165                Script::GetLineNumber(script, location->start_pos()) + 1,
1166                Script::GetColumnNumber(script, location->start_pos()),
1167                Script::GetLineNumber(script, location->end_pos()) + 1,
1168                Script::GetColumnNumber(script, location->end_pos()));
1169         // Make sure to update the raw exception pointer in case it moved.
1170         raw_exception = *exception;
1171       } else {
1172         printf(", line %d\n", script->GetLineNumber(location->start_pos()) + 1);
1173       }
1174     }
1175     raw_exception->Print();
1176     printf("Stack Trace:\n");
1177     PrintStack(stdout);
1178     printf("=========================================================\n");
1179   }
1180 
1181   // Determine whether a message needs to be created for the given exception
1182   // depending on the following criteria:
1183   // 1) External v8::TryCatch missing: Always create a message because any
1184   //    JavaScript handler for a finally-block might re-throw to top-level.
1185   // 2) External v8::TryCatch exists: Only create a message if the handler
1186   //    captures messages or is verbose (which reports despite the catch).
1187   // 3) ReThrow from v8::TryCatch: The message from a previous throw still
1188   //    exists and we preserve it instead of creating a new message.
1189   bool requires_message = try_catch_handler() == nullptr ||
1190                           try_catch_handler()->is_verbose_ ||
1191                           try_catch_handler()->capture_message_;
1192   bool rethrowing_message = thread_local_top()->rethrowing_message_;
1193 
1194   thread_local_top()->rethrowing_message_ = false;
1195 
1196   // Notify debugger of exception.
1197   if (is_catchable_by_javascript(raw_exception)) {
1198     debug()->OnThrow(exception);
1199   }
1200 
1201   // Generate the message if required.
1202   if (requires_message && !rethrowing_message) {
1203     MessageLocation computed_location;
1204     // If no location was specified we try to use a computed one instead.
1205     if (location == nullptr && ComputeLocation(&computed_location)) {
1206       location = &computed_location;
1207     }
1208 
1209     if (bootstrapper()->IsActive()) {
1210       // It's not safe to try to make message objects or collect stack traces
1211       // while the bootstrapper is active since the infrastructure may not have
1212       // been properly initialized.
1213       ReportBootstrappingException(exception, location);
1214     } else {
1215       Handle<Object> message_obj = CreateMessage(exception, location);
1216       thread_local_top()->pending_message_obj_ = *message_obj;
1217 
1218       // For any exception not caught by JavaScript, even when an external
1219       // handler is present:
1220       // If the abort-on-uncaught-exception flag is specified, and if the
1221       // embedder didn't specify a custom uncaught exception callback,
1222       // or if the custom callback determined that V8 should abort, then
1223       // abort.
1224       if (FLAG_abort_on_uncaught_exception) {
1225         CatchType prediction = PredictExceptionCatcher();
1226         if ((prediction == NOT_CAUGHT || prediction == CAUGHT_BY_EXTERNAL) &&
1227             (!abort_on_uncaught_exception_callback_ ||
1228              abort_on_uncaught_exception_callback_(
1229                  reinterpret_cast<v8::Isolate*>(this)))) {
1230           // Prevent endless recursion.
1231           FLAG_abort_on_uncaught_exception = false;
1232           // This flag is intended for use by JavaScript developers, so
1233           // print a user-friendly stack trace (not an internal one).
1234           PrintF(stderr, "%s\n\nFROM\n",
1235                  MessageHandler::GetLocalizedMessage(this, message_obj).get());
1236           PrintCurrentStackTrace(stderr);
1237           base::OS::Abort();
1238         }
1239       }
1240     }
1241   }
1242 
1243   // Set the exception being thrown.
1244   set_pending_exception(*exception);
1245   return ReadOnlyRoots(heap()).exception();
1246 }
1247 
1248 
1249 Object* Isolate::ReThrow(Object* exception) {
1250   DCHECK(!has_pending_exception());
1251 
1252   // Set the exception being re-thrown.
1253   set_pending_exception(exception);
1254   return ReadOnlyRoots(heap()).exception();
1255 }
1256 
1257 
1258 Object* Isolate::UnwindAndFindHandler() {
1259   Object* exception = pending_exception();
1260 
1261   auto FoundHandler = [&](Context* context, Address instruction_start,
1262                           intptr_t handler_offset,
1263                           Address constant_pool_address, Address handler_sp,
1264                           Address handler_fp) {
1265     // Store information to be consumed by the CEntry.
1266     thread_local_top()->pending_handler_context_ = context;
1267     thread_local_top()->pending_handler_entrypoint_ =
1268         instruction_start + handler_offset;
1269     thread_local_top()->pending_handler_constant_pool_ = constant_pool_address;
1270     thread_local_top()->pending_handler_fp_ = handler_fp;
1271     thread_local_top()->pending_handler_sp_ = handler_sp;
1272 
1273     // Return and clear pending exception.
1274     clear_pending_exception();
1275     return exception;
1276   };
1277 
1278   // Special handling of termination exceptions, uncatchable by JavaScript and
1279   // Wasm code, we unwind the handlers until the top ENTRY handler is found.
1280   bool catchable_by_js = is_catchable_by_javascript(exception);
1281 
1282   // Compute handler and stack unwinding information by performing a full walk
1283   // over the stack and dispatching according to the frame type.
1284   for (StackFrameIterator iter(this);; iter.Advance()) {
1285     // Handler must exist.
1286     DCHECK(!iter.done());
1287 
1288     StackFrame* frame = iter.frame();
1289 
1290     switch (frame->type()) {
1291       case StackFrame::ENTRY:
1292       case StackFrame::CONSTRUCT_ENTRY: {
1293         // For JSEntryStub frames we always have a handler.
1294         StackHandler* handler = frame->top_handler();
1295 
1296         // Restore the next handler.
1297         thread_local_top()->handler_ = handler->next()->address();
1298 
1299         // Gather information from the handler.
1300         Code* code = frame->LookupCode();
1301         HandlerTable table(code);
1302         return FoundHandler(nullptr, code->InstructionStart(),
1303                             table.LookupReturn(0), code->constant_pool(),
1304                             handler->address() + StackHandlerConstants::kSize,
1305                             0);
1306       }
1307 
1308       case StackFrame::WASM_COMPILED: {
1309         if (trap_handler::IsThreadInWasm()) {
1310           trap_handler::ClearThreadInWasm();
1311         }
1312 
1313         if (!is_catchable_by_wasm(exception)) {
1314           break;
1315         }
1316         int stack_slots = 0;  // Will contain stack slot count of frame.
1317         WasmCompiledFrame* wasm_frame = static_cast<WasmCompiledFrame*>(frame);
1318         int offset = wasm_frame->LookupExceptionHandlerInTable(&stack_slots);
1319         if (offset < 0) break;
1320         // Compute the stack pointer from the frame pointer. This ensures that
1321         // argument slots on the stack are dropped as returning would.
1322         Address return_sp = frame->fp() +
1323                             StandardFrameConstants::kFixedFrameSizeAboveFp -
1324                             stack_slots * kPointerSize;
1325 
1326         // This is going to be handled by Wasm, so we need to set the TLS flag
1327         // again.
1328         trap_handler::SetThreadInWasm();
1329 
1330         set_wasm_caught_exception(exception);
1331         wasm::WasmCode* wasm_code =
1332             wasm_engine()->code_manager()->LookupCode(frame->pc());
1333         return FoundHandler(nullptr, wasm_code->instruction_start(), offset,
1334                             wasm_code->constant_pool(), return_sp, frame->fp());
1335       }
1336 
1337       case StackFrame::OPTIMIZED: {
1338         // For optimized frames we perform a lookup in the handler table.
1339         if (!catchable_by_js) break;
1340         OptimizedFrame* js_frame = static_cast<OptimizedFrame*>(frame);
1341         int stack_slots = 0;  // Will contain stack slot count of frame.
1342         int offset =
1343             js_frame->LookupExceptionHandlerInTable(&stack_slots, nullptr);
1344         if (offset < 0) break;
1345         // Compute the stack pointer from the frame pointer. This ensures
1346         // that argument slots on the stack are dropped as returning would.
1347         Address return_sp = frame->fp() +
1348                             StandardFrameConstants::kFixedFrameSizeAboveFp -
1349                             stack_slots * kPointerSize;
1350 
1351         // Gather information from the frame.
1352         Code* code = frame->LookupCode();
1353 
1354         // TODO(bmeurer): Turbofanned BUILTIN frames appear as OPTIMIZED,
1355         // but do not have a code kind of OPTIMIZED_FUNCTION.
1356         if (code->kind() == Code::OPTIMIZED_FUNCTION &&
1357             code->marked_for_deoptimization()) {
1358           // If the target code is lazy deoptimized, we jump to the original
1359           // return address, but we make a note that we are throwing, so
1360           // that the deoptimizer can do the right thing.
1361           offset = static_cast<int>(frame->pc() - code->entry());
1362           set_deoptimizer_lazy_throw(true);
1363         }
1364 
1365         return FoundHandler(nullptr, code->InstructionStart(), offset,
1366                             code->constant_pool(), return_sp, frame->fp());
1367       }
1368 
1369       case StackFrame::STUB: {
1370         // Some stubs are able to handle exceptions.
1371         if (!catchable_by_js) break;
1372         StubFrame* stub_frame = static_cast<StubFrame*>(frame);
1373         Code* code = stub_frame->LookupCode();
1374         if (!code->IsCode() || code->kind() != Code::BUILTIN ||
1375             !code->handler_table_offset() || !code->is_turbofanned()) {
1376           break;
1377         }
1378 
1379         int stack_slots = 0;  // Will contain stack slot count of frame.
1380         int offset = stub_frame->LookupExceptionHandlerInTable(&stack_slots);
1381         if (offset < 0) break;
1382 
1383         // Compute the stack pointer from the frame pointer. This ensures
1384         // that argument slots on the stack are dropped as returning would.
1385         Address return_sp = frame->fp() +
1386                             StandardFrameConstants::kFixedFrameSizeAboveFp -
1387                             stack_slots * kPointerSize;
1388 
1389         return FoundHandler(nullptr, code->InstructionStart(), offset,
1390                             code->constant_pool(), return_sp, frame->fp());
1391       }
1392 
1393       case StackFrame::INTERPRETED: {
1394         // For interpreted frame we perform a range lookup in the handler table.
1395         if (!catchable_by_js) break;
1396         InterpretedFrame* js_frame = static_cast<InterpretedFrame*>(frame);
1397         int register_slots = InterpreterFrameConstants::RegisterStackSlotCount(
1398             js_frame->GetBytecodeArray()->register_count());
1399         int context_reg = 0;  // Will contain register index holding context.
1400         int offset =
1401             js_frame->LookupExceptionHandlerInTable(&context_reg, nullptr);
1402         if (offset < 0) break;
1403         // Compute the stack pointer from the frame pointer. This ensures that
1404         // argument slots on the stack are dropped as returning would.
1405         // Note: This is only needed for interpreted frames that have been
1406         //       materialized by the deoptimizer. If there is a handler frame
1407         //       in between then {frame->sp()} would already be correct.
1408         Address return_sp = frame->fp() -
1409                             InterpreterFrameConstants::kFixedFrameSizeFromFp -
1410                             register_slots * kPointerSize;
1411 
1412         // Patch the bytecode offset in the interpreted frame to reflect the
1413         // position of the exception handler. The special builtin below will
1414         // take care of continuing to dispatch at that position. Also restore
1415         // the correct context for the handler from the interpreter register.
1416         Context* context =
1417             Context::cast(js_frame->ReadInterpreterRegister(context_reg));
1418         js_frame->PatchBytecodeOffset(static_cast<int>(offset));
1419 
1420         Code* code =
1421             builtins()->builtin(Builtins::kInterpreterEnterBytecodeDispatch);
1422         return FoundHandler(context, code->InstructionStart(), 0,
1423                             code->constant_pool(), return_sp, frame->fp());
1424       }
1425 
1426       case StackFrame::BUILTIN:
1427         // For builtin frames we are guaranteed not to find a handler.
1428         if (catchable_by_js) {
1429           CHECK_EQ(-1,
1430                    JavaScriptFrame::cast(frame)->LookupExceptionHandlerInTable(
1431                        nullptr, nullptr));
1432         }
1433         break;
1434 
1435       case StackFrame::WASM_INTERPRETER_ENTRY: {
1436         if (trap_handler::IsThreadInWasm()) {
1437           trap_handler::ClearThreadInWasm();
1438         }
1439         WasmInterpreterEntryFrame* interpreter_frame =
1440             WasmInterpreterEntryFrame::cast(frame);
1441         // TODO(wasm): Implement try-catch in the interpreter.
1442         interpreter_frame->debug_info()->Unwind(frame->fp());
1443       } break;
1444 
1445       case StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH: {
1446         // Builtin continuation frames with catch can handle exceptions.
1447         if (!catchable_by_js) break;
1448         JavaScriptBuiltinContinuationWithCatchFrame* js_frame =
1449             JavaScriptBuiltinContinuationWithCatchFrame::cast(frame);
1450         js_frame->SetException(exception);
1451 
1452         // Reconstruct the stack pointer from the frame pointer.
1453         Address return_sp = js_frame->fp() - js_frame->GetSPToFPDelta();
1454         Code* code = js_frame->LookupCode();
1455         return FoundHandler(nullptr, code->InstructionStart(), 0,
1456                             code->constant_pool(), return_sp, frame->fp());
1457       } break;
1458 
1459       default:
1460         // All other types can not handle exception.
1461         break;
1462     }
1463 
1464     if (frame->is_optimized()) {
1465       // Remove per-frame stored materialized objects.
1466       bool removed = materialized_object_store_->Remove(frame->fp());
1467       USE(removed);
1468       // If there were any materialized objects, the code should be
1469       // marked for deopt.
1470       DCHECK_IMPLIES(removed, frame->LookupCode()->marked_for_deoptimization());
1471     }
1472   }
1473 
1474   UNREACHABLE();
1475 }
1476 
1477 namespace {
1478 HandlerTable::CatchPrediction PredictException(JavaScriptFrame* frame) {
1479   HandlerTable::CatchPrediction prediction;
1480   if (frame->is_optimized()) {
1481     if (frame->LookupExceptionHandlerInTable(nullptr, nullptr) > 0) {
1482       // This optimized frame will catch. It's handler table does not include
1483       // exception prediction, and we need to use the corresponding handler
1484       // tables on the unoptimized code objects.
1485       std::vector<FrameSummary> summaries;
1486       frame->Summarize(&summaries);
1487       for (size_t i = summaries.size(); i != 0; i--) {
1488         const FrameSummary& summary = summaries[i - 1];
1489         Handle<AbstractCode> code = summary.AsJavaScript().abstract_code();
1490         if (code->IsCode() && code->kind() == AbstractCode::BUILTIN) {
1491           prediction = code->GetCode()->GetBuiltinCatchPrediction();
1492           if (prediction == HandlerTable::UNCAUGHT) continue;
1493           return prediction;
1494         }
1495 
1496         // Must have been constructed from a bytecode array.
1497         CHECK_EQ(AbstractCode::INTERPRETED_FUNCTION, code->kind());
1498         int code_offset = summary.code_offset();
1499         HandlerTable table(code->GetBytecodeArray());
1500         int index = table.LookupRange(code_offset, nullptr, &prediction);
1501         if (index <= 0) continue;
1502         if (prediction == HandlerTable::UNCAUGHT) continue;
1503         return prediction;
1504       }
1505     }
1506   } else if (frame->LookupExceptionHandlerInTable(nullptr, &prediction) > 0) {
1507     return prediction;
1508   }
1509   return HandlerTable::UNCAUGHT;
1510 }
1511 
1512 Isolate::CatchType ToCatchType(HandlerTable::CatchPrediction prediction) {
1513   switch (prediction) {
1514     case HandlerTable::UNCAUGHT:
1515       return Isolate::NOT_CAUGHT;
1516     case HandlerTable::CAUGHT:
1517       return Isolate::CAUGHT_BY_JAVASCRIPT;
1518     case HandlerTable::PROMISE:
1519       return Isolate::CAUGHT_BY_PROMISE;
1520     case HandlerTable::DESUGARING:
1521       return Isolate::CAUGHT_BY_DESUGARING;
1522     case HandlerTable::ASYNC_AWAIT:
1523       return Isolate::CAUGHT_BY_ASYNC_AWAIT;
1524     default:
1525       UNREACHABLE();
1526   }
1527 }
1528 }  // anonymous namespace
1529 
1530 Isolate::CatchType Isolate::PredictExceptionCatcher() {
1531   Address external_handler = thread_local_top()->try_catch_handler_address();
1532   if (IsExternalHandlerOnTop(nullptr)) return CAUGHT_BY_EXTERNAL;
1533 
1534   // Search for an exception handler by performing a full walk over the stack.
1535   for (StackFrameIterator iter(this); !iter.done(); iter.Advance()) {
1536     StackFrame* frame = iter.frame();
1537 
1538     switch (frame->type()) {
1539       case StackFrame::ENTRY:
1540       case StackFrame::CONSTRUCT_ENTRY: {
1541         Address entry_handler = frame->top_handler()->next()->address();
1542         // The exception has been externally caught if and only if there is an
1543         // external handler which is on top of the top-most JS_ENTRY handler.
1544         if (external_handler != kNullAddress &&
1545             !try_catch_handler()->is_verbose_) {
1546           if (entry_handler == kNullAddress ||
1547               entry_handler > external_handler) {
1548             return CAUGHT_BY_EXTERNAL;
1549           }
1550         }
1551       } break;
1552 
1553       // For JavaScript frames we perform a lookup in the handler table.
1554       case StackFrame::OPTIMIZED:
1555       case StackFrame::INTERPRETED:
1556       case StackFrame::BUILTIN: {
1557         JavaScriptFrame* js_frame = JavaScriptFrame::cast(frame);
1558         Isolate::CatchType prediction = ToCatchType(PredictException(js_frame));
1559         if (prediction == NOT_CAUGHT) break;
1560         return prediction;
1561       } break;
1562 
1563       case StackFrame::STUB: {
1564         Handle<Code> code(frame->LookupCode(), this);
1565         if (!code->IsCode() || code->kind() != Code::BUILTIN ||
1566             !code->handler_table_offset() || !code->is_turbofanned()) {
1567           break;
1568         }
1569 
1570         CatchType prediction = ToCatchType(code->GetBuiltinCatchPrediction());
1571         if (prediction != NOT_CAUGHT) return prediction;
1572       } break;
1573 
1574       case StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH: {
1575         Handle<Code> code(frame->LookupCode(), this);
1576         CatchType prediction = ToCatchType(code->GetBuiltinCatchPrediction());
1577         if (prediction != NOT_CAUGHT) return prediction;
1578       } break;
1579 
1580       default:
1581         // All other types can not handle exception.
1582         break;
1583     }
1584   }
1585 
1586   // Handler not found.
1587   return NOT_CAUGHT;
1588 }
1589 
1590 Object* Isolate::ThrowIllegalOperation() {
1591   if (FLAG_stack_trace_on_illegal) PrintStack(stdout);
1592   return Throw(ReadOnlyRoots(heap()).illegal_access_string());
1593 }
1594 
1595 
1596 void Isolate::ScheduleThrow(Object* exception) {
1597   // When scheduling a throw we first throw the exception to get the
1598   // error reporting if it is uncaught before rescheduling it.
1599   Throw(exception);
1600   PropagatePendingExceptionToExternalTryCatch();
1601   if (has_pending_exception()) {
1602     thread_local_top()->scheduled_exception_ = pending_exception();
1603     thread_local_top()->external_caught_exception_ = false;
1604     clear_pending_exception();
1605   }
1606 }
1607 
1608 
1609 void Isolate::RestorePendingMessageFromTryCatch(v8::TryCatch* handler) {
1610   DCHECK(handler == try_catch_handler());
1611   DCHECK(handler->HasCaught());
1612   DCHECK(handler->rethrow_);
1613   DCHECK(handler->capture_message_);
1614   Object* message = reinterpret_cast<Object*>(handler->message_obj_);
1615   DCHECK(message->IsJSMessageObject() || message->IsTheHole(this));
1616   thread_local_top()->pending_message_obj_ = message;
1617 }
1618 
1619 
1620 void Isolate::CancelScheduledExceptionFromTryCatch(v8::TryCatch* handler) {
1621   DCHECK(has_scheduled_exception());
1622   if (scheduled_exception() == handler->exception_) {
1623     DCHECK(scheduled_exception() !=
1624            ReadOnlyRoots(heap()).termination_exception());
1625     clear_scheduled_exception();
1626   }
1627   if (thread_local_top_.pending_message_obj_ == handler->message_obj_) {
1628     clear_pending_message();
1629   }
1630 }
1631 
1632 
1633 Object* Isolate::PromoteScheduledException() {
1634   Object* thrown = scheduled_exception();
1635   clear_scheduled_exception();
1636   // Re-throw the exception to avoid getting repeated error reporting.
1637   return ReThrow(thrown);
1638 }
1639 
1640 
1641 void Isolate::PrintCurrentStackTrace(FILE* out) {
1642   for (StackTraceFrameIterator it(this); !it.done(); it.Advance()) {
1643     if (!it.is_javascript()) continue;
1644 
1645     HandleScope scope(this);
1646     JavaScriptFrame* frame = it.javascript_frame();
1647 
1648     Handle<Object> receiver(frame->receiver(), this);
1649     Handle<JSFunction> function(frame->function(), this);
1650     Handle<AbstractCode> code;
1651     int offset;
1652     if (frame->is_interpreted()) {
1653       InterpretedFrame* interpreted_frame = InterpretedFrame::cast(frame);
1654       code = handle(AbstractCode::cast(interpreted_frame->GetBytecodeArray()),
1655                     this);
1656       offset = interpreted_frame->GetBytecodeOffset();
1657     } else {
1658       code = handle(AbstractCode::cast(frame->LookupCode()), this);
1659       offset = static_cast<int>(frame->pc() - code->InstructionStart());
1660     }
1661 
1662     JSStackFrame site(this, receiver, function, code, offset);
1663     Handle<String> line = site.ToString().ToHandleChecked();
1664     if (line->length() > 0) {
1665       line->PrintOn(out);
1666       PrintF(out, "\n");
1667     }
1668   }
1669 }
1670 
1671 bool Isolate::ComputeLocation(MessageLocation* target) {
1672   StackTraceFrameIterator it(this);
1673   if (it.done()) return false;
1674   StandardFrame* frame = it.frame();
1675   // Compute the location from the function and the relocation info of the
1676   // baseline code. For optimized code this will use the deoptimization
1677   // information to get canonical location information.
1678   std::vector<FrameSummary> frames;
1679   frame->Summarize(&frames);
1680   FrameSummary& summary = frames.back();
1681   int pos = summary.SourcePosition();
1682   Handle<SharedFunctionInfo> shared;
1683   Handle<Object> script = summary.script();
1684   if (!script->IsScript() ||
1685       (Script::cast(*script)->source()->IsUndefined(this))) {
1686     return false;
1687   }
1688 
1689   if (summary.IsJavaScript()) {
1690     shared = handle(summary.AsJavaScript().function()->shared(), this);
1691   }
1692   *target = MessageLocation(Handle<Script>::cast(script), pos, pos + 1, shared);
1693   return true;
1694 }
1695 
1696 bool Isolate::ComputeLocationFromException(MessageLocation* target,
1697                                            Handle<Object> exception) {
1698   if (!exception->IsJSObject()) return false;
1699 
1700   Handle<Name> start_pos_symbol = factory()->error_start_pos_symbol();
1701   Handle<Object> start_pos = JSReceiver::GetDataProperty(
1702       Handle<JSObject>::cast(exception), start_pos_symbol);
1703   if (!start_pos->IsSmi()) return false;
1704   int start_pos_value = Handle<Smi>::cast(start_pos)->value();
1705 
1706   Handle<Name> end_pos_symbol = factory()->error_end_pos_symbol();
1707   Handle<Object> end_pos = JSReceiver::GetDataProperty(
1708       Handle<JSObject>::cast(exception), end_pos_symbol);
1709   if (!end_pos->IsSmi()) return false;
1710   int end_pos_value = Handle<Smi>::cast(end_pos)->value();
1711 
1712   Handle<Name> script_symbol = factory()->error_script_symbol();
1713   Handle<Object> script = JSReceiver::GetDataProperty(
1714       Handle<JSObject>::cast(exception), script_symbol);
1715   if (!script->IsScript()) return false;
1716 
1717   Handle<Script> cast_script(Script::cast(*script), this);
1718   *target = MessageLocation(cast_script, start_pos_value, end_pos_value);
1719   return true;
1720 }
1721 
1722 
1723 bool Isolate::ComputeLocationFromStackTrace(MessageLocation* target,
1724                                             Handle<Object> exception) {
1725   if (!exception->IsJSObject()) return false;
1726   Handle<Name> key = factory()->stack_trace_symbol();
1727   Handle<Object> property =
1728       JSReceiver::GetDataProperty(Handle<JSObject>::cast(exception), key);
1729   if (!property->IsJSArray()) return false;
1730   Handle<JSArray> simple_stack_trace = Handle<JSArray>::cast(property);
1731 
1732   Handle<FrameArray> elements(FrameArray::cast(simple_stack_trace->elements()),
1733                               this);
1734 
1735   const int frame_count = elements->FrameCount();
1736   for (int i = 0; i < frame_count; i++) {
1737     if (elements->IsWasmFrame(i) || elements->IsAsmJsWasmFrame(i)) {
1738       Handle<WasmInstanceObject> instance(elements->WasmInstance(i), this);
1739       uint32_t func_index =
1740           static_cast<uint32_t>(elements->WasmFunctionIndex(i)->value());
1741       wasm::WasmCode* wasm_code = reinterpret_cast<wasm::WasmCode*>(
1742           elements->WasmCodeObject(i)->foreign_address());
1743       int code_offset = elements->Offset(i)->value();
1744       bool is_at_number_conversion =
1745           elements->IsAsmJsWasmFrame(i) &&
1746           elements->Flags(i)->value() & FrameArray::kAsmJsAtNumberConversion;
1747       int byte_offset =
1748           FrameSummary::WasmCompiledFrameSummary::GetWasmSourcePosition(
1749               wasm_code, code_offset);
1750       int pos = WasmModuleObject::GetSourcePosition(
1751           handle(instance->module_object(), this), func_index, byte_offset,
1752           is_at_number_conversion);
1753       Handle<Script> script(instance->module_object()->script(), this);
1754 
1755       *target = MessageLocation(script, pos, pos + 1);
1756       return true;
1757     }
1758 
1759     Handle<JSFunction> fun = handle(elements->Function(i), this);
1760     if (!fun->shared()->IsSubjectToDebugging()) continue;
1761 
1762     Object* script = fun->shared()->script();
1763     if (script->IsScript() &&
1764         !(Script::cast(script)->source()->IsUndefined(this))) {
1765       AbstractCode* abstract_code = elements->Code(i);
1766       const int code_offset = elements->Offset(i)->value();
1767       const int pos = abstract_code->SourcePosition(code_offset);
1768 
1769       Handle<Script> casted_script(Script::cast(script), this);
1770       *target = MessageLocation(casted_script, pos, pos + 1);
1771       return true;
1772     }
1773   }
1774   return false;
1775 }
1776 
1777 
1778 Handle<JSMessageObject> Isolate::CreateMessage(Handle<Object> exception,
1779                                                MessageLocation* location) {
1780   Handle<FixedArray> stack_trace_object;
1781   if (capture_stack_trace_for_uncaught_exceptions_) {
1782     if (exception->IsJSError()) {
1783       // We fetch the stack trace that corresponds to this error object.
1784       // If the lookup fails, the exception is probably not a valid Error
1785       // object. In that case, we fall through and capture the stack trace
1786       // at this throw site.
1787       stack_trace_object =
1788           GetDetailedStackTrace(Handle<JSObject>::cast(exception));
1789     }
1790     if (stack_trace_object.is_null()) {
1791       // Not an error object, we capture stack and location at throw site.
1792       stack_trace_object = CaptureCurrentStackTrace(
1793           stack_trace_for_uncaught_exceptions_frame_limit_,
1794           stack_trace_for_uncaught_exceptions_options_);
1795     }
1796   }
1797   MessageLocation computed_location;
1798   if (location == nullptr &&
1799       (ComputeLocationFromException(&computed_location, exception) ||
1800        ComputeLocationFromStackTrace(&computed_location, exception) ||
1801        ComputeLocation(&computed_location))) {
1802     location = &computed_location;
1803   }
1804 
1805   return MessageHandler::MakeMessageObject(
1806       this, MessageTemplate::kUncaughtException, location, exception,
1807       stack_trace_object);
1808 }
1809 
1810 
1811 bool Isolate::IsJavaScriptHandlerOnTop(Object* exception) {
1812   DCHECK_NE(ReadOnlyRoots(heap()).the_hole_value(), exception);
1813 
1814   // For uncatchable exceptions, the JavaScript handler cannot be on top.
1815   if (!is_catchable_by_javascript(exception)) return false;
1816 
1817   // Get the top-most JS_ENTRY handler, cannot be on top if it doesn't exist.
1818   Address entry_handler = Isolate::handler(thread_local_top());
1819   if (entry_handler == kNullAddress) return false;
1820 
1821   // Get the address of the external handler so we can compare the address to
1822   // determine which one is closer to the top of the stack.
1823   Address external_handler = thread_local_top()->try_catch_handler_address();
1824   if (external_handler == kNullAddress) return true;
1825 
1826   // The exception has been externally caught if and only if there is an
1827   // external handler which is on top of the top-most JS_ENTRY handler.
1828   //
1829   // Note, that finally clauses would re-throw an exception unless it's aborted
1830   // by jumps in control flow (like return, break, etc.) and we'll have another
1831   // chance to set proper v8::TryCatch later.
1832   return (entry_handler < external_handler);
1833 }
1834 
1835 
1836 bool Isolate::IsExternalHandlerOnTop(Object* exception) {
1837   DCHECK_NE(ReadOnlyRoots(heap()).the_hole_value(), exception);
1838 
1839   // Get the address of the external handler so we can compare the address to
1840   // determine which one is closer to the top of the stack.
1841   Address external_handler = thread_local_top()->try_catch_handler_address();
1842   if (external_handler == kNullAddress) return false;
1843 
1844   // For uncatchable exceptions, the external handler is always on top.
1845   if (!is_catchable_by_javascript(exception)) return true;
1846 
1847   // Get the top-most JS_ENTRY handler, cannot be on top if it doesn't exist.
1848   Address entry_handler = Isolate::handler(thread_local_top());
1849   if (entry_handler == kNullAddress) return true;
1850 
1851   // The exception has been externally caught if and only if there is an
1852   // external handler which is on top of the top-most JS_ENTRY handler.
1853   //
1854   // Note, that finally clauses would re-throw an exception unless it's aborted
1855   // by jumps in control flow (like return, break, etc.) and we'll have another
1856   // chance to set proper v8::TryCatch later.
1857   return (entry_handler > external_handler);
1858 }
1859 
1860 void Isolate::ReportPendingMessagesImpl(bool report_externally) {
1861   Object* exception = pending_exception();
1862 
1863   // Clear the pending message object early to avoid endless recursion.
1864   Object* message_obj = thread_local_top_.pending_message_obj_;
1865   clear_pending_message();
1866 
1867   // For uncatchable exceptions we do nothing. If needed, the exception and the
1868   // message have already been propagated to v8::TryCatch.
1869   if (!is_catchable_by_javascript(exception)) return;
1870 
1871   // Determine whether the message needs to be reported to all message handlers
1872   // depending on whether and external v8::TryCatch or an internal JavaScript
1873   // handler is on top.
1874   bool should_report_exception;
1875   if (report_externally) {
1876     // Only report the exception if the external handler is verbose.
1877     should_report_exception = try_catch_handler()->is_verbose_;
1878   } else {
1879     // Report the exception if it isn't caught by JavaScript code.
1880     should_report_exception = !IsJavaScriptHandlerOnTop(exception);
1881   }
1882 
1883   // Actually report the pending message to all message handlers.
1884   if (!message_obj->IsTheHole(this) && should_report_exception) {
1885     HandleScope scope(this);
1886     Handle<JSMessageObject> message(JSMessageObject::cast(message_obj), this);
1887     Handle<Script> script(message->script(), this);
1888     int start_pos = message->start_position();
1889     int end_pos = message->end_position();
1890     MessageLocation location(script, start_pos, end_pos);
1891     MessageHandler::ReportMessage(this, &location, message);
1892   }
1893 }
1894 
1895 void Isolate::ReportPendingMessages() {
1896   DCHECK(AllowExceptions::IsAllowed(this));
1897 
1898   // The embedder might run script in response to an exception.
1899   AllowJavascriptExecutionDebugOnly allow_script(this);
1900 
1901   Object* exception = pending_exception();
1902 
1903   // Try to propagate the exception to an external v8::TryCatch handler. If
1904   // propagation was unsuccessful, then we will get another chance at reporting
1905   // the pending message if the exception is re-thrown.
1906   bool has_been_propagated = PropagatePendingExceptionToExternalTryCatch();
1907   if (!has_been_propagated) return;
1908 
1909   ReportPendingMessagesImpl(IsExternalHandlerOnTop(exception));
1910 }
1911 
1912 void Isolate::ReportPendingMessagesFromJavaScript() {
1913   DCHECK(AllowExceptions::IsAllowed(this));
1914 
1915   auto IsHandledByJavaScript = [=]() {
1916     // In this situation, the exception is always a non-terminating exception.
1917 
1918     // Get the top-most JS_ENTRY handler, cannot be on top if it doesn't exist.
1919     Address entry_handler = Isolate::handler(thread_local_top());
1920     DCHECK_NE(entry_handler, kNullAddress);
1921     entry_handler =
1922         reinterpret_cast<StackHandler*>(entry_handler)->next()->address();
1923 
1924     // Get the address of the external handler so we can compare the address to
1925     // determine which one is closer to the top of the stack.
1926     Address external_handler = thread_local_top()->try_catch_handler_address();
1927     if (external_handler == kNullAddress) return true;
1928 
1929     return (entry_handler < external_handler);
1930   };
1931 
1932   auto IsHandledExternally = [=]() {
1933     Address external_handler = thread_local_top()->try_catch_handler_address();
1934     if (external_handler == kNullAddress) return false;
1935 
1936     // Get the top-most JS_ENTRY handler, cannot be on top if it doesn't exist.
1937     Address entry_handler = Isolate::handler(thread_local_top());
1938     DCHECK_NE(entry_handler, kNullAddress);
1939     entry_handler =
1940         reinterpret_cast<StackHandler*>(entry_handler)->next()->address();
1941     return (entry_handler > external_handler);
1942   };
1943 
1944   auto PropagateToExternalHandler = [=]() {
1945     if (IsHandledByJavaScript()) {
1946       thread_local_top_.external_caught_exception_ = false;
1947       return false;
1948     }
1949 
1950     if (!IsHandledExternally()) {
1951       thread_local_top_.external_caught_exception_ = false;
1952       return true;
1953     }
1954 
1955     thread_local_top_.external_caught_exception_ = true;
1956     v8::TryCatch* handler = try_catch_handler();
1957     DCHECK(thread_local_top_.pending_message_obj_->IsJSMessageObject() ||
1958            thread_local_top_.pending_message_obj_->IsTheHole(this));
1959     handler->can_continue_ = true;
1960     handler->has_terminated_ = false;
1961     handler->exception_ = pending_exception();
1962     // Propagate to the external try-catch only if we got an actual message.
1963     if (thread_local_top_.pending_message_obj_->IsTheHole(this)) return true;
1964 
1965     handler->message_obj_ = thread_local_top_.pending_message_obj_;
1966     return true;
1967   };
1968 
1969   // Try to propagate to an external v8::TryCatch handler.
1970   if (!PropagateToExternalHandler()) return;
1971 
1972   ReportPendingMessagesImpl(true);
1973 }
1974 
1975 MessageLocation Isolate::GetMessageLocation() {
1976   DCHECK(has_pending_exception());
1977 
1978   if (thread_local_top_.pending_exception_ !=
1979           ReadOnlyRoots(heap()).termination_exception() &&
1980       !thread_local_top_.pending_message_obj_->IsTheHole(this)) {
1981     Handle<JSMessageObject> message_obj(
1982         JSMessageObject::cast(thread_local_top_.pending_message_obj_), this);
1983     Handle<Script> script(message_obj->script(), this);
1984     int start_pos = message_obj->start_position();
1985     int end_pos = message_obj->end_position();
1986     return MessageLocation(script, start_pos, end_pos);
1987   }
1988 
1989   return MessageLocation();
1990 }
1991 
1992 
1993 bool Isolate::OptionalRescheduleException(bool is_bottom_call) {
1994   DCHECK(has_pending_exception());
1995   PropagatePendingExceptionToExternalTryCatch();
1996 
1997   bool is_termination_exception =
1998       pending_exception() == ReadOnlyRoots(this).termination_exception();
1999 
2000   // Do not reschedule the exception if this is the bottom call.
2001   bool clear_exception = is_bottom_call;
2002 
2003   if (is_termination_exception) {
2004     if (is_bottom_call) {
2005       thread_local_top()->external_caught_exception_ = false;
2006       clear_pending_exception();
2007       return false;
2008     }
2009   } else if (thread_local_top()->external_caught_exception_) {
2010     // If the exception is externally caught, clear it if there are no
2011     // JavaScript frames on the way to the C++ frame that has the
2012     // external handler.
2013     DCHECK_NE(thread_local_top()->try_catch_handler_address(), kNullAddress);
2014     Address external_handler_address =
2015         thread_local_top()->try_catch_handler_address();
2016     JavaScriptFrameIterator it(this);
2017     if (it.done() || (it.frame()->sp() > external_handler_address)) {
2018       clear_exception = true;
2019     }
2020   }
2021 
2022   // Clear the exception if needed.
2023   if (clear_exception) {
2024     thread_local_top()->external_caught_exception_ = false;
2025     clear_pending_exception();
2026     return false;
2027   }
2028 
2029   // Reschedule the exception.
2030   thread_local_top()->scheduled_exception_ = pending_exception();
2031   clear_pending_exception();
2032   return true;
2033 }
2034 
2035 void Isolate::PushPromise(Handle<JSObject> promise) {
2036   ThreadLocalTop* tltop = thread_local_top();
2037   PromiseOnStack* prev = tltop->promise_on_stack_;
2038   Handle<JSObject> global_promise = global_handles()->Create(*promise);
2039   tltop->promise_on_stack_ = new PromiseOnStack(global_promise, prev);
2040 }
2041 
2042 
2043 void Isolate::PopPromise() {
2044   ThreadLocalTop* tltop = thread_local_top();
2045   if (tltop->promise_on_stack_ == nullptr) return;
2046   PromiseOnStack* prev = tltop->promise_on_stack_->prev();
2047   Handle<Object> global_promise = tltop->promise_on_stack_->promise();
2048   delete tltop->promise_on_stack_;
2049   tltop->promise_on_stack_ = prev;
2050   global_handles()->Destroy(global_promise.location());
2051 }
2052 
2053 namespace {
2054 bool InternalPromiseHasUserDefinedRejectHandler(Isolate* isolate,
2055                                                 Handle<JSPromise> promise);
2056 
2057 bool PromiseHandlerCheck(Isolate* isolate, Handle<JSReceiver> handler,
2058                          Handle<JSReceiver> deferred_promise) {
2059   // Recurse to the forwarding Promise, if any. This may be due to
2060   //  - await reaction forwarding to the throwaway Promise, which has
2061   //    a dependency edge to the outer Promise.
2062   //  - PromiseIdResolveHandler forwarding to the output of .then
2063   //  - Promise.all/Promise.race forwarding to a throwaway Promise, which
2064   //    has a dependency edge to the generated outer Promise.
2065   // Otherwise, this is a real reject handler for the Promise.
2066   Handle<Symbol> key = isolate->factory()->promise_forwarding_handler_symbol();
2067   Handle<Object> forwarding_handler = JSReceiver::GetDataProperty(handler, key);
2068   if (forwarding_handler->IsUndefined(isolate)) {
2069     return true;
2070   }
2071 
2072   if (!deferred_promise->IsJSPromise()) {
2073     return true;
2074   }
2075 
2076   return InternalPromiseHasUserDefinedRejectHandler(
2077       isolate, Handle<JSPromise>::cast(deferred_promise));
2078 }
2079 
2080 bool InternalPromiseHasUserDefinedRejectHandler(Isolate* isolate,
2081                                                 Handle<JSPromise> promise) {
2082   // If this promise was marked as being handled by a catch block
2083   // in an async function, then it has a user-defined reject handler.
2084   if (promise->handled_hint()) return true;
2085 
2086   // If this Promise is subsumed by another Promise (a Promise resolved
2087   // with another Promise, or an intermediate, hidden, throwaway Promise
2088   // within async/await), then recurse on the outer Promise.
2089   // In this case, the dependency is one possible way that the Promise
2090   // could be resolved, so it does not subsume the other following cases.
2091   Handle<Symbol> key = isolate->factory()->promise_handled_by_symbol();
2092   Handle<Object> outer_promise_obj = JSObject::GetDataProperty(promise, key);
2093   if (outer_promise_obj->IsJSPromise() &&
2094       InternalPromiseHasUserDefinedRejectHandler(
2095           isolate, Handle<JSPromise>::cast(outer_promise_obj))) {
2096     return true;
2097   }
2098 
2099   if (promise->status() == Promise::kPending) {
2100     for (Handle<Object> current(promise->reactions(), isolate);
2101          !current->IsSmi();) {
2102       Handle<PromiseReaction> reaction = Handle<PromiseReaction>::cast(current);
2103       Handle<HeapObject> promise_or_capability(
2104           reaction->promise_or_capability(), isolate);
2105       Handle<JSPromise> promise = Handle<JSPromise>::cast(
2106           promise_or_capability->IsJSPromise()
2107               ? promise_or_capability
2108               : handle(Handle<PromiseCapability>::cast(promise_or_capability)
2109                            ->promise(),
2110                        isolate));
2111       if (reaction->reject_handler()->IsUndefined(isolate)) {
2112         if (InternalPromiseHasUserDefinedRejectHandler(isolate, promise)) {
2113           return true;
2114         }
2115       } else {
2116         Handle<JSReceiver> current_handler(
2117             JSReceiver::cast(reaction->reject_handler()), isolate);
2118         if (PromiseHandlerCheck(isolate, current_handler, promise)) {
2119           return true;
2120         }
2121       }
2122       current = handle(reaction->next(), isolate);
2123     }
2124   }
2125 
2126   return false;
2127 }
2128 
2129 }  // namespace
2130 
2131 bool Isolate::PromiseHasUserDefinedRejectHandler(Handle<Object> promise) {
2132   if (!promise->IsJSPromise()) return false;
2133   return InternalPromiseHasUserDefinedRejectHandler(
2134       this, Handle<JSPromise>::cast(promise));
2135 }
2136 
2137 Handle<Object> Isolate::GetPromiseOnStackOnThrow() {
2138   Handle<Object> undefined = factory()->undefined_value();
2139   ThreadLocalTop* tltop = thread_local_top();
2140   if (tltop->promise_on_stack_ == nullptr) return undefined;
2141   // Find the top-most try-catch or try-finally handler.
2142   CatchType prediction = PredictExceptionCatcher();
2143   if (prediction == NOT_CAUGHT || prediction == CAUGHT_BY_EXTERNAL) {
2144     return undefined;
2145   }
2146   Handle<Object> retval = undefined;
2147   PromiseOnStack* promise_on_stack = tltop->promise_on_stack_;
2148   for (StackFrameIterator it(this); !it.done(); it.Advance()) {
2149     StackFrame* frame = it.frame();
2150     HandlerTable::CatchPrediction catch_prediction;
2151     if (frame->is_java_script()) {
2152       catch_prediction = PredictException(JavaScriptFrame::cast(frame));
2153     } else if (frame->type() == StackFrame::STUB) {
2154       Code* code = frame->LookupCode();
2155       if (!code->IsCode() || code->kind() != Code::BUILTIN ||
2156           !code->handler_table_offset() || !code->is_turbofanned()) {
2157         continue;
2158       }
2159       catch_prediction = code->GetBuiltinCatchPrediction();
2160     } else {
2161       continue;
2162     }
2163 
2164     switch (catch_prediction) {
2165       case HandlerTable::UNCAUGHT:
2166         continue;
2167       case HandlerTable::CAUGHT:
2168       case HandlerTable::DESUGARING:
2169         if (retval->IsJSPromise()) {
2170           // Caught the result of an inner async/await invocation.
2171           // Mark the inner promise as caught in the "synchronous case" so
2172           // that Debug::OnException will see. In the synchronous case,
2173           // namely in the code in an async function before the first
2174           // await, the function which has this exception event has not yet
2175           // returned, so the generated Promise has not yet been marked
2176           // by AsyncFunctionAwaitCaught with promiseHandledHintSymbol.
2177           Handle<JSPromise>::cast(retval)->set_handled_hint(true);
2178         }
2179         return retval;
2180       case HandlerTable::PROMISE:
2181         return promise_on_stack
2182                    ? Handle<Object>::cast(promise_on_stack->promise())
2183                    : undefined;
2184       case HandlerTable::ASYNC_AWAIT: {
2185         // If in the initial portion of async/await, continue the loop to pop up
2186         // successive async/await stack frames until an asynchronous one with
2187         // dependents is found, or a non-async stack frame is encountered, in
2188         // order to handle the synchronous async/await catch prediction case:
2189         // assume that async function calls are awaited.
2190         if (!promise_on_stack) return retval;
2191         retval = promise_on_stack->promise();
2192         if (PromiseHasUserDefinedRejectHandler(retval)) {
2193           return retval;
2194         }
2195         promise_on_stack = promise_on_stack->prev();
2196         continue;
2197       }
2198     }
2199   }
2200   return retval;
2201 }
2202 
2203 
2204 void Isolate::SetCaptureStackTraceForUncaughtExceptions(
2205       bool capture,
2206       int frame_limit,
2207       StackTrace::StackTraceOptions options) {
2208   capture_stack_trace_for_uncaught_exceptions_ = capture;
2209   stack_trace_for_uncaught_exceptions_frame_limit_ = frame_limit;
2210   stack_trace_for_uncaught_exceptions_options_ = options;
2211 }
2212 
2213 
2214 void Isolate::SetAbortOnUncaughtExceptionCallback(
2215     v8::Isolate::AbortOnUncaughtExceptionCallback callback) {
2216   abort_on_uncaught_exception_callback_ = callback;
2217 }
2218 
2219 bool Isolate::AreWasmThreadsEnabled(Handle<Context> context) {
2220   if (wasm_threads_enabled_callback()) {
2221     v8::Local<v8::Context> api_context = v8::Utils::ToLocal(context);
2222     return wasm_threads_enabled_callback()(api_context);
2223   }
2224   return FLAG_experimental_wasm_threads;
2225 }
2226 
2227 Handle<Context> Isolate::GetIncumbentContext() {
2228   JavaScriptFrameIterator it(this);
2229 
2230   // 1st candidate: most-recently-entered author function's context
2231   // if it's newer than the last Context::BackupIncumbentScope entry.
2232   if (!it.done() &&
2233       static_cast<const void*>(it.frame()) >
2234           static_cast<const void*>(top_backup_incumbent_scope())) {
2235     Context* context = Context::cast(it.frame()->context());
2236     return Handle<Context>(context->native_context(), this);
2237   }
2238 
2239   // 2nd candidate: the last Context::Scope's incumbent context if any.
2240   if (top_backup_incumbent_scope()) {
2241     return Utils::OpenHandle(
2242         *top_backup_incumbent_scope()->backup_incumbent_context_);
2243   }
2244 
2245   // Last candidate: the entered context.
2246   // Given that there is no other author function is running, there must be
2247   // no cross-context function running, then the incumbent realm must match
2248   // the entry realm.
2249   v8::Local<v8::Context> entered_context =
2250       reinterpret_cast<v8::Isolate*>(this)->GetEnteredContext();
2251   return Utils::OpenHandle(*entered_context);
2252 }
2253 
2254 char* Isolate::ArchiveThread(char* to) {
2255   MemCopy(to, reinterpret_cast<char*>(thread_local_top()),
2256           sizeof(ThreadLocalTop));
2257   InitializeThreadLocal();
2258   clear_pending_exception();
2259   clear_pending_message();
2260   clear_scheduled_exception();
2261   return to + sizeof(ThreadLocalTop);
2262 }
2263 
2264 
2265 char* Isolate::RestoreThread(char* from) {
2266   MemCopy(reinterpret_cast<char*>(thread_local_top()), from,
2267           sizeof(ThreadLocalTop));
2268 // This might be just paranoia, but it seems to be needed in case a
2269 // thread_local_top_ is restored on a separate OS thread.
2270 #ifdef USE_SIMULATOR
2271   thread_local_top()->simulator_ = Simulator::current(this);
2272 #endif
2273   DCHECK(context() == nullptr || context()->IsContext());
2274   return from + sizeof(ThreadLocalTop);
2275 }
2276 
2277 Isolate::ThreadDataTable::ThreadDataTable() : table_() {}
2278 
2279 Isolate::ThreadDataTable::~ThreadDataTable() {}
2280 
2281 void Isolate::ReleaseSharedPtrs() {
2282   while (managed_ptr_destructors_head_) {
2283     ManagedPtrDestructor* l = managed_ptr_destructors_head_;
2284     ManagedPtrDestructor* n = nullptr;
2285     managed_ptr_destructors_head_ = nullptr;
2286     for (; l != nullptr; l = n) {
2287       l->destructor_(l->shared_ptr_ptr_);
2288       n = l->next_;
2289       delete l;
2290     }
2291   }
2292 }
2293 
2294 void Isolate::RegisterManagedPtrDestructor(ManagedPtrDestructor* destructor) {
2295   DCHECK_NULL(destructor->prev_);
2296   DCHECK_NULL(destructor->next_);
2297   if (managed_ptr_destructors_head_) {
2298     managed_ptr_destructors_head_->prev_ = destructor;
2299   }
2300   destructor->next_ = managed_ptr_destructors_head_;
2301   managed_ptr_destructors_head_ = destructor;
2302 }
2303 
2304 void Isolate::UnregisterManagedPtrDestructor(ManagedPtrDestructor* destructor) {
2305   if (destructor->prev_) {
2306     destructor->prev_->next_ = destructor->next_;
2307   } else {
2308     DCHECK_EQ(destructor, managed_ptr_destructors_head_);
2309     managed_ptr_destructors_head_ = destructor->next_;
2310   }
2311   if (destructor->next_) destructor->next_->prev_ = destructor->prev_;
2312   destructor->prev_ = nullptr;
2313   destructor->next_ = nullptr;
2314 }
2315 
2316 Isolate::PerIsolateThreadData::~PerIsolateThreadData() {
2317 #if defined(USE_SIMULATOR)
2318   delete simulator_;
2319 #endif
2320 }
2321 
2322 Isolate::PerIsolateThreadData* Isolate::ThreadDataTable::Lookup(
2323     ThreadId thread_id) {
2324   auto t = table_.find(thread_id);
2325   if (t == table_.end()) return nullptr;
2326   return t->second;
2327 }
2328 
2329 
2330 void Isolate::ThreadDataTable::Insert(Isolate::PerIsolateThreadData* data) {
2331   bool inserted = table_.insert(std::make_pair(data->thread_id_, data)).second;
2332   CHECK(inserted);
2333 }
2334 
2335 
2336 void Isolate::ThreadDataTable::Remove(PerIsolateThreadData* data) {
2337   table_.erase(data->thread_id_);
2338   delete data;
2339 }
2340 
2341 void Isolate::ThreadDataTable::RemoveAllThreads() {
2342   for (auto& x : table_) {
2343     delete x.second;
2344   }
2345   table_.clear();
2346 }
2347 
2348 class VerboseAccountingAllocator : public AccountingAllocator {
2349  public:
2350   VerboseAccountingAllocator(Heap* heap, size_t allocation_sample_bytes,
2351                              size_t pool_sample_bytes)
2352       : heap_(heap),
2353         last_memory_usage_(0),
2354         last_pool_size_(0),
2355         nesting_deepth_(0),
2356         allocation_sample_bytes_(allocation_sample_bytes),
2357         pool_sample_bytes_(pool_sample_bytes) {}
2358 
2359   v8::internal::Segment* GetSegment(size_t size) override {
2360     v8::internal::Segment* memory = AccountingAllocator::GetSegment(size);
2361     if (memory) {
2362       size_t malloced_current = GetCurrentMemoryUsage();
2363       size_t pooled_current = GetCurrentPoolSize();
2364 
2365       if (last_memory_usage_ + allocation_sample_bytes_ < malloced_current ||
2366           last_pool_size_ + pool_sample_bytes_ < pooled_current) {
2367         PrintMemoryJSON(malloced_current, pooled_current);
2368         last_memory_usage_ = malloced_current;
2369         last_pool_size_ = pooled_current;
2370       }
2371     }
2372     return memory;
2373   }
2374 
2375   void ReturnSegment(v8::internal::Segment* memory) override {
2376     AccountingAllocator::ReturnSegment(memory);
2377     size_t malloced_current = GetCurrentMemoryUsage();
2378     size_t pooled_current = GetCurrentPoolSize();
2379 
2380     if (malloced_current + allocation_sample_bytes_ < last_memory_usage_ ||
2381         pooled_current + pool_sample_bytes_ < last_pool_size_) {
2382       PrintMemoryJSON(malloced_current, pooled_current);
2383       last_memory_usage_ = malloced_current;
2384       last_pool_size_ = pooled_current;
2385     }
2386   }
2387 
2388   void ZoneCreation(const Zone* zone) override {
2389     PrintZoneModificationSample(zone, "zonecreation");
2390     nesting_deepth_++;
2391   }
2392 
2393   void ZoneDestruction(const Zone* zone) override {
2394     nesting_deepth_--;
2395     PrintZoneModificationSample(zone, "zonedestruction");
2396   }
2397 
2398  private:
2399   void PrintZoneModificationSample(const Zone* zone, const char* type) {
2400     PrintF(
2401         "{"
2402         "\"type\": \"%s\", "
2403         "\"isolate\": \"%p\", "
2404         "\"time\": %f, "
2405         "\"ptr\": \"%p\", "
2406         "\"name\": \"%s\", "
2407         "\"size\": %" PRIuS
2408         ","
2409         "\"nesting\": %zu}\n",
2410         type, reinterpret_cast<void*>(heap_->isolate()),
2411         heap_->isolate()->time_millis_since_init(),
2412         reinterpret_cast<const void*>(zone), zone->name(),
2413         zone->allocation_size(), nesting_deepth_.load());
2414   }
2415 
2416   void PrintMemoryJSON(size_t malloced, size_t pooled) {
2417     // Note: Neither isolate, nor heap is locked, so be careful with accesses
2418     // as the allocator is potentially used on a concurrent thread.
2419     double time = heap_->isolate()->time_millis_since_init();
2420     PrintF(
2421         "{"
2422         "\"type\": \"zone\", "
2423         "\"isolate\": \"%p\", "
2424         "\"time\": %f, "
2425         "\"allocated\": %" PRIuS
2426         ","
2427         "\"pooled\": %" PRIuS "}\n",
2428         reinterpret_cast<void*>(heap_->isolate()), time, malloced, pooled);
2429   }
2430 
2431   Heap* heap_;
2432   std::atomic<size_t> last_memory_usage_;
2433   std::atomic<size_t> last_pool_size_;
2434   std::atomic<size_t> nesting_deepth_;
2435   size_t allocation_sample_bytes_, pool_sample_bytes_;
2436 };
2437 
2438 #ifdef DEBUG
2439 std::atomic<size_t> Isolate::non_disposed_isolates_;
2440 #endif  // DEBUG
2441 
2442 Isolate::Isolate()
2443     : embedder_data_(),
2444       entry_stack_(nullptr),
2445       stack_trace_nesting_level_(0),
2446       incomplete_message_(nullptr),
2447       bootstrapper_(nullptr),
2448       runtime_profiler_(nullptr),
2449       compilation_cache_(nullptr),
2450       logger_(nullptr),
2451       load_stub_cache_(nullptr),
2452       store_stub_cache_(nullptr),
2453       deoptimizer_data_(nullptr),
2454       deoptimizer_lazy_throw_(false),
2455       materialized_object_store_(nullptr),
2456       capture_stack_trace_for_uncaught_exceptions_(false),
2457       stack_trace_for_uncaught_exceptions_frame_limit_(0),
2458       stack_trace_for_uncaught_exceptions_options_(StackTrace::kOverview),
2459       context_slot_cache_(nullptr),
2460       descriptor_lookup_cache_(nullptr),
2461       handle_scope_implementer_(nullptr),
2462       unicode_cache_(nullptr),
2463       allocator_(FLAG_trace_zone_stats ? new VerboseAccountingAllocator(
2464                                              &heap_, 256 * KB, 128 * KB)
2465                                        : new AccountingAllocator()),
2466       inner_pointer_to_code_cache_(nullptr),
2467       global_handles_(nullptr),
2468       eternal_handles_(nullptr),
2469       thread_manager_(nullptr),
2470       builtins_(this),
2471       setup_delegate_(nullptr),
2472       regexp_stack_(nullptr),
2473       date_cache_(nullptr),
2474       // TODO(bmeurer) Initialized lazily because it depends on flags; can
2475       // be fixed once the default isolate cleanup is done.
2476       random_number_generator_(nullptr),
2477       fuzzer_rng_(nullptr),
2478       rail_mode_(PERFORMANCE_ANIMATION),
2479       atomics_wait_callback_(nullptr),
2480       atomics_wait_callback_data_(nullptr),
2481       promise_hook_(nullptr),
2482       host_import_module_dynamically_callback_(nullptr),
2483       host_initialize_import_meta_object_callback_(nullptr),
2484       load_start_time_ms_(0),
2485 #ifdef V8_INTL_SUPPORT
2486       language_singleton_regexp_matcher_(nullptr),
2487       language_tag_regexp_matcher_(nullptr),
2488       language_variant_regexp_matcher_(nullptr),
2489       default_locale_(""),
2490 #endif  // V8_INTL_SUPPORT
2491       serializer_enabled_(false),
2492       has_fatal_error_(false),
2493       initialized_from_snapshot_(false),
2494       is_tail_call_elimination_enabled_(true),
2495       is_isolate_in_background_(false),
2496       memory_savings_mode_active_(false),
2497       heap_profiler_(nullptr),
2498       code_event_dispatcher_(new CodeEventDispatcher()),
2499       function_entry_hook_(nullptr),
2500       deferred_handles_head_(nullptr),
2501       optimizing_compile_dispatcher_(nullptr),
2502       stress_deopt_count_(0),
2503       force_slow_path_(false),
2504       next_optimization_id_(0),
2505 #if V8_SFI_HAS_UNIQUE_ID
2506       next_unique_sfi_id_(0),
2507 #endif
2508       is_running_microtasks_(false),
2509       use_counter_callback_(nullptr),
2510       cancelable_task_manager_(new CancelableTaskManager()),
2511       abort_on_uncaught_exception_callback_(nullptr),
2512       total_regexp_code_generated_(0) {
2513   id_ = base::Relaxed_AtomicIncrement(&isolate_counter_, 1);
2514   TRACE_ISOLATE(constructor);
2515 
2516   memset(isolate_addresses_, 0,
2517       sizeof(isolate_addresses_[0]) * (kIsolateAddressCount + 1));
2518 
2519   heap_.isolate_ = this;
2520   stack_guard_.isolate_ = this;
2521 
2522   // ThreadManager is initialized early to support locking an isolate
2523   // before it is entered.
2524   thread_manager_ = new ThreadManager();
2525   thread_manager_->isolate_ = this;
2526 
2527 #ifdef DEBUG
2528   non_disposed_isolates_++;
2529 #endif  // DEBUG
2530 
2531   handle_scope_data_.Initialize();
2532 
2533 #define ISOLATE_INIT_EXECUTE(type, name, initial_value)                        \
2534   name##_ = (initial_value);
2535   ISOLATE_INIT_LIST(ISOLATE_INIT_EXECUTE)
2536 #undef ISOLATE_INIT_EXECUTE
2537 
2538 #define ISOLATE_INIT_ARRAY_EXECUTE(type, name, length)                         \
2539   memset(name##_, 0, sizeof(type) * length);
2540   ISOLATE_INIT_ARRAY_LIST(ISOLATE_INIT_ARRAY_EXECUTE)
2541 #undef ISOLATE_INIT_ARRAY_EXECUTE
2542 
2543   InitializeLoggingAndCounters();
2544   debug_ = new Debug(this);
2545 
2546   tracing_cpu_profiler_.reset(new TracingCpuProfilerImpl(this));
2547 
2548   init_memcopy_functions(this);
2549 
2550   if (FLAG_embedded_builtins) {
2551 #ifdef V8_MULTI_SNAPSHOTS
2552   if (FLAG_untrusted_code_mitigations) {
2553     SetEmbeddedBlob(DefaultEmbeddedBlob(), DefaultEmbeddedBlobSize());
2554   } else {
2555     SetEmbeddedBlob(TrustedEmbeddedBlob(), TrustedEmbeddedBlobSize());
2556   }
2557 #else
2558   SetEmbeddedBlob(DefaultEmbeddedBlob(), DefaultEmbeddedBlobSize());
2559 #endif
2560   }
2561 }
2562 
2563 
2564 void Isolate::TearDown() {
2565   TRACE_ISOLATE(tear_down);
2566 
2567   tracing_cpu_profiler_.reset();
2568   if (FLAG_stress_sampling_allocation_profiler > 0) {
2569     heap_profiler()->StopSamplingHeapProfiler();
2570   }
2571 
2572   // Temporarily set this isolate as current so that various parts of
2573   // the isolate can access it in their destructors without having a
2574   // direct pointer. We don't use Enter/Exit here to avoid
2575   // initializing the thread data.
2576   PerIsolateThreadData* saved_data = CurrentPerIsolateThreadData();
2577   DCHECK_EQ(base::Relaxed_Load(&isolate_key_created_), 1);
2578   Isolate* saved_isolate =
2579       reinterpret_cast<Isolate*>(base::Thread::GetThreadLocal(isolate_key_));
2580   SetIsolateThreadLocals(this, nullptr);
2581 
2582   Deinit();
2583 
2584   {
2585     base::LockGuard<base::Mutex> lock_guard(&thread_data_table_mutex_);
2586     thread_data_table_.RemoveAllThreads();
2587   }
2588 
2589 #ifdef DEBUG
2590   non_disposed_isolates_--;
2591 #endif  // DEBUG
2592 
2593   delete this;
2594 
2595   // Restore the previous current isolate.
2596   SetIsolateThreadLocals(saved_isolate, saved_data);
2597 }
2598 
2599 
2600 void Isolate::ClearSerializerData() {
2601   delete external_reference_map_;
2602   external_reference_map_ = nullptr;
2603 }
2604 
2605 
2606 void Isolate::Deinit() {
2607   TRACE_ISOLATE(deinit);
2608 
2609   debug()->Unload();
2610 
2611   if (concurrent_recompilation_enabled()) {
2612     optimizing_compile_dispatcher_->Stop();
2613     delete optimizing_compile_dispatcher_;
2614     optimizing_compile_dispatcher_ = nullptr;
2615   }
2616 
2617   wasm_engine()->DeleteCompileJobsOnIsolate(this);
2618 
2619   heap_.mark_compact_collector()->EnsureSweepingCompleted();
2620   heap_.memory_allocator()->unmapper()->EnsureUnmappingCompleted();
2621 
2622   DumpAndResetStats();
2623 
2624   if (FLAG_print_deopt_stress) {
2625     PrintF(stdout, "=== Stress deopt counter: %u\n", stress_deopt_count_);
2626   }
2627 
2628   // We must stop the logger before we tear down other components.
2629   sampler::Sampler* sampler = logger_->sampler();
2630   if (sampler && sampler->IsActive()) sampler->Stop();
2631 
2632   FreeThreadResources();
2633   logger_->StopProfilerThread();
2634 
2635   // We start with the heap tear down so that releasing managed objects does
2636   // not cause a GC.
2637   heap_.StartTearDown();
2638 
2639   ReleaseSharedPtrs();
2640 
2641   delete deoptimizer_data_;
2642   deoptimizer_data_ = nullptr;
2643   builtins_.TearDown();
2644   bootstrapper_->TearDown();
2645 
2646   if (runtime_profiler_ != nullptr) {
2647     delete runtime_profiler_;
2648     runtime_profiler_ = nullptr;
2649   }
2650 
2651   delete heap_profiler_;
2652   heap_profiler_ = nullptr;
2653 
2654   compiler_dispatcher_->AbortAll(BlockingBehavior::kBlock);
2655   delete compiler_dispatcher_;
2656   compiler_dispatcher_ = nullptr;
2657 
2658   // This stops cancelable tasks (i.e. concurrent marking tasks)
2659   cancelable_task_manager()->CancelAndWait();
2660 
2661   heap_.TearDown();
2662   logger_->TearDown();
2663 
2664   wasm_engine_.reset();
2665 
2666   if (FLAG_embedded_builtins) {
2667     if (DefaultEmbeddedBlob() == nullptr && embedded_blob() != nullptr) {
2668       // We own the embedded blob. Free it.
2669       uint8_t* data = const_cast<uint8_t*>(embedded_blob_);
2670       InstructionStream::FreeOffHeapInstructionStream(data,
2671                                                       embedded_blob_size_);
2672     }
2673   }
2674 
2675   delete interpreter_;
2676   interpreter_ = nullptr;
2677 
2678   delete ast_string_constants_;
2679   ast_string_constants_ = nullptr;
2680 
2681   code_event_dispatcher_.reset();
2682 
2683   delete root_index_map_;
2684   root_index_map_ = nullptr;
2685 
2686   ClearSerializerData();
2687 }
2688 
2689 
2690 void Isolate::SetIsolateThreadLocals(Isolate* isolate,
2691                                      PerIsolateThreadData* data) {
2692   base::Thread::SetThreadLocal(isolate_key_, isolate);
2693   base::Thread::SetThreadLocal(per_isolate_thread_data_key_, data);
2694 }
2695 
2696 
2697 Isolate::~Isolate() {
2698   TRACE_ISOLATE(destructor);
2699 
2700   // The entry stack must be empty when we get here.
2701   DCHECK(entry_stack_ == nullptr || entry_stack_->previous_item == nullptr);
2702 
2703   delete entry_stack_;
2704   entry_stack_ = nullptr;
2705 
2706   delete unicode_cache_;
2707   unicode_cache_ = nullptr;
2708 
2709   delete date_cache_;
2710   date_cache_ = nullptr;
2711 
2712 #ifdef V8_INTL_SUPPORT
2713   delete language_singleton_regexp_matcher_;
2714   language_singleton_regexp_matcher_ = nullptr;
2715 
2716   delete language_tag_regexp_matcher_;
2717   language_tag_regexp_matcher_ = nullptr;
2718 
2719   delete language_variant_regexp_matcher_;
2720   language_variant_regexp_matcher_ = nullptr;
2721 #endif  // V8_INTL_SUPPORT
2722 
2723   delete regexp_stack_;
2724   regexp_stack_ = nullptr;
2725 
2726   delete descriptor_lookup_cache_;
2727   descriptor_lookup_cache_ = nullptr;
2728   delete context_slot_cache_;
2729   context_slot_cache_ = nullptr;
2730 
2731   delete load_stub_cache_;
2732   load_stub_cache_ = nullptr;
2733   delete store_stub_cache_;
2734   store_stub_cache_ = nullptr;
2735 
2736   delete materialized_object_store_;
2737   materialized_object_store_ = nullptr;
2738 
2739   delete logger_;
2740   logger_ = nullptr;
2741 
2742   delete handle_scope_implementer_;
2743   handle_scope_implementer_ = nullptr;
2744 
2745   delete code_tracer();
2746   set_code_tracer(nullptr);
2747 
2748   delete compilation_cache_;
2749   compilation_cache_ = nullptr;
2750   delete bootstrapper_;
2751   bootstrapper_ = nullptr;
2752   delete inner_pointer_to_code_cache_;
2753   inner_pointer_to_code_cache_ = nullptr;
2754 
2755   delete thread_manager_;
2756   thread_manager_ = nullptr;
2757 
2758   delete global_handles_;
2759   global_handles_ = nullptr;
2760   delete eternal_handles_;
2761   eternal_handles_ = nullptr;
2762 
2763   delete string_stream_debug_object_cache_;
2764   string_stream_debug_object_cache_ = nullptr;
2765 
2766   delete random_number_generator_;
2767   random_number_generator_ = nullptr;
2768 
2769   delete fuzzer_rng_;
2770   fuzzer_rng_ = nullptr;
2771 
2772   delete debug_;
2773   debug_ = nullptr;
2774 
2775   delete cancelable_task_manager_;
2776   cancelable_task_manager_ = nullptr;
2777 
2778   delete allocator_;
2779   allocator_ = nullptr;
2780 }
2781 
2782 void Isolate::InitializeThreadLocal() { thread_local_top_.Initialize(this); }
2783 
2784 void Isolate::SetTerminationOnExternalTryCatch() {
2785   if (try_catch_handler() == nullptr) return;
2786   try_catch_handler()->can_continue_ = false;
2787   try_catch_handler()->has_terminated_ = true;
2788   try_catch_handler()->exception_ = ReadOnlyRoots(heap()).null_value();
2789 }
2790 
2791 bool Isolate::PropagatePendingExceptionToExternalTryCatch() {
2792   Object* exception = pending_exception();
2793 
2794   if (IsJavaScriptHandlerOnTop(exception)) {
2795     thread_local_top_.external_caught_exception_ = false;
2796     return false;
2797   }
2798 
2799   if (!IsExternalHandlerOnTop(exception)) {
2800     thread_local_top_.external_caught_exception_ = false;
2801     return true;
2802   }
2803 
2804   thread_local_top_.external_caught_exception_ = true;
2805   if (!is_catchable_by_javascript(exception)) {
2806     SetTerminationOnExternalTryCatch();
2807   } else {
2808     v8::TryCatch* handler = try_catch_handler();
2809     DCHECK(thread_local_top_.pending_message_obj_->IsJSMessageObject() ||
2810            thread_local_top_.pending_message_obj_->IsTheHole(this));
2811     handler->can_continue_ = true;
2812     handler->has_terminated_ = false;
2813     handler->exception_ = pending_exception();
2814     // Propagate to the external try-catch only if we got an actual message.
2815     if (thread_local_top_.pending_message_obj_->IsTheHole(this)) return true;
2816 
2817     handler->message_obj_ = thread_local_top_.pending_message_obj_;
2818   }
2819   return true;
2820 }
2821 
2822 bool Isolate::InitializeCounters() {
2823   if (async_counters_) return false;
2824   async_counters_ = std::make_shared<Counters>(this);
2825   return true;
2826 }
2827 
2828 void Isolate::InitializeLoggingAndCounters() {
2829   if (logger_ == nullptr) {
2830     logger_ = new Logger(this);
2831   }
2832   InitializeCounters();
2833 }
2834 
2835 namespace {
2836 void PrintBuiltinSizes(Isolate* isolate) {
2837   Builtins* builtins = isolate->builtins();
2838   for (int i = 0; i < Builtins::builtin_count; i++) {
2839     const char* name = builtins->name(i);
2840     const char* kind = Builtins::KindNameOf(i);
2841     Code* code = builtins->builtin(i);
2842     PrintF(stdout, "%s Builtin, %s, %d\n", kind, name, code->InstructionSize());
2843   }
2844 }
2845 
2846 void CreateOffHeapTrampolines(Isolate* isolate) {
2847   DCHECK(isolate->serializer_enabled());
2848   DCHECK_NOT_NULL(isolate->embedded_blob());
2849   DCHECK_NE(0, isolate->embedded_blob_size());
2850 
2851   HandleScope scope(isolate);
2852   Builtins* builtins = isolate->builtins();
2853 
2854   EmbeddedData d = EmbeddedData::FromBlob();
2855 
2856   CodeSpaceMemoryModificationScope code_allocation(isolate->heap());
2857   for (int i = 0; i < Builtins::builtin_count; i++) {
2858     if (!Builtins::IsIsolateIndependent(i)) continue;
2859 
2860     Address instruction_start = d.InstructionStartOfBuiltin(i);
2861     Handle<Code> trampoline = isolate->factory()->NewOffHeapTrampolineFor(
2862         builtins->builtin_handle(i), instruction_start);
2863 
2864     // Note that references to the old, on-heap code objects may still exist on
2865     // the heap. This is fine for the sake of serialization, as serialization
2866     // will replace all of them with a builtin reference which is later
2867     // deserialized to point to the object within the builtins table.
2868     //
2869     // From this point onwards, some builtin code objects may be unreachable and
2870     // thus collected by the GC.
2871     builtins->set_builtin(i, *trampoline);
2872 
2873     if (isolate->logger()->is_listening_to_code_events() ||
2874         isolate->is_profiling()) {
2875       isolate->logger()->LogCodeObject(*trampoline);
2876     }
2877   }
2878 }
2879 
2880 void PrintEmbeddedBuiltinCandidates(Isolate* isolate) {
2881   CHECK(FLAG_print_embedded_builtin_candidates);
2882   bool found_a_candidate = false;
2883   for (int i = 0; i < Builtins::builtin_count; i++) {
2884     if (Builtins::IsIsolateIndependent(i)) continue;
2885     Code* builtin = isolate->heap()->builtin(i);
2886     if (!builtin->IsIsolateIndependent(isolate)) continue;
2887     if (!found_a_candidate) PrintF("Found embedded builtin candidates:\n");
2888     found_a_candidate = true;
2889     PrintF("  %s\n", Builtins::name(i));
2890   }
2891 }
2892 }  // namespace
2893 
2894 void Isolate::PrepareEmbeddedBlobForSerialization() {
2895   // When preparing the embedded blob, ensure it doesn't exist yet.
2896   DCHECK_NULL(embedded_blob());
2897   DCHECK_NULL(DefaultEmbeddedBlob());
2898   DCHECK(serializer_enabled());
2899 
2900   // The isolate takes ownership of this pointer into an executable mmap'd
2901   // area. We muck around with const-casts because the standard use-case in
2902   // shipping builds is for embedded_blob_ to point into a read-only
2903   // .text-embedded section.
2904   uint8_t* data;
2905   uint32_t size;
2906   InstructionStream::CreateOffHeapInstructionStream(this, &data, &size);
2907   SetEmbeddedBlob(const_cast<const uint8_t*>(data), size);
2908   CreateOffHeapTrampolines(this);
2909 }
2910 
2911 bool Isolate::Init(StartupDeserializer* des) {
2912   TRACE_ISOLATE(init);
2913 
2914   base::ElapsedTimer timer;
2915   if (des == nullptr && FLAG_profile_deserialization) timer.Start();
2916 
2917   time_millis_at_init_ = heap_.MonotonicallyIncreasingTimeInMs();
2918 
2919   stress_deopt_count_ = FLAG_deopt_every_n_times;
2920   force_slow_path_ = FLAG_force_slow_path;
2921 
2922   has_fatal_error_ = false;
2923 
2924   if (function_entry_hook() != nullptr) {
2925     // When function entry hooking is in effect, we have to create the code
2926     // stubs from scratch to get entry hooks, rather than loading the previously
2927     // generated stubs from disk.
2928     // If this assert fires, the initialization path has regressed.
2929     DCHECK_NULL(des);
2930   }
2931 
2932   // The initialization process does not handle memory exhaustion.
2933   AlwaysAllocateScope always_allocate(this);
2934 
2935   // Safe after setting Heap::isolate_, and initializing StackGuard
2936   heap_.SetStackLimits();
2937 
2938 #define ASSIGN_ELEMENT(CamelName, hacker_name)                  \
2939   isolate_addresses_[IsolateAddressId::k##CamelName##Address] = \
2940       reinterpret_cast<Address>(hacker_name##_address());
2941   FOR_EACH_ISOLATE_ADDRESS_NAME(ASSIGN_ELEMENT)
2942 #undef ASSIGN_ELEMENT
2943 
2944   compilation_cache_ = new CompilationCache(this);
2945   context_slot_cache_ = new ContextSlotCache();
2946   descriptor_lookup_cache_ = new DescriptorLookupCache();
2947   unicode_cache_ = new UnicodeCache();
2948   inner_pointer_to_code_cache_ = new InnerPointerToCodeCache(this);
2949   global_handles_ = new GlobalHandles(this);
2950   eternal_handles_ = new EternalHandles();
2951   bootstrapper_ = new Bootstrapper(this);
2952   handle_scope_implementer_ = new HandleScopeImplementer(this);
2953   load_stub_cache_ = new StubCache(this);
2954   store_stub_cache_ = new StubCache(this);
2955   materialized_object_store_ = new MaterializedObjectStore(this);
2956   regexp_stack_ = new RegExpStack();
2957   regexp_stack_->isolate_ = this;
2958   date_cache_ = new DateCache();
2959   heap_profiler_ = new HeapProfiler(heap());
2960   interpreter_ = new interpreter::Interpreter(this);
2961   compiler_dispatcher_ =
2962       new CompilerDispatcher(this, V8::GetCurrentPlatform(), FLAG_stack_size);
2963 
2964   // Enable logging before setting up the heap
2965   logger_->SetUp(this);
2966 
2967   { // NOLINT
2968     // Ensure that the thread has a valid stack guard.  The v8::Locker object
2969     // will ensure this too, but we don't have to use lockers if we are only
2970     // using one thread.
2971     ExecutionAccess lock(this);
2972     stack_guard_.InitThread(lock);
2973   }
2974 
2975   // SetUp the object heap.
2976   DCHECK(!heap_.HasBeenSetUp());
2977   heap_.SetUp();
2978 
2979   // Setup the wasm engine.
2980   if (wasm_engine_ == nullptr) {
2981     wasm_engine_ = wasm::WasmEngine::GetWasmEngine();
2982     wasm::WasmCodeManager::InstallSamplingGCCallback(this);
2983   }
2984 
2985   deoptimizer_data_ = new DeoptimizerData(heap());
2986 
2987   const bool create_heap_objects = (des == nullptr);
2988   if (setup_delegate_ == nullptr) {
2989     setup_delegate_ = new SetupIsolateDelegate(create_heap_objects);
2990   }
2991 
2992   if (!setup_delegate_->SetupHeap(&heap_)) {
2993     V8::FatalProcessOutOfMemory(this, "heap object creation");
2994     return false;
2995   }
2996 
2997   if (create_heap_objects) {
2998     // Terminate the partial snapshot cache so we can iterate.
2999     partial_snapshot_cache_.push_back(ReadOnlyRoots(this).undefined_value());
3000   }
3001 
3002   InitializeThreadLocal();
3003 
3004   bootstrapper_->Initialize(create_heap_objects);
3005 
3006   if (FLAG_embedded_builtins) {
3007     if (create_heap_objects && serializer_enabled()) {
3008       builtins_constants_table_builder_ =
3009           new BuiltinsConstantsTableBuilder(this);
3010     }
3011   }
3012   setup_delegate_->SetupBuiltins(this);
3013   if (FLAG_embedded_builtins) {
3014     if (create_heap_objects && serializer_enabled()) {
3015       builtins_constants_table_builder_->Finalize();
3016       delete builtins_constants_table_builder_;
3017       builtins_constants_table_builder_ = nullptr;
3018     }
3019   }
3020 
3021   if (create_heap_objects) heap_.CreateFixedStubs();
3022 
3023   if (FLAG_log_internal_timer_events) {
3024     set_event_logger(Logger::DefaultEventLoggerSentinel);
3025   }
3026 
3027   if (FLAG_trace_turbo || FLAG_trace_turbo_graph || FLAG_turbo_profiling) {
3028     PrintF("Concurrent recompilation has been disabled for tracing.\n");
3029   } else if (OptimizingCompileDispatcher::Enabled()) {
3030     optimizing_compile_dispatcher_ = new OptimizingCompileDispatcher(this);
3031   }
3032 
3033   // Initialize runtime profiler before deserialization, because collections may
3034   // occur, clearing/updating ICs.
3035   runtime_profiler_ = new RuntimeProfiler(this);
3036 
3037   // If we are deserializing, read the state into the now-empty heap.
3038   {
3039     AlwaysAllocateScope always_allocate(this);
3040     CodeSpaceMemoryModificationScope modification_scope(&heap_);
3041 
3042     if (!create_heap_objects) des->DeserializeInto(this);
3043     load_stub_cache_->Initialize();
3044     store_stub_cache_->Initialize();
3045     setup_delegate_->SetupInterpreter(interpreter_);
3046 
3047     heap_.NotifyDeserializationComplete();
3048   }
3049   delete setup_delegate_;
3050   setup_delegate_ = nullptr;
3051 
3052   if (FLAG_print_builtin_size) PrintBuiltinSizes(this);
3053   if (FLAG_print_embedded_builtin_candidates) {
3054     PrintEmbeddedBuiltinCandidates(this);
3055   }
3056 
3057   // Finish initialization of ThreadLocal after deserialization is done.
3058   clear_pending_exception();
3059   clear_pending_message();
3060   clear_scheduled_exception();
3061 
3062   // Deserializing may put strange things in the root array's copy of the
3063   // stack guard.
3064   heap_.SetStackLimits();
3065 
3066   // Quiet the heap NaN if needed on target platform.
3067   if (!create_heap_objects)
3068     Assembler::QuietNaN(ReadOnlyRoots(this).nan_value());
3069 
3070   if (FLAG_trace_turbo) {
3071     // Create an empty file.
3072     std::ofstream(GetTurboCfgFileName().c_str(), std::ios_base::trunc);
3073   }
3074 
3075   CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, embedder_data_)),
3076            Internals::kIsolateEmbedderDataOffset);
3077   CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, heap_.roots_)),
3078            Internals::kIsolateRootsOffset);
3079   CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, heap_.external_memory_)),
3080            Internals::kExternalMemoryOffset);
3081   CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, heap_.external_memory_limit_)),
3082            Internals::kExternalMemoryLimitOffset);
3083   CHECK_EQ(static_cast<int>(
3084                OFFSET_OF(Isolate, heap_.external_memory_at_last_mark_compact_)),
3085            Internals::kExternalMemoryAtLastMarkCompactOffset);
3086   CHECK_EQ(
3087       static_cast<int>(OFFSET_OF(Isolate, heap_.external_reference_table_)),
3088       Internals::kIsolateRootsOffset +
3089           Heap::kRootsExternalReferenceTableOffset);
3090   CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, heap_.builtins_)),
3091            Internals::kIsolateRootsOffset + Heap::kRootsBuiltinsOffset);
3092 
3093   {
3094     HandleScope scope(this);
3095     ast_string_constants_ = new AstStringConstants(this, heap()->HashSeed());
3096   }
3097 
3098   initialized_from_snapshot_ = (des != nullptr);
3099 
3100   if (!FLAG_inline_new) heap_.DisableInlineAllocation();
3101 
3102   if (FLAG_stress_sampling_allocation_profiler > 0) {
3103     uint64_t sample_interval = FLAG_stress_sampling_allocation_profiler;
3104     int stack_depth = 128;
3105     v8::HeapProfiler::SamplingFlags sampling_flags =
3106         v8::HeapProfiler::SamplingFlags::kSamplingForceGC;
3107     heap_profiler()->StartSamplingHeapProfiler(sample_interval, stack_depth,
3108                                                sampling_flags);
3109   }
3110 
3111   if (des == nullptr && FLAG_profile_deserialization) {
3112     double ms = timer.Elapsed().InMillisecondsF();
3113     PrintF("[Initializing isolate from scratch took %0.3f ms]\n", ms);
3114   }
3115 
3116   return true;
3117 }
3118 
3119 
3120 void Isolate::Enter() {
3121   Isolate* current_isolate = nullptr;
3122   PerIsolateThreadData* current_data = CurrentPerIsolateThreadData();
3123   if (current_data != nullptr) {
3124     current_isolate = current_data->isolate_;
3125     DCHECK_NOT_NULL(current_isolate);
3126     if (current_isolate == this) {
3127       DCHECK(Current() == this);
3128       DCHECK_NOT_NULL(entry_stack_);
3129       DCHECK(entry_stack_->previous_thread_data == nullptr ||
3130              entry_stack_->previous_thread_data->thread_id().Equals(
3131                  ThreadId::Current()));
3132       // Same thread re-enters the isolate, no need to re-init anything.
3133       entry_stack_->entry_count++;
3134       return;
3135     }
3136   }
3137 
3138   PerIsolateThreadData* data = FindOrAllocatePerThreadDataForThisThread();
3139   DCHECK_NOT_NULL(data);
3140   DCHECK(data->isolate_ == this);
3141 
3142   EntryStackItem* item = new EntryStackItem(current_data,
3143                                             current_isolate,
3144                                             entry_stack_);
3145   entry_stack_ = item;
3146 
3147   SetIsolateThreadLocals(this, data);
3148 
3149   // In case it's the first time some thread enters the isolate.
3150   set_thread_id(data->thread_id());
3151 }
3152 
3153 
3154 void Isolate::Exit() {
3155   DCHECK_NOT_NULL(entry_stack_);
3156   DCHECK(entry_stack_->previous_thread_data == nullptr ||
3157          entry_stack_->previous_thread_data->thread_id().Equals(
3158              ThreadId::Current()));
3159 
3160   if (--entry_stack_->entry_count > 0) return;
3161 
3162   DCHECK_NOT_NULL(CurrentPerIsolateThreadData());
3163   DCHECK(CurrentPerIsolateThreadData()->isolate_ == this);
3164 
3165   // Pop the stack.
3166   EntryStackItem* item = entry_stack_;
3167   entry_stack_ = item->previous_item;
3168 
3169   PerIsolateThreadData* previous_thread_data = item->previous_thread_data;
3170   Isolate* previous_isolate = item->previous_isolate;
3171 
3172   delete item;
3173 
3174   // Reinit the current thread for the isolate it was running before this one.
3175   SetIsolateThreadLocals(previous_isolate, previous_thread_data);
3176 }
3177 
3178 
3179 void Isolate::LinkDeferredHandles(DeferredHandles* deferred) {
3180   deferred->next_ = deferred_handles_head_;
3181   if (deferred_handles_head_ != nullptr) {
3182     deferred_handles_head_->previous_ = deferred;
3183   }
3184   deferred_handles_head_ = deferred;
3185 }
3186 
3187 
3188 void Isolate::UnlinkDeferredHandles(DeferredHandles* deferred) {
3189 #ifdef DEBUG
3190   // In debug mode assert that the linked list is well-formed.
3191   DeferredHandles* deferred_iterator = deferred;
3192   while (deferred_iterator->previous_ != nullptr) {
3193     deferred_iterator = deferred_iterator->previous_;
3194   }
3195   DCHECK(deferred_handles_head_ == deferred_iterator);
3196 #endif
3197   if (deferred_handles_head_ == deferred) {
3198     deferred_handles_head_ = deferred_handles_head_->next_;
3199   }
3200   if (deferred->next_ != nullptr) {
3201     deferred->next_->previous_ = deferred->previous_;
3202   }
3203   if (deferred->previous_ != nullptr) {
3204     deferred->previous_->next_ = deferred->next_;
3205   }
3206 }
3207 
3208 void Isolate::DumpAndResetStats() {
3209   if (turbo_statistics() != nullptr) {
3210     DCHECK(FLAG_turbo_stats || FLAG_turbo_stats_nvp);
3211     StdoutStream os;
3212     if (FLAG_turbo_stats) {
3213       AsPrintableStatistics ps = {*turbo_statistics(), false};
3214       os << ps << std::endl;
3215     }
3216     if (FLAG_turbo_stats_nvp) {
3217       AsPrintableStatistics ps = {*turbo_statistics(), true};
3218       os << ps << std::endl;
3219     }
3220     delete turbo_statistics_;
3221     turbo_statistics_ = nullptr;
3222   }
3223   // TODO(7424): There is no public API for the {WasmEngine} yet. So for now we
3224   // just dump and reset the engines statistics together with the Isolate.
3225   if (FLAG_turbo_stats_wasm) {
3226     wasm_engine()->DumpAndResetTurboStatistics();
3227   }
3228   if (V8_UNLIKELY(FLAG_runtime_stats ==
3229                   v8::tracing::TracingCategoryObserver::ENABLED_BY_NATIVE)) {
3230     counters()->runtime_call_stats()->Print();
3231     counters()->runtime_call_stats()->Reset();
3232   }
3233 }
3234 
3235 void Isolate::AbortConcurrentOptimization(BlockingBehavior behavior) {
3236   if (concurrent_recompilation_enabled()) {
3237     DisallowHeapAllocation no_recursive_gc;
3238     optimizing_compile_dispatcher()->Flush(behavior);
3239   }
3240 }
3241 
3242 CompilationStatistics* Isolate::GetTurboStatistics() {
3243   if (turbo_statistics() == nullptr)
3244     set_turbo_statistics(new CompilationStatistics());
3245   return turbo_statistics();
3246 }
3247 
3248 
3249 CodeTracer* Isolate::GetCodeTracer() {
3250   if (code_tracer() == nullptr) set_code_tracer(new CodeTracer(id()));
3251   return code_tracer();
3252 }
3253 
3254 bool Isolate::use_optimizer() {
3255   return FLAG_opt && !serializer_enabled_ && CpuFeatures::SupportsOptimizer() &&
3256          !is_precise_count_code_coverage() && !is_block_count_code_coverage();
3257 }
3258 
3259 bool Isolate::NeedsDetailedOptimizedCodeLineInfo() const {
3260   return NeedsSourcePositionsForProfiling() || FLAG_detailed_line_info;
3261 }
3262 
3263 bool Isolate::NeedsSourcePositionsForProfiling() const {
3264   return FLAG_trace_deopt || FLAG_trace_turbo || FLAG_trace_turbo_graph ||
3265          FLAG_turbo_profiling || FLAG_perf_prof || is_profiling() ||
3266          debug_->is_active() || logger_->is_logging() || FLAG_trace_maps;
3267 }
3268 
3269 void Isolate::SetFeedbackVectorsForProfilingTools(Object* value) {
3270   DCHECK(value->IsUndefined(this) || value->IsArrayList());
3271   heap()->set_feedback_vectors_for_profiling_tools(value);
3272 }
3273 
3274 void Isolate::MaybeInitializeVectorListFromHeap() {
3275   if (!heap()->feedback_vectors_for_profiling_tools()->IsUndefined(this)) {
3276     // Already initialized, return early.
3277     DCHECK(heap()->feedback_vectors_for_profiling_tools()->IsArrayList());
3278     return;
3279   }
3280 
3281   // Collect existing feedback vectors.
3282   std::vector<Handle<FeedbackVector>> vectors;
3283 
3284   {
3285     HeapIterator heap_iterator(heap());
3286     while (HeapObject* current_obj = heap_iterator.next()) {
3287       if (!current_obj->IsFeedbackVector()) continue;
3288 
3289       FeedbackVector* vector = FeedbackVector::cast(current_obj);
3290       SharedFunctionInfo* shared = vector->shared_function_info();
3291 
3292       // No need to preserve the feedback vector for non-user-visible functions.
3293       if (!shared->IsSubjectToDebugging()) continue;
3294 
3295       vectors.emplace_back(vector, this);
3296     }
3297   }
3298 
3299   // Add collected feedback vectors to the root list lest we lose them to GC.
3300   Handle<ArrayList> list =
3301       ArrayList::New(this, static_cast<int>(vectors.size()));
3302   for (const auto& vector : vectors) list = ArrayList::Add(this, list, vector);
3303   SetFeedbackVectorsForProfilingTools(*list);
3304 }
3305 
3306 bool Isolate::IsArrayOrObjectOrStringPrototype(Object* object) {
3307   Object* context = heap()->native_contexts_list();
3308   while (!context->IsUndefined(this)) {
3309     Context* current_context = Context::cast(context);
3310     if (current_context->initial_object_prototype() == object ||
3311         current_context->initial_array_prototype() == object ||
3312         current_context->initial_string_prototype() == object) {
3313       return true;
3314     }
3315     context = current_context->next_context_link();
3316   }
3317   return false;
3318 }
3319 
3320 bool Isolate::IsInAnyContext(Object* object, uint32_t index) {
3321   DisallowHeapAllocation no_gc;
3322   Object* context = heap()->native_contexts_list();
3323   while (!context->IsUndefined(this)) {
3324     Context* current_context = Context::cast(context);
3325     if (current_context->get(index) == object) {
3326       return true;
3327     }
3328     context = current_context->next_context_link();
3329   }
3330   return false;
3331 }
3332 
3333 bool Isolate::IsNoElementsProtectorIntact(Context* context) {
3334   PropertyCell* no_elements_cell = heap()->no_elements_protector();
3335   bool cell_reports_intact =
3336       no_elements_cell->value()->IsSmi() &&
3337       Smi::ToInt(no_elements_cell->value()) == kProtectorValid;
3338 
3339 #ifdef DEBUG
3340   Context* native_context = context->native_context();
3341 
3342   Map* root_array_map =
3343       native_context->GetInitialJSArrayMap(GetInitialFastElementsKind());
3344   JSObject* initial_array_proto = JSObject::cast(
3345       native_context->get(Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
3346   JSObject* initial_object_proto = JSObject::cast(
3347       native_context->get(Context::INITIAL_OBJECT_PROTOTYPE_INDEX));
3348   JSObject* initial_string_proto = JSObject::cast(
3349       native_context->get(Context::INITIAL_STRING_PROTOTYPE_INDEX));
3350 
3351   if (root_array_map == nullptr ||
3352       initial_array_proto == initial_object_proto) {
3353     // We are in the bootstrapping process, and the entire check sequence
3354     // shouldn't be performed.
3355     return cell_reports_intact;
3356   }
3357 
3358   // Check that the array prototype hasn't been altered WRT empty elements.
3359   if (root_array_map->prototype() != initial_array_proto) {
3360     DCHECK_EQ(false, cell_reports_intact);
3361     return cell_reports_intact;
3362   }
3363 
3364   FixedArrayBase* elements = initial_array_proto->elements();
3365   ReadOnlyRoots roots(heap());
3366   if (elements != roots.empty_fixed_array() &&
3367       elements != roots.empty_slow_element_dictionary()) {
3368     DCHECK_EQ(false, cell_reports_intact);
3369     return cell_reports_intact;
3370   }
3371 
3372   // Check that the Object.prototype hasn't been altered WRT empty elements.
3373   elements = initial_object_proto->elements();
3374   if (elements != roots.empty_fixed_array() &&
3375       elements != roots.empty_slow_element_dictionary()) {
3376     DCHECK_EQ(false, cell_reports_intact);
3377     return cell_reports_intact;
3378   }
3379 
3380   // Check that the Array.prototype has the Object.prototype as its
3381   // [[Prototype]] and that the Object.prototype has a null [[Prototype]].
3382   PrototypeIterator iter(this, initial_array_proto);
3383   if (iter.IsAtEnd() || iter.GetCurrent() != initial_object_proto) {
3384     DCHECK_EQ(false, cell_reports_intact);
3385     DCHECK(!has_pending_exception());
3386     return cell_reports_intact;
3387   }
3388   iter.Advance();
3389   if (!iter.IsAtEnd()) {
3390     DCHECK_EQ(false, cell_reports_intact);
3391     DCHECK(!has_pending_exception());
3392     return cell_reports_intact;
3393   }
3394   DCHECK(!has_pending_exception());
3395 
3396   // Check that the String.prototype hasn't been altered WRT empty elements.
3397   elements = initial_string_proto->elements();
3398   if (elements != roots.empty_fixed_array() &&
3399       elements != roots.empty_slow_element_dictionary()) {
3400     DCHECK_EQ(false, cell_reports_intact);
3401     return cell_reports_intact;
3402   }
3403 
3404   // Check that the String.prototype has the Object.prototype
3405   // as its [[Prototype]] still.
3406   if (initial_string_proto->map()->prototype() != initial_object_proto) {
3407     DCHECK_EQ(false, cell_reports_intact);
3408     return cell_reports_intact;
3409   }
3410 #endif
3411 
3412   return cell_reports_intact;
3413 }
3414 
3415 bool Isolate::IsNoElementsProtectorIntact() {
3416   return Isolate::IsNoElementsProtectorIntact(context());
3417 }
3418 
3419 bool Isolate::IsIsConcatSpreadableLookupChainIntact() {
3420   Cell* is_concat_spreadable_cell = heap()->is_concat_spreadable_protector();
3421   bool is_is_concat_spreadable_set =
3422       Smi::ToInt(is_concat_spreadable_cell->value()) == kProtectorInvalid;
3423 #ifdef DEBUG
3424   Map* root_array_map =
3425       raw_native_context()->GetInitialJSArrayMap(GetInitialFastElementsKind());
3426   if (root_array_map == nullptr) {
3427     // Ignore the value of is_concat_spreadable during bootstrap.
3428     return !is_is_concat_spreadable_set;
3429   }
3430   Handle<Object> array_prototype(array_function()->prototype(), this);
3431   Handle<Symbol> key = factory()->is_concat_spreadable_symbol();
3432   Handle<Object> value;
3433   LookupIterator it(this, array_prototype, key);
3434   if (it.IsFound() && !JSReceiver::GetDataProperty(&it)->IsUndefined(this)) {
3435     // TODO(cbruni): Currently we do not revert if we unset the
3436     // @@isConcatSpreadable property on Array.prototype or Object.prototype
3437     // hence the reverse implication doesn't hold.
3438     DCHECK(is_is_concat_spreadable_set);
3439     return false;
3440   }
3441 #endif  // DEBUG
3442 
3443   return !is_is_concat_spreadable_set;
3444 }
3445 
3446 bool Isolate::IsIsConcatSpreadableLookupChainIntact(JSReceiver* receiver) {
3447   if (!IsIsConcatSpreadableLookupChainIntact()) return false;
3448   return !receiver->HasProxyInPrototype(this);
3449 }
3450 
3451 bool Isolate::IsPromiseHookProtectorIntact() {
3452   PropertyCell* promise_hook_cell = heap()->promise_hook_protector();
3453   bool is_promise_hook_protector_intact =
3454       Smi::ToInt(promise_hook_cell->value()) == kProtectorValid;
3455   DCHECK_IMPLIES(is_promise_hook_protector_intact,
3456                  !promise_hook_or_async_event_delegate_);
3457   return is_promise_hook_protector_intact;
3458 }
3459 
3460 bool Isolate::IsPromiseResolveLookupChainIntact() {
3461   Cell* promise_resolve_cell = heap()->promise_resolve_protector();
3462   bool is_promise_resolve_protector_intact =
3463       Smi::ToInt(promise_resolve_cell->value()) == kProtectorValid;
3464   return is_promise_resolve_protector_intact;
3465 }
3466 
3467 bool Isolate::IsPromiseThenLookupChainIntact() {
3468   PropertyCell* promise_then_cell = heap()->promise_then_protector();
3469   bool is_promise_then_protector_intact =
3470       Smi::ToInt(promise_then_cell->value()) == kProtectorValid;
3471   return is_promise_then_protector_intact;
3472 }
3473 
3474 bool Isolate::IsPromiseThenLookupChainIntact(Handle<JSReceiver> receiver) {
3475   DisallowHeapAllocation no_gc;
3476   if (!receiver->IsJSPromise()) return false;
3477   if (!IsInAnyContext(receiver->map()->prototype(),
3478                       Context::PROMISE_PROTOTYPE_INDEX)) {
3479     return false;
3480   }
3481   return IsPromiseThenLookupChainIntact();
3482 }
3483 
3484 void Isolate::UpdateNoElementsProtectorOnSetElement(Handle<JSObject> object) {
3485   DisallowHeapAllocation no_gc;
3486   if (!object->map()->is_prototype_map()) return;
3487   if (!IsNoElementsProtectorIntact()) return;
3488   if (!IsArrayOrObjectOrStringPrototype(*object)) return;
3489   PropertyCell::SetValueWithInvalidation(
3490       this, factory()->no_elements_protector(),
3491       handle(Smi::FromInt(kProtectorInvalid), this));
3492 }
3493 
3494 void Isolate::InvalidateIsConcatSpreadableProtector() {
3495   DCHECK(factory()->is_concat_spreadable_protector()->value()->IsSmi());
3496   DCHECK(IsIsConcatSpreadableLookupChainIntact());
3497   factory()->is_concat_spreadable_protector()->set_value(
3498       Smi::FromInt(kProtectorInvalid));
3499   DCHECK(!IsIsConcatSpreadableLookupChainIntact());
3500 }
3501 
3502 void Isolate::InvalidateArrayConstructorProtector() {
3503   DCHECK(factory()->array_constructor_protector()->value()->IsSmi());
3504   DCHECK(IsArrayConstructorIntact());
3505   factory()->array_constructor_protector()->set_value(
3506       Smi::FromInt(kProtectorInvalid));
3507   DCHECK(!IsArrayConstructorIntact());
3508 }
3509 
3510 void Isolate::InvalidateArraySpeciesProtector() {
3511   DCHECK(factory()->array_species_protector()->value()->IsSmi());
3512   DCHECK(IsArraySpeciesLookupChainIntact());
3513   PropertyCell::SetValueWithInvalidation(
3514       this, factory()->array_species_protector(),
3515       handle(Smi::FromInt(kProtectorInvalid), this));
3516   DCHECK(!IsArraySpeciesLookupChainIntact());
3517 }
3518 
3519 void Isolate::InvalidateTypedArraySpeciesProtector() {
3520   DCHECK(factory()->typed_array_species_protector()->value()->IsSmi());
3521   DCHECK(IsTypedArraySpeciesLookupChainIntact());
3522   PropertyCell::SetValueWithInvalidation(
3523       this, factory()->typed_array_species_protector(),
3524       handle(Smi::FromInt(kProtectorInvalid), this));
3525   DCHECK(!IsTypedArraySpeciesLookupChainIntact());
3526 }
3527 
3528 void Isolate::InvalidatePromiseSpeciesProtector() {
3529   DCHECK(factory()->promise_species_protector()->value()->IsSmi());
3530   DCHECK(IsPromiseSpeciesLookupChainIntact());
3531   PropertyCell::SetValueWithInvalidation(
3532       this, factory()->promise_species_protector(),
3533       handle(Smi::FromInt(kProtectorInvalid), this));
3534   DCHECK(!IsPromiseSpeciesLookupChainIntact());
3535 }
3536 
3537 void Isolate::InvalidateStringLengthOverflowProtector() {
3538   DCHECK(factory()->string_length_protector()->value()->IsSmi());
3539   DCHECK(IsStringLengthOverflowIntact());
3540   factory()->string_length_protector()->set_value(
3541       Smi::FromInt(kProtectorInvalid));
3542   DCHECK(!IsStringLengthOverflowIntact());
3543 }
3544 
3545 void Isolate::InvalidateArrayIteratorProtector() {
3546   DCHECK(factory()->array_iterator_protector()->value()->IsSmi());
3547   DCHECK(IsArrayIteratorLookupChainIntact());
3548   PropertyCell::SetValueWithInvalidation(
3549       this, factory()->array_iterator_protector(),
3550       handle(Smi::FromInt(kProtectorInvalid), this));
3551   DCHECK(!IsArrayIteratorLookupChainIntact());
3552 }
3553 
3554 void Isolate::InvalidateArrayBufferNeuteringProtector() {
3555   DCHECK(factory()->array_buffer_neutering_protector()->value()->IsSmi());
3556   DCHECK(IsArrayBufferNeuteringIntact());
3557   PropertyCell::SetValueWithInvalidation(
3558       this, factory()->array_buffer_neutering_protector(),
3559       handle(Smi::FromInt(kProtectorInvalid), this));
3560   DCHECK(!IsArrayBufferNeuteringIntact());
3561 }
3562 
3563 void Isolate::InvalidatePromiseHookProtector() {
3564   DCHECK(factory()->promise_hook_protector()->value()->IsSmi());
3565   DCHECK(IsPromiseHookProtectorIntact());
3566   PropertyCell::SetValueWithInvalidation(
3567       this, factory()->promise_hook_protector(),
3568       handle(Smi::FromInt(kProtectorInvalid), this));
3569   DCHECK(!IsPromiseHookProtectorIntact());
3570 }
3571 
3572 void Isolate::InvalidatePromiseResolveProtector() {
3573   DCHECK(factory()->promise_resolve_protector()->value()->IsSmi());
3574   DCHECK(IsPromiseResolveLookupChainIntact());
3575   factory()->promise_resolve_protector()->set_value(
3576       Smi::FromInt(kProtectorInvalid));
3577   DCHECK(!IsPromiseResolveLookupChainIntact());
3578 }
3579 
3580 void Isolate::InvalidatePromiseThenProtector() {
3581   DCHECK(factory()->promise_then_protector()->value()->IsSmi());
3582   DCHECK(IsPromiseThenLookupChainIntact());
3583   PropertyCell::SetValueWithInvalidation(
3584       this, factory()->promise_then_protector(),
3585       handle(Smi::FromInt(kProtectorInvalid), this));
3586   DCHECK(!IsPromiseThenLookupChainIntact());
3587 }
3588 
3589 bool Isolate::IsAnyInitialArrayPrototype(Handle<JSArray> array) {
3590   DisallowHeapAllocation no_gc;
3591   return IsInAnyContext(*array, Context::INITIAL_ARRAY_PROTOTYPE_INDEX);
3592 }
3593 
3594 static base::RandomNumberGenerator* ensure_rng_exists(
3595     base::RandomNumberGenerator** rng, int seed) {
3596   if (*rng == nullptr) {
3597     if (seed != 0) {
3598       *rng = new base::RandomNumberGenerator(seed);
3599     } else {
3600       *rng = new base::RandomNumberGenerator();
3601     }
3602   }
3603   return *rng;
3604 }
3605 
3606 base::RandomNumberGenerator* Isolate::random_number_generator() {
3607   return ensure_rng_exists(&random_number_generator_, FLAG_random_seed);
3608 }
3609 
3610 base::RandomNumberGenerator* Isolate::fuzzer_rng() {
3611   if (fuzzer_rng_ == nullptr) {
3612     int64_t seed = FLAG_fuzzer_random_seed;
3613     if (seed == 0) {
3614       seed = random_number_generator()->initial_seed();
3615     }
3616 
3617     fuzzer_rng_ = new base::RandomNumberGenerator(seed);
3618   }
3619 
3620   return fuzzer_rng_;
3621 }
3622 
3623 int Isolate::GenerateIdentityHash(uint32_t mask) {
3624   int hash;
3625   int attempts = 0;
3626   do {
3627     hash = random_number_generator()->NextInt() & mask;
3628   } while (hash == 0 && attempts++ < 30);
3629   return hash != 0 ? hash : 1;
3630 }
3631 
3632 Code* Isolate::FindCodeObject(Address a) {
3633   return heap()->GcSafeFindCodeForInnerPointer(a);
3634 }
3635 
3636 
3637 #ifdef DEBUG
3638 #define ISOLATE_FIELD_OFFSET(type, name, ignored)                       \
3639 const intptr_t Isolate::name##_debug_offset_ = OFFSET_OF(Isolate, name##_);
3640 ISOLATE_INIT_LIST(ISOLATE_FIELD_OFFSET)
3641 ISOLATE_INIT_ARRAY_LIST(ISOLATE_FIELD_OFFSET)
3642 #undef ISOLATE_FIELD_OFFSET
3643 #endif
3644 
3645 Handle<Symbol> Isolate::SymbolFor(Heap::RootListIndex dictionary_index,
3646                                   Handle<String> name, bool private_symbol) {
3647   Handle<String> key = factory()->InternalizeString(name);
3648   Handle<NameDictionary> dictionary =
3649       Handle<NameDictionary>::cast(heap()->root_handle(dictionary_index));
3650   int entry = dictionary->FindEntry(this, key);
3651   Handle<Symbol> symbol;
3652   if (entry == NameDictionary::kNotFound) {
3653     symbol =
3654         private_symbol ? factory()->NewPrivateSymbol() : factory()->NewSymbol();
3655     symbol->set_name(*key);
3656     dictionary = NameDictionary::Add(this, dictionary, key, symbol,
3657                                      PropertyDetails::Empty(), &entry);
3658     switch (dictionary_index) {
3659       case Heap::kPublicSymbolTableRootIndex:
3660         symbol->set_is_public(true);
3661         heap()->set_public_symbol_table(*dictionary);
3662         break;
3663       case Heap::kApiSymbolTableRootIndex:
3664         heap()->set_api_symbol_table(*dictionary);
3665         break;
3666       case Heap::kApiPrivateSymbolTableRootIndex:
3667         heap()->set_api_private_symbol_table(*dictionary);
3668         break;
3669       default:
3670         UNREACHABLE();
3671     }
3672   } else {
3673     symbol = Handle<Symbol>(Symbol::cast(dictionary->ValueAt(entry)), this);
3674   }
3675   return symbol;
3676 }
3677 
3678 void Isolate::AddBeforeCallEnteredCallback(BeforeCallEnteredCallback callback) {
3679   auto pos = std::find(before_call_entered_callbacks_.begin(),
3680                        before_call_entered_callbacks_.end(), callback);
3681   if (pos != before_call_entered_callbacks_.end()) return;
3682   before_call_entered_callbacks_.push_back(callback);
3683 }
3684 
3685 void Isolate::RemoveBeforeCallEnteredCallback(
3686     BeforeCallEnteredCallback callback) {
3687   auto pos = std::find(before_call_entered_callbacks_.begin(),
3688                        before_call_entered_callbacks_.end(), callback);
3689   if (pos == before_call_entered_callbacks_.end()) return;
3690   before_call_entered_callbacks_.erase(pos);
3691 }
3692 
3693 void Isolate::AddCallCompletedCallback(CallCompletedCallback callback) {
3694   auto pos = std::find(call_completed_callbacks_.begin(),
3695                        call_completed_callbacks_.end(), callback);
3696   if (pos != call_completed_callbacks_.end()) return;
3697   call_completed_callbacks_.push_back(callback);
3698 }
3699 
3700 void Isolate::RemoveCallCompletedCallback(CallCompletedCallback callback) {
3701   auto pos = std::find(call_completed_callbacks_.begin(),
3702                        call_completed_callbacks_.end(), callback);
3703   if (pos == call_completed_callbacks_.end()) return;
3704   call_completed_callbacks_.erase(pos);
3705 }
3706 
3707 void Isolate::AddMicrotasksCompletedCallback(
3708     MicrotasksCompletedCallback callback) {
3709   auto pos = std::find(microtasks_completed_callbacks_.begin(),
3710                        microtasks_completed_callbacks_.end(), callback);
3711   if (pos != microtasks_completed_callbacks_.end()) return;
3712   microtasks_completed_callbacks_.push_back(callback);
3713 }
3714 
3715 void Isolate::RemoveMicrotasksCompletedCallback(
3716     MicrotasksCompletedCallback callback) {
3717   auto pos = std::find(microtasks_completed_callbacks_.begin(),
3718                        microtasks_completed_callbacks_.end(), callback);
3719   if (pos == microtasks_completed_callbacks_.end()) return;
3720   microtasks_completed_callbacks_.erase(pos);
3721 }
3722 
3723 void Isolate::FireCallCompletedCallback() {
3724   if (!handle_scope_implementer()->CallDepthIsZero()) return;
3725 
3726   bool run_microtasks =
3727       pending_microtask_count() &&
3728       !handle_scope_implementer()->HasMicrotasksSuppressions() &&
3729       handle_scope_implementer()->microtasks_policy() ==
3730           v8::MicrotasksPolicy::kAuto;
3731 
3732   if (run_microtasks) RunMicrotasks();
3733 
3734   if (call_completed_callbacks_.empty()) return;
3735   // Fire callbacks.  Increase call depth to prevent recursive callbacks.
3736   v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(this);
3737   v8::Isolate::SuppressMicrotaskExecutionScope suppress(isolate);
3738   std::vector<CallCompletedCallback> callbacks(call_completed_callbacks_);
3739   for (auto& callback : callbacks) {
3740     callback(reinterpret_cast<v8::Isolate*>(this));
3741   }
3742 }
3743 
3744 void Isolate::PromiseHookStateUpdated() {
3745   bool is_active = promise_hook_ || async_event_delegate_;
3746   if (is_active && IsPromiseHookProtectorIntact()) {
3747     HandleScope scope(this);
3748     InvalidatePromiseHookProtector();
3749   }
3750   promise_hook_or_async_event_delegate_ = is_active;
3751 }
3752 
3753 namespace {
3754 
3755 MaybeHandle<JSPromise> NewRejectedPromise(Isolate* isolate,
3756                                           v8::Local<v8::Context> api_context,
3757                                           Handle<Object> exception) {
3758   v8::Local<v8::Promise::Resolver> resolver;
3759   ASSIGN_RETURN_ON_SCHEDULED_EXCEPTION_VALUE(
3760       isolate, resolver, v8::Promise::Resolver::New(api_context),
3761       MaybeHandle<JSPromise>());
3762 
3763   RETURN_ON_SCHEDULED_EXCEPTION_VALUE(
3764       isolate, resolver->Reject(api_context, v8::Utils::ToLocal(exception)),
3765       MaybeHandle<JSPromise>());
3766 
3767   v8::Local<v8::Promise> promise = resolver->GetPromise();
3768   return v8::Utils::OpenHandle(*promise);
3769 }
3770 
3771 }  // namespace
3772 
3773 MaybeHandle<JSPromise> Isolate::RunHostImportModuleDynamicallyCallback(
3774     Handle<Script> referrer, Handle<Object> specifier) {
3775   v8::Local<v8::Context> api_context =
3776       v8::Utils::ToLocal(Handle<Context>(native_context()));
3777 
3778   if (host_import_module_dynamically_callback_ == nullptr) {
3779     Handle<Object> exception =
3780         factory()->NewError(error_function(), MessageTemplate::kUnsupported);
3781     return NewRejectedPromise(this, api_context, exception);
3782   }
3783 
3784   Handle<String> specifier_str;
3785   MaybeHandle<String> maybe_specifier = Object::ToString(this, specifier);
3786   if (!maybe_specifier.ToHandle(&specifier_str)) {
3787     Handle<Object> exception(pending_exception(), this);
3788     clear_pending_exception();
3789 
3790     return NewRejectedPromise(this, api_context, exception);
3791   }
3792   DCHECK(!has_pending_exception());
3793 
3794   v8::Local<v8::Promise> promise;
3795   ASSIGN_RETURN_ON_SCHEDULED_EXCEPTION_VALUE(
3796       this, promise,
3797       host_import_module_dynamically_callback_(
3798           api_context, v8::Utils::ScriptOrModuleToLocal(referrer),
3799           v8::Utils::ToLocal(specifier_str)),
3800       MaybeHandle<JSPromise>());
3801   return v8::Utils::OpenHandle(*promise);
3802 }
3803 
3804 void Isolate::SetHostImportModuleDynamicallyCallback(
3805     HostImportModuleDynamicallyCallback callback) {
3806   host_import_module_dynamically_callback_ = callback;
3807 }
3808 
3809 Handle<JSObject> Isolate::RunHostInitializeImportMetaObjectCallback(
3810     Handle<Module> module) {
3811   Handle<Object> host_meta(module->import_meta(), this);
3812   if (host_meta->IsTheHole(this)) {
3813     host_meta = factory()->NewJSObjectWithNullProto();
3814     if (host_initialize_import_meta_object_callback_ != nullptr) {
3815       v8::Local<v8::Context> api_context =
3816           v8::Utils::ToLocal(Handle<Context>(native_context()));
3817       host_initialize_import_meta_object_callback_(
3818           api_context, Utils::ToLocal(module),
3819           v8::Local<v8::Object>::Cast(v8::Utils::ToLocal(host_meta)));
3820     }
3821     module->set_import_meta(*host_meta);
3822   }
3823   return Handle<JSObject>::cast(host_meta);
3824 }
3825 
3826 void Isolate::SetHostInitializeImportMetaObjectCallback(
3827     HostInitializeImportMetaObjectCallback callback) {
3828   host_initialize_import_meta_object_callback_ = callback;
3829 }
3830 
3831 void Isolate::SetAtomicsWaitCallback(v8::Isolate::AtomicsWaitCallback callback,
3832                                      void* data) {
3833   atomics_wait_callback_ = callback;
3834   atomics_wait_callback_data_ = data;
3835 }
3836 
3837 void Isolate::RunAtomicsWaitCallback(v8::Isolate::AtomicsWaitEvent event,
3838                                      Handle<JSArrayBuffer> array_buffer,
3839                                      size_t offset_in_bytes, int32_t value,
3840                                      double timeout_in_ms,
3841                                      AtomicsWaitWakeHandle* stop_handle) {
3842   DCHECK(array_buffer->is_shared());
3843   if (atomics_wait_callback_ == nullptr) return;
3844   HandleScope handle_scope(this);
3845   atomics_wait_callback_(
3846       event, v8::Utils::ToLocalShared(array_buffer), offset_in_bytes, value,
3847       timeout_in_ms,
3848       reinterpret_cast<v8::Isolate::AtomicsWaitWakeHandle*>(stop_handle),
3849       atomics_wait_callback_data_);
3850 }
3851 
3852 void Isolate::SetPromiseHook(PromiseHook hook) {
3853   promise_hook_ = hook;
3854   PromiseHookStateUpdated();
3855 }
3856 
3857 void Isolate::RunPromiseHook(PromiseHookType type, Handle<JSPromise> promise,
3858                              Handle<Object> parent) {
3859   RunPromiseHookForAsyncEventDelegate(type, promise);
3860   if (promise_hook_ == nullptr) return;
3861   promise_hook_(type, v8::Utils::PromiseToLocal(promise),
3862                 v8::Utils::ToLocal(parent));
3863 }
3864 
3865 void Isolate::RunPromiseHookForAsyncEventDelegate(PromiseHookType type,
3866                                                   Handle<JSPromise> promise) {
3867   if (!async_event_delegate_) return;
3868   if (type == PromiseHookType::kResolve) return;
3869 
3870   if (type == PromiseHookType::kBefore) {
3871     if (!promise->async_task_id()) return;
3872     async_event_delegate_->AsyncEventOccurred(debug::kDebugWillHandle,
3873                                               promise->async_task_id(), false);
3874   } else if (type == PromiseHookType::kAfter) {
3875     if (!promise->async_task_id()) return;
3876     async_event_delegate_->AsyncEventOccurred(debug::kDebugDidHandle,
3877                                               promise->async_task_id(), false);
3878   } else {
3879     DCHECK(type == PromiseHookType::kInit);
3880     debug::DebugAsyncActionType type = debug::kDebugPromiseThen;
3881     bool last_frame_was_promise_builtin = false;
3882     JavaScriptFrameIterator it(this);
3883     while (!it.done()) {
3884       std::vector<Handle<SharedFunctionInfo>> infos;
3885       it.frame()->GetFunctions(&infos);
3886       for (size_t i = 1; i <= infos.size(); ++i) {
3887         Handle<SharedFunctionInfo> info = infos[infos.size() - i];
3888         if (info->IsUserJavaScript()) {
3889           // We should not report PromiseThen and PromiseCatch which is called
3890           // indirectly, e.g. Promise.all calls Promise.then internally.
3891           if (last_frame_was_promise_builtin) {
3892             if (!promise->async_task_id()) {
3893               promise->set_async_task_id(++async_task_count_);
3894             }
3895             async_event_delegate_->AsyncEventOccurred(
3896                 type, promise->async_task_id(), debug()->IsBlackboxed(info));
3897           }
3898           return;
3899         }
3900         last_frame_was_promise_builtin = false;
3901         if (info->HasBuiltinId()) {
3902           if (info->builtin_id() == Builtins::kPromisePrototypeThen) {
3903             type = debug::kDebugPromiseThen;
3904             last_frame_was_promise_builtin = true;
3905           } else if (info->builtin_id() == Builtins::kPromisePrototypeCatch) {
3906             type = debug::kDebugPromiseCatch;
3907             last_frame_was_promise_builtin = true;
3908           } else if (info->builtin_id() == Builtins::kPromisePrototypeFinally) {
3909             type = debug::kDebugPromiseFinally;
3910             last_frame_was_promise_builtin = true;
3911           }
3912         }
3913       }
3914       it.Advance();
3915     }
3916   }
3917 }
3918 
3919 void Isolate::OnAsyncFunctionStateChanged(Handle<JSPromise> promise,
3920                                           debug::DebugAsyncActionType event) {
3921   if (!async_event_delegate_) return;
3922   if (!promise->async_task_id()) {
3923     promise->set_async_task_id(++async_task_count_);
3924   }
3925   async_event_delegate_->AsyncEventOccurred(event, promise->async_task_id(),
3926                                             false);
3927 }
3928 
3929 void Isolate::SetPromiseRejectCallback(PromiseRejectCallback callback) {
3930   promise_reject_callback_ = callback;
3931 }
3932 
3933 void Isolate::ReportPromiseReject(Handle<JSPromise> promise,
3934                                   Handle<Object> value,
3935                                   v8::PromiseRejectEvent event) {
3936   if (promise_reject_callback_ == nullptr) return;
3937   Handle<FixedArray> stack_trace;
3938   if (event != v8::kPromiseHandlerAddedAfterReject && value->IsJSObject()) {
3939     stack_trace = GetDetailedStackTrace(Handle<JSObject>::cast(value));
3940   }
3941   promise_reject_callback_(v8::PromiseRejectMessage(
3942       v8::Utils::PromiseToLocal(promise), event, v8::Utils::ToLocal(value),
3943       v8::Utils::StackTraceToLocal(stack_trace)));
3944 }
3945 
3946 void Isolate::EnqueueMicrotask(Handle<Microtask> microtask) {
3947   Handle<FixedArray> queue(heap()->microtask_queue(), this);
3948   int num_tasks = pending_microtask_count();
3949   DCHECK_LE(num_tasks, queue->length());
3950   if (num_tasks == queue->length()) {
3951     queue = factory()->CopyFixedArrayAndGrow(queue, std::max(num_tasks, 8));
3952     heap()->set_microtask_queue(*queue);
3953   }
3954   DCHECK_LE(8, queue->length());
3955   DCHECK_LT(num_tasks, queue->length());
3956   DCHECK(queue->get(num_tasks)->IsUndefined(this));
3957   queue->set(num_tasks, *microtask);
3958   set_pending_microtask_count(num_tasks + 1);
3959 }
3960 
3961 
3962 void Isolate::RunMicrotasks() {
3963   // Increase call depth to prevent recursive callbacks.
3964   v8::Isolate::SuppressMicrotaskExecutionScope suppress(
3965       reinterpret_cast<v8::Isolate*>(this));
3966   if (pending_microtask_count()) {
3967     is_running_microtasks_ = true;
3968     TRACE_EVENT0("v8.execute", "RunMicrotasks");
3969     TRACE_EVENT_CALL_STATS_SCOPED(this, "v8", "V8.RunMicrotasks");
3970 
3971     HandleScope scope(this);
3972     MaybeHandle<Object> maybe_exception;
3973     MaybeHandle<Object> maybe_result = Execution::RunMicrotasks(
3974         this, Execution::MessageHandling::kReport, &maybe_exception);
3975     // If execution is terminating, bail out, clean up, and propagate to
3976     // TryCatch scope.
3977     if (maybe_result.is_null() && maybe_exception.is_null()) {
3978       heap()->set_microtask_queue(ReadOnlyRoots(heap()).empty_fixed_array());
3979       set_pending_microtask_count(0);
3980       handle_scope_implementer()->LeaveMicrotaskContext();
3981       SetTerminationOnExternalTryCatch();
3982     }
3983     CHECK_EQ(0, pending_microtask_count());
3984     CHECK_EQ(0, heap()->microtask_queue()->length());
3985     is_running_microtasks_ = false;
3986   }
3987   FireMicrotasksCompletedCallback();
3988 }
3989 
3990 void Isolate::SetUseCounterCallback(v8::Isolate::UseCounterCallback callback) {
3991   DCHECK(!use_counter_callback_);
3992   use_counter_callback_ = callback;
3993 }
3994 
3995 
3996 void Isolate::CountUsage(v8::Isolate::UseCounterFeature feature) {
3997   // The counter callback may cause the embedder to call into V8, which is not
3998   // generally possible during GC.
3999   if (heap_.gc_state() == Heap::NOT_IN_GC) {
4000     if (use_counter_callback_) {
4001       HandleScope handle_scope(this);
4002       use_counter_callback_(reinterpret_cast<v8::Isolate*>(this), feature);
4003     }
4004   } else {
4005     heap_.IncrementDeferredCount(feature);
4006   }
4007 }
4008 
4009 std::string Isolate::GetTurboCfgFileName() {
4010   if (FLAG_trace_turbo_cfg_file == nullptr) {
4011     std::ostringstream os;
4012     os << "turbo-" << base::OS::GetCurrentProcessId() << "-" << id() << ".cfg";
4013     return os.str();
4014   } else {
4015     return FLAG_trace_turbo_cfg_file;
4016   }
4017 }
4018 
4019 // Heap::detached_contexts tracks detached contexts as pairs
4020 // (number of GC since the context was detached, the context).
4021 void Isolate::AddDetachedContext(Handle<Context> context) {
4022   HandleScope scope(this);
4023   Handle<WeakArrayList> detached_contexts = factory()->detached_contexts();
4024   detached_contexts = WeakArrayList::AddToEnd(
4025       this, detached_contexts, MaybeObjectHandle(Smi::kZero, this));
4026   detached_contexts = WeakArrayList::AddToEnd(this, detached_contexts,
4027                                               MaybeObjectHandle::Weak(context));
4028   heap()->set_detached_contexts(*detached_contexts);
4029 }
4030 
4031 
4032 void Isolate::CheckDetachedContextsAfterGC() {
4033   HandleScope scope(this);
4034   Handle<WeakArrayList> detached_contexts = factory()->detached_contexts();
4035   int length = detached_contexts->length();
4036   if (length == 0) return;
4037   int new_length = 0;
4038   for (int i = 0; i < length; i += 2) {
4039     int mark_sweeps = Smi::ToInt(detached_contexts->Get(i)->ToSmi());
4040     MaybeObject* context = detached_contexts->Get(i + 1);
4041     DCHECK(context->IsWeakHeapObject() || context->IsClearedWeakHeapObject());
4042     if (!context->IsClearedWeakHeapObject()) {
4043       detached_contexts->Set(
4044           new_length, MaybeObject::FromSmi(Smi::FromInt(mark_sweeps + 1)));
4045       detached_contexts->Set(new_length + 1, context);
4046       new_length += 2;
4047     }
4048   }
4049   detached_contexts->set_length(new_length);
4050   while (new_length < length) {
4051     detached_contexts->Set(new_length, MaybeObject::FromSmi(Smi::kZero));
4052     ++new_length;
4053   }
4054 
4055   if (FLAG_trace_detached_contexts) {
4056     PrintF("%d detached contexts are collected out of %d\n",
4057            length - new_length, length);
4058     for (int i = 0; i < new_length; i += 2) {
4059       int mark_sweeps = Smi::ToInt(detached_contexts->Get(i)->ToSmi());
4060       MaybeObject* context = detached_contexts->Get(i + 1);
4061       DCHECK(context->IsWeakHeapObject() || context->IsClearedWeakHeapObject());
4062       if (mark_sweeps > 3) {
4063         PrintF("detached context %p\n survived %d GCs (leak?)\n",
4064                static_cast<void*>(context), mark_sweeps);
4065       }
4066     }
4067   }
4068 }
4069 
4070 double Isolate::LoadStartTimeMs() {
4071   base::LockGuard<base::Mutex> guard(&rail_mutex_);
4072   return load_start_time_ms_;
4073 }
4074 
4075 void Isolate::SetRAILMode(RAILMode rail_mode) {
4076   RAILMode old_rail_mode = rail_mode_.Value();
4077   if (old_rail_mode != PERFORMANCE_LOAD && rail_mode == PERFORMANCE_LOAD) {
4078     base::LockGuard<base::Mutex> guard(&rail_mutex_);
4079     load_start_time_ms_ = heap()->MonotonicallyIncreasingTimeInMs();
4080   }
4081   rail_mode_.SetValue(rail_mode);
4082   if (old_rail_mode == PERFORMANCE_LOAD && rail_mode != PERFORMANCE_LOAD) {
4083     heap()->incremental_marking()->incremental_marking_job()->ScheduleTask(
4084         heap());
4085   }
4086   if (FLAG_trace_rail) {
4087     PrintIsolate(this, "RAIL mode: %s\n", RAILModeName(rail_mode));
4088   }
4089 }
4090 
4091 void Isolate::IsolateInBackgroundNotification() {
4092   is_isolate_in_background_ = true;
4093   heap()->ActivateMemoryReducerIfNeeded();
4094 }
4095 
4096 void Isolate::IsolateInForegroundNotification() {
4097   is_isolate_in_background_ = false;
4098 }
4099 
4100 void Isolate::PrintWithTimestamp(const char* format, ...) {
4101   base::OS::Print("[%d:%p] %8.0f ms: ", base::OS::GetCurrentProcessId(),
4102                   static_cast<void*>(this), time_millis_since_init());
4103   va_list arguments;
4104   va_start(arguments, format);
4105   base::OS::VPrint(format, arguments);
4106   va_end(arguments);
4107 }
4108 
4109 void Isolate::SetIdle(bool is_idle) {
4110   if (!is_profiling()) return;
4111   StateTag state = current_vm_state();
4112   DCHECK(state == EXTERNAL || state == IDLE);
4113   if (js_entry_sp() != kNullAddress) return;
4114   if (is_idle) {
4115     set_current_vm_state(IDLE);
4116   } else if (state == IDLE) {
4117     set_current_vm_state(EXTERNAL);
4118   }
4119 }
4120 
4121 bool StackLimitCheck::JsHasOverflowed(uintptr_t gap) const {
4122   StackGuard* stack_guard = isolate_->stack_guard();
4123 #ifdef USE_SIMULATOR
4124   // The simulator uses a separate JS stack.
4125   Address jssp_address = Simulator::current(isolate_)->get_sp();
4126   uintptr_t jssp = static_cast<uintptr_t>(jssp_address);
4127   if (jssp - gap < stack_guard->real_jslimit()) return true;
4128 #endif  // USE_SIMULATOR
4129   return GetCurrentStackPosition() - gap < stack_guard->real_climit();
4130 }
4131 
4132 SaveContext::SaveContext(Isolate* isolate)
4133     : isolate_(isolate), prev_(isolate->save_context()) {
4134   if (isolate->context() != nullptr) {
4135     context_ = Handle<Context>(isolate->context(), isolate);
4136   }
4137   isolate->set_save_context(this);
4138 
4139   c_entry_fp_ = isolate->c_entry_fp(isolate->thread_local_top());
4140 }
4141 
4142 SaveContext::~SaveContext() {
4143   isolate_->set_context(context_.is_null() ? nullptr : *context_);
4144   isolate_->set_save_context(prev_);
4145 }
4146 
4147 bool SaveContext::IsBelowFrame(StandardFrame* frame) {
4148   return (c_entry_fp_ == 0) || (c_entry_fp_ > frame->sp());
4149 }
4150 
4151 #ifdef DEBUG
4152 AssertNoContextChange::AssertNoContextChange(Isolate* isolate)
4153     : isolate_(isolate), context_(isolate->context(), isolate) {}
4154 #endif  // DEBUG
4155 
4156 bool InterruptsScope::Intercept(StackGuard::InterruptFlag flag) {
4157   InterruptsScope* last_postpone_scope = nullptr;
4158   for (InterruptsScope* current = this; current; current = current->prev_) {
4159     // We only consider scopes related to passed flag.
4160     if (!(current->intercept_mask_ & flag)) continue;
4161     if (current->mode_ == kRunInterrupts) {
4162       // If innermost scope is kRunInterrupts scope, prevent interrupt from
4163       // being intercepted.
4164       break;
4165     } else {
4166       DCHECK_EQ(current->mode_, kPostponeInterrupts);
4167       last_postpone_scope = current;
4168     }
4169   }
4170   // If there is no postpone scope for passed flag then we should not intercept.
4171   if (!last_postpone_scope) return false;
4172   last_postpone_scope->intercepted_flags_ |= flag;
4173   return true;
4174 }
4175 
4176 #undef TRACE_ISOLATE
4177 
4178 }  // namespace internal
4179 }  // namespace v8
4180