// Copyright 2012 the V8 project authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "src/execution/isolate.h" #include #include #include #include #include #include #include #include #include #include "include/v8-template.h" #include "src/api/api-inl.h" #include "src/ast/ast-value-factory.h" #include "src/ast/scopes.h" #include "src/base/hashmap.h" #include "src/base/logging.h" #include "src/base/platform/mutex.h" #include "src/base/platform/platform.h" #include "src/base/sys-info.h" #include "src/base/utils/random-number-generator.h" #include "src/baseline/baseline-batch-compiler.h" #include "src/bigint/bigint.h" #include "src/builtins/builtins-promise.h" #include "src/builtins/constants-table-builder.h" #include "src/codegen/assembler-inl.h" #include "src/codegen/compilation-cache.h" #include "src/codegen/flush-instruction-cache.h" #include "src/common/assert-scope.h" #include "src/common/ptr-compr-inl.h" #include "src/compiler-dispatcher/lazy-compile-dispatcher.h" #include "src/compiler-dispatcher/optimizing-compile-dispatcher.h" #include "src/date/date.h" #include "src/debug/debug-frames.h" #if V8_ENABLE_WEBASSEMBLY #include "src/debug/debug-wasm-objects.h" #endif // V8_ENABLE_WEBASSEMBLY #include "src/debug/debug.h" #include "src/deoptimizer/deoptimizer.h" #include "src/deoptimizer/materialized-object-store.h" #include "src/diagnostics/basic-block-profiler.h" #include "src/diagnostics/compilation-statistics.h" #include "src/execution/frames-inl.h" #include "src/execution/frames.h" #include "src/execution/isolate-inl.h" #include "src/execution/local-isolate.h" #include "src/execution/messages.h" #include "src/execution/microtask-queue.h" #include "src/execution/protectors-inl.h" #include "src/execution/simulator.h" #include "src/execution/tiering-manager.h" #include "src/execution/v8threads.h" #include "src/execution/vm-state-inl.h" #include "src/handles/global-handles-inl.h" #include "src/handles/persistent-handles.h" #include "src/heap/heap-inl.h" #include "src/heap/heap.h" #include "src/heap/local-heap.h" #include "src/heap/parked-scope.h" #include "src/heap/read-only-heap.h" #include "src/heap/safepoint.h" #include "src/ic/stub-cache.h" #include "src/init/bootstrapper.h" #include "src/init/setup-isolate.h" #include "src/init/v8.h" #include "src/interpreter/interpreter.h" #include "src/libsampler/sampler.h" #include "src/logging/counters.h" #include "src/logging/log.h" #include "src/logging/metrics.h" #include "src/logging/runtime-call-stats-scope.h" #include "src/numbers/hash-seed-inl.h" #include "src/objects/backing-store.h" #include "src/objects/call-site-info-inl.h" #include "src/objects/elements.h" #include "src/objects/feedback-vector.h" #include "src/objects/hash-table-inl.h" #include "src/objects/js-array-buffer-inl.h" #include "src/objects/js-array-inl.h" #include "src/objects/js-generator-inl.h" #include "src/objects/js-weak-refs-inl.h" #include "src/objects/managed-inl.h" #include "src/objects/module-inl.h" #include "src/objects/promise-inl.h" #include "src/objects/prototype.h" #include "src/objects/slots.h" #include "src/objects/smi.h" #include "src/objects/source-text-module-inl.h" #include "src/objects/visitors.h" #include "src/profiler/heap-profiler.h" #include "src/profiler/tracing-cpu-profiler.h" #include "src/regexp/regexp-stack.h" #include "src/snapshot/embedded/embedded-data-inl.h" #include "src/snapshot/embedded/embedded-file-writer-interface.h" #include "src/snapshot/read-only-deserializer.h" #include "src/snapshot/shared-heap-deserializer.h" #include "src/snapshot/startup-deserializer.h" #include "src/strings/string-builder-inl.h" #include "src/strings/string-stream.h" #include "src/tasks/cancelable-task.h" #include "src/tracing/tracing-category-observer.h" #include "src/utils/address-map.h" #include "src/utils/ostreams.h" #include "src/utils/version.h" #include "src/zone/accounting-allocator.h" #include "src/zone/type-stats.h" #ifdef V8_INTL_SUPPORT #include "src/objects/intl-objects.h" #include "unicode/locid.h" #include "unicode/uobject.h" #endif // V8_INTL_SUPPORT #if V8_ENABLE_MAGLEV #include "src/maglev/maglev-concurrent-dispatcher.h" #endif // V8_ENABLE_MAGLEV #if V8_ENABLE_WEBASSEMBLY #include "src/trap-handler/trap-handler.h" #include "src/wasm/wasm-code-manager.h" #include "src/wasm/wasm-engine.h" #include "src/wasm/wasm-module.h" #include "src/wasm/wasm-objects.h" #endif // V8_ENABLE_WEBASSEMBLY #if defined(V8_OS_WIN64) #include "src/diagnostics/unwinding-info-win64.h" #endif // V8_OS_WIN64 #ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING #include "src/base/platform/wrappers.h" #include "src/heap/conservative-stack-visitor.h" #endif #if USE_SIMULATOR #include "src/execution/simulator-base.h" #endif extern "C" const uint8_t* v8_Default_embedded_blob_code_; extern "C" uint32_t v8_Default_embedded_blob_code_size_; extern "C" const uint8_t* v8_Default_embedded_blob_data_; extern "C" uint32_t v8_Default_embedded_blob_data_size_; namespace v8 { namespace internal { #ifdef DEBUG #define TRACE_ISOLATE(tag) \ do { \ if (FLAG_trace_isolates) { \ PrintF("Isolate %p (id %d)" #tag "\n", reinterpret_cast(this), \ id()); \ } \ } while (false) #else #define TRACE_ISOLATE(tag) #endif const uint8_t* DefaultEmbeddedBlobCode() { return v8_Default_embedded_blob_code_; } uint32_t DefaultEmbeddedBlobCodeSize() { return v8_Default_embedded_blob_code_size_; } const uint8_t* DefaultEmbeddedBlobData() { return v8_Default_embedded_blob_data_; } uint32_t DefaultEmbeddedBlobDataSize() { return v8_Default_embedded_blob_data_size_; } namespace { // These variables provide access to the current embedded blob without requiring // an isolate instance. This is needed e.g. by Code::InstructionStart, which may // not have access to an isolate but still needs to access the embedded blob. // The variables are initialized by each isolate in Init(). Writes and reads are // relaxed since we can guarantee that the current thread has initialized these // variables before accessing them. Different threads may race, but this is fine // since they all attempt to set the same values of the blob pointer and size. std::atomic current_embedded_blob_code_(nullptr); std::atomic current_embedded_blob_code_size_(0); std::atomic current_embedded_blob_data_(nullptr); std::atomic current_embedded_blob_data_size_(0); // The various workflows around embedded snapshots are fairly complex. We need // to support plain old snapshot builds, nosnap builds, and the requirements of // subtly different serialization tests. There's two related knobs to twiddle: // // - The default embedded blob may be overridden by setting the sticky embedded // blob. This is set automatically whenever we create a new embedded blob. // // - Lifecycle management can be either manual or set to refcounting. // // A few situations to demonstrate their use: // // - A plain old snapshot build neither overrides the default blob nor // refcounts. // // - mksnapshot sets the sticky blob and manually frees the embedded // blob once done. // // - Most serializer tests do the same. // // - Nosnapshot builds set the sticky blob and enable refcounting. // This mutex protects access to the following variables: // - sticky_embedded_blob_code_ // - sticky_embedded_blob_code_size_ // - sticky_embedded_blob_data_ // - sticky_embedded_blob_data_size_ // - enable_embedded_blob_refcounting_ // - current_embedded_blob_refs_ base::LazyMutex current_embedded_blob_refcount_mutex_ = LAZY_MUTEX_INITIALIZER; const uint8_t* sticky_embedded_blob_code_ = nullptr; uint32_t sticky_embedded_blob_code_size_ = 0; const uint8_t* sticky_embedded_blob_data_ = nullptr; uint32_t sticky_embedded_blob_data_size_ = 0; bool enable_embedded_blob_refcounting_ = true; int current_embedded_blob_refs_ = 0; const uint8_t* StickyEmbeddedBlobCode() { return sticky_embedded_blob_code_; } uint32_t StickyEmbeddedBlobCodeSize() { return sticky_embedded_blob_code_size_; } const uint8_t* StickyEmbeddedBlobData() { return sticky_embedded_blob_data_; } uint32_t StickyEmbeddedBlobDataSize() { return sticky_embedded_blob_data_size_; } void SetStickyEmbeddedBlob(const uint8_t* code, uint32_t code_size, const uint8_t* data, uint32_t data_size) { sticky_embedded_blob_code_ = code; sticky_embedded_blob_code_size_ = code_size; sticky_embedded_blob_data_ = data; sticky_embedded_blob_data_size_ = data_size; } } // namespace void DisableEmbeddedBlobRefcounting() { base::MutexGuard guard(current_embedded_blob_refcount_mutex_.Pointer()); enable_embedded_blob_refcounting_ = false; } void FreeCurrentEmbeddedBlob() { CHECK(!enable_embedded_blob_refcounting_); base::MutexGuard guard(current_embedded_blob_refcount_mutex_.Pointer()); if (StickyEmbeddedBlobCode() == nullptr) return; CHECK_EQ(StickyEmbeddedBlobCode(), Isolate::CurrentEmbeddedBlobCode()); CHECK_EQ(StickyEmbeddedBlobData(), Isolate::CurrentEmbeddedBlobData()); OffHeapInstructionStream::FreeOffHeapOffHeapInstructionStream( const_cast(Isolate::CurrentEmbeddedBlobCode()), Isolate::CurrentEmbeddedBlobCodeSize(), const_cast(Isolate::CurrentEmbeddedBlobData()), Isolate::CurrentEmbeddedBlobDataSize()); current_embedded_blob_code_.store(nullptr, std::memory_order_relaxed); current_embedded_blob_code_size_.store(0, std::memory_order_relaxed); current_embedded_blob_data_.store(nullptr, std::memory_order_relaxed); current_embedded_blob_data_size_.store(0, std::memory_order_relaxed); sticky_embedded_blob_code_ = nullptr; sticky_embedded_blob_code_size_ = 0; sticky_embedded_blob_data_ = nullptr; sticky_embedded_blob_data_size_ = 0; } // static bool Isolate::CurrentEmbeddedBlobIsBinaryEmbedded() { // In some situations, we must be able to rely on the embedded blob being // immortal immovable. This is the case if the blob is binary-embedded. // See blob lifecycle controls above for descriptions of when the current // embedded blob may change (e.g. in tests or mksnapshot). If the blob is // binary-embedded, it is immortal immovable. const uint8_t* code = current_embedded_blob_code_.load(std::memory_order_relaxed); if (code == nullptr) return false; return code == DefaultEmbeddedBlobCode(); } void Isolate::SetEmbeddedBlob(const uint8_t* code, uint32_t code_size, const uint8_t* data, uint32_t data_size) { CHECK_NOT_NULL(code); CHECK_NOT_NULL(data); embedded_blob_code_ = code; embedded_blob_code_size_ = code_size; embedded_blob_data_ = data; embedded_blob_data_size_ = data_size; current_embedded_blob_code_.store(code, std::memory_order_relaxed); current_embedded_blob_code_size_.store(code_size, std::memory_order_relaxed); current_embedded_blob_data_.store(data, std::memory_order_relaxed); current_embedded_blob_data_size_.store(data_size, std::memory_order_relaxed); #ifdef DEBUG // Verify that the contents of the embedded blob are unchanged from // serialization-time, just to ensure the compiler isn't messing with us. EmbeddedData d = EmbeddedData::FromBlob(); if (d.EmbeddedBlobDataHash() != d.CreateEmbeddedBlobDataHash()) { FATAL( "Embedded blob data section checksum verification failed. This " "indicates that the embedded blob has been modified since compilation " "time."); } if (FLAG_text_is_readable) { if (d.EmbeddedBlobCodeHash() != d.CreateEmbeddedBlobCodeHash()) { FATAL( "Embedded blob code section checksum verification failed. This " "indicates that the embedded blob has been modified since " "compilation time. A common cause is a debugging breakpoint set " "within builtin code."); } } #endif // DEBUG } void Isolate::ClearEmbeddedBlob() { CHECK(enable_embedded_blob_refcounting_); CHECK_EQ(embedded_blob_code_, CurrentEmbeddedBlobCode()); CHECK_EQ(embedded_blob_code_, StickyEmbeddedBlobCode()); CHECK_EQ(embedded_blob_data_, CurrentEmbeddedBlobData()); CHECK_EQ(embedded_blob_data_, StickyEmbeddedBlobData()); embedded_blob_code_ = nullptr; embedded_blob_code_size_ = 0; embedded_blob_data_ = nullptr; embedded_blob_data_size_ = 0; current_embedded_blob_code_.store(nullptr, std::memory_order_relaxed); current_embedded_blob_code_size_.store(0, std::memory_order_relaxed); current_embedded_blob_data_.store(nullptr, std::memory_order_relaxed); current_embedded_blob_data_size_.store(0, std::memory_order_relaxed); sticky_embedded_blob_code_ = nullptr; sticky_embedded_blob_code_size_ = 0; sticky_embedded_blob_data_ = nullptr; sticky_embedded_blob_data_size_ = 0; } const uint8_t* Isolate::embedded_blob_code() const { return embedded_blob_code_; } uint32_t Isolate::embedded_blob_code_size() const { return embedded_blob_code_size_; } const uint8_t* Isolate::embedded_blob_data() const { return embedded_blob_data_; } uint32_t Isolate::embedded_blob_data_size() const { return embedded_blob_data_size_; } // static const uint8_t* Isolate::CurrentEmbeddedBlobCode() { return current_embedded_blob_code_.load(std::memory_order_relaxed); } // static uint32_t Isolate::CurrentEmbeddedBlobCodeSize() { return current_embedded_blob_code_size_.load(std::memory_order_relaxed); } // static const uint8_t* Isolate::CurrentEmbeddedBlobData() { return current_embedded_blob_data_.load(std::memory_order_relaxed); } // static uint32_t Isolate::CurrentEmbeddedBlobDataSize() { return current_embedded_blob_data_size_.load(std::memory_order_relaxed); } // static base::AddressRegion Isolate::GetShortBuiltinsCallRegion() { // Update calculations below if the assert fails. STATIC_ASSERT(kMaxPCRelativeCodeRangeInMB <= 4096); if (kMaxPCRelativeCodeRangeInMB == 0) { // Return empty region if pc-relative calls/jumps are not supported. return base::AddressRegion(kNullAddress, 0); } constexpr size_t max_size = std::numeric_limits::max(); if (uint64_t{kMaxPCRelativeCodeRangeInMB} * MB > max_size) { // The whole addressable space is reachable with pc-relative calls/jumps. return base::AddressRegion(kNullAddress, max_size); } constexpr size_t radius = kMaxPCRelativeCodeRangeInMB * MB; DCHECK_LT(CurrentEmbeddedBlobCodeSize(), radius); Address embedded_blob_code_start = reinterpret_cast
(CurrentEmbeddedBlobCode()); if (embedded_blob_code_start == kNullAddress) { // Return empty region if there's no embedded blob. return base::AddressRegion(kNullAddress, 0); } Address embedded_blob_code_end = embedded_blob_code_start + CurrentEmbeddedBlobCodeSize(); Address region_start = (embedded_blob_code_end > radius) ? (embedded_blob_code_end - radius) : 0; Address region_end = embedded_blob_code_start + radius; if (region_end < embedded_blob_code_start) { region_end = static_cast
(-1); } return base::AddressRegion(region_start, region_end - region_start); } size_t Isolate::HashIsolateForEmbeddedBlob() { DCHECK(builtins_.is_initialized()); DCHECK(Builtins::AllBuiltinsAreIsolateIndependent()); DisallowGarbageCollection no_gc; static constexpr size_t kSeed = 0; size_t hash = kSeed; // Hash data sections of builtin code objects. for (Builtin builtin = Builtins::kFirst; builtin <= Builtins::kLast; ++builtin) { Code code = FromCodeT(builtins()->code(builtin)); DCHECK(Internals::HasHeapObjectTag(code.ptr())); uint8_t* const code_ptr = reinterpret_cast(code.ptr() - kHeapObjectTag); // These static asserts ensure we don't miss relevant fields. We don't hash // pointer compression base, instruction/metadata size value and flags since // they change when creating the off-heap trampolines. Other data fields // must remain the same. #ifdef V8_EXTERNAL_CODE_SPACE STATIC_ASSERT(Code::kMainCageBaseUpper32BitsOffset == Code::kDataStart); STATIC_ASSERT(Code::kInstructionSizeOffset == Code::kMainCageBaseUpper32BitsOffsetEnd + 1); #else STATIC_ASSERT(Code::kInstructionSizeOffset == Code::kDataStart); #endif // V8_EXTERNAL_CODE_SPACE STATIC_ASSERT(Code::kMetadataSizeOffset == Code::kInstructionSizeOffsetEnd + 1); STATIC_ASSERT(Code::kFlagsOffset == Code::kMetadataSizeOffsetEnd + 1); STATIC_ASSERT(Code::kBuiltinIndexOffset == Code::kFlagsOffsetEnd + 1); static constexpr int kStartOffset = Code::kBuiltinIndexOffset; for (int j = kStartOffset; j < Code::kUnalignedHeaderSize; j++) { hash = base::hash_combine(hash, size_t{code_ptr[j]}); } } // The builtins constants table is also tightly tied to embedded builtins. hash = base::hash_combine( hash, static_cast(heap_.builtins_constants_table().length())); return hash; } base::Thread::LocalStorageKey Isolate::isolate_key_; base::Thread::LocalStorageKey Isolate::per_isolate_thread_data_key_; std::atomic Isolate::isolate_key_created_{false}; namespace { // A global counter for all generated Isolates, might overflow. std::atomic isolate_counter{0}; } // namespace Isolate::PerIsolateThreadData* Isolate::FindOrAllocatePerThreadDataForThisThread() { ThreadId thread_id = ThreadId::Current(); PerIsolateThreadData* per_thread = nullptr; { base::MutexGuard lock_guard(&thread_data_table_mutex_); per_thread = thread_data_table_.Lookup(thread_id); if (per_thread == nullptr) { if (FLAG_adjust_os_scheduling_parameters) { base::OS::AdjustSchedulingParams(); } per_thread = new PerIsolateThreadData(this, thread_id); thread_data_table_.Insert(per_thread); } DCHECK(thread_data_table_.Lookup(thread_id) == per_thread); } return per_thread; } void Isolate::DiscardPerThreadDataForThisThread() { ThreadId thread_id = ThreadId::TryGetCurrent(); if (thread_id.IsValid()) { DCHECK_NE(thread_manager_->mutex_owner_.load(std::memory_order_relaxed), thread_id); base::MutexGuard lock_guard(&thread_data_table_mutex_); PerIsolateThreadData* per_thread = thread_data_table_.Lookup(thread_id); if (per_thread) { DCHECK(!per_thread->thread_state_); thread_data_table_.Remove(per_thread); } } } Isolate::PerIsolateThreadData* Isolate::FindPerThreadDataForThisThread() { ThreadId thread_id = ThreadId::Current(); return FindPerThreadDataForThread(thread_id); } Isolate::PerIsolateThreadData* Isolate::FindPerThreadDataForThread( ThreadId thread_id) { PerIsolateThreadData* per_thread = nullptr; { base::MutexGuard lock_guard(&thread_data_table_mutex_); per_thread = thread_data_table_.Lookup(thread_id); } return per_thread; } void Isolate::InitializeOncePerProcess() { isolate_key_ = base::Thread::CreateThreadLocalKey(); bool expected = false; CHECK(isolate_key_created_.compare_exchange_strong( expected, true, std::memory_order_relaxed)); per_isolate_thread_data_key_ = base::Thread::CreateThreadLocalKey(); Heap::InitializeOncePerProcess(); } void Isolate::DisposeOncePerProcess() { base::Thread::DeleteThreadLocalKey(isolate_key_); bool expected = true; CHECK(isolate_key_created_.compare_exchange_strong( expected, false, std::memory_order_relaxed)); base::Thread::DeleteThreadLocalKey(per_isolate_thread_data_key_); } Address Isolate::get_address_from_id(IsolateAddressId id) { return isolate_addresses_[id]; } char* Isolate::Iterate(RootVisitor* v, char* thread_storage) { ThreadLocalTop* thread = reinterpret_cast(thread_storage); Iterate(v, thread); return thread_storage + sizeof(ThreadLocalTop); } void Isolate::IterateThread(ThreadVisitor* v, char* t) { ThreadLocalTop* thread = reinterpret_cast(t); v->VisitThread(this, thread); } void Isolate::Iterate(RootVisitor* v, ThreadLocalTop* thread) { // Visit the roots from the top for a given thread. v->VisitRootPointer(Root::kStackRoots, nullptr, FullObjectSlot(&thread->pending_exception_)); v->VisitRootPointer(Root::kStackRoots, nullptr, FullObjectSlot(&thread->pending_message_)); v->VisitRootPointer(Root::kStackRoots, nullptr, FullObjectSlot(&thread->context_)); v->VisitRootPointer(Root::kStackRoots, nullptr, FullObjectSlot(&thread->scheduled_exception_)); for (v8::TryCatch* block = thread->try_catch_handler_; block != nullptr; block = block->next_) { // TODO(3770): Make TryCatch::exception_ an Address (and message_obj_ too). v->VisitRootPointer( Root::kStackRoots, nullptr, FullObjectSlot(reinterpret_cast
(&(block->exception_)))); v->VisitRootPointer( Root::kStackRoots, nullptr, FullObjectSlot(reinterpret_cast
(&(block->message_obj_)))); } #ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING ConservativeStackVisitor stack_visitor(this, v); thread_local_top()->stack_.IteratePointers(&stack_visitor); #endif // Iterate over pointers on native execution stack. #if V8_ENABLE_WEBASSEMBLY wasm::WasmCodeRefScope wasm_code_ref_scope; if (FLAG_experimental_wasm_stack_switching) { wasm::StackMemory* current = wasm_stacks_; DCHECK_NOT_NULL(current); do { if (current->IsActive()) { // The active stack's jump buffer does not match the current state, use // the thread info below instead. current = current->next(); continue; } for (StackFrameIterator it(this, current); !it.done(); it.Advance()) { it.frame()->Iterate(v); } current = current->next(); } while (current != wasm_stacks_); } #endif // V8_ENABLE_WEBASSEMBLY for (StackFrameIterator it(this, thread); !it.done(); it.Advance()) { it.frame()->Iterate(v); } } void Isolate::Iterate(RootVisitor* v) { ThreadLocalTop* current_t = thread_local_top(); Iterate(v, current_t); } void Isolate::RegisterTryCatchHandler(v8::TryCatch* that) { thread_local_top()->try_catch_handler_ = that; } void Isolate::UnregisterTryCatchHandler(v8::TryCatch* that) { DCHECK(thread_local_top()->try_catch_handler_ == that); thread_local_top()->try_catch_handler_ = that->next_; } Handle Isolate::StackTraceString() { if (stack_trace_nesting_level_ == 0) { stack_trace_nesting_level_++; HeapStringAllocator allocator; StringStream::ClearMentionedObjectCache(this); StringStream accumulator(&allocator); incomplete_message_ = &accumulator; PrintStack(&accumulator); Handle stack_trace = accumulator.ToString(this); incomplete_message_ = nullptr; stack_trace_nesting_level_ = 0; return stack_trace; } else if (stack_trace_nesting_level_ == 1) { stack_trace_nesting_level_++; base::OS::PrintError( "\n\nAttempt to print stack while printing stack (double fault)\n"); base::OS::PrintError( "If you are lucky you may find a partial stack dump on stdout.\n\n"); incomplete_message_->OutputToStdOut(); return factory()->empty_string(); } else { base::OS::Abort(); } } void Isolate::PushStackTraceAndDie(void* ptr1, void* ptr2, void* ptr3, void* ptr4) { StackTraceFailureMessage message(this, StackTraceFailureMessage::kIncludeStackTrace, ptr1, ptr2, ptr3, ptr4); message.Print(); base::OS::Abort(); } void Isolate::PushParamsAndDie(void* ptr1, void* ptr2, void* ptr3, void* ptr4, void* ptr5, void* ptr6) { StackTraceFailureMessage message( this, StackTraceFailureMessage::kDontIncludeStackTrace, ptr1, ptr2, ptr3, ptr4, ptr5, ptr6); message.Print(); base::OS::Abort(); } void StackTraceFailureMessage::Print() volatile { // Print the details of this failure message object, including its own address // to force stack allocation. base::OS::PrintError( "Stacktrace:\n ptr1=%p\n ptr2=%p\n ptr3=%p\n ptr4=%p\n " "ptr5=%p\n ptr6=%p\n failure_message_object=%p\n%s", ptr1_, ptr2_, ptr3_, ptr4_, ptr5_, ptr6_, this, &js_stack_trace_[0]); } StackTraceFailureMessage::StackTraceFailureMessage( Isolate* isolate, StackTraceFailureMessage::StackTraceMode mode, void* ptr1, void* ptr2, void* ptr3, void* ptr4, void* ptr5, void* ptr6) { isolate_ = isolate; ptr1_ = ptr1; ptr2_ = ptr2; ptr3_ = ptr3; ptr4_ = ptr4; ptr5_ = ptr5; ptr6_ = ptr6; // Write a stracktrace into the {js_stack_trace_} buffer. const size_t buffer_length = arraysize(js_stack_trace_); memset(&js_stack_trace_, 0, buffer_length); memset(&code_objects_, 0, sizeof(code_objects_)); if (mode == kIncludeStackTrace) { FixedStringAllocator fixed(&js_stack_trace_[0], buffer_length - 1); StringStream accumulator(&fixed, StringStream::kPrintObjectConcise); isolate->PrintStack(&accumulator, Isolate::kPrintStackVerbose); // Keeping a reference to the last code objects to increase likelyhood that // they get included in the minidump. const size_t code_objects_length = arraysize(code_objects_); size_t i = 0; StackFrameIterator it(isolate); for (; !it.done() && i < code_objects_length; it.Advance()) { code_objects_[i++] = reinterpret_cast(it.frame()->unchecked_code().ptr()); } } } bool NoExtension(const v8::FunctionCallbackInfo&) { return false; } namespace { class CallSiteBuilder { public: CallSiteBuilder(Isolate* isolate, FrameSkipMode mode, int limit, Handle caller) : isolate_(isolate), mode_(mode), limit_(limit), caller_(caller), skip_next_frame_(mode != SKIP_NONE) { DCHECK_IMPLIES(mode_ == SKIP_UNTIL_SEEN, caller_->IsJSFunction()); // Modern web applications are usually built with multiple layers of // framework and library code, and stack depth tends to be more than // a dozen frames, so we over-allocate a bit here to avoid growing // the elements array in the common case. elements_ = isolate->factory()->NewFixedArray(std::min(64, limit)); } bool Visit(FrameSummary const& summary) { if (Full()) return false; #if V8_ENABLE_WEBASSEMBLY if (summary.IsWasm()) { AppendWasmFrame(summary.AsWasm()); return true; } #endif // V8_ENABLE_WEBASSEMBLY AppendJavaScriptFrame(summary.AsJavaScript()); return true; } void AppendAsyncFrame(Handle generator_object) { Handle function(generator_object->function(), isolate_); if (!IsVisibleInStackTrace(function)) return; int flags = CallSiteInfo::kIsAsync; if (IsStrictFrame(function)) flags |= CallSiteInfo::kIsStrict; Handle receiver(generator_object->receiver(), isolate_); Handle code(function->shared().GetBytecodeArray(isolate_), isolate_); // The stored bytecode offset is relative to a different base than what // is used in the source position table, hence the subtraction. int offset = Smi::ToInt(generator_object->input_or_debug_pos()) - (BytecodeArray::kHeaderSize - kHeapObjectTag); Handle parameters = isolate_->factory()->empty_fixed_array(); if (V8_UNLIKELY(FLAG_detailed_error_stack_trace)) { parameters = isolate_->factory()->CopyFixedArrayUpTo( handle(generator_object->parameters_and_registers(), isolate_), function->shared() .internal_formal_parameter_count_without_receiver()); } AppendFrame(receiver, function, code, offset, flags, parameters); } void AppendPromiseCombinatorFrame(Handle element_function, Handle combinator) { if (!IsVisibleInStackTrace(combinator)) return; int flags = CallSiteInfo::kIsAsync | CallSiteInfo::kIsSourcePositionComputed; Handle receiver(combinator->native_context().promise_function(), isolate_); // TODO(v8:11880): avoid roundtrips between cdc and code. Handle code(FromCodeT(combinator->code()), isolate_); // TODO(mmarchini) save Promises list from the Promise combinator Handle parameters = isolate_->factory()->empty_fixed_array(); // We store the offset of the promise into the element function's // hash field for element callbacks. int promise_index = Smi::ToInt(Smi::cast(element_function->GetIdentityHash())) - 1; AppendFrame(receiver, combinator, code, promise_index, flags, parameters); } void AppendJavaScriptFrame( FrameSummary::JavaScriptFrameSummary const& summary) { // Filter out internal frames that we do not want to show. if (!IsVisibleInStackTrace(summary.function())) return; int flags = 0; Handle function = summary.function(); if (IsStrictFrame(function)) flags |= CallSiteInfo::kIsStrict; if (summary.is_constructor()) flags |= CallSiteInfo::kIsConstructor; AppendFrame(summary.receiver(), function, summary.abstract_code(), summary.code_offset(), flags, summary.parameters()); } #if V8_ENABLE_WEBASSEMBLY void AppendWasmFrame(FrameSummary::WasmFrameSummary const& summary) { if (summary.code()->kind() != wasm::WasmCode::kWasmFunction) return; Handle instance = summary.wasm_instance(); int flags = CallSiteInfo::kIsWasm; if (instance->module_object().is_asm_js()) { flags |= CallSiteInfo::kIsAsmJsWasm; if (summary.at_to_number_conversion()) { flags |= CallSiteInfo::kIsAsmJsAtNumberConversion; } } auto code = Managed::Allocate( isolate_, 0, summary.code(), instance->module_object().shared_native_module()); AppendFrame(instance, handle(Smi::FromInt(summary.function_index()), isolate_), code, summary.code_offset(), flags, isolate_->factory()->empty_fixed_array()); } #endif // V8_ENABLE_WEBASSEMBLY bool Full() { return index_ >= limit_; } Handle Build() { return FixedArray::ShrinkOrEmpty(isolate_, elements_, index_); } private: // Poison stack frames below the first strict mode frame. // The stack trace API should not expose receivers and function // objects on frames deeper than the top-most one with a strict mode // function. bool IsStrictFrame(Handle function) { if (!encountered_strict_function_) { encountered_strict_function_ = is_strict(function->shared().language_mode()); } return encountered_strict_function_; } // Determines whether the given stack frame should be displayed in a stack // trace. bool IsVisibleInStackTrace(Handle function) { return ShouldIncludeFrame(function) && IsNotHidden(function); } // This mechanism excludes a number of uninteresting frames from the stack // trace. This can be be the first frame (which will be a builtin-exit frame // for the error constructor builtin) or every frame until encountering a // user-specified function. bool ShouldIncludeFrame(Handle function) { switch (mode_) { case SKIP_NONE: return true; case SKIP_FIRST: if (!skip_next_frame_) return true; skip_next_frame_ = false; return false; case SKIP_UNTIL_SEEN: if (skip_next_frame_ && (*function == *caller_)) { skip_next_frame_ = false; return false; } return !skip_next_frame_; } UNREACHABLE(); } bool IsNotHidden(Handle function) { // TODO(szuend): Remove this check once the flag is enabled // by default. if (!FLAG_experimental_stack_trace_frames && function->shared().IsApiFunction()) { return false; } // Functions defined not in user scripts are not visible unless directly // exposed, in which case the native flag is set. // The --builtins-in-stack-traces command line flag allows including // internal call sites in the stack trace for debugging purposes. if (!FLAG_builtins_in_stack_traces && !function->shared().IsUserJavaScript()) { return function->shared().native() || function->shared().IsApiFunction(); } return true; } void AppendFrame(Handle receiver_or_instance, Handle function, Handle code, int offset, int flags, Handle parameters) { if (receiver_or_instance->IsTheHole(isolate_)) { // TODO(jgruber): Fix all cases in which frames give us a hole value // (e.g. the receiver in RegExp constructor frames). receiver_or_instance = isolate_->factory()->undefined_value(); } auto info = isolate_->factory()->NewCallSiteInfo( receiver_or_instance, function, code, offset, flags, parameters); elements_ = FixedArray::SetAndGrow(isolate_, elements_, index_++, info); } Isolate* isolate_; const FrameSkipMode mode_; int index_ = 0; const int limit_; const Handle caller_; bool skip_next_frame_; bool encountered_strict_function_ = false; Handle elements_; }; bool GetStackTraceLimit(Isolate* isolate, int* result) { if (FLAG_correctness_fuzzer_suppressions) return false; Handle error = isolate->error_function(); Handle key = isolate->factory()->stackTraceLimit_string(); Handle stack_trace_limit = JSReceiver::GetDataProperty(isolate, error, key); if (!stack_trace_limit->IsNumber()) return false; // Ensure that limit is not negative. *result = std::max(FastD2IChecked(stack_trace_limit->Number()), 0); if (*result != FLAG_stack_trace_limit) { isolate->CountUsage(v8::Isolate::kErrorStackTraceLimit); } return true; } bool IsBuiltinFunction(Isolate* isolate, HeapObject object, Builtin builtin) { if (!object.IsJSFunction()) return false; JSFunction const function = JSFunction::cast(object); return function.code() == isolate->builtins()->code(builtin); } void CaptureAsyncStackTrace(Isolate* isolate, Handle promise, CallSiteBuilder* builder) { while (!builder->Full()) { // Check that the {promise} is not settled. if (promise->status() != Promise::kPending) return; // Check that we have exactly one PromiseReaction on the {promise}. if (!promise->reactions().IsPromiseReaction()) return; Handle reaction( PromiseReaction::cast(promise->reactions()), isolate); if (!reaction->next().IsSmi()) return; // Check if the {reaction} has one of the known async function or // async generator continuations as its fulfill handler. if (IsBuiltinFunction(isolate, reaction->fulfill_handler(), Builtin::kAsyncFunctionAwaitResolveClosure) || IsBuiltinFunction(isolate, reaction->fulfill_handler(), Builtin::kAsyncGeneratorAwaitResolveClosure) || IsBuiltinFunction(isolate, reaction->fulfill_handler(), Builtin::kAsyncGeneratorYieldResolveClosure)) { // Now peek into the handlers' AwaitContext to get to // the JSGeneratorObject for the async function. Handle context( JSFunction::cast(reaction->fulfill_handler()).context(), isolate); Handle generator_object( JSGeneratorObject::cast(context->extension()), isolate); CHECK(generator_object->is_suspended()); // Append async frame corresponding to the {generator_object}. builder->AppendAsyncFrame(generator_object); // Try to continue from here. if (generator_object->IsJSAsyncFunctionObject()) { Handle async_function_object = Handle::cast(generator_object); promise = handle(async_function_object->promise(), isolate); } else { Handle async_generator_object = Handle::cast(generator_object); if (async_generator_object->queue().IsUndefined(isolate)) return; Handle async_generator_request( AsyncGeneratorRequest::cast(async_generator_object->queue()), isolate); promise = handle(JSPromise::cast(async_generator_request->promise()), isolate); } } else if (IsBuiltinFunction(isolate, reaction->fulfill_handler(), Builtin::kPromiseAllResolveElementClosure)) { Handle function(JSFunction::cast(reaction->fulfill_handler()), isolate); Handle context(function->context(), isolate); Handle combinator(context->native_context().promise_all(), isolate); builder->AppendPromiseCombinatorFrame(function, combinator); // Now peak into the Promise.all() resolve element context to // find the promise capability that's being resolved when all // the concurrent promises resolve. int const index = PromiseBuiltins::kPromiseAllResolveElementCapabilitySlot; Handle capability( PromiseCapability::cast(context->get(index)), isolate); if (!capability->promise().IsJSPromise()) return; promise = handle(JSPromise::cast(capability->promise()), isolate); } else if (IsBuiltinFunction( isolate, reaction->fulfill_handler(), Builtin::kPromiseAllSettledResolveElementClosure)) { Handle function(JSFunction::cast(reaction->fulfill_handler()), isolate); Handle context(function->context(), isolate); Handle combinator( context->native_context().promise_all_settled(), isolate); builder->AppendPromiseCombinatorFrame(function, combinator); // Now peak into the Promise.allSettled() resolve element context to // find the promise capability that's being resolved when all // the concurrent promises resolve. int const index = PromiseBuiltins::kPromiseAllResolveElementCapabilitySlot; Handle capability( PromiseCapability::cast(context->get(index)), isolate); if (!capability->promise().IsJSPromise()) return; promise = handle(JSPromise::cast(capability->promise()), isolate); } else if (IsBuiltinFunction(isolate, reaction->reject_handler(), Builtin::kPromiseAnyRejectElementClosure)) { Handle function(JSFunction::cast(reaction->reject_handler()), isolate); Handle context(function->context(), isolate); Handle combinator(context->native_context().promise_any(), isolate); builder->AppendPromiseCombinatorFrame(function, combinator); // Now peak into the Promise.any() reject element context to // find the promise capability that's being resolved when any of // the concurrent promises resolve. int const index = PromiseBuiltins::kPromiseAnyRejectElementCapabilitySlot; Handle capability( PromiseCapability::cast(context->get(index)), isolate); if (!capability->promise().IsJSPromise()) return; promise = handle(JSPromise::cast(capability->promise()), isolate); } else if (IsBuiltinFunction(isolate, reaction->fulfill_handler(), Builtin::kPromiseCapabilityDefaultResolve)) { Handle function(JSFunction::cast(reaction->fulfill_handler()), isolate); Handle context(function->context(), isolate); promise = handle(JSPromise::cast(context->get(PromiseBuiltins::kPromiseSlot)), isolate); } else { // We have some generic promise chain here, so try to // continue with the chained promise on the reaction // (only works for native promise chains). Handle promise_or_capability( reaction->promise_or_capability(), isolate); if (promise_or_capability->IsJSPromise()) { promise = Handle::cast(promise_or_capability); } else if (promise_or_capability->IsPromiseCapability()) { Handle capability = Handle::cast(promise_or_capability); if (!capability->promise().IsJSPromise()) return; promise = handle(JSPromise::cast(capability->promise()), isolate); } else { // Otherwise the {promise_or_capability} must be undefined here. CHECK(promise_or_capability->IsUndefined(isolate)); return; } } } } void CaptureAsyncStackTrace(Isolate* isolate, CallSiteBuilder* builder) { Handle current_microtask = isolate->factory()->current_microtask(); if (current_microtask->IsPromiseReactionJobTask()) { Handle promise_reaction_job_task = Handle::cast(current_microtask); // Check if the {reaction} has one of the known async function or // async generator continuations as its fulfill handler. if (IsBuiltinFunction(isolate, promise_reaction_job_task->handler(), Builtin::kAsyncFunctionAwaitResolveClosure) || IsBuiltinFunction(isolate, promise_reaction_job_task->handler(), Builtin::kAsyncGeneratorAwaitResolveClosure) || IsBuiltinFunction(isolate, promise_reaction_job_task->handler(), Builtin::kAsyncGeneratorYieldResolveClosure) || IsBuiltinFunction(isolate, promise_reaction_job_task->handler(), Builtin::kAsyncFunctionAwaitRejectClosure) || IsBuiltinFunction(isolate, promise_reaction_job_task->handler(), Builtin::kAsyncGeneratorAwaitRejectClosure)) { // Now peek into the handlers' AwaitContext to get to // the JSGeneratorObject for the async function. Handle context( JSFunction::cast(promise_reaction_job_task->handler()).context(), isolate); Handle generator_object( JSGeneratorObject::cast(context->extension()), isolate); if (generator_object->is_executing()) { if (generator_object->IsJSAsyncFunctionObject()) { Handle async_function_object = Handle::cast(generator_object); Handle promise(async_function_object->promise(), isolate); CaptureAsyncStackTrace(isolate, promise, builder); } else { Handle async_generator_object = Handle::cast(generator_object); Handle queue(async_generator_object->queue(), isolate); if (!queue->IsUndefined(isolate)) { Handle async_generator_request = Handle::cast(queue); Handle promise( JSPromise::cast(async_generator_request->promise()), isolate); CaptureAsyncStackTrace(isolate, promise, builder); } } } } else { // The {promise_reaction_job_task} doesn't belong to an await (or // yield inside an async generator), but we might still be able to // find an async frame if we follow along the chain of promises on // the {promise_reaction_job_task}. Handle promise_or_capability( promise_reaction_job_task->promise_or_capability(), isolate); if (promise_or_capability->IsJSPromise()) { Handle promise = Handle::cast(promise_or_capability); CaptureAsyncStackTrace(isolate, promise, builder); } } } } template void VisitStack(Isolate* isolate, Visitor* visitor, StackTrace::StackTraceOptions options = StackTrace::kDetailed) { DisallowJavascriptExecution no_js(isolate); for (StackFrameIterator it(isolate); !it.done(); it.Advance()) { StackFrame* frame = it.frame(); switch (frame->type()) { case StackFrame::BUILTIN_EXIT: case StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION: case StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH: case StackFrame::OPTIMIZED: case StackFrame::INTERPRETED: case StackFrame::BASELINE: case StackFrame::BUILTIN: #if V8_ENABLE_WEBASSEMBLY case StackFrame::WASM: #endif // V8_ENABLE_WEBASSEMBLY { // A standard frame may include many summarized frames (due to // inlining). std::vector summaries; CommonFrame::cast(frame)->Summarize(&summaries); for (auto rit = summaries.rbegin(); rit != summaries.rend(); ++rit) { FrameSummary& summary = *rit; // Skip frames from other origins when asked to do so. if (!(options & StackTrace::kExposeFramesAcrossSecurityOrigins) && !summary.native_context()->HasSameSecurityTokenAs( isolate->context())) { continue; } if (!visitor->Visit(summary)) return; } break; } default: break; } } } Handle CaptureSimpleStackTrace(Isolate* isolate, int limit, FrameSkipMode mode, Handle caller) { TRACE_EVENT_BEGIN1(TRACE_DISABLED_BY_DEFAULT("v8.stack_trace"), __func__, "maxFrameCount", limit); #if V8_ENABLE_WEBASSEMBLY wasm::WasmCodeRefScope code_ref_scope; #endif // V8_ENABLE_WEBASSEMBLY CallSiteBuilder builder(isolate, mode, limit, caller); VisitStack(isolate, &builder); // If --async-stack-traces are enabled and the "current microtask" is a // PromiseReactionJobTask, we try to enrich the stack trace with async // frames. if (FLAG_async_stack_traces) { CaptureAsyncStackTrace(isolate, &builder); } Handle stack_trace = builder.Build(); TRACE_EVENT_END1(TRACE_DISABLED_BY_DEFAULT("v8.stack_trace"), __func__, "frameCount", stack_trace->length()); return stack_trace; } } // namespace MaybeHandle Isolate::CaptureAndSetErrorStack( Handle error_object, FrameSkipMode mode, Handle caller) { TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.stack_trace"), __func__); Handle error_stack = factory()->undefined_value(); // Capture the "simple stack trace" for the error.stack property, // which can be disabled by setting Error.stackTraceLimit to a non // number value or simply deleting the property. If the inspector // is active, and requests more stack frames than the JavaScript // program itself, we collect up to the maximum. int stack_trace_limit = 0; if (GetStackTraceLimit(this, &stack_trace_limit)) { int limit = stack_trace_limit; if (capture_stack_trace_for_uncaught_exceptions_ && !(stack_trace_for_uncaught_exceptions_options_ & StackTrace::kExposeFramesAcrossSecurityOrigins)) { // Collect up to the maximum of what the JavaScript program and // the inspector want. There's a special case here where the API // can ask the stack traces to also include cross-origin frames, // in which case we collect a separate trace below. Note that // the inspector doesn't use this option, so we could as well // just deprecate this in the future. if (limit < stack_trace_for_uncaught_exceptions_frame_limit_) { limit = stack_trace_for_uncaught_exceptions_frame_limit_; } } error_stack = CaptureSimpleStackTrace(this, limit, mode, caller); } // Next is the inspector part: Depending on whether we got a "simple // stack trace" above and whether that's usable (meaning the API // didn't request to include cross-origin frames), we remember the // cap for the stack trace (either a positive limit indicating that // the Error.stackTraceLimit value was below what was requested via // the API, or a negative limit to indicate the opposite), or we // collect a "detailed stack trace" eagerly and stash that away. if (capture_stack_trace_for_uncaught_exceptions_) { Handle limit_or_stack_frame_infos; if (error_stack->IsUndefined(this) || (stack_trace_for_uncaught_exceptions_options_ & StackTrace::kExposeFramesAcrossSecurityOrigins)) { limit_or_stack_frame_infos = CaptureDetailedStackTrace( stack_trace_for_uncaught_exceptions_frame_limit_, stack_trace_for_uncaught_exceptions_options_); } else { int limit = stack_trace_limit > stack_trace_for_uncaught_exceptions_frame_limit_ ? -stack_trace_for_uncaught_exceptions_frame_limit_ : stack_trace_limit; limit_or_stack_frame_infos = handle(Smi::FromInt(limit), this); } error_stack = factory()->NewErrorStackData(error_stack, limit_or_stack_frame_infos); } RETURN_ON_EXCEPTION( this, JSObject::SetProperty(this, error_object, factory()->error_stack_symbol(), error_stack, StoreOrigin::kMaybeKeyed, Just(ShouldThrow::kThrowOnError)), JSObject); return error_object; } Handle Isolate::GetDetailedStackTrace( Handle error_object) { Handle error_stack = JSReceiver::GetDataProperty( this, error_object, factory()->error_stack_symbol()); if (!error_stack->IsErrorStackData()) { return Handle(); } Handle error_stack_data = Handle::cast(error_stack); ErrorStackData::EnsureStackFrameInfos(this, error_stack_data); if (!error_stack_data->limit_or_stack_frame_infos().IsFixedArray()) { return Handle(); } return handle( FixedArray::cast(error_stack_data->limit_or_stack_frame_infos()), this); } Handle Isolate::GetSimpleStackTrace( Handle error_object) { Handle error_stack = JSReceiver::GetDataProperty( this, error_object, factory()->error_stack_symbol()); if (error_stack->IsFixedArray()) { return Handle::cast(error_stack); } if (!error_stack->IsErrorStackData()) { return factory()->empty_fixed_array(); } Handle error_stack_data = Handle::cast(error_stack); if (!error_stack_data->HasCallSiteInfos()) { return factory()->empty_fixed_array(); } return handle(error_stack_data->call_site_infos(), this); } Address Isolate::GetAbstractPC(int* line, int* column) { JavaScriptFrameIterator it(this); if (it.done()) { *line = -1; *column = -1; return kNullAddress; } JavaScriptFrame* frame = it.frame(); DCHECK(!frame->is_builtin()); Handle shared = handle(frame->function().shared(), this); SharedFunctionInfo::EnsureSourcePositionsAvailable(this, shared); int position = frame->position(); Object maybe_script = frame->function().shared().script(); if (maybe_script.IsScript()) { Handle