1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_ISOLATE_H_ 6 #define V8_ISOLATE_H_ 7 8 #include <queue> 9 #include <set> 10 11 #include "include/v8-debug.h" 12 #include "src/allocation.h" 13 #include "src/assert-scope.h" 14 #include "src/base/atomicops.h" 15 #include "src/builtins.h" 16 #include "src/cancelable-task.h" 17 #include "src/contexts.h" 18 #include "src/date.h" 19 #include "src/execution.h" 20 #include "src/frames.h" 21 #include "src/futex-emulation.h" 22 #include "src/global-handles.h" 23 #include "src/handles.h" 24 #include "src/hashmap.h" 25 #include "src/heap/heap.h" 26 #include "src/messages.h" 27 #include "src/optimizing-compile-dispatcher.h" 28 #include "src/regexp/regexp-stack.h" 29 #include "src/runtime/runtime.h" 30 #include "src/runtime-profiler.h" 31 #include "src/zone.h" 32 33 namespace v8 { 34 35 namespace base { 36 class RandomNumberGenerator; 37 } 38 39 namespace internal { 40 41 class BasicBlockProfiler; 42 class Bootstrapper; 43 class CallInterfaceDescriptorData; 44 class CodeGenerator; 45 class CodeRange; 46 class CodeStubDescriptor; 47 class CodeTracer; 48 class CompilationCache; 49 class CompilationStatistics; 50 class ContextSlotCache; 51 class Counters; 52 class CpuFeatures; 53 class CpuProfiler; 54 class DeoptimizerData; 55 class Deserializer; 56 class EmptyStatement; 57 class ExternalCallbackScope; 58 class ExternalReferenceTable; 59 class Factory; 60 class FunctionInfoListener; 61 class HandleScopeImplementer; 62 class HeapProfiler; 63 class HStatistics; 64 class HTracer; 65 class InlineRuntimeFunctionsTable; 66 class InnerPointerToCodeCache; 67 class Logger; 68 class MaterializedObjectStore; 69 class CodeAgingHelper; 70 class RegExpStack; 71 class SaveContext; 72 class StatsTable; 73 class StringTracker; 74 class StubCache; 75 class SweeperThread; 76 class ThreadManager; 77 class ThreadState; 78 class ThreadVisitor; // Defined in v8threads.h 79 class UnicodeCache; 80 template <StateTag Tag> class VMState; 81 82 // 'void function pointer', used to roundtrip the 83 // ExternalReference::ExternalReferenceRedirector since we can not include 84 // assembler.h, where it is defined, here. 85 typedef void* ExternalReferenceRedirectorPointer(); 86 87 88 class Debug; 89 class PromiseOnStack; 90 class Redirection; 91 class Simulator; 92 93 namespace interpreter { 94 class Interpreter; 95 } 96 97 // Static indirection table for handles to constants. If a frame 98 // element represents a constant, the data contains an index into 99 // this table of handles to the actual constants. 100 // Static indirection table for handles to constants. If a Result 101 // represents a constant, the data contains an index into this table 102 // of handles to the actual constants. 103 typedef ZoneList<Handle<Object> > ZoneObjectList; 104 105 #define RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate) \ 106 do { \ 107 Isolate* __isolate__ = (isolate); \ 108 if (__isolate__->has_scheduled_exception()) { \ 109 return __isolate__->PromoteScheduledException(); \ 110 } \ 111 } while (false) 112 113 // Macros for MaybeHandle. 114 115 #define RETURN_VALUE_IF_SCHEDULED_EXCEPTION(isolate, value) \ 116 do { \ 117 Isolate* __isolate__ = (isolate); \ 118 if (__isolate__->has_scheduled_exception()) { \ 119 __isolate__->PromoteScheduledException(); \ 120 return value; \ 121 } \ 122 } while (false) 123 124 #define RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, T) \ 125 RETURN_VALUE_IF_SCHEDULED_EXCEPTION(isolate, MaybeHandle<T>()) 126 127 #define ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, dst, call, value) \ 128 do { \ 129 if (!(call).ToHandle(&dst)) { \ 130 DCHECK((isolate)->has_pending_exception()); \ 131 return value; \ 132 } \ 133 } while (false) 134 135 #define ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, dst, call) \ 136 ASSIGN_RETURN_ON_EXCEPTION_VALUE( \ 137 isolate, dst, call, isolate->heap()->exception()) 138 139 #define ASSIGN_RETURN_ON_EXCEPTION(isolate, dst, call, T) \ 140 ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, dst, call, MaybeHandle<T>()) 141 142 #define THROW_NEW_ERROR(isolate, call, T) \ 143 do { \ 144 return isolate->Throw<T>(isolate->factory()->call); \ 145 } while (false) 146 147 #define THROW_NEW_ERROR_RETURN_FAILURE(isolate, call) \ 148 do { \ 149 return isolate->Throw(*isolate->factory()->call); \ 150 } while (false) 151 152 #define RETURN_ON_EXCEPTION_VALUE(isolate, call, value) \ 153 do { \ 154 if ((call).is_null()) { \ 155 DCHECK((isolate)->has_pending_exception()); \ 156 return value; \ 157 } \ 158 } while (false) 159 160 #define RETURN_FAILURE_ON_EXCEPTION(isolate, call) \ 161 RETURN_ON_EXCEPTION_VALUE(isolate, call, isolate->heap()->exception()) 162 163 #define RETURN_ON_EXCEPTION(isolate, call, T) \ 164 RETURN_ON_EXCEPTION_VALUE(isolate, call, MaybeHandle<T>()) 165 166 167 #define FOR_EACH_ISOLATE_ADDRESS_NAME(C) \ 168 C(Handler, handler) \ 169 C(CEntryFP, c_entry_fp) \ 170 C(CFunction, c_function) \ 171 C(Context, context) \ 172 C(PendingException, pending_exception) \ 173 C(PendingHandlerContext, pending_handler_context) \ 174 C(PendingHandlerCode, pending_handler_code) \ 175 C(PendingHandlerOffset, pending_handler_offset) \ 176 C(PendingHandlerFP, pending_handler_fp) \ 177 C(PendingHandlerSP, pending_handler_sp) \ 178 C(ExternalCaughtException, external_caught_exception) \ 179 C(JSEntrySP, js_entry_sp) 180 181 182 // Platform-independent, reliable thread identifier. 183 class ThreadId { 184 public: 185 // Creates an invalid ThreadId. ThreadId()186 ThreadId() { base::NoBarrier_Store(&id_, kInvalidId); } 187 188 ThreadId& operator=(const ThreadId& other) { 189 base::NoBarrier_Store(&id_, base::NoBarrier_Load(&other.id_)); 190 return *this; 191 } 192 193 // Returns ThreadId for current thread. Current()194 static ThreadId Current() { return ThreadId(GetCurrentThreadId()); } 195 196 // Returns invalid ThreadId (guaranteed not to be equal to any thread). Invalid()197 static ThreadId Invalid() { return ThreadId(kInvalidId); } 198 199 // Compares ThreadIds for equality. INLINE(bool Equals (const ThreadId & other)const)200 INLINE(bool Equals(const ThreadId& other) const) { 201 return base::NoBarrier_Load(&id_) == base::NoBarrier_Load(&other.id_); 202 } 203 204 // Checks whether this ThreadId refers to any thread. INLINE(bool IsValid ()const)205 INLINE(bool IsValid() const) { 206 return base::NoBarrier_Load(&id_) != kInvalidId; 207 } 208 209 // Converts ThreadId to an integer representation 210 // (required for public API: V8::V8::GetCurrentThreadId). ToInteger()211 int ToInteger() const { return static_cast<int>(base::NoBarrier_Load(&id_)); } 212 213 // Converts ThreadId to an integer representation 214 // (required for public API: V8::V8::TerminateExecution). FromInteger(int id)215 static ThreadId FromInteger(int id) { return ThreadId(id); } 216 217 private: 218 static const int kInvalidId = -1; 219 ThreadId(int id)220 explicit ThreadId(int id) { base::NoBarrier_Store(&id_, id); } 221 222 static int AllocateThreadId(); 223 224 static int GetCurrentThreadId(); 225 226 base::Atomic32 id_; 227 228 static base::Atomic32 highest_thread_id_; 229 230 friend class Isolate; 231 }; 232 233 234 #define FIELD_ACCESSOR(type, name) \ 235 inline void set_##name(type v) { name##_ = v; } \ 236 inline type name() const { return name##_; } 237 238 239 class ThreadLocalTop BASE_EMBEDDED { 240 public: 241 // Does early low-level initialization that does not depend on the 242 // isolate being present. 243 ThreadLocalTop(); 244 245 // Initialize the thread data. 246 void Initialize(); 247 248 // Get the top C++ try catch handler or NULL if none are registered. 249 // 250 // This method is not guaranteed to return an address that can be 251 // used for comparison with addresses into the JS stack. If such an 252 // address is needed, use try_catch_handler_address. FIELD_ACCESSOR(v8::TryCatch *,try_catch_handler)253 FIELD_ACCESSOR(v8::TryCatch*, try_catch_handler) 254 255 // Get the address of the top C++ try catch handler or NULL if 256 // none are registered. 257 // 258 // This method always returns an address that can be compared to 259 // pointers into the JavaScript stack. When running on actual 260 // hardware, try_catch_handler_address and TryCatchHandler return 261 // the same pointer. When running on a simulator with a separate JS 262 // stack, try_catch_handler_address returns a JS stack address that 263 // corresponds to the place on the JS stack where the C++ handler 264 // would have been if the stack were not separate. 265 Address try_catch_handler_address() { 266 return reinterpret_cast<Address>( 267 v8::TryCatch::JSStackComparableAddress(try_catch_handler())); 268 } 269 270 void Free(); 271 272 Isolate* isolate_; 273 // The context where the current execution method is created and for variable 274 // lookups. 275 Context* context_; 276 ThreadId thread_id_; 277 Object* pending_exception_; 278 279 // Communication channel between Isolate::FindHandler and the CEntryStub. 280 Context* pending_handler_context_; 281 Code* pending_handler_code_; 282 intptr_t pending_handler_offset_; 283 Address pending_handler_fp_; 284 Address pending_handler_sp_; 285 286 // Communication channel between Isolate::Throw and message consumers. 287 bool rethrowing_message_; 288 Object* pending_message_obj_; 289 290 // Use a separate value for scheduled exceptions to preserve the 291 // invariants that hold about pending_exception. We may want to 292 // unify them later. 293 Object* scheduled_exception_; 294 bool external_caught_exception_; 295 SaveContext* save_context_; 296 297 // Stack. 298 Address c_entry_fp_; // the frame pointer of the top c entry frame 299 Address handler_; // try-blocks are chained through the stack 300 Address c_function_; // C function that was called at c entry. 301 302 // Throwing an exception may cause a Promise rejection. For this purpose 303 // we keep track of a stack of nested promises and the corresponding 304 // try-catch handlers. 305 PromiseOnStack* promise_on_stack_; 306 307 #ifdef USE_SIMULATOR 308 Simulator* simulator_; 309 #endif 310 311 Address js_entry_sp_; // the stack pointer of the bottom JS entry frame 312 // the external callback we're currently in 313 ExternalCallbackScope* external_callback_scope_; 314 StateTag current_vm_state_; 315 316 // Call back function to report unsafe JS accesses. 317 v8::FailedAccessCheckCallback failed_access_check_callback_; 318 319 private: 320 void InitializeInternal(); 321 322 v8::TryCatch* try_catch_handler_; 323 }; 324 325 326 #if USE_SIMULATOR 327 328 #define ISOLATE_INIT_SIMULATOR_LIST(V) \ 329 V(bool, simulator_initialized, false) \ 330 V(HashMap*, simulator_i_cache, NULL) \ 331 V(Redirection*, simulator_redirection, NULL) 332 #else 333 334 #define ISOLATE_INIT_SIMULATOR_LIST(V) 335 336 #endif 337 338 339 #ifdef DEBUG 340 341 #define ISOLATE_INIT_DEBUG_ARRAY_LIST(V) \ 342 V(CommentStatistic, paged_space_comments_statistics, \ 343 CommentStatistic::kMaxComments + 1) \ 344 V(int, code_kind_statistics, Code::NUMBER_OF_KINDS) 345 #else 346 347 #define ISOLATE_INIT_DEBUG_ARRAY_LIST(V) 348 349 #endif 350 351 #define ISOLATE_INIT_ARRAY_LIST(V) \ 352 /* SerializerDeserializer state. */ \ 353 V(int32_t, jsregexp_static_offsets_vector, kJSRegexpStaticOffsetsVectorSize) \ 354 V(int, bad_char_shift_table, kUC16AlphabetSize) \ 355 V(int, good_suffix_shift_table, (kBMMaxShift + 1)) \ 356 V(int, suffix_table, (kBMMaxShift + 1)) \ 357 V(uint32_t, private_random_seed, 2) \ 358 ISOLATE_INIT_DEBUG_ARRAY_LIST(V) 359 360 typedef List<HeapObject*> DebugObjectCache; 361 362 #define ISOLATE_INIT_LIST(V) \ 363 /* Assembler state. */ \ 364 V(FatalErrorCallback, exception_behavior, NULL) \ 365 V(LogEventCallback, event_logger, NULL) \ 366 V(AllowCodeGenerationFromStringsCallback, allow_code_gen_callback, NULL) \ 367 /* To distinguish the function templates, so that we can find them in the */ \ 368 /* function cache of the native context. */ \ 369 V(int, next_serial_number, 0) \ 370 V(ExternalReferenceRedirectorPointer*, external_reference_redirector, NULL) \ 371 /* Part of the state of liveedit. */ \ 372 V(FunctionInfoListener*, active_function_info_listener, NULL) \ 373 /* State for Relocatable. */ \ 374 V(Relocatable*, relocatable_top, NULL) \ 375 V(DebugObjectCache*, string_stream_debug_object_cache, NULL) \ 376 V(Object*, string_stream_current_security_token, NULL) \ 377 V(ExternalReferenceTable*, external_reference_table, NULL) \ 378 V(HashMap*, external_reference_map, NULL) \ 379 V(HashMap*, root_index_map, NULL) \ 380 V(int, pending_microtask_count, 0) \ 381 V(bool, autorun_microtasks, true) \ 382 V(HStatistics*, hstatistics, NULL) \ 383 V(CompilationStatistics*, turbo_statistics, NULL) \ 384 V(HTracer*, htracer, NULL) \ 385 V(CodeTracer*, code_tracer, NULL) \ 386 V(bool, fp_stubs_generated, false) \ 387 V(uint32_t, per_isolate_assert_data, 0xFFFFFFFFu) \ 388 V(PromiseRejectCallback, promise_reject_callback, NULL) \ 389 V(const v8::StartupData*, snapshot_blob, NULL) \ 390 ISOLATE_INIT_SIMULATOR_LIST(V) 391 392 #define THREAD_LOCAL_TOP_ACCESSOR(type, name) \ 393 inline void set_##name(type v) { thread_local_top_.name##_ = v; } \ 394 inline type name() const { return thread_local_top_.name##_; } 395 396 #define THREAD_LOCAL_TOP_ADDRESS(type, name) \ 397 type* name##_address() { return &thread_local_top_.name##_; } 398 399 400 class Isolate { 401 // These forward declarations are required to make the friend declarations in 402 // PerIsolateThreadData work on some older versions of gcc. 403 class ThreadDataTable; 404 class EntryStackItem; 405 public: 406 ~Isolate(); 407 408 // A thread has a PerIsolateThreadData instance for each isolate that it has 409 // entered. That instance is allocated when the isolate is initially entered 410 // and reused on subsequent entries. 411 class PerIsolateThreadData { 412 public: PerIsolateThreadData(Isolate * isolate,ThreadId thread_id)413 PerIsolateThreadData(Isolate* isolate, ThreadId thread_id) 414 : isolate_(isolate), 415 thread_id_(thread_id), 416 stack_limit_(0), 417 thread_state_(NULL), 418 #if USE_SIMULATOR 419 simulator_(NULL), 420 #endif 421 next_(NULL), 422 prev_(NULL) { } 423 ~PerIsolateThreadData(); isolate()424 Isolate* isolate() const { return isolate_; } thread_id()425 ThreadId thread_id() const { return thread_id_; } 426 FIELD_ACCESSOR(uintptr_t,stack_limit)427 FIELD_ACCESSOR(uintptr_t, stack_limit) 428 FIELD_ACCESSOR(ThreadState*, thread_state) 429 430 #if USE_SIMULATOR 431 FIELD_ACCESSOR(Simulator*, simulator) 432 #endif 433 434 bool Matches(Isolate* isolate, ThreadId thread_id) const { 435 return isolate_ == isolate && thread_id_.Equals(thread_id); 436 } 437 438 private: 439 Isolate* isolate_; 440 ThreadId thread_id_; 441 uintptr_t stack_limit_; 442 ThreadState* thread_state_; 443 444 #if USE_SIMULATOR 445 Simulator* simulator_; 446 #endif 447 448 PerIsolateThreadData* next_; 449 PerIsolateThreadData* prev_; 450 451 friend class Isolate; 452 friend class ThreadDataTable; 453 friend class EntryStackItem; 454 455 DISALLOW_COPY_AND_ASSIGN(PerIsolateThreadData); 456 }; 457 458 459 enum AddressId { 460 #define DECLARE_ENUM(CamelName, hacker_name) k##CamelName##Address, 461 FOR_EACH_ISOLATE_ADDRESS_NAME(DECLARE_ENUM) 462 #undef DECLARE_ENUM 463 kIsolateAddressCount 464 }; 465 466 static void InitializeOncePerProcess(); 467 468 // Returns the PerIsolateThreadData for the current thread (or NULL if one is 469 // not currently set). CurrentPerIsolateThreadData()470 static PerIsolateThreadData* CurrentPerIsolateThreadData() { 471 return reinterpret_cast<PerIsolateThreadData*>( 472 base::Thread::GetThreadLocal(per_isolate_thread_data_key_)); 473 } 474 475 // Returns the isolate inside which the current thread is running. INLINE(static Isolate * Current ())476 INLINE(static Isolate* Current()) { 477 DCHECK(base::NoBarrier_Load(&isolate_key_created_) == 1); 478 Isolate* isolate = reinterpret_cast<Isolate*>( 479 base::Thread::GetExistingThreadLocal(isolate_key_)); 480 DCHECK(isolate != NULL); 481 return isolate; 482 } 483 484 // Like Current, but skips the check that |isolate_key_| was initialized. 485 // Callers have to ensure that themselves. 486 // DO NOT USE. The only remaining callsite will be deleted soon. INLINE(static Isolate * UnsafeCurrent ())487 INLINE(static Isolate* UnsafeCurrent()) { 488 return reinterpret_cast<Isolate*>( 489 base::Thread::GetThreadLocal(isolate_key_)); 490 } 491 492 // Usually called by Init(), but can be called early e.g. to allow 493 // testing components that require logging but not the whole 494 // isolate. 495 // 496 // Safe to call more than once. 497 void InitializeLoggingAndCounters(); 498 499 bool Init(Deserializer* des); 500 501 // True if at least one thread Enter'ed this isolate. IsInUse()502 bool IsInUse() { return entry_stack_ != NULL; } 503 504 // Destroys the non-default isolates. 505 // Sets default isolate into "has_been_disposed" state rather then destroying, 506 // for legacy API reasons. 507 void TearDown(); 508 509 static void GlobalTearDown(); 510 511 void ClearSerializerData(); 512 513 // Find the PerThread for this particular (isolate, thread) combination 514 // If one does not yet exist, return null. 515 PerIsolateThreadData* FindPerThreadDataForThisThread(); 516 517 // Find the PerThread for given (isolate, thread) combination 518 // If one does not yet exist, return null. 519 PerIsolateThreadData* FindPerThreadDataForThread(ThreadId thread_id); 520 521 // Discard the PerThread for this particular (isolate, thread) combination 522 // If one does not yet exist, no-op. 523 void DiscardPerThreadDataForThisThread(); 524 525 // Returns the key used to store the pointer to the current isolate. 526 // Used internally for V8 threads that do not execute JavaScript but still 527 // are part of the domain of an isolate (like the context switcher). isolate_key()528 static base::Thread::LocalStorageKey isolate_key() { 529 return isolate_key_; 530 } 531 532 // Returns the key used to store process-wide thread IDs. thread_id_key()533 static base::Thread::LocalStorageKey thread_id_key() { 534 return thread_id_key_; 535 } 536 537 static base::Thread::LocalStorageKey per_isolate_thread_data_key(); 538 539 // Mutex for serializing access to break control structures. break_access()540 base::RecursiveMutex* break_access() { return &break_access_; } 541 542 Address get_address_from_id(AddressId id); 543 544 // Access to top context (where the current function object was created). context()545 Context* context() { return thread_local_top_.context_; } 546 inline void set_context(Context* context); context_address()547 Context** context_address() { return &thread_local_top_.context_; } 548 549 THREAD_LOCAL_TOP_ACCESSOR(SaveContext*, save_context) 550 551 // Access to current thread id. 552 THREAD_LOCAL_TOP_ACCESSOR(ThreadId, thread_id) 553 554 // Interface to pending exception. 555 inline Object* pending_exception(); 556 inline void set_pending_exception(Object* exception_obj); 557 inline void clear_pending_exception(); 558 559 THREAD_LOCAL_TOP_ADDRESS(Object*, pending_exception) 560 561 inline bool has_pending_exception(); 562 THREAD_LOCAL_TOP_ADDRESS(Context *,pending_handler_context)563 THREAD_LOCAL_TOP_ADDRESS(Context*, pending_handler_context) 564 THREAD_LOCAL_TOP_ADDRESS(Code*, pending_handler_code) 565 THREAD_LOCAL_TOP_ADDRESS(intptr_t, pending_handler_offset) 566 THREAD_LOCAL_TOP_ADDRESS(Address, pending_handler_fp) 567 THREAD_LOCAL_TOP_ADDRESS(Address, pending_handler_sp) 568 569 THREAD_LOCAL_TOP_ACCESSOR(bool, external_caught_exception) 570 571 v8::TryCatch* try_catch_handler() { 572 return thread_local_top_.try_catch_handler(); 573 } external_caught_exception_address()574 bool* external_caught_exception_address() { 575 return &thread_local_top_.external_caught_exception_; 576 } 577 578 THREAD_LOCAL_TOP_ADDRESS(Object*, scheduled_exception) 579 580 inline void clear_pending_message(); pending_message_obj_address()581 Address pending_message_obj_address() { 582 return reinterpret_cast<Address>(&thread_local_top_.pending_message_obj_); 583 } 584 585 inline Object* scheduled_exception(); 586 inline bool has_scheduled_exception(); 587 inline void clear_scheduled_exception(); 588 589 bool IsJavaScriptHandlerOnTop(Object* exception); 590 bool IsExternalHandlerOnTop(Object* exception); 591 592 inline bool is_catchable_by_javascript(Object* exception); 593 594 // JS execution stack (see frames.h). c_entry_fp(ThreadLocalTop * thread)595 static Address c_entry_fp(ThreadLocalTop* thread) { 596 return thread->c_entry_fp_; 597 } handler(ThreadLocalTop * thread)598 static Address handler(ThreadLocalTop* thread) { return thread->handler_; } c_function()599 Address c_function() { return thread_local_top_.c_function_; } 600 c_entry_fp_address()601 inline Address* c_entry_fp_address() { 602 return &thread_local_top_.c_entry_fp_; 603 } handler_address()604 inline Address* handler_address() { return &thread_local_top_.handler_; } c_function_address()605 inline Address* c_function_address() { 606 return &thread_local_top_.c_function_; 607 } 608 609 // Bottom JS entry. js_entry_sp()610 Address js_entry_sp() { 611 return thread_local_top_.js_entry_sp_; 612 } js_entry_sp_address()613 inline Address* js_entry_sp_address() { 614 return &thread_local_top_.js_entry_sp_; 615 } 616 617 // Returns the global object of the current context. It could be 618 // a builtin object, or a JS global object. 619 inline Handle<JSGlobalObject> global_object(); 620 621 // Returns the global proxy object of the current context. global_proxy()622 JSObject* global_proxy() { 623 return context()->global_proxy(); 624 } 625 ArchiveSpacePerThread()626 static int ArchiveSpacePerThread() { return sizeof(ThreadLocalTop); } FreeThreadResources()627 void FreeThreadResources() { thread_local_top_.Free(); } 628 629 // This method is called by the api after operations that may throw 630 // exceptions. If an exception was thrown and not handled by an external 631 // handler the exception is scheduled to be rethrown when we return to running 632 // JavaScript code. If an exception is scheduled true is returned. 633 bool OptionalRescheduleException(bool is_bottom_call); 634 635 // Push and pop a promise and the current try-catch handler. 636 void PushPromise(Handle<JSObject> promise, Handle<JSFunction> function); 637 void PopPromise(); 638 Handle<Object> GetPromiseOnStackOnThrow(); 639 640 class ExceptionScope { 641 public: 642 // Scope currently can only be used for regular exceptions, 643 // not termination exception. 644 inline explicit ExceptionScope(Isolate* isolate); 645 inline ~ExceptionScope(); 646 647 private: 648 Isolate* isolate_; 649 Handle<Object> pending_exception_; 650 }; 651 652 void SetCaptureStackTraceForUncaughtExceptions( 653 bool capture, 654 int frame_limit, 655 StackTrace::StackTraceOptions options); 656 657 void SetAbortOnUncaughtExceptionCallback( 658 v8::Isolate::AbortOnUncaughtExceptionCallback callback); 659 660 enum PrintStackMode { kPrintStackConcise, kPrintStackVerbose }; 661 void PrintCurrentStackTrace(FILE* out); 662 void PrintStack(StringStream* accumulator, 663 PrintStackMode mode = kPrintStackVerbose); 664 void PrintStack(FILE* out, PrintStackMode mode = kPrintStackVerbose); 665 Handle<String> StackTraceString(); 666 NO_INLINE(void PushStackTraceAndDie(unsigned int magic, void* ptr1, 667 void* ptr2, unsigned int magic2)); 668 Handle<JSArray> CaptureCurrentStackTrace( 669 int frame_limit, 670 StackTrace::StackTraceOptions options); 671 Handle<Object> CaptureSimpleStackTrace(Handle<JSObject> error_object, 672 Handle<Object> caller); 673 MaybeHandle<JSObject> CaptureAndSetDetailedStackTrace( 674 Handle<JSObject> error_object); 675 MaybeHandle<JSObject> CaptureAndSetSimpleStackTrace( 676 Handle<JSObject> error_object, Handle<Object> caller); 677 Handle<JSArray> GetDetailedStackTrace(Handle<JSObject> error_object); 678 Handle<JSArray> GetDetailedFromSimpleStackTrace( 679 Handle<JSObject> error_object); 680 681 // Returns if the given context may access the given global object. If 682 // the result is false, the pending exception is guaranteed to be 683 // set. 684 bool MayAccess(Handle<Context> accessing_context, Handle<JSObject> receiver); 685 686 bool IsInternallyUsedPropertyName(Handle<Object> name); 687 688 void SetFailedAccessCheckCallback(v8::FailedAccessCheckCallback callback); 689 void ReportFailedAccessCheck(Handle<JSObject> receiver); 690 691 // Exception throwing support. The caller should use the result 692 // of Throw() as its return value. 693 Object* Throw(Object* exception, MessageLocation* location = NULL); 694 Object* ThrowIllegalOperation(); 695 696 template <typename T> 697 MUST_USE_RESULT MaybeHandle<T> Throw(Handle<Object> exception, 698 MessageLocation* location = NULL) { 699 Throw(*exception, location); 700 return MaybeHandle<T>(); 701 } 702 703 // Re-throw an exception. This involves no error reporting since error 704 // reporting was handled when the exception was thrown originally. 705 Object* ReThrow(Object* exception); 706 707 // Find the correct handler for the current pending exception. This also 708 // clears and returns the current pending exception. 709 Object* UnwindAndFindHandler(); 710 711 // Tries to predict whether an exception will be caught. Note that this can 712 // only produce an estimate, because it is undecidable whether a finally 713 // clause will consume or re-throw an exception. We conservatively assume any 714 // finally clause will behave as if the exception were consumed. 715 enum CatchType { NOT_CAUGHT, CAUGHT_BY_JAVASCRIPT, CAUGHT_BY_EXTERNAL }; 716 CatchType PredictExceptionCatcher(); 717 718 void ScheduleThrow(Object* exception); 719 // Re-set pending message, script and positions reported to the TryCatch 720 // back to the TLS for re-use when rethrowing. 721 void RestorePendingMessageFromTryCatch(v8::TryCatch* handler); 722 // Un-schedule an exception that was caught by a TryCatch handler. 723 void CancelScheduledExceptionFromTryCatch(v8::TryCatch* handler); 724 void ReportPendingMessages(); 725 // Return pending location if any or unfilled structure. 726 MessageLocation GetMessageLocation(); 727 728 // Promote a scheduled exception to pending. Asserts has_scheduled_exception. 729 Object* PromoteScheduledException(); 730 731 // Attempts to compute the current source location, storing the 732 // result in the target out parameter. 733 bool ComputeLocation(MessageLocation* target); 734 bool ComputeLocationFromException(MessageLocation* target, 735 Handle<Object> exception); 736 bool ComputeLocationFromStackTrace(MessageLocation* target, 737 Handle<Object> exception); 738 739 Handle<JSMessageObject> CreateMessage(Handle<Object> exception, 740 MessageLocation* location); 741 742 // Out of resource exception helpers. 743 Object* StackOverflow(); 744 Object* TerminateExecution(); 745 void CancelTerminateExecution(); 746 747 void RequestInterrupt(InterruptCallback callback, void* data); 748 void InvokeApiInterruptCallbacks(); 749 750 // Administration 751 void Iterate(ObjectVisitor* v); 752 void Iterate(ObjectVisitor* v, ThreadLocalTop* t); 753 char* Iterate(ObjectVisitor* v, char* t); 754 void IterateThread(ThreadVisitor* v, char* t); 755 756 // Returns the current native context. 757 Handle<Context> native_context(); 758 759 // Returns the native context of the calling JavaScript code. That 760 // is, the native context of the top-most JavaScript frame. 761 Handle<Context> GetCallingNativeContext(); 762 763 void RegisterTryCatchHandler(v8::TryCatch* that); 764 void UnregisterTryCatchHandler(v8::TryCatch* that); 765 766 char* ArchiveThread(char* to); 767 char* RestoreThread(char* from); 768 769 static const char* const kStackOverflowMessage; 770 771 static const int kUC16AlphabetSize = 256; // See StringSearchBase. 772 static const int kBMMaxShift = 250; // See StringSearchBase. 773 774 // Accessors. 775 #define GLOBAL_ACCESSOR(type, name, initialvalue) \ 776 inline type name() const { \ 777 DCHECK(OFFSET_OF(Isolate, name##_) == name##_debug_offset_); \ 778 return name##_; \ 779 } \ 780 inline void set_##name(type value) { \ 781 DCHECK(OFFSET_OF(Isolate, name##_) == name##_debug_offset_); \ 782 name##_ = value; \ 783 } 784 ISOLATE_INIT_LIST(GLOBAL_ACCESSOR) 785 #undef GLOBAL_ACCESSOR 786 787 #define GLOBAL_ARRAY_ACCESSOR(type, name, length) \ 788 inline type* name() { \ 789 DCHECK(OFFSET_OF(Isolate, name##_) == name##_debug_offset_); \ 790 return &(name##_)[0]; \ 791 } ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_ACCESSOR)792 ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_ACCESSOR) 793 #undef GLOBAL_ARRAY_ACCESSOR 794 795 #define NATIVE_CONTEXT_FIELD_ACCESSOR(index, type, name) \ 796 inline Handle<type> name(); \ 797 inline bool is_##name(type* value); 798 NATIVE_CONTEXT_FIELDS(NATIVE_CONTEXT_FIELD_ACCESSOR) 799 #undef NATIVE_CONTEXT_FIELD_ACCESSOR 800 801 Bootstrapper* bootstrapper() { return bootstrapper_; } counters()802 Counters* counters() { 803 // Call InitializeLoggingAndCounters() if logging is needed before 804 // the isolate is fully initialized. 805 DCHECK(counters_ != NULL); 806 return counters_; 807 } code_range()808 CodeRange* code_range() { return code_range_; } runtime_profiler()809 RuntimeProfiler* runtime_profiler() { return runtime_profiler_; } compilation_cache()810 CompilationCache* compilation_cache() { return compilation_cache_; } logger()811 Logger* logger() { 812 // Call InitializeLoggingAndCounters() if logging is needed before 813 // the isolate is fully initialized. 814 DCHECK(logger_ != NULL); 815 return logger_; 816 } stack_guard()817 StackGuard* stack_guard() { return &stack_guard_; } heap()818 Heap* heap() { return &heap_; } 819 StatsTable* stats_table(); stub_cache()820 StubCache* stub_cache() { return stub_cache_; } code_aging_helper()821 CodeAgingHelper* code_aging_helper() { return code_aging_helper_; } deoptimizer_data()822 DeoptimizerData* deoptimizer_data() { return deoptimizer_data_; } thread_local_top()823 ThreadLocalTop* thread_local_top() { return &thread_local_top_; } materialized_object_store()824 MaterializedObjectStore* materialized_object_store() { 825 return materialized_object_store_; 826 } 827 memory_allocator()828 MemoryAllocator* memory_allocator() { 829 return memory_allocator_; 830 } 831 keyed_lookup_cache()832 KeyedLookupCache* keyed_lookup_cache() { 833 return keyed_lookup_cache_; 834 } 835 context_slot_cache()836 ContextSlotCache* context_slot_cache() { 837 return context_slot_cache_; 838 } 839 descriptor_lookup_cache()840 DescriptorLookupCache* descriptor_lookup_cache() { 841 return descriptor_lookup_cache_; 842 } 843 handle_scope_data()844 HandleScopeData* handle_scope_data() { return &handle_scope_data_; } 845 handle_scope_implementer()846 HandleScopeImplementer* handle_scope_implementer() { 847 DCHECK(handle_scope_implementer_); 848 return handle_scope_implementer_; 849 } runtime_zone()850 Zone* runtime_zone() { return &runtime_zone_; } interface_descriptor_zone()851 Zone* interface_descriptor_zone() { return &interface_descriptor_zone_; } 852 unicode_cache()853 UnicodeCache* unicode_cache() { 854 return unicode_cache_; 855 } 856 inner_pointer_to_code_cache()857 InnerPointerToCodeCache* inner_pointer_to_code_cache() { 858 return inner_pointer_to_code_cache_; 859 } 860 global_handles()861 GlobalHandles* global_handles() { return global_handles_; } 862 eternal_handles()863 EternalHandles* eternal_handles() { return eternal_handles_; } 864 thread_manager()865 ThreadManager* thread_manager() { return thread_manager_; } 866 jsregexp_uncanonicalize()867 unibrow::Mapping<unibrow::Ecma262UnCanonicalize>* jsregexp_uncanonicalize() { 868 return &jsregexp_uncanonicalize_; 869 } 870 jsregexp_canonrange()871 unibrow::Mapping<unibrow::CanonicalizationRange>* jsregexp_canonrange() { 872 return &jsregexp_canonrange_; 873 } 874 runtime_state()875 RuntimeState* runtime_state() { return &runtime_state_; } 876 builtins()877 Builtins* builtins() { return &builtins_; } 878 NotifyExtensionInstalled()879 void NotifyExtensionInstalled() { 880 has_installed_extensions_ = true; 881 } 882 has_installed_extensions()883 bool has_installed_extensions() { return has_installed_extensions_; } 884 885 unibrow::Mapping<unibrow::Ecma262Canonicalize>* regexp_macro_assembler_canonicalize()886 regexp_macro_assembler_canonicalize() { 887 return ®exp_macro_assembler_canonicalize_; 888 } 889 regexp_stack()890 RegExpStack* regexp_stack() { return regexp_stack_; } 891 892 unibrow::Mapping<unibrow::Ecma262Canonicalize>* interp_canonicalize_mapping()893 interp_canonicalize_mapping() { 894 return &interp_canonicalize_mapping_; 895 } 896 debug()897 Debug* debug() { return debug_; } 898 cpu_profiler()899 CpuProfiler* cpu_profiler() const { return cpu_profiler_; } heap_profiler()900 HeapProfiler* heap_profiler() const { return heap_profiler_; } 901 902 #ifdef DEBUG heap_histograms()903 HistogramInfo* heap_histograms() { return heap_histograms_; } 904 js_spill_information()905 JSObject::SpillInformation* js_spill_information() { 906 return &js_spill_information_; 907 } 908 #endif 909 factory()910 Factory* factory() { return reinterpret_cast<Factory*>(this); } 911 912 static const int kJSRegexpStaticOffsetsVectorSize = 128; 913 THREAD_LOCAL_TOP_ACCESSOR(ExternalCallbackScope *,external_callback_scope)914 THREAD_LOCAL_TOP_ACCESSOR(ExternalCallbackScope*, external_callback_scope) 915 916 THREAD_LOCAL_TOP_ACCESSOR(StateTag, current_vm_state) 917 918 void SetData(uint32_t slot, void* data) { 919 DCHECK(slot < Internals::kNumIsolateDataSlots); 920 embedder_data_[slot] = data; 921 } GetData(uint32_t slot)922 void* GetData(uint32_t slot) { 923 DCHECK(slot < Internals::kNumIsolateDataSlots); 924 return embedder_data_[slot]; 925 } 926 serializer_enabled()927 bool serializer_enabled() const { return serializer_enabled_; } snapshot_available()928 bool snapshot_available() const { 929 return snapshot_blob_ != NULL && snapshot_blob_->raw_size != 0; 930 } 931 IsDead()932 bool IsDead() { return has_fatal_error_; } SignalFatalError()933 void SignalFatalError() { has_fatal_error_ = true; } 934 935 bool use_crankshaft() const; 936 initialized_from_snapshot()937 bool initialized_from_snapshot() { return initialized_from_snapshot_; } 938 time_millis_since_init()939 double time_millis_since_init() { 940 return heap_.MonotonicallyIncreasingTimeInMs() - time_millis_at_init_; 941 } 942 date_cache()943 DateCache* date_cache() { 944 return date_cache_; 945 } 946 set_date_cache(DateCache * date_cache)947 void set_date_cache(DateCache* date_cache) { 948 if (date_cache != date_cache_) { 949 delete date_cache_; 950 } 951 date_cache_ = date_cache; 952 } 953 954 Map* get_initial_js_array_map(ElementsKind kind, 955 Strength strength = Strength::WEAK); 956 957 static const int kArrayProtectorValid = 1; 958 static const int kArrayProtectorInvalid = 0; 959 960 bool IsFastArrayConstructorPrototypeChainIntact(); 961 962 // On intent to set an element in object, make sure that appropriate 963 // notifications occur if the set is on the elements of the array or 964 // object prototype. Also ensure that changes to prototype chain between 965 // Array and Object fire notifications. 966 void UpdateArrayProtectorOnSetElement(Handle<JSObject> object); UpdateArrayProtectorOnSetLength(Handle<JSObject> object)967 void UpdateArrayProtectorOnSetLength(Handle<JSObject> object) { 968 UpdateArrayProtectorOnSetElement(object); 969 } UpdateArrayProtectorOnSetPrototype(Handle<JSObject> object)970 void UpdateArrayProtectorOnSetPrototype(Handle<JSObject> object) { 971 UpdateArrayProtectorOnSetElement(object); 972 } UpdateArrayProtectorOnNormalizeElements(Handle<JSObject> object)973 void UpdateArrayProtectorOnNormalizeElements(Handle<JSObject> object) { 974 UpdateArrayProtectorOnSetElement(object); 975 } 976 977 // Returns true if array is the initial array prototype in any native context. 978 bool IsAnyInitialArrayPrototype(Handle<JSArray> array); 979 980 CallInterfaceDescriptorData* call_descriptor_data(int index); 981 982 void IterateDeferredHandles(ObjectVisitor* visitor); 983 void LinkDeferredHandles(DeferredHandles* deferred_handles); 984 void UnlinkDeferredHandles(DeferredHandles* deferred_handles); 985 986 #ifdef DEBUG 987 bool IsDeferredHandle(Object** location); 988 #endif // DEBUG 989 concurrent_recompilation_enabled()990 bool concurrent_recompilation_enabled() { 991 // Thread is only available with flag enabled. 992 DCHECK(optimizing_compile_dispatcher_ == NULL || 993 FLAG_concurrent_recompilation); 994 return optimizing_compile_dispatcher_ != NULL; 995 } 996 concurrent_osr_enabled()997 bool concurrent_osr_enabled() const { 998 // Thread is only available with flag enabled. 999 DCHECK(optimizing_compile_dispatcher_ == NULL || 1000 FLAG_concurrent_recompilation); 1001 return optimizing_compile_dispatcher_ != NULL && FLAG_concurrent_osr; 1002 } 1003 optimizing_compile_dispatcher()1004 OptimizingCompileDispatcher* optimizing_compile_dispatcher() { 1005 return optimizing_compile_dispatcher_; 1006 } 1007 id()1008 int id() const { return static_cast<int>(id_); } 1009 1010 HStatistics* GetHStatistics(); 1011 CompilationStatistics* GetTurboStatistics(); 1012 HTracer* GetHTracer(); 1013 CodeTracer* GetCodeTracer(); 1014 1015 void DumpAndResetCompilationStats(); 1016 function_entry_hook()1017 FunctionEntryHook function_entry_hook() { return function_entry_hook_; } set_function_entry_hook(FunctionEntryHook function_entry_hook)1018 void set_function_entry_hook(FunctionEntryHook function_entry_hook) { 1019 function_entry_hook_ = function_entry_hook; 1020 } 1021 stress_deopt_count_address()1022 void* stress_deopt_count_address() { return &stress_deopt_count_; } 1023 virtual_handler_register_address()1024 void* virtual_handler_register_address() { 1025 return &virtual_handler_register_; 1026 } 1027 virtual_slot_register_address()1028 void* virtual_slot_register_address() { return &virtual_slot_register_; } 1029 1030 base::RandomNumberGenerator* random_number_generator(); 1031 1032 // Given an address occupied by a live code object, return that object. 1033 Object* FindCodeObject(Address a); 1034 NextOptimizationId()1035 int NextOptimizationId() { 1036 int id = next_optimization_id_++; 1037 if (!Smi::IsValid(next_optimization_id_)) { 1038 next_optimization_id_ = 0; 1039 } 1040 return id; 1041 } 1042 IncrementJsCallsFromApiCounter()1043 void IncrementJsCallsFromApiCounter() { ++js_calls_from_api_counter_; } 1044 js_calls_from_api_counter()1045 unsigned int js_calls_from_api_counter() { 1046 return js_calls_from_api_counter_; 1047 } 1048 1049 // Get (and lazily initialize) the registry for per-isolate symbols. 1050 Handle<JSObject> GetSymbolRegistry(); 1051 1052 void AddCallCompletedCallback(CallCompletedCallback callback); 1053 void RemoveCallCompletedCallback(CallCompletedCallback callback); 1054 void FireCallCompletedCallback(); 1055 1056 void SetPromiseRejectCallback(PromiseRejectCallback callback); 1057 void ReportPromiseReject(Handle<JSObject> promise, Handle<Object> value, 1058 v8::PromiseRejectEvent event); 1059 1060 void EnqueueMicrotask(Handle<Object> microtask); 1061 void RunMicrotasks(); 1062 1063 void SetUseCounterCallback(v8::Isolate::UseCounterCallback callback); 1064 void CountUsage(v8::Isolate::UseCounterFeature feature); 1065 1066 BasicBlockProfiler* GetOrCreateBasicBlockProfiler(); basic_block_profiler()1067 BasicBlockProfiler* basic_block_profiler() { return basic_block_profiler_; } 1068 1069 std::string GetTurboCfgFileName(); 1070 1071 #if TRACE_MAPS GetNextUniqueSharedFunctionInfoId()1072 int GetNextUniqueSharedFunctionInfoId() { return next_unique_sfi_id_++; } 1073 #endif 1074 1075 1076 void AddDetachedContext(Handle<Context> context); 1077 void CheckDetachedContextsAfterGC(); 1078 partial_snapshot_cache()1079 List<Object*>* partial_snapshot_cache() { return &partial_snapshot_cache_; } 1080 set_array_buffer_allocator(v8::ArrayBuffer::Allocator * allocator)1081 void set_array_buffer_allocator(v8::ArrayBuffer::Allocator* allocator) { 1082 array_buffer_allocator_ = allocator; 1083 } array_buffer_allocator()1084 v8::ArrayBuffer::Allocator* array_buffer_allocator() const { 1085 return array_buffer_allocator_; 1086 } 1087 futex_wait_list_node()1088 FutexWaitListNode* futex_wait_list_node() { return &futex_wait_list_node_; } 1089 cancelable_task_manager()1090 CancelableTaskManager* cancelable_task_manager() { 1091 return cancelable_task_manager_; 1092 } 1093 interpreter()1094 interpreter::Interpreter* interpreter() const { return interpreter_; } 1095 1096 protected: 1097 explicit Isolate(bool enable_serializer); 1098 1099 private: 1100 friend struct GlobalState; 1101 friend struct InitializeGlobalState; 1102 Handle<JSObject> SetUpSubregistry(Handle<JSObject> registry, Handle<Map> map, 1103 const char* name); 1104 1105 // These fields are accessed through the API, offsets must be kept in sync 1106 // with v8::internal::Internals (in include/v8.h) constants. This is also 1107 // verified in Isolate::Init() using runtime checks. 1108 void* embedder_data_[Internals::kNumIsolateDataSlots]; 1109 Heap heap_; 1110 1111 // The per-process lock should be acquired before the ThreadDataTable is 1112 // modified. 1113 class ThreadDataTable { 1114 public: 1115 ThreadDataTable(); 1116 ~ThreadDataTable(); 1117 1118 PerIsolateThreadData* Lookup(Isolate* isolate, ThreadId thread_id); 1119 void Insert(PerIsolateThreadData* data); 1120 void Remove(PerIsolateThreadData* data); 1121 void RemoveAllThreads(Isolate* isolate); 1122 1123 private: 1124 PerIsolateThreadData* list_; 1125 }; 1126 1127 // These items form a stack synchronously with threads Enter'ing and Exit'ing 1128 // the Isolate. The top of the stack points to a thread which is currently 1129 // running the Isolate. When the stack is empty, the Isolate is considered 1130 // not entered by any thread and can be Disposed. 1131 // If the same thread enters the Isolate more then once, the entry_count_ 1132 // is incremented rather then a new item pushed to the stack. 1133 class EntryStackItem { 1134 public: EntryStackItem(PerIsolateThreadData * previous_thread_data,Isolate * previous_isolate,EntryStackItem * previous_item)1135 EntryStackItem(PerIsolateThreadData* previous_thread_data, 1136 Isolate* previous_isolate, 1137 EntryStackItem* previous_item) 1138 : entry_count(1), 1139 previous_thread_data(previous_thread_data), 1140 previous_isolate(previous_isolate), 1141 previous_item(previous_item) { } 1142 1143 int entry_count; 1144 PerIsolateThreadData* previous_thread_data; 1145 Isolate* previous_isolate; 1146 EntryStackItem* previous_item; 1147 1148 private: 1149 DISALLOW_COPY_AND_ASSIGN(EntryStackItem); 1150 }; 1151 1152 static base::LazyMutex thread_data_table_mutex_; 1153 1154 static base::Thread::LocalStorageKey per_isolate_thread_data_key_; 1155 static base::Thread::LocalStorageKey isolate_key_; 1156 static base::Thread::LocalStorageKey thread_id_key_; 1157 static ThreadDataTable* thread_data_table_; 1158 1159 // A global counter for all generated Isolates, might overflow. 1160 static base::Atomic32 isolate_counter_; 1161 1162 #if DEBUG 1163 static base::Atomic32 isolate_key_created_; 1164 #endif 1165 1166 void Deinit(); 1167 1168 static void SetIsolateThreadLocals(Isolate* isolate, 1169 PerIsolateThreadData* data); 1170 1171 // Find the PerThread for this particular (isolate, thread) combination. 1172 // If one does not yet exist, allocate a new one. 1173 PerIsolateThreadData* FindOrAllocatePerThreadDataForThisThread(); 1174 1175 // Initializes the current thread to run this Isolate. 1176 // Not thread-safe. Multiple threads should not Enter/Exit the same isolate 1177 // at the same time, this should be prevented using external locking. 1178 void Enter(); 1179 1180 // Exits the current thread. The previosuly entered Isolate is restored 1181 // for the thread. 1182 // Not thread-safe. Multiple threads should not Enter/Exit the same isolate 1183 // at the same time, this should be prevented using external locking. 1184 void Exit(); 1185 1186 void InitializeThreadLocal(); 1187 1188 void MarkCompactPrologue(bool is_compacting, 1189 ThreadLocalTop* archived_thread_data); 1190 void MarkCompactEpilogue(bool is_compacting, 1191 ThreadLocalTop* archived_thread_data); 1192 1193 void FillCache(); 1194 1195 // Propagate pending exception message to the v8::TryCatch. 1196 // If there is no external try-catch or message was successfully propagated, 1197 // then return true. 1198 bool PropagatePendingExceptionToExternalTryCatch(); 1199 1200 // Remove per-frame stored materialized objects when we are unwinding 1201 // the frame. 1202 void RemoveMaterializedObjectsOnUnwind(StackFrame* frame); 1203 1204 base::Atomic32 id_; 1205 EntryStackItem* entry_stack_; 1206 int stack_trace_nesting_level_; 1207 StringStream* incomplete_message_; 1208 Address isolate_addresses_[kIsolateAddressCount + 1]; // NOLINT 1209 Bootstrapper* bootstrapper_; 1210 RuntimeProfiler* runtime_profiler_; 1211 CompilationCache* compilation_cache_; 1212 Counters* counters_; 1213 CodeRange* code_range_; 1214 base::RecursiveMutex break_access_; 1215 Logger* logger_; 1216 StackGuard stack_guard_; 1217 StatsTable* stats_table_; 1218 StubCache* stub_cache_; 1219 CodeAgingHelper* code_aging_helper_; 1220 DeoptimizerData* deoptimizer_data_; 1221 MaterializedObjectStore* materialized_object_store_; 1222 ThreadLocalTop thread_local_top_; 1223 bool capture_stack_trace_for_uncaught_exceptions_; 1224 int stack_trace_for_uncaught_exceptions_frame_limit_; 1225 StackTrace::StackTraceOptions stack_trace_for_uncaught_exceptions_options_; 1226 MemoryAllocator* memory_allocator_; 1227 KeyedLookupCache* keyed_lookup_cache_; 1228 ContextSlotCache* context_slot_cache_; 1229 DescriptorLookupCache* descriptor_lookup_cache_; 1230 HandleScopeData handle_scope_data_; 1231 HandleScopeImplementer* handle_scope_implementer_; 1232 UnicodeCache* unicode_cache_; 1233 Zone runtime_zone_; 1234 Zone interface_descriptor_zone_; 1235 InnerPointerToCodeCache* inner_pointer_to_code_cache_; 1236 GlobalHandles* global_handles_; 1237 EternalHandles* eternal_handles_; 1238 ThreadManager* thread_manager_; 1239 RuntimeState runtime_state_; 1240 Builtins builtins_; 1241 bool has_installed_extensions_; 1242 unibrow::Mapping<unibrow::Ecma262UnCanonicalize> jsregexp_uncanonicalize_; 1243 unibrow::Mapping<unibrow::CanonicalizationRange> jsregexp_canonrange_; 1244 unibrow::Mapping<unibrow::Ecma262Canonicalize> 1245 regexp_macro_assembler_canonicalize_; 1246 RegExpStack* regexp_stack_; 1247 DateCache* date_cache_; 1248 unibrow::Mapping<unibrow::Ecma262Canonicalize> interp_canonicalize_mapping_; 1249 CallInterfaceDescriptorData* call_descriptor_data_; 1250 base::RandomNumberGenerator* random_number_generator_; 1251 1252 // Whether the isolate has been created for snapshotting. 1253 bool serializer_enabled_; 1254 1255 // True if fatal error has been signaled for this isolate. 1256 bool has_fatal_error_; 1257 1258 // True if this isolate was initialized from a snapshot. 1259 bool initialized_from_snapshot_; 1260 1261 // Time stamp at initialization. 1262 double time_millis_at_init_; 1263 1264 #ifdef DEBUG 1265 // A static array of histogram info for each type. 1266 HistogramInfo heap_histograms_[LAST_TYPE + 1]; 1267 JSObject::SpillInformation js_spill_information_; 1268 #endif 1269 1270 Debug* debug_; 1271 CpuProfiler* cpu_profiler_; 1272 HeapProfiler* heap_profiler_; 1273 FunctionEntryHook function_entry_hook_; 1274 1275 interpreter::Interpreter* interpreter_; 1276 1277 typedef std::pair<InterruptCallback, void*> InterruptEntry; 1278 std::queue<InterruptEntry> api_interrupts_queue_; 1279 1280 #define GLOBAL_BACKING_STORE(type, name, initialvalue) \ 1281 type name##_; 1282 ISOLATE_INIT_LIST(GLOBAL_BACKING_STORE) 1283 #undef GLOBAL_BACKING_STORE 1284 1285 #define GLOBAL_ARRAY_BACKING_STORE(type, name, length) \ 1286 type name##_[length]; 1287 ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_BACKING_STORE) 1288 #undef GLOBAL_ARRAY_BACKING_STORE 1289 1290 #ifdef DEBUG 1291 // This class is huge and has a number of fields controlled by 1292 // preprocessor defines. Make sure the offsets of these fields agree 1293 // between compilation units. 1294 #define ISOLATE_FIELD_OFFSET(type, name, ignored) \ 1295 static const intptr_t name##_debug_offset_; 1296 ISOLATE_INIT_LIST(ISOLATE_FIELD_OFFSET) 1297 ISOLATE_INIT_ARRAY_LIST(ISOLATE_FIELD_OFFSET) 1298 #undef ISOLATE_FIELD_OFFSET 1299 #endif 1300 1301 DeferredHandles* deferred_handles_head_; 1302 OptimizingCompileDispatcher* optimizing_compile_dispatcher_; 1303 1304 // Counts deopt points if deopt_every_n_times is enabled. 1305 unsigned int stress_deopt_count_; 1306 1307 Address virtual_handler_register_; 1308 Address virtual_slot_register_; 1309 1310 int next_optimization_id_; 1311 1312 // Counts javascript calls from the API. Wraps around on overflow. 1313 unsigned int js_calls_from_api_counter_; 1314 1315 #if TRACE_MAPS 1316 int next_unique_sfi_id_; 1317 #endif 1318 1319 // List of callbacks when a Call completes. 1320 List<CallCompletedCallback> call_completed_callbacks_; 1321 1322 v8::Isolate::UseCounterCallback use_counter_callback_; 1323 BasicBlockProfiler* basic_block_profiler_; 1324 1325 List<Object*> partial_snapshot_cache_; 1326 1327 v8::ArrayBuffer::Allocator* array_buffer_allocator_; 1328 1329 FutexWaitListNode futex_wait_list_node_; 1330 1331 CancelableTaskManager* cancelable_task_manager_; 1332 1333 v8::Isolate::AbortOnUncaughtExceptionCallback 1334 abort_on_uncaught_exception_callback_; 1335 1336 friend class ExecutionAccess; 1337 friend class HandleScopeImplementer; 1338 friend class OptimizingCompileDispatcher; 1339 friend class SweeperThread; 1340 friend class ThreadManager; 1341 friend class Simulator; 1342 friend class StackGuard; 1343 friend class ThreadId; 1344 friend class TestMemoryAllocatorScope; 1345 friend class TestCodeRangeScope; 1346 friend class v8::Isolate; 1347 friend class v8::Locker; 1348 friend class v8::Unlocker; 1349 friend v8::StartupData v8::V8::CreateSnapshotDataBlob(const char*); 1350 1351 DISALLOW_COPY_AND_ASSIGN(Isolate); 1352 }; 1353 1354 1355 #undef FIELD_ACCESSOR 1356 #undef THREAD_LOCAL_TOP_ACCESSOR 1357 1358 1359 class PromiseOnStack { 1360 public: PromiseOnStack(Handle<JSFunction> function,Handle<JSObject> promise,PromiseOnStack * prev)1361 PromiseOnStack(Handle<JSFunction> function, Handle<JSObject> promise, 1362 PromiseOnStack* prev) 1363 : function_(function), promise_(promise), prev_(prev) {} function()1364 Handle<JSFunction> function() { return function_; } promise()1365 Handle<JSObject> promise() { return promise_; } prev()1366 PromiseOnStack* prev() { return prev_; } 1367 1368 private: 1369 Handle<JSFunction> function_; 1370 Handle<JSObject> promise_; 1371 PromiseOnStack* prev_; 1372 }; 1373 1374 1375 // If the GCC version is 4.1.x or 4.2.x an additional field is added to the 1376 // class as a work around for a bug in the generated code found with these 1377 // versions of GCC. See V8 issue 122 for details. 1378 class SaveContext BASE_EMBEDDED { 1379 public: 1380 explicit SaveContext(Isolate* isolate); 1381 ~SaveContext(); 1382 context()1383 Handle<Context> context() { return context_; } prev()1384 SaveContext* prev() { return prev_; } 1385 1386 // Returns true if this save context is below a given JavaScript frame. IsBelowFrame(JavaScriptFrame * frame)1387 bool IsBelowFrame(JavaScriptFrame* frame) { 1388 return (c_entry_fp_ == 0) || (c_entry_fp_ > frame->sp()); 1389 } 1390 1391 private: 1392 Isolate* isolate_; 1393 Handle<Context> context_; 1394 SaveContext* prev_; 1395 Address c_entry_fp_; 1396 }; 1397 1398 1399 class AssertNoContextChange BASE_EMBEDDED { 1400 #ifdef DEBUG 1401 public: 1402 explicit AssertNoContextChange(Isolate* isolate); ~AssertNoContextChange()1403 ~AssertNoContextChange() { 1404 DCHECK(isolate_->context() == *context_); 1405 } 1406 1407 private: 1408 Isolate* isolate_; 1409 Handle<Context> context_; 1410 #else 1411 public: 1412 explicit AssertNoContextChange(Isolate* isolate) { } 1413 #endif 1414 }; 1415 1416 1417 class ExecutionAccess BASE_EMBEDDED { 1418 public: ExecutionAccess(Isolate * isolate)1419 explicit ExecutionAccess(Isolate* isolate) : isolate_(isolate) { 1420 Lock(isolate); 1421 } ~ExecutionAccess()1422 ~ExecutionAccess() { Unlock(isolate_); } 1423 Lock(Isolate * isolate)1424 static void Lock(Isolate* isolate) { isolate->break_access()->Lock(); } Unlock(Isolate * isolate)1425 static void Unlock(Isolate* isolate) { isolate->break_access()->Unlock(); } 1426 TryLock(Isolate * isolate)1427 static bool TryLock(Isolate* isolate) { 1428 return isolate->break_access()->TryLock(); 1429 } 1430 1431 private: 1432 Isolate* isolate_; 1433 }; 1434 1435 1436 // Support for checking for stack-overflows. 1437 class StackLimitCheck BASE_EMBEDDED { 1438 public: StackLimitCheck(Isolate * isolate)1439 explicit StackLimitCheck(Isolate* isolate) : isolate_(isolate) { } 1440 1441 // Use this to check for stack-overflows in C++ code. HasOverflowed()1442 bool HasOverflowed() const { 1443 StackGuard* stack_guard = isolate_->stack_guard(); 1444 return GetCurrentStackPosition() < stack_guard->real_climit(); 1445 } 1446 1447 // Use this to check for interrupt request in C++ code. InterruptRequested()1448 bool InterruptRequested() { 1449 StackGuard* stack_guard = isolate_->stack_guard(); 1450 return GetCurrentStackPosition() < stack_guard->climit(); 1451 } 1452 1453 // Use this to check for stack-overflow when entering runtime from JS code. 1454 bool JsHasOverflowed(uintptr_t gap = 0) const; 1455 1456 private: 1457 Isolate* isolate_; 1458 }; 1459 1460 1461 // Support for temporarily postponing interrupts. When the outermost 1462 // postpone scope is left the interrupts will be re-enabled and any 1463 // interrupts that occurred while in the scope will be taken into 1464 // account. 1465 class PostponeInterruptsScope BASE_EMBEDDED { 1466 public: 1467 PostponeInterruptsScope(Isolate* isolate, 1468 int intercept_mask = StackGuard::ALL_INTERRUPTS) 1469 : stack_guard_(isolate->stack_guard()), 1470 intercept_mask_(intercept_mask), 1471 intercepted_flags_(0) { 1472 stack_guard_->PushPostponeInterruptsScope(this); 1473 } 1474 ~PostponeInterruptsScope()1475 ~PostponeInterruptsScope() { 1476 stack_guard_->PopPostponeInterruptsScope(); 1477 } 1478 1479 // Find the bottom-most scope that intercepts this interrupt. 1480 // Return whether the interrupt has been intercepted. 1481 bool Intercept(StackGuard::InterruptFlag flag); 1482 1483 private: 1484 StackGuard* stack_guard_; 1485 int intercept_mask_; 1486 int intercepted_flags_; 1487 PostponeInterruptsScope* prev_; 1488 1489 friend class StackGuard; 1490 }; 1491 1492 1493 class CodeTracer final : public Malloced { 1494 public: CodeTracer(int isolate_id)1495 explicit CodeTracer(int isolate_id) 1496 : file_(NULL), 1497 scope_depth_(0) { 1498 if (!ShouldRedirect()) { 1499 file_ = stdout; 1500 return; 1501 } 1502 1503 if (FLAG_redirect_code_traces_to == NULL) { 1504 SNPrintF(filename_, 1505 "code-%d-%d.asm", 1506 base::OS::GetCurrentProcessId(), 1507 isolate_id); 1508 } else { 1509 StrNCpy(filename_, FLAG_redirect_code_traces_to, filename_.length()); 1510 } 1511 1512 WriteChars(filename_.start(), "", 0, false); 1513 } 1514 1515 class Scope { 1516 public: Scope(CodeTracer * tracer)1517 explicit Scope(CodeTracer* tracer) : tracer_(tracer) { tracer->OpenFile(); } ~Scope()1518 ~Scope() { tracer_->CloseFile(); } 1519 file()1520 FILE* file() const { return tracer_->file(); } 1521 1522 private: 1523 CodeTracer* tracer_; 1524 }; 1525 OpenFile()1526 void OpenFile() { 1527 if (!ShouldRedirect()) { 1528 return; 1529 } 1530 1531 if (file_ == NULL) { 1532 file_ = base::OS::FOpen(filename_.start(), "ab"); 1533 } 1534 1535 scope_depth_++; 1536 } 1537 CloseFile()1538 void CloseFile() { 1539 if (!ShouldRedirect()) { 1540 return; 1541 } 1542 1543 if (--scope_depth_ == 0) { 1544 fclose(file_); 1545 file_ = NULL; 1546 } 1547 } 1548 file()1549 FILE* file() const { return file_; } 1550 1551 private: ShouldRedirect()1552 static bool ShouldRedirect() { 1553 return FLAG_redirect_code_traces; 1554 } 1555 1556 EmbeddedVector<char, 128> filename_; 1557 FILE* file_; 1558 int scope_depth_; 1559 }; 1560 1561 } // namespace internal 1562 } // namespace v8 1563 1564 #endif // V8_ISOLATE_H_ 1565