• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #ifndef V8_ISOLATE_H_
29 #define V8_ISOLATE_H_
30 
31 #include "../include/v8-debug.h"
32 #include "allocation.h"
33 #include "apiutils.h"
34 #include "assert-scope.h"
35 #include "atomicops.h"
36 #include "builtins.h"
37 #include "contexts.h"
38 #include "execution.h"
39 #include "frames.h"
40 #include "date.h"
41 #include "global-handles.h"
42 #include "handles.h"
43 #include "hashmap.h"
44 #include "heap.h"
45 #include "optimizing-compiler-thread.h"
46 #include "regexp-stack.h"
47 #include "runtime-profiler.h"
48 #include "runtime.h"
49 #include "zone.h"
50 
51 namespace v8 {
52 namespace internal {
53 
54 class Bootstrapper;
55 class CodeGenerator;
56 class CodeRange;
57 struct CodeStubInterfaceDescriptor;
58 class CodeTracer;
59 class CompilationCache;
60 class ContextSlotCache;
61 class Counters;
62 class CpuFeatures;
63 class CpuProfiler;
64 class DeoptimizerData;
65 class Deserializer;
66 class EmptyStatement;
67 class ExternalCallbackScope;
68 class ExternalReferenceTable;
69 class Factory;
70 class FunctionInfoListener;
71 class HandleScopeImplementer;
72 class HeapProfiler;
73 class HStatistics;
74 class HTracer;
75 class InlineRuntimeFunctionsTable;
76 class NoAllocationStringAllocator;
77 class InnerPointerToCodeCache;
78 class RandomNumberGenerator;
79 class RegExpStack;
80 class SaveContext;
81 class UnicodeCache;
82 class ConsStringIteratorOp;
83 class StringTracker;
84 class StubCache;
85 class SweeperThread;
86 class ThreadManager;
87 class ThreadState;
88 class ThreadVisitor;  // Defined in v8threads.h
89 template <StateTag Tag> class VMState;
90 
91 // 'void function pointer', used to roundtrip the
92 // ExternalReference::ExternalReferenceRedirector since we can not include
93 // assembler.h, where it is defined, here.
94 typedef void* ExternalReferenceRedirectorPointer();
95 
96 
97 #ifdef ENABLE_DEBUGGER_SUPPORT
98 class Debug;
99 class Debugger;
100 class DebuggerAgent;
101 #endif
102 
103 #if !defined(__arm__) && V8_TARGET_ARCH_ARM || \
104     !defined(__mips__) && V8_TARGET_ARCH_MIPS
105 class Redirection;
106 class Simulator;
107 #endif
108 
109 
110 // Static indirection table for handles to constants.  If a frame
111 // element represents a constant, the data contains an index into
112 // this table of handles to the actual constants.
113 // Static indirection table for handles to constants.  If a Result
114 // represents a constant, the data contains an index into this table
115 // of handles to the actual constants.
116 typedef ZoneList<Handle<Object> > ZoneObjectList;
117 
118 #define RETURN_IF_SCHEDULED_EXCEPTION(isolate)            \
119   do {                                                    \
120     Isolate* __isolate__ = (isolate);                     \
121     if (__isolate__->has_scheduled_exception()) {         \
122       return __isolate__->PromoteScheduledException();    \
123     }                                                     \
124   } while (false)
125 
126 #define RETURN_HANDLE_IF_SCHEDULED_EXCEPTION(isolate, T)  \
127   do {                                                    \
128     Isolate* __isolate__ = (isolate);                     \
129     if (__isolate__->has_scheduled_exception()) {         \
130       __isolate__->PromoteScheduledException();           \
131       return Handle<T>::null();                           \
132     }                                                     \
133   } while (false)
134 
135 #define RETURN_IF_EMPTY_HANDLE_VALUE(isolate, call, value) \
136   do {                                                     \
137     if ((call).is_null()) {                                \
138       ASSERT((isolate)->has_pending_exception());          \
139       return (value);                                      \
140     }                                                      \
141   } while (false)
142 
143 #define CHECK_NOT_EMPTY_HANDLE(isolate, call)     \
144   do {                                            \
145     ASSERT(!(isolate)->has_pending_exception());  \
146     CHECK(!(call).is_null());                     \
147     CHECK(!(isolate)->has_pending_exception());   \
148   } while (false)
149 
150 #define RETURN_IF_EMPTY_HANDLE(isolate, call)                       \
151   RETURN_IF_EMPTY_HANDLE_VALUE(isolate, call, Failure::Exception())
152 
153 #define FOR_EACH_ISOLATE_ADDRESS_NAME(C)                \
154   C(Handler, handler)                                   \
155   C(CEntryFP, c_entry_fp)                               \
156   C(Context, context)                                   \
157   C(PendingException, pending_exception)                \
158   C(ExternalCaughtException, external_caught_exception) \
159   C(JSEntrySP, js_entry_sp)
160 
161 
162 // Platform-independent, reliable thread identifier.
163 class ThreadId {
164  public:
165   // Creates an invalid ThreadId.
ThreadId()166   ThreadId() : id_(kInvalidId) {}
167 
168   // Returns ThreadId for current thread.
Current()169   static ThreadId Current() { return ThreadId(GetCurrentThreadId()); }
170 
171   // Returns invalid ThreadId (guaranteed not to be equal to any thread).
Invalid()172   static ThreadId Invalid() { return ThreadId(kInvalidId); }
173 
174   // Compares ThreadIds for equality.
INLINE(bool Equals (const ThreadId & other)const)175   INLINE(bool Equals(const ThreadId& other) const) {
176     return id_ == other.id_;
177   }
178 
179   // Checks whether this ThreadId refers to any thread.
INLINE(bool IsValid ()const)180   INLINE(bool IsValid() const) {
181     return id_ != kInvalidId;
182   }
183 
184   // Converts ThreadId to an integer representation
185   // (required for public API: V8::V8::GetCurrentThreadId).
ToInteger()186   int ToInteger() const { return id_; }
187 
188   // Converts ThreadId to an integer representation
189   // (required for public API: V8::V8::TerminateExecution).
FromInteger(int id)190   static ThreadId FromInteger(int id) { return ThreadId(id); }
191 
192  private:
193   static const int kInvalidId = -1;
194 
ThreadId(int id)195   explicit ThreadId(int id) : id_(id) {}
196 
197   static int AllocateThreadId();
198 
199   static int GetCurrentThreadId();
200 
201   int id_;
202 
203   static Atomic32 highest_thread_id_;
204 
205   friend class Isolate;
206 };
207 
208 
209 class ThreadLocalTop BASE_EMBEDDED {
210  public:
211   // Does early low-level initialization that does not depend on the
212   // isolate being present.
213   ThreadLocalTop();
214 
215   // Initialize the thread data.
216   void Initialize();
217 
218   // Get the top C++ try catch handler or NULL if none are registered.
219   //
220   // This method is not guarenteed to return an address that can be
221   // used for comparison with addresses into the JS stack.  If such an
222   // address is needed, use try_catch_handler_address.
223   v8::TryCatch* TryCatchHandler();
224 
225   // Get the address of the top C++ try catch handler or NULL if
226   // none are registered.
227   //
228   // This method always returns an address that can be compared to
229   // pointers into the JavaScript stack.  When running on actual
230   // hardware, try_catch_handler_address and TryCatchHandler return
231   // the same pointer.  When running on a simulator with a separate JS
232   // stack, try_catch_handler_address returns a JS stack address that
233   // corresponds to the place on the JS stack where the C++ handler
234   // would have been if the stack were not separate.
try_catch_handler_address()235   inline Address try_catch_handler_address() {
236     return try_catch_handler_address_;
237   }
238 
239   // Set the address of the top C++ try catch handler.
set_try_catch_handler_address(Address address)240   inline void set_try_catch_handler_address(Address address) {
241     try_catch_handler_address_ = address;
242   }
243 
Free()244   void Free() {
245     ASSERT(!has_pending_message_);
246     ASSERT(!external_caught_exception_);
247     ASSERT(try_catch_handler_address_ == NULL);
248   }
249 
250   Isolate* isolate_;
251   // The context where the current execution method is created and for variable
252   // lookups.
253   Context* context_;
254   ThreadId thread_id_;
255   MaybeObject* pending_exception_;
256   bool has_pending_message_;
257   bool rethrowing_message_;
258   Object* pending_message_obj_;
259   Object* pending_message_script_;
260   int pending_message_start_pos_;
261   int pending_message_end_pos_;
262   // Use a separate value for scheduled exceptions to preserve the
263   // invariants that hold about pending_exception.  We may want to
264   // unify them later.
265   MaybeObject* scheduled_exception_;
266   bool external_caught_exception_;
267   SaveContext* save_context_;
268   v8::TryCatch* catcher_;
269 
270   // Stack.
271   Address c_entry_fp_;  // the frame pointer of the top c entry frame
272   Address handler_;   // try-blocks are chained through the stack
273 
274 #ifdef USE_SIMULATOR
275   Simulator* simulator_;
276 #endif
277 
278   Address js_entry_sp_;  // the stack pointer of the bottom JS entry frame
279   // the external callback we're currently in
280   ExternalCallbackScope* external_callback_scope_;
281   StateTag current_vm_state_;
282 
283   // Generated code scratch locations.
284   int32_t formal_count_;
285 
286   // Call back function to report unsafe JS accesses.
287   v8::FailedAccessCheckCallback failed_access_check_callback_;
288 
289   // Head of the list of live LookupResults.
290   LookupResult* top_lookup_result_;
291 
292   // Whether out of memory exceptions should be ignored.
293   bool ignore_out_of_memory_;
294 
295  private:
296   void InitializeInternal();
297 
298   Address try_catch_handler_address_;
299 };
300 
301 
302 #ifdef ENABLE_DEBUGGER_SUPPORT
303 
304 #define ISOLATE_DEBUGGER_INIT_LIST(V)                                          \
305   V(DebuggerAgent*, debugger_agent_instance, NULL)
306 #else
307 
308 #define ISOLATE_DEBUGGER_INIT_LIST(V)
309 
310 #endif
311 
312 #ifdef DEBUG
313 
314 #define ISOLATE_INIT_DEBUG_ARRAY_LIST(V)                                       \
315   V(CommentStatistic, paged_space_comments_statistics,                         \
316       CommentStatistic::kMaxComments + 1)
317 #else
318 
319 #define ISOLATE_INIT_DEBUG_ARRAY_LIST(V)
320 
321 #endif
322 
323 #define ISOLATE_INIT_ARRAY_LIST(V)                                             \
324   /* SerializerDeserializer state. */                                          \
325   V(int32_t, jsregexp_static_offsets_vector, kJSRegexpStaticOffsetsVectorSize) \
326   V(int, bad_char_shift_table, kUC16AlphabetSize)                              \
327   V(int, good_suffix_shift_table, (kBMMaxShift + 1))                           \
328   V(int, suffix_table, (kBMMaxShift + 1))                                      \
329   V(uint32_t, private_random_seed, 2)                                          \
330   ISOLATE_INIT_DEBUG_ARRAY_LIST(V)
331 
332 typedef List<HeapObject*> DebugObjectCache;
333 
334 #define ISOLATE_INIT_LIST(V)                                                   \
335   /* SerializerDeserializer state. */                                          \
336   V(int, serialize_partial_snapshot_cache_length, 0)                           \
337   V(int, serialize_partial_snapshot_cache_capacity, 0)                         \
338   V(Object**, serialize_partial_snapshot_cache, NULL)                          \
339   /* Assembler state. */                                                       \
340   /* A previously allocated buffer of kMinimalBufferSize bytes, or NULL. */    \
341   V(byte*, assembler_spare_buffer, NULL)                                       \
342   V(FatalErrorCallback, exception_behavior, NULL)                              \
343   V(AllowCodeGenerationFromStringsCallback, allow_code_gen_callback, NULL)     \
344   /* To distinguish the function templates, so that we can find them in the */ \
345   /* function cache of the native context. */                                  \
346   V(int, next_serial_number, 0)                                                \
347   V(ExternalReferenceRedirectorPointer*, external_reference_redirector, NULL)  \
348   V(bool, always_allow_natives_syntax, false)                                  \
349   /* Part of the state of liveedit. */                                         \
350   V(FunctionInfoListener*, active_function_info_listener, NULL)                \
351   /* State for Relocatable. */                                                 \
352   V(Relocatable*, relocatable_top, NULL)                                       \
353   V(DebugObjectCache*, string_stream_debug_object_cache, NULL)                 \
354   V(Object*, string_stream_current_security_token, NULL)                       \
355   /* TODO(isolates): Release this on destruction? */                           \
356   V(int*, irregexp_interpreter_backtrack_stack_cache, NULL)                    \
357   /* Serializer state. */                                                      \
358   V(ExternalReferenceTable*, external_reference_table, NULL)                   \
359   /* AstNode state. */                                                         \
360   V(int, ast_node_id, 0)                                                       \
361   V(unsigned, ast_node_count, 0)                                               \
362   V(bool, microtask_pending, false)                                           \
363   V(HStatistics*, hstatistics, NULL)                                           \
364   V(HTracer*, htracer, NULL)                                                   \
365   V(CodeTracer*, code_tracer, NULL)                                            \
366   ISOLATE_DEBUGGER_INIT_LIST(V)
367 
368 class Isolate {
369   // These forward declarations are required to make the friend declarations in
370   // PerIsolateThreadData work on some older versions of gcc.
371   class ThreadDataTable;
372   class EntryStackItem;
373  public:
374   ~Isolate();
375 
376   // A thread has a PerIsolateThreadData instance for each isolate that it has
377   // entered. That instance is allocated when the isolate is initially entered
378   // and reused on subsequent entries.
379   class PerIsolateThreadData {
380    public:
PerIsolateThreadData(Isolate * isolate,ThreadId thread_id)381     PerIsolateThreadData(Isolate* isolate, ThreadId thread_id)
382         : isolate_(isolate),
383           thread_id_(thread_id),
384           stack_limit_(0),
385           thread_state_(NULL),
386 #if !defined(__arm__) && V8_TARGET_ARCH_ARM || \
387     !defined(__mips__) && V8_TARGET_ARCH_MIPS
388           simulator_(NULL),
389 #endif
390           next_(NULL),
391           prev_(NULL) { }
isolate()392     Isolate* isolate() const { return isolate_; }
thread_id()393     ThreadId thread_id() const { return thread_id_; }
set_stack_limit(uintptr_t value)394     void set_stack_limit(uintptr_t value) { stack_limit_ = value; }
stack_limit()395     uintptr_t stack_limit() const { return stack_limit_; }
thread_state()396     ThreadState* thread_state() const { return thread_state_; }
set_thread_state(ThreadState * value)397     void set_thread_state(ThreadState* value) { thread_state_ = value; }
398 
399 #if !defined(__arm__) && V8_TARGET_ARCH_ARM || \
400     !defined(__mips__) && V8_TARGET_ARCH_MIPS
simulator()401     Simulator* simulator() const { return simulator_; }
set_simulator(Simulator * simulator)402     void set_simulator(Simulator* simulator) {
403       simulator_ = simulator;
404     }
405 #endif
406 
Matches(Isolate * isolate,ThreadId thread_id)407     bool Matches(Isolate* isolate, ThreadId thread_id) const {
408       return isolate_ == isolate && thread_id_.Equals(thread_id);
409     }
410 
411    private:
412     Isolate* isolate_;
413     ThreadId thread_id_;
414     uintptr_t stack_limit_;
415     ThreadState* thread_state_;
416 
417 #if !defined(__arm__) && V8_TARGET_ARCH_ARM || \
418     !defined(__mips__) && V8_TARGET_ARCH_MIPS
419     Simulator* simulator_;
420 #endif
421 
422     PerIsolateThreadData* next_;
423     PerIsolateThreadData* prev_;
424 
425     friend class Isolate;
426     friend class ThreadDataTable;
427     friend class EntryStackItem;
428 
429     DISALLOW_COPY_AND_ASSIGN(PerIsolateThreadData);
430   };
431 
432 
433   enum AddressId {
434 #define DECLARE_ENUM(CamelName, hacker_name) k##CamelName##Address,
435     FOR_EACH_ISOLATE_ADDRESS_NAME(DECLARE_ENUM)
436 #undef DECLARE_ENUM
437     kIsolateAddressCount
438   };
439 
440   // Returns the PerIsolateThreadData for the current thread (or NULL if one is
441   // not currently set).
CurrentPerIsolateThreadData()442   static PerIsolateThreadData* CurrentPerIsolateThreadData() {
443     return reinterpret_cast<PerIsolateThreadData*>(
444         Thread::GetThreadLocal(per_isolate_thread_data_key_));
445   }
446 
447   // Returns the isolate inside which the current thread is running.
INLINE(static Isolate * Current ())448   INLINE(static Isolate* Current()) {
449     Isolate* isolate = reinterpret_cast<Isolate*>(
450         Thread::GetExistingThreadLocal(isolate_key_));
451     ASSERT(isolate != NULL);
452     return isolate;
453   }
454 
INLINE(static Isolate * UncheckedCurrent ())455   INLINE(static Isolate* UncheckedCurrent()) {
456     return reinterpret_cast<Isolate*>(Thread::GetThreadLocal(isolate_key_));
457   }
458 
459   // Usually called by Init(), but can be called early e.g. to allow
460   // testing components that require logging but not the whole
461   // isolate.
462   //
463   // Safe to call more than once.
464   void InitializeLoggingAndCounters();
465 
466   bool Init(Deserializer* des);
467 
IsInitialized()468   bool IsInitialized() { return state_ == INITIALIZED; }
469 
470   // True if at least one thread Enter'ed this isolate.
IsInUse()471   bool IsInUse() { return entry_stack_ != NULL; }
472 
473   // Destroys the non-default isolates.
474   // Sets default isolate into "has_been_disposed" state rather then destroying,
475   // for legacy API reasons.
476   void TearDown();
477 
478   static void GlobalTearDown();
479 
IsDefaultIsolate()480   bool IsDefaultIsolate() const { return this == default_isolate_; }
481 
482   static void SetCrashIfDefaultIsolateInitialized();
483   // Ensures that process-wide resources and the default isolate have been
484   // allocated. It is only necessary to call this method in rare cases, for
485   // example if you are using V8 from within the body of a static initializer.
486   // Safe to call multiple times.
487   static void EnsureDefaultIsolate();
488 
489   // Find the PerThread for this particular (isolate, thread) combination
490   // If one does not yet exist, return null.
491   PerIsolateThreadData* FindPerThreadDataForThisThread();
492 
493   // Find the PerThread for given (isolate, thread) combination
494   // If one does not yet exist, return null.
495   PerIsolateThreadData* FindPerThreadDataForThread(ThreadId thread_id);
496 
497 #ifdef ENABLE_DEBUGGER_SUPPORT
498   // Get the debugger from the default isolate. Preinitializes the
499   // default isolate if needed.
500   static Debugger* GetDefaultIsolateDebugger();
501 #endif
502 
503   // Get the stack guard from the default isolate. Preinitializes the
504   // default isolate if needed.
505   static StackGuard* GetDefaultIsolateStackGuard();
506 
507   // Returns the key used to store the pointer to the current isolate.
508   // Used internally for V8 threads that do not execute JavaScript but still
509   // are part of the domain of an isolate (like the context switcher).
isolate_key()510   static Thread::LocalStorageKey isolate_key() {
511     return isolate_key_;
512   }
513 
514   // Returns the key used to store process-wide thread IDs.
thread_id_key()515   static Thread::LocalStorageKey thread_id_key() {
516     return thread_id_key_;
517   }
518 
519   static Thread::LocalStorageKey per_isolate_thread_data_key();
520 
521   // If a client attempts to create a Locker without specifying an isolate,
522   // we assume that the client is using legacy behavior. Set up the current
523   // thread to be inside the implicit isolate (or fail a check if we have
524   // switched to non-legacy behavior).
525   static void EnterDefaultIsolate();
526 
527   // Mutex for serializing access to break control structures.
break_access()528   RecursiveMutex* break_access() { return &break_access_; }
529 
530   // Mutex for serializing access to debugger.
debugger_access()531   RecursiveMutex* debugger_access() { return &debugger_access_; }
532 
533   Address get_address_from_id(AddressId id);
534 
535   // Access to top context (where the current function object was created).
context()536   Context* context() { return thread_local_top_.context_; }
set_context(Context * context)537   void set_context(Context* context) {
538     ASSERT(context == NULL || context->IsContext());
539     thread_local_top_.context_ = context;
540   }
context_address()541   Context** context_address() { return &thread_local_top_.context_; }
542 
save_context()543   SaveContext* save_context() { return thread_local_top_.save_context_; }
set_save_context(SaveContext * save)544   void set_save_context(SaveContext* save) {
545     thread_local_top_.save_context_ = save;
546   }
547 
548   // Access to current thread id.
thread_id()549   ThreadId thread_id() { return thread_local_top_.thread_id_; }
set_thread_id(ThreadId id)550   void set_thread_id(ThreadId id) { thread_local_top_.thread_id_ = id; }
551 
552   // Interface to pending exception.
pending_exception()553   MaybeObject* pending_exception() {
554     ASSERT(has_pending_exception());
555     return thread_local_top_.pending_exception_;
556   }
external_caught_exception()557   bool external_caught_exception() {
558     return thread_local_top_.external_caught_exception_;
559   }
set_external_caught_exception(bool value)560   void set_external_caught_exception(bool value) {
561     thread_local_top_.external_caught_exception_ = value;
562   }
set_pending_exception(MaybeObject * exception)563   void set_pending_exception(MaybeObject* exception) {
564     thread_local_top_.pending_exception_ = exception;
565   }
clear_pending_exception()566   void clear_pending_exception() {
567     thread_local_top_.pending_exception_ = heap_.the_hole_value();
568   }
pending_exception_address()569   MaybeObject** pending_exception_address() {
570     return &thread_local_top_.pending_exception_;
571   }
has_pending_exception()572   bool has_pending_exception() {
573     return !thread_local_top_.pending_exception_->IsTheHole();
574   }
clear_pending_message()575   void clear_pending_message() {
576     thread_local_top_.has_pending_message_ = false;
577     thread_local_top_.pending_message_obj_ = heap_.the_hole_value();
578     thread_local_top_.pending_message_script_ = heap_.the_hole_value();
579   }
try_catch_handler()580   v8::TryCatch* try_catch_handler() {
581     return thread_local_top_.TryCatchHandler();
582   }
try_catch_handler_address()583   Address try_catch_handler_address() {
584     return thread_local_top_.try_catch_handler_address();
585   }
external_caught_exception_address()586   bool* external_caught_exception_address() {
587     return &thread_local_top_.external_caught_exception_;
588   }
catcher()589   v8::TryCatch* catcher() {
590     return thread_local_top_.catcher_;
591   }
set_catcher(v8::TryCatch * catcher)592   void set_catcher(v8::TryCatch* catcher) {
593     thread_local_top_.catcher_ = catcher;
594   }
595 
scheduled_exception_address()596   MaybeObject** scheduled_exception_address() {
597     return &thread_local_top_.scheduled_exception_;
598   }
599 
pending_message_obj_address()600   Address pending_message_obj_address() {
601     return reinterpret_cast<Address>(&thread_local_top_.pending_message_obj_);
602   }
603 
has_pending_message_address()604   Address has_pending_message_address() {
605     return reinterpret_cast<Address>(&thread_local_top_.has_pending_message_);
606   }
607 
pending_message_script_address()608   Address pending_message_script_address() {
609     return reinterpret_cast<Address>(
610         &thread_local_top_.pending_message_script_);
611   }
612 
scheduled_exception()613   MaybeObject* scheduled_exception() {
614     ASSERT(has_scheduled_exception());
615     return thread_local_top_.scheduled_exception_;
616   }
has_scheduled_exception()617   bool has_scheduled_exception() {
618     return thread_local_top_.scheduled_exception_ != heap_.the_hole_value();
619   }
clear_scheduled_exception()620   void clear_scheduled_exception() {
621     thread_local_top_.scheduled_exception_ = heap_.the_hole_value();
622   }
623 
624   bool IsExternallyCaught();
625 
is_catchable_by_javascript(MaybeObject * exception)626   bool is_catchable_by_javascript(MaybeObject* exception) {
627     return (!exception->IsOutOfMemory()) &&
628         (exception != heap()->termination_exception());
629   }
630 
631   // Serializer.
632   void PushToPartialSnapshotCache(Object* obj);
633 
634   // JS execution stack (see frames.h).
c_entry_fp(ThreadLocalTop * thread)635   static Address c_entry_fp(ThreadLocalTop* thread) {
636     return thread->c_entry_fp_;
637   }
handler(ThreadLocalTop * thread)638   static Address handler(ThreadLocalTop* thread) { return thread->handler_; }
639 
c_entry_fp_address()640   inline Address* c_entry_fp_address() {
641     return &thread_local_top_.c_entry_fp_;
642   }
handler_address()643   inline Address* handler_address() { return &thread_local_top_.handler_; }
644 
645   // Bottom JS entry.
js_entry_sp()646   Address js_entry_sp() {
647     return thread_local_top_.js_entry_sp_;
648   }
js_entry_sp_address()649   inline Address* js_entry_sp_address() {
650     return &thread_local_top_.js_entry_sp_;
651   }
652 
653   // Generated code scratch locations.
formal_count_address()654   void* formal_count_address() { return &thread_local_top_.formal_count_; }
655 
656   // Returns the global object of the current context. It could be
657   // a builtin object, or a JS global object.
global_object()658   Handle<GlobalObject> global_object() {
659     return Handle<GlobalObject>(context()->global_object());
660   }
661 
662   // Returns the global proxy object of the current context.
global_proxy()663   Object* global_proxy() {
664     return context()->global_proxy();
665   }
666 
js_builtins_object()667   Handle<JSBuiltinsObject> js_builtins_object() {
668     return Handle<JSBuiltinsObject>(thread_local_top_.context_->builtins());
669   }
670 
ArchiveSpacePerThread()671   static int ArchiveSpacePerThread() { return sizeof(ThreadLocalTop); }
FreeThreadResources()672   void FreeThreadResources() { thread_local_top_.Free(); }
673 
674   // This method is called by the api after operations that may throw
675   // exceptions.  If an exception was thrown and not handled by an external
676   // handler the exception is scheduled to be rethrown when we return to running
677   // JavaScript code.  If an exception is scheduled true is returned.
678   bool OptionalRescheduleException(bool is_bottom_call);
679 
680   class ExceptionScope {
681    public:
ExceptionScope(Isolate * isolate)682     explicit ExceptionScope(Isolate* isolate) :
683       // Scope currently can only be used for regular exceptions, not
684       // failures like OOM or termination exception.
685       isolate_(isolate),
686       pending_exception_(isolate_->pending_exception()->ToObjectUnchecked(),
687                          isolate_),
688       catcher_(isolate_->catcher())
689     { }
690 
~ExceptionScope()691     ~ExceptionScope() {
692       isolate_->set_catcher(catcher_);
693       isolate_->set_pending_exception(*pending_exception_);
694     }
695 
696    private:
697     Isolate* isolate_;
698     Handle<Object> pending_exception_;
699     v8::TryCatch* catcher_;
700   };
701 
702   void SetCaptureStackTraceForUncaughtExceptions(
703       bool capture,
704       int frame_limit,
705       StackTrace::StackTraceOptions options);
706 
707   // Tells whether the current context has experienced an out of memory
708   // exception.
709   bool is_out_of_memory();
ignore_out_of_memory()710   bool ignore_out_of_memory() {
711     return thread_local_top_.ignore_out_of_memory_;
712   }
set_ignore_out_of_memory(bool value)713   void set_ignore_out_of_memory(bool value) {
714     thread_local_top_.ignore_out_of_memory_ = value;
715   }
716 
717   void PrintCurrentStackTrace(FILE* out);
718   void PrintStack(StringStream* accumulator);
719   void PrintStack(FILE* out);
720   Handle<String> StackTraceString();
721   NO_INLINE(void PushStackTraceAndDie(unsigned int magic,
722                                       Object* object,
723                                       Map* map,
724                                       unsigned int magic2));
725   Handle<JSArray> CaptureCurrentStackTrace(
726       int frame_limit,
727       StackTrace::StackTraceOptions options);
728 
729   Handle<JSArray> CaptureSimpleStackTrace(Handle<JSObject> error_object,
730                                           Handle<Object> caller,
731                                           int limit);
732   void CaptureAndSetDetailedStackTrace(Handle<JSObject> error_object);
733 
734   // Returns if the top context may access the given global object. If
735   // the result is false, the pending exception is guaranteed to be
736   // set.
737 
738   // TODO(yangguo): temporary wrappers
MayNamedAccessWrapper(Handle<JSObject> receiver,Handle<Object> key,v8::AccessType type)739   bool MayNamedAccessWrapper(Handle<JSObject> receiver,
740                              Handle<Object> key,
741                              v8::AccessType type) {
742     return MayNamedAccess(*receiver, *key, type);
743   }
MayIndexedAccessWrapper(Handle<JSObject> receiver,uint32_t index,v8::AccessType type)744   bool MayIndexedAccessWrapper(Handle<JSObject> receiver,
745                                uint32_t index,
746                                v8::AccessType type) {
747     return MayIndexedAccess(*receiver, index, type);
748   }
749 
750   bool MayNamedAccess(JSObject* receiver,
751                       Object* key,
752                       v8::AccessType type);
753   bool MayIndexedAccess(JSObject* receiver,
754                         uint32_t index,
755                         v8::AccessType type);
756 
757   void SetFailedAccessCheckCallback(v8::FailedAccessCheckCallback callback);
758   void ReportFailedAccessCheck(JSObject* receiver, v8::AccessType type);
759 
760   // Exception throwing support. The caller should use the result
761   // of Throw() as its return value.
762   Failure* Throw(Object* exception, MessageLocation* location = NULL);
763   // Re-throw an exception.  This involves no error reporting since
764   // error reporting was handled when the exception was thrown
765   // originally.
766   Failure* ReThrow(MaybeObject* exception);
767   void ScheduleThrow(Object* exception);
768   // Re-set pending message, script and positions reported to the TryCatch
769   // back to the TLS for re-use when rethrowing.
770   void RestorePendingMessageFromTryCatch(v8::TryCatch* handler);
771   void ReportPendingMessages();
772   // Return pending location if any or unfilled structure.
773   MessageLocation GetMessageLocation();
774   Failure* ThrowIllegalOperation();
775 
776   // Promote a scheduled exception to pending. Asserts has_scheduled_exception.
777   Failure* PromoteScheduledException();
778   void DoThrow(Object* exception, MessageLocation* location);
779   // Checks if exception should be reported and finds out if it's
780   // caught externally.
781   bool ShouldReportException(bool* can_be_caught_externally,
782                              bool catchable_by_javascript);
783 
784   // Attempts to compute the current source location, storing the
785   // result in the target out parameter.
786   void ComputeLocation(MessageLocation* target);
787 
788   // Out of resource exception helpers.
789   Failure* StackOverflow();
790   Failure* TerminateExecution();
791   void CancelTerminateExecution();
792 
793   // Administration
794   void Iterate(ObjectVisitor* v);
795   void Iterate(ObjectVisitor* v, ThreadLocalTop* t);
796   char* Iterate(ObjectVisitor* v, char* t);
797   void IterateThread(ThreadVisitor* v, char* t);
798 
799 
800   // Returns the current native and global context.
801   Handle<Context> native_context();
802   Handle<Context> global_context();
803 
804   // Returns the native context of the calling JavaScript code.  That
805   // is, the native context of the top-most JavaScript frame.
806   Handle<Context> GetCallingNativeContext();
807 
808   void RegisterTryCatchHandler(v8::TryCatch* that);
809   void UnregisterTryCatchHandler(v8::TryCatch* that);
810 
811   char* ArchiveThread(char* to);
812   char* RestoreThread(char* from);
813 
814   static const char* const kStackOverflowMessage;
815 
816   static const int kUC16AlphabetSize = 256;  // See StringSearchBase.
817   static const int kBMMaxShift = 250;        // See StringSearchBase.
818 
819   // Accessors.
820 #define GLOBAL_ACCESSOR(type, name, initialvalue)                       \
821   inline type name() const {                                            \
822     ASSERT(OFFSET_OF(Isolate, name##_) == name##_debug_offset_);        \
823     return name##_;                                                     \
824   }                                                                     \
825   inline void set_##name(type value) {                                  \
826     ASSERT(OFFSET_OF(Isolate, name##_) == name##_debug_offset_);        \
827     name##_ = value;                                                    \
828   }
829   ISOLATE_INIT_LIST(GLOBAL_ACCESSOR)
830 #undef GLOBAL_ACCESSOR
831 
832 #define GLOBAL_ARRAY_ACCESSOR(type, name, length)                       \
833   inline type* name() {                                                 \
834     ASSERT(OFFSET_OF(Isolate, name##_) == name##_debug_offset_);        \
835     return &(name##_)[0];                                               \
836   }
ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_ACCESSOR)837   ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_ACCESSOR)
838 #undef GLOBAL_ARRAY_ACCESSOR
839 
840 #define NATIVE_CONTEXT_FIELD_ACCESSOR(index, type, name)            \
841   Handle<type> name() {                                             \
842     return Handle<type>(context()->native_context()->name(), this); \
843   }                                                                 \
844   bool is_##name(type* value) {                                     \
845     return context()->native_context()->is_##name(value);           \
846   }
847   NATIVE_CONTEXT_FIELDS(NATIVE_CONTEXT_FIELD_ACCESSOR)
848 #undef NATIVE_CONTEXT_FIELD_ACCESSOR
849 
850   Bootstrapper* bootstrapper() { return bootstrapper_; }
counters()851   Counters* counters() {
852     // Call InitializeLoggingAndCounters() if logging is needed before
853     // the isolate is fully initialized.
854     ASSERT(counters_ != NULL);
855     return counters_;
856   }
code_range()857   CodeRange* code_range() { return code_range_; }
runtime_profiler()858   RuntimeProfiler* runtime_profiler() { return runtime_profiler_; }
compilation_cache()859   CompilationCache* compilation_cache() { return compilation_cache_; }
logger()860   Logger* logger() {
861     // Call InitializeLoggingAndCounters() if logging is needed before
862     // the isolate is fully initialized.
863     ASSERT(logger_ != NULL);
864     return logger_;
865   }
stack_guard()866   StackGuard* stack_guard() { return &stack_guard_; }
heap()867   Heap* heap() { return &heap_; }
868   StatsTable* stats_table();
stub_cache()869   StubCache* stub_cache() { return stub_cache_; }
deoptimizer_data()870   DeoptimizerData* deoptimizer_data() { return deoptimizer_data_; }
thread_local_top()871   ThreadLocalTop* thread_local_top() { return &thread_local_top_; }
872 
transcendental_cache()873   TranscendentalCache* transcendental_cache() const {
874     return transcendental_cache_;
875   }
876 
memory_allocator()877   MemoryAllocator* memory_allocator() {
878     return memory_allocator_;
879   }
880 
keyed_lookup_cache()881   KeyedLookupCache* keyed_lookup_cache() {
882     return keyed_lookup_cache_;
883   }
884 
context_slot_cache()885   ContextSlotCache* context_slot_cache() {
886     return context_slot_cache_;
887   }
888 
descriptor_lookup_cache()889   DescriptorLookupCache* descriptor_lookup_cache() {
890     return descriptor_lookup_cache_;
891   }
892 
handle_scope_data()893   v8::ImplementationUtilities::HandleScopeData* handle_scope_data() {
894     return &handle_scope_data_;
895   }
handle_scope_implementer()896   HandleScopeImplementer* handle_scope_implementer() {
897     ASSERT(handle_scope_implementer_);
898     return handle_scope_implementer_;
899   }
runtime_zone()900   Zone* runtime_zone() { return &runtime_zone_; }
901 
unicode_cache()902   UnicodeCache* unicode_cache() {
903     return unicode_cache_;
904   }
905 
inner_pointer_to_code_cache()906   InnerPointerToCodeCache* inner_pointer_to_code_cache() {
907     return inner_pointer_to_code_cache_;
908   }
909 
write_iterator()910   ConsStringIteratorOp* write_iterator() { return write_iterator_; }
911 
global_handles()912   GlobalHandles* global_handles() { return global_handles_; }
913 
eternal_handles()914   EternalHandles* eternal_handles() { return eternal_handles_; }
915 
thread_manager()916   ThreadManager* thread_manager() { return thread_manager_; }
917 
string_tracker()918   StringTracker* string_tracker() { return string_tracker_; }
919 
jsregexp_uncanonicalize()920   unibrow::Mapping<unibrow::Ecma262UnCanonicalize>* jsregexp_uncanonicalize() {
921     return &jsregexp_uncanonicalize_;
922   }
923 
jsregexp_canonrange()924   unibrow::Mapping<unibrow::CanonicalizationRange>* jsregexp_canonrange() {
925     return &jsregexp_canonrange_;
926   }
927 
objects_string_compare_iterator_a()928   ConsStringIteratorOp* objects_string_compare_iterator_a() {
929     return &objects_string_compare_iterator_a_;
930   }
931 
objects_string_compare_iterator_b()932   ConsStringIteratorOp* objects_string_compare_iterator_b() {
933     return &objects_string_compare_iterator_b_;
934   }
935 
objects_string_iterator()936   StaticResource<ConsStringIteratorOp>* objects_string_iterator() {
937     return &objects_string_iterator_;
938   }
939 
runtime_state()940   RuntimeState* runtime_state() { return &runtime_state_; }
941 
set_fp_stubs_generated(bool value)942   void set_fp_stubs_generated(bool value) {
943     fp_stubs_generated_ = value;
944   }
945 
fp_stubs_generated()946   bool fp_stubs_generated() { return fp_stubs_generated_; }
947 
builtins()948   Builtins* builtins() { return &builtins_; }
949 
NotifyExtensionInstalled()950   void NotifyExtensionInstalled() {
951     has_installed_extensions_ = true;
952   }
953 
has_installed_extensions()954   bool has_installed_extensions() { return has_installed_extensions_; }
955 
956   unibrow::Mapping<unibrow::Ecma262Canonicalize>*
regexp_macro_assembler_canonicalize()957       regexp_macro_assembler_canonicalize() {
958     return &regexp_macro_assembler_canonicalize_;
959   }
960 
regexp_stack()961   RegExpStack* regexp_stack() { return regexp_stack_; }
962 
963   unibrow::Mapping<unibrow::Ecma262Canonicalize>*
interp_canonicalize_mapping()964       interp_canonicalize_mapping() {
965     return &interp_canonicalize_mapping_;
966   }
967 
968   inline bool IsCodePreAgingActive();
969 
970 #ifdef ENABLE_DEBUGGER_SUPPORT
debugger()971   Debugger* debugger() {
972     if (!NoBarrier_Load(&debugger_initialized_)) InitializeDebugger();
973     return debugger_;
974   }
debug()975   Debug* debug() {
976     if (!NoBarrier_Load(&debugger_initialized_)) InitializeDebugger();
977     return debug_;
978   }
979 #endif
980 
981   inline bool IsDebuggerActive();
982   inline bool DebuggerHasBreakPoints();
983 
cpu_profiler()984   CpuProfiler* cpu_profiler() const { return cpu_profiler_; }
heap_profiler()985   HeapProfiler* heap_profiler() const { return heap_profiler_; }
986 
987 #ifdef DEBUG
heap_histograms()988   HistogramInfo* heap_histograms() { return heap_histograms_; }
989 
js_spill_information()990   JSObject::SpillInformation* js_spill_information() {
991     return &js_spill_information_;
992   }
993 
code_kind_statistics()994   int* code_kind_statistics() { return code_kind_statistics_; }
995 #endif
996 
997 #if V8_TARGET_ARCH_ARM && !defined(__arm__) || \
998     V8_TARGET_ARCH_MIPS && !defined(__mips__)
simulator_initialized()999   bool simulator_initialized() { return simulator_initialized_; }
set_simulator_initialized(bool initialized)1000   void set_simulator_initialized(bool initialized) {
1001     simulator_initialized_ = initialized;
1002   }
1003 
simulator_i_cache()1004   HashMap* simulator_i_cache() { return simulator_i_cache_; }
set_simulator_i_cache(HashMap * hash_map)1005   void set_simulator_i_cache(HashMap* hash_map) {
1006     simulator_i_cache_ = hash_map;
1007   }
1008 
simulator_redirection()1009   Redirection* simulator_redirection() {
1010     return simulator_redirection_;
1011   }
set_simulator_redirection(Redirection * redirection)1012   void set_simulator_redirection(Redirection* redirection) {
1013     simulator_redirection_ = redirection;
1014   }
1015 #endif
1016 
factory()1017   Factory* factory() { return reinterpret_cast<Factory*>(this); }
1018 
1019   static const int kJSRegexpStaticOffsetsVectorSize = 128;
1020 
external_callback_scope()1021   ExternalCallbackScope* external_callback_scope() {
1022     return thread_local_top_.external_callback_scope_;
1023   }
set_external_callback_scope(ExternalCallbackScope * scope)1024   void set_external_callback_scope(ExternalCallbackScope* scope) {
1025     thread_local_top_.external_callback_scope_ = scope;
1026   }
1027 
current_vm_state()1028   StateTag current_vm_state() {
1029     return thread_local_top_.current_vm_state_;
1030   }
1031 
set_current_vm_state(StateTag state)1032   void set_current_vm_state(StateTag state) {
1033     thread_local_top_.current_vm_state_ = state;
1034   }
1035 
SetData(uint32_t slot,void * data)1036   void SetData(uint32_t slot, void* data) {
1037     ASSERT(slot < Internals::kNumIsolateDataSlots);
1038     embedder_data_[slot] = data;
1039   }
GetData(uint32_t slot)1040   void* GetData(uint32_t slot) {
1041     ASSERT(slot < Internals::kNumIsolateDataSlots);
1042     return embedder_data_[slot];
1043   }
1044 
top_lookup_result()1045   LookupResult* top_lookup_result() {
1046     return thread_local_top_.top_lookup_result_;
1047   }
SetTopLookupResult(LookupResult * top)1048   void SetTopLookupResult(LookupResult* top) {
1049     thread_local_top_.top_lookup_result_ = top;
1050   }
1051 
IsDead()1052   bool IsDead() { return has_fatal_error_; }
SignalFatalError()1053   void SignalFatalError() { has_fatal_error_ = true; }
1054 
use_crankshaft()1055   bool use_crankshaft() const { return use_crankshaft_; }
1056 
initialized_from_snapshot()1057   bool initialized_from_snapshot() { return initialized_from_snapshot_; }
1058 
time_millis_since_init()1059   double time_millis_since_init() {
1060     return OS::TimeCurrentMillis() - time_millis_at_init_;
1061   }
1062 
date_cache()1063   DateCache* date_cache() {
1064     return date_cache_;
1065   }
1066 
set_date_cache(DateCache * date_cache)1067   void set_date_cache(DateCache* date_cache) {
1068     if (date_cache != date_cache_) {
1069       delete date_cache_;
1070     }
1071     date_cache_ = date_cache;
1072   }
1073 
1074   Map* get_initial_js_array_map(ElementsKind kind);
1075 
1076   bool IsFastArrayConstructorPrototypeChainIntact();
1077 
1078   CodeStubInterfaceDescriptor*
1079       code_stub_interface_descriptor(int index);
1080 
1081   void IterateDeferredHandles(ObjectVisitor* visitor);
1082   void LinkDeferredHandles(DeferredHandles* deferred_handles);
1083   void UnlinkDeferredHandles(DeferredHandles* deferred_handles);
1084 
1085 #ifdef DEBUG
1086   bool IsDeferredHandle(Object** location);
1087 #endif  // DEBUG
1088 
set_max_available_threads(int value)1089   void set_max_available_threads(int value) {
1090     max_available_threads_ = value;
1091   }
1092 
concurrent_recompilation_enabled()1093   bool concurrent_recompilation_enabled() {
1094     // Thread is only available with flag enabled.
1095     ASSERT(optimizing_compiler_thread_ == NULL ||
1096            FLAG_concurrent_recompilation);
1097     return optimizing_compiler_thread_ != NULL;
1098   }
1099 
concurrent_osr_enabled()1100   bool concurrent_osr_enabled() const {
1101     // Thread is only available with flag enabled.
1102     ASSERT(optimizing_compiler_thread_ == NULL ||
1103            FLAG_concurrent_recompilation);
1104     return optimizing_compiler_thread_ != NULL && FLAG_concurrent_osr;
1105   }
1106 
optimizing_compiler_thread()1107   OptimizingCompilerThread* optimizing_compiler_thread() {
1108     return optimizing_compiler_thread_;
1109   }
1110 
num_sweeper_threads()1111   int num_sweeper_threads() const {
1112     return num_sweeper_threads_;
1113   }
1114 
sweeper_threads()1115   SweeperThread** sweeper_threads() {
1116     return sweeper_thread_;
1117   }
1118 
1119   // PreInits and returns a default isolate. Needed when a new thread tries
1120   // to create a Locker for the first time (the lock itself is in the isolate).
1121   // TODO(svenpanne) This method is on death row...
1122   static v8::Isolate* GetDefaultIsolateForLocking();
1123 
id()1124   int id() const { return static_cast<int>(id_); }
1125 
1126   HStatistics* GetHStatistics();
1127   HTracer* GetHTracer();
1128   CodeTracer* GetCodeTracer();
1129 
function_entry_hook()1130   FunctionEntryHook function_entry_hook() { return function_entry_hook_; }
set_function_entry_hook(FunctionEntryHook function_entry_hook)1131   void set_function_entry_hook(FunctionEntryHook function_entry_hook) {
1132     function_entry_hook_ = function_entry_hook;
1133   }
1134 
stress_deopt_count_address()1135   void* stress_deopt_count_address() { return &stress_deopt_count_; }
1136 
1137   inline RandomNumberGenerator* random_number_generator();
1138 
1139   // Given an address occupied by a live code object, return that object.
1140   Object* FindCodeObject(Address a);
1141 
1142  private:
1143   Isolate();
1144 
1145   friend struct GlobalState;
1146   friend struct InitializeGlobalState;
1147 
1148   enum State {
1149     UNINITIALIZED,    // Some components may not have been allocated.
1150     INITIALIZED       // All components are fully initialized.
1151   };
1152 
1153   // These fields are accessed through the API, offsets must be kept in sync
1154   // with v8::internal::Internals (in include/v8.h) constants. This is also
1155   // verified in Isolate::Init() using runtime checks.
1156   void* embedder_data_[Internals::kNumIsolateDataSlots];
1157   Heap heap_;
1158   State state_;  // Will be padded to kApiPointerSize.
1159 
1160   // The per-process lock should be acquired before the ThreadDataTable is
1161   // modified.
1162   class ThreadDataTable {
1163    public:
1164     ThreadDataTable();
1165     ~ThreadDataTable();
1166 
1167     PerIsolateThreadData* Lookup(Isolate* isolate, ThreadId thread_id);
1168     void Insert(PerIsolateThreadData* data);
1169     void Remove(PerIsolateThreadData* data);
1170     void RemoveAllThreads(Isolate* isolate);
1171 
1172    private:
1173     PerIsolateThreadData* list_;
1174   };
1175 
1176   // These items form a stack synchronously with threads Enter'ing and Exit'ing
1177   // the Isolate. The top of the stack points to a thread which is currently
1178   // running the Isolate. When the stack is empty, the Isolate is considered
1179   // not entered by any thread and can be Disposed.
1180   // If the same thread enters the Isolate more then once, the entry_count_
1181   // is incremented rather then a new item pushed to the stack.
1182   class EntryStackItem {
1183    public:
EntryStackItem(PerIsolateThreadData * previous_thread_data,Isolate * previous_isolate,EntryStackItem * previous_item)1184     EntryStackItem(PerIsolateThreadData* previous_thread_data,
1185                    Isolate* previous_isolate,
1186                    EntryStackItem* previous_item)
1187         : entry_count(1),
1188           previous_thread_data(previous_thread_data),
1189           previous_isolate(previous_isolate),
1190           previous_item(previous_item) { }
1191 
1192     int entry_count;
1193     PerIsolateThreadData* previous_thread_data;
1194     Isolate* previous_isolate;
1195     EntryStackItem* previous_item;
1196 
1197    private:
1198     DISALLOW_COPY_AND_ASSIGN(EntryStackItem);
1199   };
1200 
1201   // This mutex protects highest_thread_id_, thread_data_table_ and
1202   // default_isolate_.
1203   static Mutex process_wide_mutex_;
1204 
1205   static Thread::LocalStorageKey per_isolate_thread_data_key_;
1206   static Thread::LocalStorageKey isolate_key_;
1207   static Thread::LocalStorageKey thread_id_key_;
1208   static Isolate* default_isolate_;
1209   static ThreadDataTable* thread_data_table_;
1210 
1211   // A global counter for all generated Isolates, might overflow.
1212   static Atomic32 isolate_counter_;
1213 
1214   void Deinit();
1215 
1216   static void SetIsolateThreadLocals(Isolate* isolate,
1217                                      PerIsolateThreadData* data);
1218 
1219   // Find the PerThread for this particular (isolate, thread) combination.
1220   // If one does not yet exist, allocate a new one.
1221   PerIsolateThreadData* FindOrAllocatePerThreadDataForThisThread();
1222 
1223   // Initializes the current thread to run this Isolate.
1224   // Not thread-safe. Multiple threads should not Enter/Exit the same isolate
1225   // at the same time, this should be prevented using external locking.
1226   void Enter();
1227 
1228   // Exits the current thread. The previosuly entered Isolate is restored
1229   // for the thread.
1230   // Not thread-safe. Multiple threads should not Enter/Exit the same isolate
1231   // at the same time, this should be prevented using external locking.
1232   void Exit();
1233 
1234   void InitializeThreadLocal();
1235 
1236   void MarkCompactPrologue(bool is_compacting,
1237                            ThreadLocalTop* archived_thread_data);
1238   void MarkCompactEpilogue(bool is_compacting,
1239                            ThreadLocalTop* archived_thread_data);
1240 
1241   void FillCache();
1242 
1243   void PropagatePendingExceptionToExternalTryCatch();
1244 
1245   void InitializeDebugger();
1246 
1247   // Traverse prototype chain to find out whether the object is derived from
1248   // the Error object.
1249   bool IsErrorObject(Handle<Object> obj);
1250 
1251   Atomic32 id_;
1252   EntryStackItem* entry_stack_;
1253   int stack_trace_nesting_level_;
1254   StringStream* incomplete_message_;
1255   Address isolate_addresses_[kIsolateAddressCount + 1];  // NOLINT
1256   Bootstrapper* bootstrapper_;
1257   RuntimeProfiler* runtime_profiler_;
1258   CompilationCache* compilation_cache_;
1259   Counters* counters_;
1260   CodeRange* code_range_;
1261   RecursiveMutex break_access_;
1262   Atomic32 debugger_initialized_;
1263   RecursiveMutex debugger_access_;
1264   Logger* logger_;
1265   StackGuard stack_guard_;
1266   StatsTable* stats_table_;
1267   StubCache* stub_cache_;
1268   DeoptimizerData* deoptimizer_data_;
1269   ThreadLocalTop thread_local_top_;
1270   bool capture_stack_trace_for_uncaught_exceptions_;
1271   int stack_trace_for_uncaught_exceptions_frame_limit_;
1272   StackTrace::StackTraceOptions stack_trace_for_uncaught_exceptions_options_;
1273   TranscendentalCache* transcendental_cache_;
1274   MemoryAllocator* memory_allocator_;
1275   KeyedLookupCache* keyed_lookup_cache_;
1276   ContextSlotCache* context_slot_cache_;
1277   DescriptorLookupCache* descriptor_lookup_cache_;
1278   v8::ImplementationUtilities::HandleScopeData handle_scope_data_;
1279   HandleScopeImplementer* handle_scope_implementer_;
1280   UnicodeCache* unicode_cache_;
1281   Zone runtime_zone_;
1282   InnerPointerToCodeCache* inner_pointer_to_code_cache_;
1283   ConsStringIteratorOp* write_iterator_;
1284   GlobalHandles* global_handles_;
1285   EternalHandles* eternal_handles_;
1286   ThreadManager* thread_manager_;
1287   RuntimeState runtime_state_;
1288   bool fp_stubs_generated_;
1289   Builtins builtins_;
1290   bool has_installed_extensions_;
1291   StringTracker* string_tracker_;
1292   unibrow::Mapping<unibrow::Ecma262UnCanonicalize> jsregexp_uncanonicalize_;
1293   unibrow::Mapping<unibrow::CanonicalizationRange> jsregexp_canonrange_;
1294   ConsStringIteratorOp objects_string_compare_iterator_a_;
1295   ConsStringIteratorOp objects_string_compare_iterator_b_;
1296   StaticResource<ConsStringIteratorOp> objects_string_iterator_;
1297   unibrow::Mapping<unibrow::Ecma262Canonicalize>
1298       regexp_macro_assembler_canonicalize_;
1299   RegExpStack* regexp_stack_;
1300   DateCache* date_cache_;
1301   unibrow::Mapping<unibrow::Ecma262Canonicalize> interp_canonicalize_mapping_;
1302   CodeStubInterfaceDescriptor* code_stub_interface_descriptors_;
1303   RandomNumberGenerator* random_number_generator_;
1304 
1305   // True if fatal error has been signaled for this isolate.
1306   bool has_fatal_error_;
1307 
1308   // True if we are using the Crankshaft optimizing compiler.
1309   bool use_crankshaft_;
1310 
1311   // True if this isolate was initialized from a snapshot.
1312   bool initialized_from_snapshot_;
1313 
1314   // Time stamp at initialization.
1315   double time_millis_at_init_;
1316 
1317 #if V8_TARGET_ARCH_ARM && !defined(__arm__) || \
1318     V8_TARGET_ARCH_MIPS && !defined(__mips__)
1319   bool simulator_initialized_;
1320   HashMap* simulator_i_cache_;
1321   Redirection* simulator_redirection_;
1322 #endif
1323 
1324 #ifdef DEBUG
1325   // A static array of histogram info for each type.
1326   HistogramInfo heap_histograms_[LAST_TYPE + 1];
1327   JSObject::SpillInformation js_spill_information_;
1328   int code_kind_statistics_[Code::NUMBER_OF_KINDS];
1329 #endif
1330 
1331 #ifdef ENABLE_DEBUGGER_SUPPORT
1332   Debugger* debugger_;
1333   Debug* debug_;
1334 #endif
1335   CpuProfiler* cpu_profiler_;
1336   HeapProfiler* heap_profiler_;
1337   FunctionEntryHook function_entry_hook_;
1338 
1339 #define GLOBAL_BACKING_STORE(type, name, initialvalue)                         \
1340   type name##_;
1341   ISOLATE_INIT_LIST(GLOBAL_BACKING_STORE)
1342 #undef GLOBAL_BACKING_STORE
1343 
1344 #define GLOBAL_ARRAY_BACKING_STORE(type, name, length)                         \
1345   type name##_[length];
1346   ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_BACKING_STORE)
1347 #undef GLOBAL_ARRAY_BACKING_STORE
1348 
1349 #ifdef DEBUG
1350   // This class is huge and has a number of fields controlled by
1351   // preprocessor defines. Make sure the offsets of these fields agree
1352   // between compilation units.
1353 #define ISOLATE_FIELD_OFFSET(type, name, ignored)                              \
1354   static const intptr_t name##_debug_offset_;
1355   ISOLATE_INIT_LIST(ISOLATE_FIELD_OFFSET)
1356   ISOLATE_INIT_ARRAY_LIST(ISOLATE_FIELD_OFFSET)
1357 #undef ISOLATE_FIELD_OFFSET
1358 #endif
1359 
1360   DeferredHandles* deferred_handles_head_;
1361   OptimizingCompilerThread* optimizing_compiler_thread_;
1362   SweeperThread** sweeper_thread_;
1363   int num_sweeper_threads_;
1364 
1365   // TODO(yangguo): This will become obsolete once ResourceConstraints
1366   // becomes an argument to Isolate constructor.
1367   int max_available_threads_;
1368 
1369   // Counts deopt points if deopt_every_n_times is enabled.
1370   unsigned int stress_deopt_count_;
1371 
1372   friend class ExecutionAccess;
1373   friend class HandleScopeImplementer;
1374   friend class IsolateInitializer;
1375   friend class OptimizingCompilerThread;
1376   friend class SweeperThread;
1377   friend class ThreadManager;
1378   friend class Simulator;
1379   friend class StackGuard;
1380   friend class ThreadId;
1381   friend class TestMemoryAllocatorScope;
1382   friend class TestCodeRangeScope;
1383   friend class v8::Isolate;
1384   friend class v8::Locker;
1385   friend class v8::Unlocker;
1386 
1387   DISALLOW_COPY_AND_ASSIGN(Isolate);
1388 };
1389 
1390 
1391 // If the GCC version is 4.1.x or 4.2.x an additional field is added to the
1392 // class as a work around for a bug in the generated code found with these
1393 // versions of GCC. See V8 issue 122 for details.
1394 class SaveContext BASE_EMBEDDED {
1395  public:
1396   inline explicit SaveContext(Isolate* isolate);
1397 
~SaveContext()1398   ~SaveContext() {
1399     isolate_->set_context(context_.is_null() ? NULL : *context_);
1400     isolate_->set_save_context(prev_);
1401   }
1402 
context()1403   Handle<Context> context() { return context_; }
prev()1404   SaveContext* prev() { return prev_; }
1405 
1406   // Returns true if this save context is below a given JavaScript frame.
IsBelowFrame(JavaScriptFrame * frame)1407   bool IsBelowFrame(JavaScriptFrame* frame) {
1408     return (c_entry_fp_ == 0) || (c_entry_fp_ > frame->sp());
1409   }
1410 
1411  private:
1412   Isolate* isolate_;
1413   Handle<Context> context_;
1414   SaveContext* prev_;
1415   Address c_entry_fp_;
1416 };
1417 
1418 
1419 class AssertNoContextChange BASE_EMBEDDED {
1420 #ifdef DEBUG
1421  public:
AssertNoContextChange(Isolate * isolate)1422   explicit AssertNoContextChange(Isolate* isolate)
1423     : isolate_(isolate),
1424       context_(isolate->context(), isolate) { }
~AssertNoContextChange()1425   ~AssertNoContextChange() {
1426     ASSERT(isolate_->context() == *context_);
1427   }
1428 
1429  private:
1430   Isolate* isolate_;
1431   Handle<Context> context_;
1432 #else
1433  public:
1434   explicit AssertNoContextChange(Isolate* isolate) { }
1435 #endif
1436 };
1437 
1438 
1439 class ExecutionAccess BASE_EMBEDDED {
1440  public:
ExecutionAccess(Isolate * isolate)1441   explicit ExecutionAccess(Isolate* isolate) : isolate_(isolate) {
1442     Lock(isolate);
1443   }
~ExecutionAccess()1444   ~ExecutionAccess() { Unlock(isolate_); }
1445 
Lock(Isolate * isolate)1446   static void Lock(Isolate* isolate) { isolate->break_access()->Lock(); }
Unlock(Isolate * isolate)1447   static void Unlock(Isolate* isolate) { isolate->break_access()->Unlock(); }
1448 
TryLock(Isolate * isolate)1449   static bool TryLock(Isolate* isolate) {
1450     return isolate->break_access()->TryLock();
1451   }
1452 
1453  private:
1454   Isolate* isolate_;
1455 };
1456 
1457 
1458 // Support for checking for stack-overflows in C++ code.
1459 class StackLimitCheck BASE_EMBEDDED {
1460  public:
StackLimitCheck(Isolate * isolate)1461   explicit StackLimitCheck(Isolate* isolate) : isolate_(isolate) { }
1462 
HasOverflowed()1463   bool HasOverflowed() const {
1464     StackGuard* stack_guard = isolate_->stack_guard();
1465     return (reinterpret_cast<uintptr_t>(this) < stack_guard->real_climit());
1466   }
1467  private:
1468   Isolate* isolate_;
1469 };
1470 
1471 
1472 // Support for temporarily postponing interrupts. When the outermost
1473 // postpone scope is left the interrupts will be re-enabled and any
1474 // interrupts that occurred while in the scope will be taken into
1475 // account.
1476 class PostponeInterruptsScope BASE_EMBEDDED {
1477  public:
PostponeInterruptsScope(Isolate * isolate)1478   explicit PostponeInterruptsScope(Isolate* isolate)
1479       : stack_guard_(isolate->stack_guard()) {
1480     stack_guard_->thread_local_.postpone_interrupts_nesting_++;
1481     stack_guard_->DisableInterrupts();
1482   }
1483 
~PostponeInterruptsScope()1484   ~PostponeInterruptsScope() {
1485     if (--stack_guard_->thread_local_.postpone_interrupts_nesting_ == 0) {
1486       stack_guard_->EnableInterrupts();
1487     }
1488   }
1489  private:
1490   StackGuard* stack_guard_;
1491 };
1492 
1493 
1494 // Tells whether the native context is marked with out of memory.
has_out_of_memory()1495 inline bool Context::has_out_of_memory() {
1496   return native_context()->out_of_memory()->IsTrue();
1497 }
1498 
1499 
1500 // Mark the native context with out of memory.
mark_out_of_memory()1501 inline void Context::mark_out_of_memory() {
1502   native_context()->set_out_of_memory(GetIsolate()->heap()->true_value());
1503 }
1504 
1505 class CodeTracer V8_FINAL : public Malloced {
1506  public:
CodeTracer(int isolate_id)1507   explicit CodeTracer(int isolate_id)
1508       : file_(NULL),
1509         scope_depth_(0) {
1510     if (!ShouldRedirect()) {
1511       file_ = stdout;
1512       return;
1513     }
1514 
1515     if (FLAG_redirect_code_traces_to == NULL) {
1516       OS::SNPrintF(filename_,
1517                    "code-%d-%d.asm",
1518                    OS::GetCurrentProcessId(),
1519                    isolate_id);
1520     } else {
1521       OS::StrNCpy(filename_, FLAG_redirect_code_traces_to, filename_.length());
1522     }
1523 
1524     WriteChars(filename_.start(), "", 0, false);
1525   }
1526 
1527   class Scope {
1528    public:
Scope(CodeTracer * tracer)1529     explicit Scope(CodeTracer* tracer) : tracer_(tracer) { tracer->OpenFile(); }
~Scope()1530     ~Scope() { tracer_->CloseFile();  }
1531 
file()1532     FILE* file() const { return tracer_->file(); }
1533 
1534    private:
1535     CodeTracer* tracer_;
1536   };
1537 
OpenFile()1538   void OpenFile() {
1539     if (!ShouldRedirect()) {
1540       return;
1541     }
1542 
1543     if (file_ == NULL) {
1544       file_ = OS::FOpen(filename_.start(), "a");
1545     }
1546 
1547     scope_depth_++;
1548   }
1549 
CloseFile()1550   void CloseFile() {
1551     if (!ShouldRedirect()) {
1552       return;
1553     }
1554 
1555     if (--scope_depth_ == 0) {
1556       fclose(file_);
1557       file_ = NULL;
1558     }
1559   }
1560 
file()1561   FILE* file() const { return file_; }
1562 
1563  private:
ShouldRedirect()1564   static bool ShouldRedirect() {
1565     return FLAG_redirect_code_traces;
1566   }
1567 
1568   EmbeddedVector<char, 128> filename_;
1569   FILE* file_;
1570   int scope_depth_;
1571 };
1572 
1573 } }  // namespace v8::internal
1574 
1575 #endif  // V8_ISOLATE_H_
1576