• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1  // Copyright 2012 the V8 project authors. All rights reserved.
2  // Use of this source code is governed by a BSD-style license that can be
3  // found in the LICENSE file.
4  
5  #ifndef V8_ISOLATE_H_
6  #define V8_ISOLATE_H_
7  
8  #include "include/v8-debug.h"
9  #include "src/allocation.h"
10  #include "src/assert-scope.h"
11  #include "src/base/atomicops.h"
12  #include "src/builtins.h"
13  #include "src/contexts.h"
14  #include "src/date.h"
15  #include "src/execution.h"
16  #include "src/frames.h"
17  #include "src/global-handles.h"
18  #include "src/handles.h"
19  #include "src/hashmap.h"
20  #include "src/heap/heap.h"
21  #include "src/optimizing-compiler-thread.h"
22  #include "src/regexp-stack.h"
23  #include "src/runtime.h"
24  #include "src/runtime-profiler.h"
25  #include "src/zone.h"
26  
27  namespace v8 {
28  
29  namespace base {
30  class RandomNumberGenerator;
31  }
32  
33  namespace internal {
34  
35  class Bootstrapper;
36  class CallInterfaceDescriptorData;
37  class CodeGenerator;
38  class CodeRange;
39  class CodeStubDescriptor;
40  class CodeTracer;
41  class CompilationCache;
42  class ConsStringIteratorOp;
43  class ContextSlotCache;
44  class Counters;
45  class CpuFeatures;
46  class CpuProfiler;
47  class DeoptimizerData;
48  class Deserializer;
49  class EmptyStatement;
50  class ExternalCallbackScope;
51  class ExternalReferenceTable;
52  class Factory;
53  class FunctionInfoListener;
54  class HandleScopeImplementer;
55  class HeapProfiler;
56  class HStatistics;
57  class HTracer;
58  class InlineRuntimeFunctionsTable;
59  class InnerPointerToCodeCache;
60  class MaterializedObjectStore;
61  class CodeAgingHelper;
62  class RegExpStack;
63  class SaveContext;
64  class StringTracker;
65  class StubCache;
66  class SweeperThread;
67  class ThreadManager;
68  class ThreadState;
69  class ThreadVisitor;  // Defined in v8threads.h
70  class UnicodeCache;
71  template <StateTag Tag> class VMState;
72  
73  // 'void function pointer', used to roundtrip the
74  // ExternalReference::ExternalReferenceRedirector since we can not include
75  // assembler.h, where it is defined, here.
76  typedef void* ExternalReferenceRedirectorPointer();
77  
78  
79  class Debug;
80  class Debugger;
81  class PromiseOnStack;
82  
83  #if !defined(__arm__) && V8_TARGET_ARCH_ARM || \
84      !defined(__aarch64__) && V8_TARGET_ARCH_ARM64 || \
85      !defined(__mips__) && V8_TARGET_ARCH_MIPS || \
86      !defined(__mips__) && V8_TARGET_ARCH_MIPS64
87  class Redirection;
88  class Simulator;
89  #endif
90  
91  
92  // Static indirection table for handles to constants.  If a frame
93  // element represents a constant, the data contains an index into
94  // this table of handles to the actual constants.
95  // Static indirection table for handles to constants.  If a Result
96  // represents a constant, the data contains an index into this table
97  // of handles to the actual constants.
98  typedef ZoneList<Handle<Object> > ZoneObjectList;
99  
100  #define RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate)    \
101    do {                                                    \
102      Isolate* __isolate__ = (isolate);                     \
103      if (__isolate__->has_scheduled_exception()) {         \
104        return __isolate__->PromoteScheduledException();    \
105      }                                                     \
106    } while (false)
107  
108  // Macros for MaybeHandle.
109  
110  #define RETURN_VALUE_IF_SCHEDULED_EXCEPTION(isolate, value) \
111    do {                                                      \
112      Isolate* __isolate__ = (isolate);                       \
113      if (__isolate__->has_scheduled_exception()) {           \
114        __isolate__->PromoteScheduledException();             \
115        return value;                                         \
116      }                                                       \
117    } while (false)
118  
119  #define RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, T) \
120    RETURN_VALUE_IF_SCHEDULED_EXCEPTION(isolate, MaybeHandle<T>())
121  
122  #define ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, dst, call, value)  \
123    do {                                                               \
124      if (!(call).ToHandle(&dst)) {                                    \
125        DCHECK((isolate)->has_pending_exception());                    \
126        return value;                                                  \
127      }                                                                \
128    } while (false)
129  
130  #define ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, dst, call)  \
131    ASSIGN_RETURN_ON_EXCEPTION_VALUE(                             \
132        isolate, dst, call, isolate->heap()->exception())
133  
134  #define ASSIGN_RETURN_ON_EXCEPTION(isolate, dst, call, T)  \
135    ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, dst, call, MaybeHandle<T>())
136  
137  #define THROW_NEW_ERROR(isolate, call, T)                                    \
138    do {                                                                       \
139      Handle<Object> __error__;                                                \
140      ASSIGN_RETURN_ON_EXCEPTION(isolate, __error__, isolate->factory()->call, \
141                                 T);                                           \
142      return isolate->Throw<T>(__error__);                                     \
143    } while (false)
144  
145  #define THROW_NEW_ERROR_RETURN_FAILURE(isolate, call)             \
146    do {                                                            \
147      Handle<Object> __error__;                                     \
148      ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, __error__,        \
149                                         isolate->factory()->call); \
150      return isolate->Throw(*__error__);                            \
151    } while (false)
152  
153  #define RETURN_ON_EXCEPTION_VALUE(isolate, call, value)            \
154    do {                                                             \
155      if ((call).is_null()) {                                        \
156        DCHECK((isolate)->has_pending_exception());                  \
157        return value;                                                \
158      }                                                              \
159    } while (false)
160  
161  #define RETURN_FAILURE_ON_EXCEPTION(isolate, call)  \
162    RETURN_ON_EXCEPTION_VALUE(isolate, call, isolate->heap()->exception())
163  
164  #define RETURN_ON_EXCEPTION(isolate, call, T)  \
165    RETURN_ON_EXCEPTION_VALUE(isolate, call, MaybeHandle<T>())
166  
167  
168  #define FOR_EACH_ISOLATE_ADDRESS_NAME(C)                \
169    C(Handler, handler)                                   \
170    C(CEntryFP, c_entry_fp)                               \
171    C(Context, context)                                   \
172    C(PendingException, pending_exception)                \
173    C(ExternalCaughtException, external_caught_exception) \
174    C(JSEntrySP, js_entry_sp)
175  
176  
177  // Platform-independent, reliable thread identifier.
178  class ThreadId {
179   public:
180    // Creates an invalid ThreadId.
ThreadId()181    ThreadId() : id_(kInvalidId) {}
182  
183    // Returns ThreadId for current thread.
Current()184    static ThreadId Current() { return ThreadId(GetCurrentThreadId()); }
185  
186    // Returns invalid ThreadId (guaranteed not to be equal to any thread).
Invalid()187    static ThreadId Invalid() { return ThreadId(kInvalidId); }
188  
189    // Compares ThreadIds for equality.
INLINE(bool Equals (const ThreadId & other)const)190    INLINE(bool Equals(const ThreadId& other) const) {
191      return id_ == other.id_;
192    }
193  
194    // Checks whether this ThreadId refers to any thread.
INLINE(bool IsValid ()const)195    INLINE(bool IsValid() const) {
196      return id_ != kInvalidId;
197    }
198  
199    // Converts ThreadId to an integer representation
200    // (required for public API: V8::V8::GetCurrentThreadId).
ToInteger()201    int ToInteger() const { return id_; }
202  
203    // Converts ThreadId to an integer representation
204    // (required for public API: V8::V8::TerminateExecution).
FromInteger(int id)205    static ThreadId FromInteger(int id) { return ThreadId(id); }
206  
207   private:
208    static const int kInvalidId = -1;
209  
ThreadId(int id)210    explicit ThreadId(int id) : id_(id) {}
211  
212    static int AllocateThreadId();
213  
214    static int GetCurrentThreadId();
215  
216    int id_;
217  
218    static base::Atomic32 highest_thread_id_;
219  
220    friend class Isolate;
221  };
222  
223  
224  #define FIELD_ACCESSOR(type, name)                 \
225    inline void set_##name(type v) { name##_ = v; }  \
226    inline type name() const { return name##_; }
227  
228  
229  class ThreadLocalTop BASE_EMBEDDED {
230   public:
231    // Does early low-level initialization that does not depend on the
232    // isolate being present.
233    ThreadLocalTop();
234  
235    // Initialize the thread data.
236    void Initialize();
237  
238    // Get the top C++ try catch handler or NULL if none are registered.
239    //
240    // This method is not guaranteed to return an address that can be
241    // used for comparison with addresses into the JS stack.  If such an
242    // address is needed, use try_catch_handler_address.
FIELD_ACCESSOR(v8::TryCatch *,try_catch_handler)243    FIELD_ACCESSOR(v8::TryCatch*, try_catch_handler)
244  
245    // Get the address of the top C++ try catch handler or NULL if
246    // none are registered.
247    //
248    // This method always returns an address that can be compared to
249    // pointers into the JavaScript stack.  When running on actual
250    // hardware, try_catch_handler_address and TryCatchHandler return
251    // the same pointer.  When running on a simulator with a separate JS
252    // stack, try_catch_handler_address returns a JS stack address that
253    // corresponds to the place on the JS stack where the C++ handler
254    // would have been if the stack were not separate.
255    Address try_catch_handler_address() {
256      return reinterpret_cast<Address>(
257          v8::TryCatch::JSStackComparableAddress(try_catch_handler()));
258    }
259  
260    void Free();
261  
262    Isolate* isolate_;
263    // The context where the current execution method is created and for variable
264    // lookups.
265    Context* context_;
266    ThreadId thread_id_;
267    Object* pending_exception_;
268    bool has_pending_message_;
269    bool rethrowing_message_;
270    Object* pending_message_obj_;
271    Object* pending_message_script_;
272    int pending_message_start_pos_;
273    int pending_message_end_pos_;
274    // Use a separate value for scheduled exceptions to preserve the
275    // invariants that hold about pending_exception.  We may want to
276    // unify them later.
277    Object* scheduled_exception_;
278    bool external_caught_exception_;
279    SaveContext* save_context_;
280    v8::TryCatch* catcher_;
281  
282    // Stack.
283    Address c_entry_fp_;  // the frame pointer of the top c entry frame
284    Address handler_;   // try-blocks are chained through the stack
285  
286    // Throwing an exception may cause a Promise rejection.  For this purpose
287    // we keep track of a stack of nested promises and the corresponding
288    // try-catch handlers.
289    PromiseOnStack* promise_on_stack_;
290  
291  #ifdef USE_SIMULATOR
292    Simulator* simulator_;
293  #endif
294  
295    Address js_entry_sp_;  // the stack pointer of the bottom JS entry frame
296    // the external callback we're currently in
297    ExternalCallbackScope* external_callback_scope_;
298    StateTag current_vm_state_;
299  
300    // Generated code scratch locations.
301    int32_t formal_count_;
302  
303    // Call back function to report unsafe JS accesses.
304    v8::FailedAccessCheckCallback failed_access_check_callback_;
305  
306    // Head of the list of live LookupResults.
307    LookupResult* top_lookup_result_;
308  
309   private:
310    void InitializeInternal();
311  
312    v8::TryCatch* try_catch_handler_;
313  };
314  
315  
316  #if V8_TARGET_ARCH_ARM && !defined(__arm__) || \
317      V8_TARGET_ARCH_ARM64 && !defined(__aarch64__) || \
318      V8_TARGET_ARCH_MIPS && !defined(__mips__) || \
319      V8_TARGET_ARCH_MIPS64 && !defined(__mips__)
320  
321  #define ISOLATE_INIT_SIMULATOR_LIST(V)                                         \
322    V(bool, simulator_initialized, false)                                        \
323    V(HashMap*, simulator_i_cache, NULL)                                         \
324    V(Redirection*, simulator_redirection, NULL)
325  #else
326  
327  #define ISOLATE_INIT_SIMULATOR_LIST(V)
328  
329  #endif
330  
331  
332  #ifdef DEBUG
333  
334  #define ISOLATE_INIT_DEBUG_ARRAY_LIST(V)                                       \
335    V(CommentStatistic, paged_space_comments_statistics,                         \
336        CommentStatistic::kMaxComments + 1)                                      \
337    V(int, code_kind_statistics, Code::NUMBER_OF_KINDS)
338  #else
339  
340  #define ISOLATE_INIT_DEBUG_ARRAY_LIST(V)
341  
342  #endif
343  
344  #define ISOLATE_INIT_ARRAY_LIST(V)                                             \
345    /* SerializerDeserializer state. */                                          \
346    V(int32_t, jsregexp_static_offsets_vector, kJSRegexpStaticOffsetsVectorSize) \
347    V(int, bad_char_shift_table, kUC16AlphabetSize)                              \
348    V(int, good_suffix_shift_table, (kBMMaxShift + 1))                           \
349    V(int, suffix_table, (kBMMaxShift + 1))                                      \
350    V(uint32_t, private_random_seed, 2)                                          \
351    ISOLATE_INIT_DEBUG_ARRAY_LIST(V)
352  
353  typedef List<HeapObject*> DebugObjectCache;
354  
355  #define ISOLATE_INIT_LIST(V)                                                   \
356    /* SerializerDeserializer state. */                                          \
357    V(int, serialize_partial_snapshot_cache_length, 0)                           \
358    V(int, serialize_partial_snapshot_cache_capacity, 0)                         \
359    V(Object**, serialize_partial_snapshot_cache, NULL)                          \
360    /* Assembler state. */                                                       \
361    V(FatalErrorCallback, exception_behavior, NULL)                              \
362    V(LogEventCallback, event_logger, NULL)                                      \
363    V(AllowCodeGenerationFromStringsCallback, allow_code_gen_callback, NULL)     \
364    /* To distinguish the function templates, so that we can find them in the */ \
365    /* function cache of the native context. */                                  \
366    V(int, next_serial_number, 0)                                                \
367    V(ExternalReferenceRedirectorPointer*, external_reference_redirector, NULL)  \
368    /* Part of the state of liveedit. */                                         \
369    V(FunctionInfoListener*, active_function_info_listener, NULL)                \
370    /* State for Relocatable. */                                                 \
371    V(Relocatable*, relocatable_top, NULL)                                       \
372    V(DebugObjectCache*, string_stream_debug_object_cache, NULL)                 \
373    V(Object*, string_stream_current_security_token, NULL)                       \
374    /* Serializer state. */                                                      \
375    V(ExternalReferenceTable*, external_reference_table, NULL)                   \
376    V(int, pending_microtask_count, 0)                                           \
377    V(bool, autorun_microtasks, true)                                            \
378    V(HStatistics*, hstatistics, NULL)                                           \
379    V(HStatistics*, tstatistics, NULL)                                           \
380    V(HTracer*, htracer, NULL)                                                   \
381    V(CodeTracer*, code_tracer, NULL)                                            \
382    V(bool, fp_stubs_generated, false)                                           \
383    V(int, max_available_threads, 0)                                             \
384    V(uint32_t, per_isolate_assert_data, 0xFFFFFFFFu)                            \
385    V(InterruptCallback, api_interrupt_callback, NULL)                           \
386    V(void*, api_interrupt_callback_data, NULL)                                  \
387    ISOLATE_INIT_SIMULATOR_LIST(V)
388  
389  #define THREAD_LOCAL_TOP_ACCESSOR(type, name)                        \
390    inline void set_##name(type v) { thread_local_top_.name##_ = v; }  \
391    inline type name() const { return thread_local_top_.name##_; }
392  
393  
394  class Isolate {
395    // These forward declarations are required to make the friend declarations in
396    // PerIsolateThreadData work on some older versions of gcc.
397    class ThreadDataTable;
398    class EntryStackItem;
399   public:
400    ~Isolate();
401  
402    // A thread has a PerIsolateThreadData instance for each isolate that it has
403    // entered. That instance is allocated when the isolate is initially entered
404    // and reused on subsequent entries.
405    class PerIsolateThreadData {
406     public:
PerIsolateThreadData(Isolate * isolate,ThreadId thread_id)407      PerIsolateThreadData(Isolate* isolate, ThreadId thread_id)
408          : isolate_(isolate),
409            thread_id_(thread_id),
410            stack_limit_(0),
411            thread_state_(NULL),
412  #if !defined(__arm__) && V8_TARGET_ARCH_ARM || \
413      !defined(__aarch64__) && V8_TARGET_ARCH_ARM64 || \
414      !defined(__mips__) && V8_TARGET_ARCH_MIPS || \
415      !defined(__mips__) && V8_TARGET_ARCH_MIPS64
416            simulator_(NULL),
417  #endif
418            next_(NULL),
419            prev_(NULL) { }
420      ~PerIsolateThreadData();
isolate()421      Isolate* isolate() const { return isolate_; }
thread_id()422      ThreadId thread_id() const { return thread_id_; }
423  
FIELD_ACCESSOR(uintptr_t,stack_limit)424      FIELD_ACCESSOR(uintptr_t, stack_limit)
425      FIELD_ACCESSOR(ThreadState*, thread_state)
426  
427  #if !defined(__arm__) && V8_TARGET_ARCH_ARM || \
428      !defined(__aarch64__) && V8_TARGET_ARCH_ARM64 || \
429      !defined(__mips__) && V8_TARGET_ARCH_MIPS || \
430      !defined(__mips__) && V8_TARGET_ARCH_MIPS64
431      FIELD_ACCESSOR(Simulator*, simulator)
432  #endif
433  
434      bool Matches(Isolate* isolate, ThreadId thread_id) const {
435        return isolate_ == isolate && thread_id_.Equals(thread_id);
436      }
437  
438     private:
439      Isolate* isolate_;
440      ThreadId thread_id_;
441      uintptr_t stack_limit_;
442      ThreadState* thread_state_;
443  
444  #if !defined(__arm__) && V8_TARGET_ARCH_ARM || \
445      !defined(__aarch64__) && V8_TARGET_ARCH_ARM64 || \
446      !defined(__mips__) && V8_TARGET_ARCH_MIPS || \
447      !defined(__mips__) && V8_TARGET_ARCH_MIPS64
448      Simulator* simulator_;
449  #endif
450  
451      PerIsolateThreadData* next_;
452      PerIsolateThreadData* prev_;
453  
454      friend class Isolate;
455      friend class ThreadDataTable;
456      friend class EntryStackItem;
457  
458      DISALLOW_COPY_AND_ASSIGN(PerIsolateThreadData);
459    };
460  
461  
462    enum AddressId {
463  #define DECLARE_ENUM(CamelName, hacker_name) k##CamelName##Address,
464      FOR_EACH_ISOLATE_ADDRESS_NAME(DECLARE_ENUM)
465  #undef DECLARE_ENUM
466      kIsolateAddressCount
467    };
468  
469    static void InitializeOncePerProcess();
470  
471    // Returns the PerIsolateThreadData for the current thread (or NULL if one is
472    // not currently set).
CurrentPerIsolateThreadData()473    static PerIsolateThreadData* CurrentPerIsolateThreadData() {
474      return reinterpret_cast<PerIsolateThreadData*>(
475          base::Thread::GetThreadLocal(per_isolate_thread_data_key_));
476    }
477  
478    // Returns the isolate inside which the current thread is running.
INLINE(static Isolate * Current ())479    INLINE(static Isolate* Current()) {
480      Isolate* isolate = reinterpret_cast<Isolate*>(
481          base::Thread::GetExistingThreadLocal(isolate_key_));
482      DCHECK(isolate != NULL);
483      return isolate;
484    }
485  
INLINE(static Isolate * UncheckedCurrent ())486    INLINE(static Isolate* UncheckedCurrent()) {
487      return reinterpret_cast<Isolate*>(
488          base::Thread::GetThreadLocal(isolate_key_));
489    }
490  
491    // Like UncheckedCurrent, but skips the check that |isolate_key_| was
492    // initialized. Callers have to ensure that themselves.
INLINE(static Isolate * UnsafeCurrent ())493    INLINE(static Isolate* UnsafeCurrent()) {
494      return reinterpret_cast<Isolate*>(
495          base::Thread::GetThreadLocal(isolate_key_));
496    }
497  
498    // Usually called by Init(), but can be called early e.g. to allow
499    // testing components that require logging but not the whole
500    // isolate.
501    //
502    // Safe to call more than once.
503    void InitializeLoggingAndCounters();
504  
505    bool Init(Deserializer* des);
506  
IsInitialized()507    bool IsInitialized() { return state_ == INITIALIZED; }
508  
509    // True if at least one thread Enter'ed this isolate.
IsInUse()510    bool IsInUse() { return entry_stack_ != NULL; }
511  
512    // Destroys the non-default isolates.
513    // Sets default isolate into "has_been_disposed" state rather then destroying,
514    // for legacy API reasons.
515    void TearDown();
516  
517    static void GlobalTearDown();
518  
519    // Find the PerThread for this particular (isolate, thread) combination
520    // If one does not yet exist, return null.
521    PerIsolateThreadData* FindPerThreadDataForThisThread();
522  
523    // Find the PerThread for given (isolate, thread) combination
524    // If one does not yet exist, return null.
525    PerIsolateThreadData* FindPerThreadDataForThread(ThreadId thread_id);
526  
527    // Returns the key used to store the pointer to the current isolate.
528    // Used internally for V8 threads that do not execute JavaScript but still
529    // are part of the domain of an isolate (like the context switcher).
isolate_key()530    static base::Thread::LocalStorageKey isolate_key() {
531      return isolate_key_;
532    }
533  
534    // Returns the key used to store process-wide thread IDs.
thread_id_key()535    static base::Thread::LocalStorageKey thread_id_key() {
536      return thread_id_key_;
537    }
538  
539    static base::Thread::LocalStorageKey per_isolate_thread_data_key();
540  
541    // Mutex for serializing access to break control structures.
break_access()542    base::RecursiveMutex* break_access() { return &break_access_; }
543  
544    Address get_address_from_id(AddressId id);
545  
546    // Access to top context (where the current function object was created).
context()547    Context* context() { return thread_local_top_.context_; }
set_context(Context * context)548    void set_context(Context* context) {
549      DCHECK(context == NULL || context->IsContext());
550      thread_local_top_.context_ = context;
551    }
context_address()552    Context** context_address() { return &thread_local_top_.context_; }
553  
THREAD_LOCAL_TOP_ACCESSOR(SaveContext *,save_context)554    THREAD_LOCAL_TOP_ACCESSOR(SaveContext*, save_context)
555  
556    // Access to current thread id.
557    THREAD_LOCAL_TOP_ACCESSOR(ThreadId, thread_id)
558  
559    // Interface to pending exception.
560    Object* pending_exception() {
561      DCHECK(has_pending_exception());
562      DCHECK(!thread_local_top_.pending_exception_->IsException());
563      return thread_local_top_.pending_exception_;
564    }
565  
set_pending_exception(Object * exception_obj)566    void set_pending_exception(Object* exception_obj) {
567      DCHECK(!exception_obj->IsException());
568      thread_local_top_.pending_exception_ = exception_obj;
569    }
570  
clear_pending_exception()571    void clear_pending_exception() {
572      DCHECK(!thread_local_top_.pending_exception_->IsException());
573      thread_local_top_.pending_exception_ = heap_.the_hole_value();
574    }
575  
pending_exception_address()576    Object** pending_exception_address() {
577      return &thread_local_top_.pending_exception_;
578    }
579  
has_pending_exception()580    bool has_pending_exception() {
581      DCHECK(!thread_local_top_.pending_exception_->IsException());
582      return !thread_local_top_.pending_exception_->IsTheHole();
583    }
584  
THREAD_LOCAL_TOP_ACCESSOR(bool,external_caught_exception)585    THREAD_LOCAL_TOP_ACCESSOR(bool, external_caught_exception)
586  
587    void clear_pending_message() {
588      thread_local_top_.has_pending_message_ = false;
589      thread_local_top_.pending_message_obj_ = heap_.the_hole_value();
590      thread_local_top_.pending_message_script_ = heap_.the_hole_value();
591    }
try_catch_handler()592    v8::TryCatch* try_catch_handler() {
593      return thread_local_top_.try_catch_handler();
594    }
try_catch_handler_address()595    Address try_catch_handler_address() {
596      return thread_local_top_.try_catch_handler_address();
597    }
external_caught_exception_address()598    bool* external_caught_exception_address() {
599      return &thread_local_top_.external_caught_exception_;
600    }
601  
THREAD_LOCAL_TOP_ACCESSOR(v8::TryCatch *,catcher)602    THREAD_LOCAL_TOP_ACCESSOR(v8::TryCatch*, catcher)
603  
604    Object** scheduled_exception_address() {
605      return &thread_local_top_.scheduled_exception_;
606    }
607  
pending_message_obj_address()608    Address pending_message_obj_address() {
609      return reinterpret_cast<Address>(&thread_local_top_.pending_message_obj_);
610    }
611  
has_pending_message_address()612    Address has_pending_message_address() {
613      return reinterpret_cast<Address>(&thread_local_top_.has_pending_message_);
614    }
615  
pending_message_script_address()616    Address pending_message_script_address() {
617      return reinterpret_cast<Address>(
618          &thread_local_top_.pending_message_script_);
619    }
620  
scheduled_exception()621    Object* scheduled_exception() {
622      DCHECK(has_scheduled_exception());
623      DCHECK(!thread_local_top_.scheduled_exception_->IsException());
624      return thread_local_top_.scheduled_exception_;
625    }
has_scheduled_exception()626    bool has_scheduled_exception() {
627      DCHECK(!thread_local_top_.scheduled_exception_->IsException());
628      return thread_local_top_.scheduled_exception_ != heap_.the_hole_value();
629    }
clear_scheduled_exception()630    void clear_scheduled_exception() {
631      DCHECK(!thread_local_top_.scheduled_exception_->IsException());
632      thread_local_top_.scheduled_exception_ = heap_.the_hole_value();
633    }
634  
635    bool HasExternalTryCatch();
636    bool IsFinallyOnTop();
637  
is_catchable_by_javascript(Object * exception)638    bool is_catchable_by_javascript(Object* exception) {
639      return exception != heap()->termination_exception();
640    }
641  
642    // Serializer.
643    void PushToPartialSnapshotCache(Object* obj);
644  
645    // JS execution stack (see frames.h).
c_entry_fp(ThreadLocalTop * thread)646    static Address c_entry_fp(ThreadLocalTop* thread) {
647      return thread->c_entry_fp_;
648    }
handler(ThreadLocalTop * thread)649    static Address handler(ThreadLocalTop* thread) { return thread->handler_; }
650  
c_entry_fp_address()651    inline Address* c_entry_fp_address() {
652      return &thread_local_top_.c_entry_fp_;
653    }
handler_address()654    inline Address* handler_address() { return &thread_local_top_.handler_; }
655  
656    // Bottom JS entry.
js_entry_sp()657    Address js_entry_sp() {
658      return thread_local_top_.js_entry_sp_;
659    }
js_entry_sp_address()660    inline Address* js_entry_sp_address() {
661      return &thread_local_top_.js_entry_sp_;
662    }
663  
664    // Generated code scratch locations.
formal_count_address()665    void* formal_count_address() { return &thread_local_top_.formal_count_; }
666  
667    // Returns the global object of the current context. It could be
668    // a builtin object, or a JS global object.
global_object()669    Handle<GlobalObject> global_object() {
670      return Handle<GlobalObject>(context()->global_object());
671    }
672  
673    // Returns the global proxy object of the current context.
global_proxy()674    JSObject* global_proxy() {
675      return context()->global_proxy();
676    }
677  
js_builtins_object()678    Handle<JSBuiltinsObject> js_builtins_object() {
679      return Handle<JSBuiltinsObject>(thread_local_top_.context_->builtins());
680    }
681  
ArchiveSpacePerThread()682    static int ArchiveSpacePerThread() { return sizeof(ThreadLocalTop); }
FreeThreadResources()683    void FreeThreadResources() { thread_local_top_.Free(); }
684  
685    // This method is called by the api after operations that may throw
686    // exceptions.  If an exception was thrown and not handled by an external
687    // handler the exception is scheduled to be rethrown when we return to running
688    // JavaScript code.  If an exception is scheduled true is returned.
689    bool OptionalRescheduleException(bool is_bottom_call);
690  
691    // Push and pop a promise and the current try-catch handler.
692    void PushPromise(Handle<JSObject> promise);
693    void PopPromise();
694    Handle<Object> GetPromiseOnStackOnThrow();
695  
696    class ExceptionScope {
697     public:
ExceptionScope(Isolate * isolate)698      explicit ExceptionScope(Isolate* isolate) :
699        // Scope currently can only be used for regular exceptions,
700        // not termination exception.
701        isolate_(isolate),
702        pending_exception_(isolate_->pending_exception(), isolate_),
703        catcher_(isolate_->catcher())
704      { }
705  
~ExceptionScope()706      ~ExceptionScope() {
707        isolate_->set_catcher(catcher_);
708        isolate_->set_pending_exception(*pending_exception_);
709      }
710  
711     private:
712      Isolate* isolate_;
713      Handle<Object> pending_exception_;
714      v8::TryCatch* catcher_;
715    };
716  
717    void SetCaptureStackTraceForUncaughtExceptions(
718        bool capture,
719        int frame_limit,
720        StackTrace::StackTraceOptions options);
721  
722    void PrintCurrentStackTrace(FILE* out);
723    void PrintStack(StringStream* accumulator);
724    void PrintStack(FILE* out);
725    Handle<String> StackTraceString();
726    NO_INLINE(void PushStackTraceAndDie(unsigned int magic,
727                                        Object* object,
728                                        Map* map,
729                                        unsigned int magic2));
730    Handle<JSArray> CaptureCurrentStackTrace(
731        int frame_limit,
732        StackTrace::StackTraceOptions options);
733    Handle<Object> CaptureSimpleStackTrace(Handle<JSObject> error_object,
734                                           Handle<Object> caller);
735    void CaptureAndSetDetailedStackTrace(Handle<JSObject> error_object);
736    void CaptureAndSetSimpleStackTrace(Handle<JSObject> error_object,
737                                       Handle<Object> caller);
738  
739    // Returns if the top context may access the given global object. If
740    // the result is false, the pending exception is guaranteed to be
741    // set.
742  
743    bool MayNamedAccess(Handle<JSObject> receiver,
744                        Handle<Object> key,
745                        v8::AccessType type);
746    bool MayIndexedAccess(Handle<JSObject> receiver,
747                          uint32_t index,
748                          v8::AccessType type);
749  
750    void SetFailedAccessCheckCallback(v8::FailedAccessCheckCallback callback);
751    void ReportFailedAccessCheck(Handle<JSObject> receiver, v8::AccessType type);
752  
753    // Exception throwing support. The caller should use the result
754    // of Throw() as its return value.
755    Object* Throw(Object* exception, MessageLocation* location = NULL);
756  
757    template <typename T>
758    MUST_USE_RESULT MaybeHandle<T> Throw(Handle<Object> exception,
759                                         MessageLocation* location = NULL) {
760      Throw(*exception, location);
761      return MaybeHandle<T>();
762    }
763  
764    // Re-throw an exception.  This involves no error reporting since
765    // error reporting was handled when the exception was thrown
766    // originally.
767    Object* ReThrow(Object* exception);
768    void ScheduleThrow(Object* exception);
769    // Re-set pending message, script and positions reported to the TryCatch
770    // back to the TLS for re-use when rethrowing.
771    void RestorePendingMessageFromTryCatch(v8::TryCatch* handler);
772    // Un-schedule an exception that was caught by a TryCatch handler.
773    void CancelScheduledExceptionFromTryCatch(v8::TryCatch* handler);
774    void ReportPendingMessages();
775    // Return pending location if any or unfilled structure.
776    MessageLocation GetMessageLocation();
777    Object* ThrowIllegalOperation();
778  
779    // Promote a scheduled exception to pending. Asserts has_scheduled_exception.
780    Object* PromoteScheduledException();
781    void DoThrow(Object* exception, MessageLocation* location);
782    // Checks if exception should be reported and finds out if it's
783    // caught externally.
784    bool ShouldReportException(bool* can_be_caught_externally,
785                               bool catchable_by_javascript);
786  
787    // Attempts to compute the current source location, storing the
788    // result in the target out parameter.
789    void ComputeLocation(MessageLocation* target);
790  
791    // Out of resource exception helpers.
792    Object* StackOverflow();
793    Object* TerminateExecution();
794    void CancelTerminateExecution();
795  
796    void InvokeApiInterruptCallback();
797  
798    // Administration
799    void Iterate(ObjectVisitor* v);
800    void Iterate(ObjectVisitor* v, ThreadLocalTop* t);
801    char* Iterate(ObjectVisitor* v, char* t);
802    void IterateThread(ThreadVisitor* v, char* t);
803  
804  
805    // Returns the current native and global context.
806    Handle<Context> native_context();
807    Handle<Context> global_context();
808  
809    // Returns the native context of the calling JavaScript code.  That
810    // is, the native context of the top-most JavaScript frame.
811    Handle<Context> GetCallingNativeContext();
812  
813    void RegisterTryCatchHandler(v8::TryCatch* that);
814    void UnregisterTryCatchHandler(v8::TryCatch* that);
815  
816    char* ArchiveThread(char* to);
817    char* RestoreThread(char* from);
818  
819    static const char* const kStackOverflowMessage;
820  
821    static const int kUC16AlphabetSize = 256;  // See StringSearchBase.
822    static const int kBMMaxShift = 250;        // See StringSearchBase.
823  
824    // Accessors.
825  #define GLOBAL_ACCESSOR(type, name, initialvalue)                       \
826    inline type name() const {                                            \
827      DCHECK(OFFSET_OF(Isolate, name##_) == name##_debug_offset_);        \
828      return name##_;                                                     \
829    }                                                                     \
830    inline void set_##name(type value) {                                  \
831      DCHECK(OFFSET_OF(Isolate, name##_) == name##_debug_offset_);        \
832      name##_ = value;                                                    \
833    }
834    ISOLATE_INIT_LIST(GLOBAL_ACCESSOR)
835  #undef GLOBAL_ACCESSOR
836  
837  #define GLOBAL_ARRAY_ACCESSOR(type, name, length)                       \
838    inline type* name() {                                                 \
839      DCHECK(OFFSET_OF(Isolate, name##_) == name##_debug_offset_);        \
840      return &(name##_)[0];                                               \
841    }
ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_ACCESSOR)842    ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_ACCESSOR)
843  #undef GLOBAL_ARRAY_ACCESSOR
844  
845  #define NATIVE_CONTEXT_FIELD_ACCESSOR(index, type, name)            \
846    Handle<type> name() {                                             \
847      return Handle<type>(native_context()->name(), this);            \
848    }                                                                 \
849    bool is_##name(type* value) {                                     \
850      return native_context()->is_##name(value);                      \
851    }
852    NATIVE_CONTEXT_FIELDS(NATIVE_CONTEXT_FIELD_ACCESSOR)
853  #undef NATIVE_CONTEXT_FIELD_ACCESSOR
854  
855    Bootstrapper* bootstrapper() { return bootstrapper_; }
counters()856    Counters* counters() {
857      // Call InitializeLoggingAndCounters() if logging is needed before
858      // the isolate is fully initialized.
859      DCHECK(counters_ != NULL);
860      return counters_;
861    }
code_range()862    CodeRange* code_range() { return code_range_; }
runtime_profiler()863    RuntimeProfiler* runtime_profiler() { return runtime_profiler_; }
compilation_cache()864    CompilationCache* compilation_cache() { return compilation_cache_; }
logger()865    Logger* logger() {
866      // Call InitializeLoggingAndCounters() if logging is needed before
867      // the isolate is fully initialized.
868      DCHECK(logger_ != NULL);
869      return logger_;
870    }
stack_guard()871    StackGuard* stack_guard() { return &stack_guard_; }
heap()872    Heap* heap() { return &heap_; }
873    StatsTable* stats_table();
stub_cache()874    StubCache* stub_cache() { return stub_cache_; }
code_aging_helper()875    CodeAgingHelper* code_aging_helper() { return code_aging_helper_; }
deoptimizer_data()876    DeoptimizerData* deoptimizer_data() { return deoptimizer_data_; }
thread_local_top()877    ThreadLocalTop* thread_local_top() { return &thread_local_top_; }
materialized_object_store()878    MaterializedObjectStore* materialized_object_store() {
879      return materialized_object_store_;
880    }
881  
memory_allocator()882    MemoryAllocator* memory_allocator() {
883      return memory_allocator_;
884    }
885  
keyed_lookup_cache()886    KeyedLookupCache* keyed_lookup_cache() {
887      return keyed_lookup_cache_;
888    }
889  
context_slot_cache()890    ContextSlotCache* context_slot_cache() {
891      return context_slot_cache_;
892    }
893  
descriptor_lookup_cache()894    DescriptorLookupCache* descriptor_lookup_cache() {
895      return descriptor_lookup_cache_;
896    }
897  
handle_scope_data()898    HandleScopeData* handle_scope_data() { return &handle_scope_data_; }
899  
handle_scope_implementer()900    HandleScopeImplementer* handle_scope_implementer() {
901      DCHECK(handle_scope_implementer_);
902      return handle_scope_implementer_;
903    }
runtime_zone()904    Zone* runtime_zone() { return &runtime_zone_; }
905  
unicode_cache()906    UnicodeCache* unicode_cache() {
907      return unicode_cache_;
908    }
909  
inner_pointer_to_code_cache()910    InnerPointerToCodeCache* inner_pointer_to_code_cache() {
911      return inner_pointer_to_code_cache_;
912    }
913  
write_iterator()914    ConsStringIteratorOp* write_iterator() { return write_iterator_; }
915  
global_handles()916    GlobalHandles* global_handles() { return global_handles_; }
917  
eternal_handles()918    EternalHandles* eternal_handles() { return eternal_handles_; }
919  
thread_manager()920    ThreadManager* thread_manager() { return thread_manager_; }
921  
string_tracker()922    StringTracker* string_tracker() { return string_tracker_; }
923  
jsregexp_uncanonicalize()924    unibrow::Mapping<unibrow::Ecma262UnCanonicalize>* jsregexp_uncanonicalize() {
925      return &jsregexp_uncanonicalize_;
926    }
927  
jsregexp_canonrange()928    unibrow::Mapping<unibrow::CanonicalizationRange>* jsregexp_canonrange() {
929      return &jsregexp_canonrange_;
930    }
931  
objects_string_compare_iterator_a()932    ConsStringIteratorOp* objects_string_compare_iterator_a() {
933      return &objects_string_compare_iterator_a_;
934    }
935  
objects_string_compare_iterator_b()936    ConsStringIteratorOp* objects_string_compare_iterator_b() {
937      return &objects_string_compare_iterator_b_;
938    }
939  
objects_string_iterator()940    StaticResource<ConsStringIteratorOp>* objects_string_iterator() {
941      return &objects_string_iterator_;
942    }
943  
runtime_state()944    RuntimeState* runtime_state() { return &runtime_state_; }
945  
builtins()946    Builtins* builtins() { return &builtins_; }
947  
NotifyExtensionInstalled()948    void NotifyExtensionInstalled() {
949      has_installed_extensions_ = true;
950    }
951  
has_installed_extensions()952    bool has_installed_extensions() { return has_installed_extensions_; }
953  
954    unibrow::Mapping<unibrow::Ecma262Canonicalize>*
regexp_macro_assembler_canonicalize()955        regexp_macro_assembler_canonicalize() {
956      return &regexp_macro_assembler_canonicalize_;
957    }
958  
regexp_stack()959    RegExpStack* regexp_stack() { return regexp_stack_; }
960  
961    unibrow::Mapping<unibrow::Ecma262Canonicalize>*
interp_canonicalize_mapping()962        interp_canonicalize_mapping() {
963      return &interp_canonicalize_mapping_;
964    }
965  
debug()966    Debug* debug() { return debug_; }
967  
968    inline bool DebuggerHasBreakPoints();
969  
cpu_profiler()970    CpuProfiler* cpu_profiler() const { return cpu_profiler_; }
heap_profiler()971    HeapProfiler* heap_profiler() const { return heap_profiler_; }
972  
973  #ifdef DEBUG
heap_histograms()974    HistogramInfo* heap_histograms() { return heap_histograms_; }
975  
js_spill_information()976    JSObject::SpillInformation* js_spill_information() {
977      return &js_spill_information_;
978    }
979  #endif
980  
factory()981    Factory* factory() { return reinterpret_cast<Factory*>(this); }
982  
983    static const int kJSRegexpStaticOffsetsVectorSize = 128;
984  
THREAD_LOCAL_TOP_ACCESSOR(ExternalCallbackScope *,external_callback_scope)985    THREAD_LOCAL_TOP_ACCESSOR(ExternalCallbackScope*, external_callback_scope)
986  
987    THREAD_LOCAL_TOP_ACCESSOR(StateTag, current_vm_state)
988  
989    void SetData(uint32_t slot, void* data) {
990      DCHECK(slot < Internals::kNumIsolateDataSlots);
991      embedder_data_[slot] = data;
992    }
GetData(uint32_t slot)993    void* GetData(uint32_t slot) {
994      DCHECK(slot < Internals::kNumIsolateDataSlots);
995      return embedder_data_[slot];
996    }
997  
THREAD_LOCAL_TOP_ACCESSOR(LookupResult *,top_lookup_result)998    THREAD_LOCAL_TOP_ACCESSOR(LookupResult*, top_lookup_result)
999  
1000    void enable_serializer() {
1001      // The serializer can only be enabled before the isolate init.
1002      DCHECK(state_ != INITIALIZED);
1003      serializer_enabled_ = true;
1004    }
1005  
serializer_enabled()1006    bool serializer_enabled() const { return serializer_enabled_; }
1007  
IsDead()1008    bool IsDead() { return has_fatal_error_; }
SignalFatalError()1009    void SignalFatalError() { has_fatal_error_ = true; }
1010  
1011    bool use_crankshaft() const;
1012  
initialized_from_snapshot()1013    bool initialized_from_snapshot() { return initialized_from_snapshot_; }
1014  
time_millis_since_init()1015    double time_millis_since_init() {
1016      return base::OS::TimeCurrentMillis() - time_millis_at_init_;
1017    }
1018  
date_cache()1019    DateCache* date_cache() {
1020      return date_cache_;
1021    }
1022  
set_date_cache(DateCache * date_cache)1023    void set_date_cache(DateCache* date_cache) {
1024      if (date_cache != date_cache_) {
1025        delete date_cache_;
1026      }
1027      date_cache_ = date_cache;
1028    }
1029  
1030    Map* get_initial_js_array_map(ElementsKind kind);
1031  
1032    bool IsFastArrayConstructorPrototypeChainIntact();
1033  
1034    CallInterfaceDescriptorData* call_descriptor_data(int index);
1035  
1036    void IterateDeferredHandles(ObjectVisitor* visitor);
1037    void LinkDeferredHandles(DeferredHandles* deferred_handles);
1038    void UnlinkDeferredHandles(DeferredHandles* deferred_handles);
1039  
1040  #ifdef DEBUG
1041    bool IsDeferredHandle(Object** location);
1042  #endif  // DEBUG
1043  
concurrent_recompilation_enabled()1044    bool concurrent_recompilation_enabled() {
1045      // Thread is only available with flag enabled.
1046      DCHECK(optimizing_compiler_thread_ == NULL ||
1047             FLAG_concurrent_recompilation);
1048      return optimizing_compiler_thread_ != NULL;
1049    }
1050  
concurrent_osr_enabled()1051    bool concurrent_osr_enabled() const {
1052      // Thread is only available with flag enabled.
1053      DCHECK(optimizing_compiler_thread_ == NULL ||
1054             FLAG_concurrent_recompilation);
1055      return optimizing_compiler_thread_ != NULL && FLAG_concurrent_osr;
1056    }
1057  
optimizing_compiler_thread()1058    OptimizingCompilerThread* optimizing_compiler_thread() {
1059      return optimizing_compiler_thread_;
1060    }
1061  
num_sweeper_threads()1062    int num_sweeper_threads() const {
1063      return num_sweeper_threads_;
1064    }
1065  
sweeper_threads()1066    SweeperThread** sweeper_threads() {
1067      return sweeper_thread_;
1068    }
1069  
id()1070    int id() const { return static_cast<int>(id_); }
1071  
1072    HStatistics* GetHStatistics();
1073    HStatistics* GetTStatistics();
1074    HTracer* GetHTracer();
1075    CodeTracer* GetCodeTracer();
1076  
function_entry_hook()1077    FunctionEntryHook function_entry_hook() { return function_entry_hook_; }
set_function_entry_hook(FunctionEntryHook function_entry_hook)1078    void set_function_entry_hook(FunctionEntryHook function_entry_hook) {
1079      function_entry_hook_ = function_entry_hook;
1080    }
1081  
stress_deopt_count_address()1082    void* stress_deopt_count_address() { return &stress_deopt_count_; }
1083  
1084    inline base::RandomNumberGenerator* random_number_generator();
1085  
1086    // Given an address occupied by a live code object, return that object.
1087    Object* FindCodeObject(Address a);
1088  
NextOptimizationId()1089    int NextOptimizationId() {
1090      int id = next_optimization_id_++;
1091      if (!Smi::IsValid(next_optimization_id_)) {
1092        next_optimization_id_ = 0;
1093      }
1094      return id;
1095    }
1096  
1097    // Get (and lazily initialize) the registry for per-isolate symbols.
1098    Handle<JSObject> GetSymbolRegistry();
1099  
1100    void AddCallCompletedCallback(CallCompletedCallback callback);
1101    void RemoveCallCompletedCallback(CallCompletedCallback callback);
1102    void FireCallCompletedCallback();
1103  
1104    void EnqueueMicrotask(Handle<Object> microtask);
1105    void RunMicrotasks();
1106  
1107    void SetUseCounterCallback(v8::Isolate::UseCounterCallback callback);
1108    void CountUsage(v8::Isolate::UseCounterFeature feature);
1109  
NewForTesting()1110    static Isolate* NewForTesting() { return new Isolate(); }
1111  
1112   private:
1113    Isolate();
1114  
1115    friend struct GlobalState;
1116    friend struct InitializeGlobalState;
1117  
1118    enum State {
1119      UNINITIALIZED,    // Some components may not have been allocated.
1120      INITIALIZED       // All components are fully initialized.
1121    };
1122  
1123    // These fields are accessed through the API, offsets must be kept in sync
1124    // with v8::internal::Internals (in include/v8.h) constants. This is also
1125    // verified in Isolate::Init() using runtime checks.
1126    void* embedder_data_[Internals::kNumIsolateDataSlots];
1127    Heap heap_;
1128    State state_;  // Will be padded to kApiPointerSize.
1129  
1130    // The per-process lock should be acquired before the ThreadDataTable is
1131    // modified.
1132    class ThreadDataTable {
1133     public:
1134      ThreadDataTable();
1135      ~ThreadDataTable();
1136  
1137      PerIsolateThreadData* Lookup(Isolate* isolate, ThreadId thread_id);
1138      void Insert(PerIsolateThreadData* data);
1139      void Remove(PerIsolateThreadData* data);
1140      void RemoveAllThreads(Isolate* isolate);
1141  
1142     private:
1143      PerIsolateThreadData* list_;
1144    };
1145  
1146    // These items form a stack synchronously with threads Enter'ing and Exit'ing
1147    // the Isolate. The top of the stack points to a thread which is currently
1148    // running the Isolate. When the stack is empty, the Isolate is considered
1149    // not entered by any thread and can be Disposed.
1150    // If the same thread enters the Isolate more then once, the entry_count_
1151    // is incremented rather then a new item pushed to the stack.
1152    class EntryStackItem {
1153     public:
EntryStackItem(PerIsolateThreadData * previous_thread_data,Isolate * previous_isolate,EntryStackItem * previous_item)1154      EntryStackItem(PerIsolateThreadData* previous_thread_data,
1155                     Isolate* previous_isolate,
1156                     EntryStackItem* previous_item)
1157          : entry_count(1),
1158            previous_thread_data(previous_thread_data),
1159            previous_isolate(previous_isolate),
1160            previous_item(previous_item) { }
1161  
1162      int entry_count;
1163      PerIsolateThreadData* previous_thread_data;
1164      Isolate* previous_isolate;
1165      EntryStackItem* previous_item;
1166  
1167     private:
1168      DISALLOW_COPY_AND_ASSIGN(EntryStackItem);
1169    };
1170  
1171    static base::LazyMutex thread_data_table_mutex_;
1172  
1173    static base::Thread::LocalStorageKey per_isolate_thread_data_key_;
1174    static base::Thread::LocalStorageKey isolate_key_;
1175    static base::Thread::LocalStorageKey thread_id_key_;
1176    static ThreadDataTable* thread_data_table_;
1177  
1178    // A global counter for all generated Isolates, might overflow.
1179    static base::Atomic32 isolate_counter_;
1180  
1181    void Deinit();
1182  
1183    static void SetIsolateThreadLocals(Isolate* isolate,
1184                                       PerIsolateThreadData* data);
1185  
1186    // Find the PerThread for this particular (isolate, thread) combination.
1187    // If one does not yet exist, allocate a new one.
1188    PerIsolateThreadData* FindOrAllocatePerThreadDataForThisThread();
1189  
1190    // Initializes the current thread to run this Isolate.
1191    // Not thread-safe. Multiple threads should not Enter/Exit the same isolate
1192    // at the same time, this should be prevented using external locking.
1193    void Enter();
1194  
1195    // Exits the current thread. The previosuly entered Isolate is restored
1196    // for the thread.
1197    // Not thread-safe. Multiple threads should not Enter/Exit the same isolate
1198    // at the same time, this should be prevented using external locking.
1199    void Exit();
1200  
1201    void InitializeThreadLocal();
1202  
1203    void MarkCompactPrologue(bool is_compacting,
1204                             ThreadLocalTop* archived_thread_data);
1205    void MarkCompactEpilogue(bool is_compacting,
1206                             ThreadLocalTop* archived_thread_data);
1207  
1208    void FillCache();
1209  
1210    // Propagate pending exception message to the v8::TryCatch.
1211    // If there is no external try-catch or message was successfully propagated,
1212    // then return true.
1213    bool PropagatePendingExceptionToExternalTryCatch();
1214  
1215    // Traverse prototype chain to find out whether the object is derived from
1216    // the Error object.
1217    bool IsErrorObject(Handle<Object> obj);
1218  
1219    base::Atomic32 id_;
1220    EntryStackItem* entry_stack_;
1221    int stack_trace_nesting_level_;
1222    StringStream* incomplete_message_;
1223    Address isolate_addresses_[kIsolateAddressCount + 1];  // NOLINT
1224    Bootstrapper* bootstrapper_;
1225    RuntimeProfiler* runtime_profiler_;
1226    CompilationCache* compilation_cache_;
1227    Counters* counters_;
1228    CodeRange* code_range_;
1229    base::RecursiveMutex break_access_;
1230    base::Atomic32 debugger_initialized_;
1231    Logger* logger_;
1232    StackGuard stack_guard_;
1233    StatsTable* stats_table_;
1234    StubCache* stub_cache_;
1235    CodeAgingHelper* code_aging_helper_;
1236    DeoptimizerData* deoptimizer_data_;
1237    MaterializedObjectStore* materialized_object_store_;
1238    ThreadLocalTop thread_local_top_;
1239    bool capture_stack_trace_for_uncaught_exceptions_;
1240    int stack_trace_for_uncaught_exceptions_frame_limit_;
1241    StackTrace::StackTraceOptions stack_trace_for_uncaught_exceptions_options_;
1242    MemoryAllocator* memory_allocator_;
1243    KeyedLookupCache* keyed_lookup_cache_;
1244    ContextSlotCache* context_slot_cache_;
1245    DescriptorLookupCache* descriptor_lookup_cache_;
1246    HandleScopeData handle_scope_data_;
1247    HandleScopeImplementer* handle_scope_implementer_;
1248    UnicodeCache* unicode_cache_;
1249    Zone runtime_zone_;
1250    InnerPointerToCodeCache* inner_pointer_to_code_cache_;
1251    ConsStringIteratorOp* write_iterator_;
1252    GlobalHandles* global_handles_;
1253    EternalHandles* eternal_handles_;
1254    ThreadManager* thread_manager_;
1255    RuntimeState runtime_state_;
1256    Builtins builtins_;
1257    bool has_installed_extensions_;
1258    StringTracker* string_tracker_;
1259    unibrow::Mapping<unibrow::Ecma262UnCanonicalize> jsregexp_uncanonicalize_;
1260    unibrow::Mapping<unibrow::CanonicalizationRange> jsregexp_canonrange_;
1261    ConsStringIteratorOp objects_string_compare_iterator_a_;
1262    ConsStringIteratorOp objects_string_compare_iterator_b_;
1263    StaticResource<ConsStringIteratorOp> objects_string_iterator_;
1264    unibrow::Mapping<unibrow::Ecma262Canonicalize>
1265        regexp_macro_assembler_canonicalize_;
1266    RegExpStack* regexp_stack_;
1267    DateCache* date_cache_;
1268    unibrow::Mapping<unibrow::Ecma262Canonicalize> interp_canonicalize_mapping_;
1269    CallInterfaceDescriptorData* call_descriptor_data_;
1270    base::RandomNumberGenerator* random_number_generator_;
1271  
1272    // Whether the isolate has been created for snapshotting.
1273    bool serializer_enabled_;
1274  
1275    // True if fatal error has been signaled for this isolate.
1276    bool has_fatal_error_;
1277  
1278    // True if this isolate was initialized from a snapshot.
1279    bool initialized_from_snapshot_;
1280  
1281    // Time stamp at initialization.
1282    double time_millis_at_init_;
1283  
1284  #ifdef DEBUG
1285    // A static array of histogram info for each type.
1286    HistogramInfo heap_histograms_[LAST_TYPE + 1];
1287    JSObject::SpillInformation js_spill_information_;
1288  #endif
1289  
1290    Debug* debug_;
1291    CpuProfiler* cpu_profiler_;
1292    HeapProfiler* heap_profiler_;
1293    FunctionEntryHook function_entry_hook_;
1294  
1295  #define GLOBAL_BACKING_STORE(type, name, initialvalue)                         \
1296    type name##_;
1297    ISOLATE_INIT_LIST(GLOBAL_BACKING_STORE)
1298  #undef GLOBAL_BACKING_STORE
1299  
1300  #define GLOBAL_ARRAY_BACKING_STORE(type, name, length)                         \
1301    type name##_[length];
1302    ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_BACKING_STORE)
1303  #undef GLOBAL_ARRAY_BACKING_STORE
1304  
1305  #ifdef DEBUG
1306    // This class is huge and has a number of fields controlled by
1307    // preprocessor defines. Make sure the offsets of these fields agree
1308    // between compilation units.
1309  #define ISOLATE_FIELD_OFFSET(type, name, ignored)                              \
1310    static const intptr_t name##_debug_offset_;
1311    ISOLATE_INIT_LIST(ISOLATE_FIELD_OFFSET)
1312    ISOLATE_INIT_ARRAY_LIST(ISOLATE_FIELD_OFFSET)
1313  #undef ISOLATE_FIELD_OFFSET
1314  #endif
1315  
1316    DeferredHandles* deferred_handles_head_;
1317    OptimizingCompilerThread* optimizing_compiler_thread_;
1318    SweeperThread** sweeper_thread_;
1319    int num_sweeper_threads_;
1320  
1321    // Counts deopt points if deopt_every_n_times is enabled.
1322    unsigned int stress_deopt_count_;
1323  
1324    int next_optimization_id_;
1325  
1326    // List of callbacks when a Call completes.
1327    List<CallCompletedCallback> call_completed_callbacks_;
1328  
1329    v8::Isolate::UseCounterCallback use_counter_callback_;
1330  
1331    friend class ExecutionAccess;
1332    friend class HandleScopeImplementer;
1333    friend class IsolateInitializer;
1334    friend class OptimizingCompilerThread;
1335    friend class SweeperThread;
1336    friend class ThreadManager;
1337    friend class Simulator;
1338    friend class StackGuard;
1339    friend class ThreadId;
1340    friend class TestMemoryAllocatorScope;
1341    friend class TestCodeRangeScope;
1342    friend class v8::Isolate;
1343    friend class v8::Locker;
1344    friend class v8::Unlocker;
1345  
1346    DISALLOW_COPY_AND_ASSIGN(Isolate);
1347  };
1348  
1349  
1350  #undef FIELD_ACCESSOR
1351  #undef THREAD_LOCAL_TOP_ACCESSOR
1352  
1353  
1354  class PromiseOnStack {
1355   public:
PromiseOnStack(StackHandler * handler,Handle<JSObject> promise,PromiseOnStack * prev)1356    PromiseOnStack(StackHandler* handler, Handle<JSObject> promise,
1357                   PromiseOnStack* prev)
1358        : handler_(handler), promise_(promise), prev_(prev) {}
handler()1359    StackHandler* handler() { return handler_; }
promise()1360    Handle<JSObject> promise() { return promise_; }
prev()1361    PromiseOnStack* prev() { return prev_; }
1362  
1363   private:
1364    StackHandler* handler_;
1365    Handle<JSObject> promise_;
1366    PromiseOnStack* prev_;
1367  };
1368  
1369  
1370  // If the GCC version is 4.1.x or 4.2.x an additional field is added to the
1371  // class as a work around for a bug in the generated code found with these
1372  // versions of GCC. See V8 issue 122 for details.
1373  class SaveContext BASE_EMBEDDED {
1374   public:
1375    inline explicit SaveContext(Isolate* isolate);
1376  
~SaveContext()1377    ~SaveContext() {
1378      isolate_->set_context(context_.is_null() ? NULL : *context_);
1379      isolate_->set_save_context(prev_);
1380    }
1381  
context()1382    Handle<Context> context() { return context_; }
prev()1383    SaveContext* prev() { return prev_; }
1384  
1385    // Returns true if this save context is below a given JavaScript frame.
IsBelowFrame(JavaScriptFrame * frame)1386    bool IsBelowFrame(JavaScriptFrame* frame) {
1387      return (c_entry_fp_ == 0) || (c_entry_fp_ > frame->sp());
1388    }
1389  
1390   private:
1391    Isolate* isolate_;
1392    Handle<Context> context_;
1393    SaveContext* prev_;
1394    Address c_entry_fp_;
1395  };
1396  
1397  
1398  class AssertNoContextChange BASE_EMBEDDED {
1399  #ifdef DEBUG
1400   public:
AssertNoContextChange(Isolate * isolate)1401    explicit AssertNoContextChange(Isolate* isolate)
1402      : isolate_(isolate),
1403        context_(isolate->context(), isolate) { }
~AssertNoContextChange()1404    ~AssertNoContextChange() {
1405      DCHECK(isolate_->context() == *context_);
1406    }
1407  
1408   private:
1409    Isolate* isolate_;
1410    Handle<Context> context_;
1411  #else
1412   public:
1413    explicit AssertNoContextChange(Isolate* isolate) { }
1414  #endif
1415  };
1416  
1417  
1418  class ExecutionAccess BASE_EMBEDDED {
1419   public:
ExecutionAccess(Isolate * isolate)1420    explicit ExecutionAccess(Isolate* isolate) : isolate_(isolate) {
1421      Lock(isolate);
1422    }
~ExecutionAccess()1423    ~ExecutionAccess() { Unlock(isolate_); }
1424  
Lock(Isolate * isolate)1425    static void Lock(Isolate* isolate) { isolate->break_access()->Lock(); }
Unlock(Isolate * isolate)1426    static void Unlock(Isolate* isolate) { isolate->break_access()->Unlock(); }
1427  
TryLock(Isolate * isolate)1428    static bool TryLock(Isolate* isolate) {
1429      return isolate->break_access()->TryLock();
1430    }
1431  
1432   private:
1433    Isolate* isolate_;
1434  };
1435  
1436  
1437  // Support for checking for stack-overflows.
1438  class StackLimitCheck BASE_EMBEDDED {
1439   public:
StackLimitCheck(Isolate * isolate)1440    explicit StackLimitCheck(Isolate* isolate) : isolate_(isolate) { }
1441  
1442    // Use this to check for stack-overflows in C++ code.
HasOverflowed()1443    inline bool HasOverflowed() const {
1444      StackGuard* stack_guard = isolate_->stack_guard();
1445      return GetCurrentStackPosition() < stack_guard->real_climit();
1446    }
1447  
1448    // Use this to check for stack-overflow when entering runtime from JS code.
1449    bool JsHasOverflowed() const;
1450  
1451   private:
1452    Isolate* isolate_;
1453  };
1454  
1455  
1456  // Support for temporarily postponing interrupts. When the outermost
1457  // postpone scope is left the interrupts will be re-enabled and any
1458  // interrupts that occurred while in the scope will be taken into
1459  // account.
1460  class PostponeInterruptsScope BASE_EMBEDDED {
1461   public:
1462    PostponeInterruptsScope(Isolate* isolate,
1463                            int intercept_mask = StackGuard::ALL_INTERRUPTS)
1464        : stack_guard_(isolate->stack_guard()),
1465          intercept_mask_(intercept_mask),
1466          intercepted_flags_(0) {
1467      stack_guard_->PushPostponeInterruptsScope(this);
1468    }
1469  
~PostponeInterruptsScope()1470    ~PostponeInterruptsScope() {
1471      stack_guard_->PopPostponeInterruptsScope();
1472    }
1473  
1474    // Find the bottom-most scope that intercepts this interrupt.
1475    // Return whether the interrupt has been intercepted.
1476    bool Intercept(StackGuard::InterruptFlag flag);
1477  
1478   private:
1479    StackGuard* stack_guard_;
1480    int intercept_mask_;
1481    int intercepted_flags_;
1482    PostponeInterruptsScope* prev_;
1483  
1484    friend class StackGuard;
1485  };
1486  
1487  
1488  class CodeTracer FINAL : public Malloced {
1489   public:
CodeTracer(int isolate_id)1490    explicit CodeTracer(int isolate_id)
1491        : file_(NULL),
1492          scope_depth_(0) {
1493      if (!ShouldRedirect()) {
1494        file_ = stdout;
1495        return;
1496      }
1497  
1498      if (FLAG_redirect_code_traces_to == NULL) {
1499        SNPrintF(filename_,
1500                 "code-%d-%d.asm",
1501                 base::OS::GetCurrentProcessId(),
1502                 isolate_id);
1503      } else {
1504        StrNCpy(filename_, FLAG_redirect_code_traces_to, filename_.length());
1505      }
1506  
1507      WriteChars(filename_.start(), "", 0, false);
1508    }
1509  
1510    class Scope {
1511     public:
Scope(CodeTracer * tracer)1512      explicit Scope(CodeTracer* tracer) : tracer_(tracer) { tracer->OpenFile(); }
~Scope()1513      ~Scope() { tracer_->CloseFile();  }
1514  
file()1515      FILE* file() const { return tracer_->file(); }
1516  
1517     private:
1518      CodeTracer* tracer_;
1519    };
1520  
OpenFile()1521    void OpenFile() {
1522      if (!ShouldRedirect()) {
1523        return;
1524      }
1525  
1526      if (file_ == NULL) {
1527        file_ = base::OS::FOpen(filename_.start(), "a");
1528      }
1529  
1530      scope_depth_++;
1531    }
1532  
CloseFile()1533    void CloseFile() {
1534      if (!ShouldRedirect()) {
1535        return;
1536      }
1537  
1538      if (--scope_depth_ == 0) {
1539        fclose(file_);
1540        file_ = NULL;
1541      }
1542    }
1543  
file()1544    FILE* file() const { return file_; }
1545  
1546   private:
ShouldRedirect()1547    static bool ShouldRedirect() {
1548      return FLAG_redirect_code_traces;
1549    }
1550  
1551    EmbeddedVector<char, 128> filename_;
1552    FILE* file_;
1553    int scope_depth_;
1554  };
1555  
1556  } }  // namespace v8::internal
1557  
1558  #endif  // V8_ISOLATE_H_
1559