1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include <stdlib.h>
29
30 #include "v8.h"
31
32 #include "ast.h"
33 #include "bootstrapper.h"
34 #include "codegen.h"
35 #include "compilation-cache.h"
36 #include "debug.h"
37 #include "deoptimizer.h"
38 #include "heap-profiler.h"
39 #include "hydrogen.h"
40 #include "isolate.h"
41 #include "lithium-allocator.h"
42 #include "log.h"
43 #include "regexp-stack.h"
44 #include "runtime-profiler.h"
45 #include "scanner.h"
46 #include "scopeinfo.h"
47 #include "serialize.h"
48 #include "simulator.h"
49 #include "spaces.h"
50 #include "stub-cache.h"
51 #include "version.h"
52
53
54 namespace v8 {
55 namespace internal {
56
57 Atomic32 ThreadId::highest_thread_id_ = 0;
58
AllocateThreadId()59 int ThreadId::AllocateThreadId() {
60 int new_id = NoBarrier_AtomicIncrement(&highest_thread_id_, 1);
61 return new_id;
62 }
63
GetCurrentThreadId()64 int ThreadId::GetCurrentThreadId() {
65 int thread_id = Thread::GetThreadLocalInt(Isolate::thread_id_key_);
66 if (thread_id == 0) {
67 thread_id = AllocateThreadId();
68 Thread::SetThreadLocalInt(Isolate::thread_id_key_, thread_id);
69 }
70 return thread_id;
71 }
72
73
74 // Create a dummy thread that will wait forever on a semaphore. The only
75 // purpose for this thread is to have some stack area to save essential data
76 // into for use by a stacks only core dump (aka minidump).
77 class PreallocatedMemoryThread: public Thread {
78 public:
data()79 char* data() {
80 if (data_ready_semaphore_ != NULL) {
81 // Initial access is guarded until the data has been published.
82 data_ready_semaphore_->Wait();
83 delete data_ready_semaphore_;
84 data_ready_semaphore_ = NULL;
85 }
86 return data_;
87 }
88
length()89 unsigned length() {
90 if (data_ready_semaphore_ != NULL) {
91 // Initial access is guarded until the data has been published.
92 data_ready_semaphore_->Wait();
93 delete data_ready_semaphore_;
94 data_ready_semaphore_ = NULL;
95 }
96 return length_;
97 }
98
99 // Stop the PreallocatedMemoryThread and release its resources.
StopThread()100 void StopThread() {
101 keep_running_ = false;
102 wait_for_ever_semaphore_->Signal();
103
104 // Wait for the thread to terminate.
105 Join();
106
107 if (data_ready_semaphore_ != NULL) {
108 delete data_ready_semaphore_;
109 data_ready_semaphore_ = NULL;
110 }
111
112 delete wait_for_ever_semaphore_;
113 wait_for_ever_semaphore_ = NULL;
114 }
115
116 protected:
117 // When the thread starts running it will allocate a fixed number of bytes
118 // on the stack and publish the location of this memory for others to use.
Run()119 void Run() {
120 EmbeddedVector<char, 15 * 1024> local_buffer;
121
122 // Initialize the buffer with a known good value.
123 OS::StrNCpy(local_buffer, "Trace data was not generated.\n",
124 local_buffer.length());
125
126 // Publish the local buffer and signal its availability.
127 data_ = local_buffer.start();
128 length_ = local_buffer.length();
129 data_ready_semaphore_->Signal();
130
131 while (keep_running_) {
132 // This thread will wait here until the end of time.
133 wait_for_ever_semaphore_->Wait();
134 }
135
136 // Make sure we access the buffer after the wait to remove all possibility
137 // of it being optimized away.
138 OS::StrNCpy(local_buffer, "PreallocatedMemoryThread shutting down.\n",
139 local_buffer.length());
140 }
141
142
143 private:
PreallocatedMemoryThread(Isolate * isolate)144 explicit PreallocatedMemoryThread(Isolate* isolate)
145 : Thread(isolate, "v8:PreallocMem"),
146 keep_running_(true),
147 wait_for_ever_semaphore_(OS::CreateSemaphore(0)),
148 data_ready_semaphore_(OS::CreateSemaphore(0)),
149 data_(NULL),
150 length_(0) {
151 }
152
153 // Used to make sure that the thread keeps looping even for spurious wakeups.
154 bool keep_running_;
155
156 // This semaphore is used by the PreallocatedMemoryThread to wait for ever.
157 Semaphore* wait_for_ever_semaphore_;
158 // Semaphore to signal that the data has been initialized.
159 Semaphore* data_ready_semaphore_;
160
161 // Location and size of the preallocated memory block.
162 char* data_;
163 unsigned length_;
164
165 friend class Isolate;
166
167 DISALLOW_COPY_AND_ASSIGN(PreallocatedMemoryThread);
168 };
169
170
PreallocatedMemoryThreadStart()171 void Isolate::PreallocatedMemoryThreadStart() {
172 if (preallocated_memory_thread_ != NULL) return;
173 preallocated_memory_thread_ = new PreallocatedMemoryThread(this);
174 preallocated_memory_thread_->Start();
175 }
176
177
PreallocatedMemoryThreadStop()178 void Isolate::PreallocatedMemoryThreadStop() {
179 if (preallocated_memory_thread_ == NULL) return;
180 preallocated_memory_thread_->StopThread();
181 // Done with the thread entirely.
182 delete preallocated_memory_thread_;
183 preallocated_memory_thread_ = NULL;
184 }
185
186
PreallocatedStorageInit(size_t size)187 void Isolate::PreallocatedStorageInit(size_t size) {
188 ASSERT(free_list_.next_ == &free_list_);
189 ASSERT(free_list_.previous_ == &free_list_);
190 PreallocatedStorage* free_chunk =
191 reinterpret_cast<PreallocatedStorage*>(new char[size]);
192 free_list_.next_ = free_list_.previous_ = free_chunk;
193 free_chunk->next_ = free_chunk->previous_ = &free_list_;
194 free_chunk->size_ = size - sizeof(PreallocatedStorage);
195 preallocated_storage_preallocated_ = true;
196 }
197
198
PreallocatedStorageNew(size_t size)199 void* Isolate::PreallocatedStorageNew(size_t size) {
200 if (!preallocated_storage_preallocated_) {
201 return FreeStoreAllocationPolicy::New(size);
202 }
203 ASSERT(free_list_.next_ != &free_list_);
204 ASSERT(free_list_.previous_ != &free_list_);
205
206 size = (size + kPointerSize - 1) & ~(kPointerSize - 1);
207 // Search for exact fit.
208 for (PreallocatedStorage* storage = free_list_.next_;
209 storage != &free_list_;
210 storage = storage->next_) {
211 if (storage->size_ == size) {
212 storage->Unlink();
213 storage->LinkTo(&in_use_list_);
214 return reinterpret_cast<void*>(storage + 1);
215 }
216 }
217 // Search for first fit.
218 for (PreallocatedStorage* storage = free_list_.next_;
219 storage != &free_list_;
220 storage = storage->next_) {
221 if (storage->size_ >= size + sizeof(PreallocatedStorage)) {
222 storage->Unlink();
223 storage->LinkTo(&in_use_list_);
224 PreallocatedStorage* left_over =
225 reinterpret_cast<PreallocatedStorage*>(
226 reinterpret_cast<char*>(storage + 1) + size);
227 left_over->size_ = storage->size_ - size - sizeof(PreallocatedStorage);
228 ASSERT(size + left_over->size_ + sizeof(PreallocatedStorage) ==
229 storage->size_);
230 storage->size_ = size;
231 left_over->LinkTo(&free_list_);
232 return reinterpret_cast<void*>(storage + 1);
233 }
234 }
235 // Allocation failure.
236 ASSERT(false);
237 return NULL;
238 }
239
240
241 // We don't attempt to coalesce.
PreallocatedStorageDelete(void * p)242 void Isolate::PreallocatedStorageDelete(void* p) {
243 if (p == NULL) {
244 return;
245 }
246 if (!preallocated_storage_preallocated_) {
247 FreeStoreAllocationPolicy::Delete(p);
248 return;
249 }
250 PreallocatedStorage* storage = reinterpret_cast<PreallocatedStorage*>(p) - 1;
251 ASSERT(storage->next_->previous_ == storage);
252 ASSERT(storage->previous_->next_ == storage);
253 storage->Unlink();
254 storage->LinkTo(&free_list_);
255 }
256
257
258 Isolate* Isolate::default_isolate_ = NULL;
259 Thread::LocalStorageKey Isolate::isolate_key_;
260 Thread::LocalStorageKey Isolate::thread_id_key_;
261 Thread::LocalStorageKey Isolate::per_isolate_thread_data_key_;
262 Mutex* Isolate::process_wide_mutex_ = OS::CreateMutex();
263 Isolate::ThreadDataTable* Isolate::thread_data_table_ = NULL;
264
265
266 class IsolateInitializer {
267 public:
IsolateInitializer()268 IsolateInitializer() {
269 Isolate::EnsureDefaultIsolate();
270 }
271 };
272
EnsureDefaultIsolateAllocated()273 static IsolateInitializer* EnsureDefaultIsolateAllocated() {
274 // TODO(isolates): Use the system threading API to do this once?
275 static IsolateInitializer static_initializer;
276 return &static_initializer;
277 }
278
279 // This variable only needed to trigger static intialization.
280 static IsolateInitializer* static_initializer = EnsureDefaultIsolateAllocated();
281
282
283
284
285
AllocatePerIsolateThreadData(ThreadId thread_id)286 Isolate::PerIsolateThreadData* Isolate::AllocatePerIsolateThreadData(
287 ThreadId thread_id) {
288 ASSERT(!thread_id.Equals(ThreadId::Invalid()));
289 PerIsolateThreadData* per_thread = new PerIsolateThreadData(this, thread_id);
290 {
291 ScopedLock lock(process_wide_mutex_);
292 ASSERT(thread_data_table_->Lookup(this, thread_id) == NULL);
293 thread_data_table_->Insert(per_thread);
294 ASSERT(thread_data_table_->Lookup(this, thread_id) == per_thread);
295 }
296 return per_thread;
297 }
298
299
300 Isolate::PerIsolateThreadData*
FindOrAllocatePerThreadDataForThisThread()301 Isolate::FindOrAllocatePerThreadDataForThisThread() {
302 ThreadId thread_id = ThreadId::Current();
303 PerIsolateThreadData* per_thread = NULL;
304 {
305 ScopedLock lock(process_wide_mutex_);
306 per_thread = thread_data_table_->Lookup(this, thread_id);
307 if (per_thread == NULL) {
308 per_thread = AllocatePerIsolateThreadData(thread_id);
309 }
310 }
311 return per_thread;
312 }
313
314
EnsureDefaultIsolate()315 void Isolate::EnsureDefaultIsolate() {
316 ScopedLock lock(process_wide_mutex_);
317 if (default_isolate_ == NULL) {
318 isolate_key_ = Thread::CreateThreadLocalKey();
319 thread_id_key_ = Thread::CreateThreadLocalKey();
320 per_isolate_thread_data_key_ = Thread::CreateThreadLocalKey();
321 thread_data_table_ = new Isolate::ThreadDataTable();
322 default_isolate_ = new Isolate();
323 }
324 // Can't use SetIsolateThreadLocals(default_isolate_, NULL) here
325 // becase a non-null thread data may be already set.
326 Thread::SetThreadLocal(isolate_key_, default_isolate_);
327 }
328
329
GetDefaultIsolateDebugger()330 Debugger* Isolate::GetDefaultIsolateDebugger() {
331 EnsureDefaultIsolate();
332 return default_isolate_->debugger();
333 }
334
335
GetDefaultIsolateStackGuard()336 StackGuard* Isolate::GetDefaultIsolateStackGuard() {
337 EnsureDefaultIsolate();
338 return default_isolate_->stack_guard();
339 }
340
341
EnterDefaultIsolate()342 void Isolate::EnterDefaultIsolate() {
343 EnsureDefaultIsolate();
344 ASSERT(default_isolate_ != NULL);
345
346 PerIsolateThreadData* data = CurrentPerIsolateThreadData();
347 // If not yet in default isolate - enter it.
348 if (data == NULL || data->isolate() != default_isolate_) {
349 default_isolate_->Enter();
350 }
351 }
352
353
GetDefaultIsolateForLocking()354 Isolate* Isolate::GetDefaultIsolateForLocking() {
355 EnsureDefaultIsolate();
356 return default_isolate_;
357 }
358
359
ThreadDataTable()360 Isolate::ThreadDataTable::ThreadDataTable()
361 : list_(NULL) {
362 }
363
364
365 Isolate::PerIsolateThreadData*
Lookup(Isolate * isolate,ThreadId thread_id)366 Isolate::ThreadDataTable::Lookup(Isolate* isolate,
367 ThreadId thread_id) {
368 for (PerIsolateThreadData* data = list_; data != NULL; data = data->next_) {
369 if (data->Matches(isolate, thread_id)) return data;
370 }
371 return NULL;
372 }
373
374
Insert(Isolate::PerIsolateThreadData * data)375 void Isolate::ThreadDataTable::Insert(Isolate::PerIsolateThreadData* data) {
376 if (list_ != NULL) list_->prev_ = data;
377 data->next_ = list_;
378 list_ = data;
379 }
380
381
Remove(PerIsolateThreadData * data)382 void Isolate::ThreadDataTable::Remove(PerIsolateThreadData* data) {
383 if (list_ == data) list_ = data->next_;
384 if (data->next_ != NULL) data->next_->prev_ = data->prev_;
385 if (data->prev_ != NULL) data->prev_->next_ = data->next_;
386 }
387
388
Remove(Isolate * isolate,ThreadId thread_id)389 void Isolate::ThreadDataTable::Remove(Isolate* isolate,
390 ThreadId thread_id) {
391 PerIsolateThreadData* data = Lookup(isolate, thread_id);
392 if (data != NULL) {
393 Remove(data);
394 }
395 }
396
397
398 #ifdef DEBUG
399 #define TRACE_ISOLATE(tag) \
400 do { \
401 if (FLAG_trace_isolates) { \
402 PrintF("Isolate %p " #tag "\n", reinterpret_cast<void*>(this)); \
403 } \
404 } while (false)
405 #else
406 #define TRACE_ISOLATE(tag)
407 #endif
408
409
Isolate()410 Isolate::Isolate()
411 : state_(UNINITIALIZED),
412 entry_stack_(NULL),
413 stack_trace_nesting_level_(0),
414 incomplete_message_(NULL),
415 preallocated_memory_thread_(NULL),
416 preallocated_message_space_(NULL),
417 bootstrapper_(NULL),
418 runtime_profiler_(NULL),
419 compilation_cache_(NULL),
420 counters_(NULL),
421 code_range_(NULL),
422 // Must be initialized early to allow v8::SetResourceConstraints calls.
423 break_access_(OS::CreateMutex()),
424 debugger_initialized_(false),
425 // Must be initialized early to allow v8::Debug calls.
426 debugger_access_(OS::CreateMutex()),
427 logger_(NULL),
428 stats_table_(NULL),
429 stub_cache_(NULL),
430 deoptimizer_data_(NULL),
431 capture_stack_trace_for_uncaught_exceptions_(false),
432 stack_trace_for_uncaught_exceptions_frame_limit_(0),
433 stack_trace_for_uncaught_exceptions_options_(StackTrace::kOverview),
434 transcendental_cache_(NULL),
435 memory_allocator_(NULL),
436 keyed_lookup_cache_(NULL),
437 context_slot_cache_(NULL),
438 descriptor_lookup_cache_(NULL),
439 handle_scope_implementer_(NULL),
440 unicode_cache_(NULL),
441 in_use_list_(0),
442 free_list_(0),
443 preallocated_storage_preallocated_(false),
444 pc_to_code_cache_(NULL),
445 write_input_buffer_(NULL),
446 global_handles_(NULL),
447 context_switcher_(NULL),
448 thread_manager_(NULL),
449 ast_sentinels_(NULL),
450 string_tracker_(NULL),
451 regexp_stack_(NULL),
452 frame_element_constant_list_(0),
453 result_constant_list_(0) {
454 TRACE_ISOLATE(constructor);
455
456 memset(isolate_addresses_, 0,
457 sizeof(isolate_addresses_[0]) * (k_isolate_address_count + 1));
458
459 heap_.isolate_ = this;
460 zone_.isolate_ = this;
461 stack_guard_.isolate_ = this;
462
463 #if defined(V8_TARGET_ARCH_ARM) && !defined(__arm__) || \
464 defined(V8_TARGET_ARCH_MIPS) && !defined(__mips__)
465 simulator_initialized_ = false;
466 simulator_i_cache_ = NULL;
467 simulator_redirection_ = NULL;
468 #endif
469
470 thread_manager_ = new ThreadManager();
471 thread_manager_->isolate_ = this;
472
473 #ifdef DEBUG
474 // heap_histograms_ initializes itself.
475 memset(&js_spill_information_, 0, sizeof(js_spill_information_));
476 memset(code_kind_statistics_, 0,
477 sizeof(code_kind_statistics_[0]) * Code::NUMBER_OF_KINDS);
478 #endif
479
480 #ifdef ENABLE_DEBUGGER_SUPPORT
481 debug_ = NULL;
482 debugger_ = NULL;
483 #endif
484
485 #ifdef ENABLE_LOGGING_AND_PROFILING
486 producer_heap_profile_ = NULL;
487 #endif
488
489 handle_scope_data_.Initialize();
490
491 #define ISOLATE_INIT_EXECUTE(type, name, initial_value) \
492 name##_ = (initial_value);
493 ISOLATE_INIT_LIST(ISOLATE_INIT_EXECUTE)
494 #undef ISOLATE_INIT_EXECUTE
495
496 #define ISOLATE_INIT_ARRAY_EXECUTE(type, name, length) \
497 memset(name##_, 0, sizeof(type) * length);
498 ISOLATE_INIT_ARRAY_LIST(ISOLATE_INIT_ARRAY_EXECUTE)
499 #undef ISOLATE_INIT_ARRAY_EXECUTE
500 }
501
TearDown()502 void Isolate::TearDown() {
503 TRACE_ISOLATE(tear_down);
504
505 // Temporarily set this isolate as current so that various parts of
506 // the isolate can access it in their destructors without having a
507 // direct pointer. We don't use Enter/Exit here to avoid
508 // initializing the thread data.
509 PerIsolateThreadData* saved_data = CurrentPerIsolateThreadData();
510 Isolate* saved_isolate = UncheckedCurrent();
511 SetIsolateThreadLocals(this, NULL);
512
513 Deinit();
514
515 if (!IsDefaultIsolate()) {
516 delete this;
517 }
518
519 // Restore the previous current isolate.
520 SetIsolateThreadLocals(saved_isolate, saved_data);
521 }
522
523
Deinit()524 void Isolate::Deinit() {
525 if (state_ == INITIALIZED) {
526 TRACE_ISOLATE(deinit);
527
528 if (FLAG_hydrogen_stats) HStatistics::Instance()->Print();
529
530 // We must stop the logger before we tear down other components.
531 logger_->EnsureTickerStopped();
532
533 delete deoptimizer_data_;
534 deoptimizer_data_ = NULL;
535 if (FLAG_preemption) {
536 v8::Locker locker;
537 v8::Locker::StopPreemption();
538 }
539 builtins_.TearDown();
540 bootstrapper_->TearDown();
541
542 // Remove the external reference to the preallocated stack memory.
543 delete preallocated_message_space_;
544 preallocated_message_space_ = NULL;
545 PreallocatedMemoryThreadStop();
546
547 HeapProfiler::TearDown();
548 CpuProfiler::TearDown();
549 if (runtime_profiler_ != NULL) {
550 runtime_profiler_->TearDown();
551 delete runtime_profiler_;
552 runtime_profiler_ = NULL;
553 }
554 heap_.TearDown();
555 logger_->TearDown();
556
557 // The default isolate is re-initializable due to legacy API.
558 state_ = UNINITIALIZED;
559 }
560 }
561
562
SetIsolateThreadLocals(Isolate * isolate,PerIsolateThreadData * data)563 void Isolate::SetIsolateThreadLocals(Isolate* isolate,
564 PerIsolateThreadData* data) {
565 Thread::SetThreadLocal(isolate_key_, isolate);
566 Thread::SetThreadLocal(per_isolate_thread_data_key_, data);
567 }
568
569
~Isolate()570 Isolate::~Isolate() {
571 TRACE_ISOLATE(destructor);
572
573 #ifdef ENABLE_LOGGING_AND_PROFILING
574 delete producer_heap_profile_;
575 producer_heap_profile_ = NULL;
576 #endif
577
578 delete unicode_cache_;
579 unicode_cache_ = NULL;
580
581 delete regexp_stack_;
582 regexp_stack_ = NULL;
583
584 delete ast_sentinels_;
585 ast_sentinels_ = NULL;
586
587 delete descriptor_lookup_cache_;
588 descriptor_lookup_cache_ = NULL;
589 delete context_slot_cache_;
590 context_slot_cache_ = NULL;
591 delete keyed_lookup_cache_;
592 keyed_lookup_cache_ = NULL;
593
594 delete transcendental_cache_;
595 transcendental_cache_ = NULL;
596 delete stub_cache_;
597 stub_cache_ = NULL;
598 delete stats_table_;
599 stats_table_ = NULL;
600
601 delete logger_;
602 logger_ = NULL;
603
604 delete counters_;
605 counters_ = NULL;
606
607 delete handle_scope_implementer_;
608 handle_scope_implementer_ = NULL;
609 delete break_access_;
610 break_access_ = NULL;
611
612 delete compilation_cache_;
613 compilation_cache_ = NULL;
614 delete bootstrapper_;
615 bootstrapper_ = NULL;
616 delete pc_to_code_cache_;
617 pc_to_code_cache_ = NULL;
618 delete write_input_buffer_;
619 write_input_buffer_ = NULL;
620
621 delete context_switcher_;
622 context_switcher_ = NULL;
623 delete thread_manager_;
624 thread_manager_ = NULL;
625
626 delete string_tracker_;
627 string_tracker_ = NULL;
628
629 delete memory_allocator_;
630 memory_allocator_ = NULL;
631 delete code_range_;
632 code_range_ = NULL;
633 delete global_handles_;
634 global_handles_ = NULL;
635
636 #ifdef ENABLE_DEBUGGER_SUPPORT
637 delete debugger_;
638 debugger_ = NULL;
639 delete debug_;
640 debug_ = NULL;
641 #endif
642 }
643
644
InitializeThreadLocal()645 void Isolate::InitializeThreadLocal() {
646 thread_local_top_.Initialize();
647 clear_pending_exception();
648 clear_pending_message();
649 clear_scheduled_exception();
650 }
651
652
PropagatePendingExceptionToExternalTryCatch()653 void Isolate::PropagatePendingExceptionToExternalTryCatch() {
654 ASSERT(has_pending_exception());
655
656 bool external_caught = IsExternallyCaught();
657 thread_local_top_.external_caught_exception_ = external_caught;
658
659 if (!external_caught) return;
660
661 if (thread_local_top_.pending_exception_ == Failure::OutOfMemoryException()) {
662 // Do not propagate OOM exception: we should kill VM asap.
663 } else if (thread_local_top_.pending_exception_ ==
664 heap()->termination_exception()) {
665 try_catch_handler()->can_continue_ = false;
666 try_catch_handler()->exception_ = heap()->null_value();
667 } else {
668 // At this point all non-object (failure) exceptions have
669 // been dealt with so this shouldn't fail.
670 ASSERT(!pending_exception()->IsFailure());
671 try_catch_handler()->can_continue_ = true;
672 try_catch_handler()->exception_ = pending_exception();
673 if (!thread_local_top_.pending_message_obj_->IsTheHole()) {
674 try_catch_handler()->message_ = thread_local_top_.pending_message_obj_;
675 }
676 }
677 }
678
679
InitializeLoggingAndCounters()680 void Isolate::InitializeLoggingAndCounters() {
681 if (logger_ == NULL) {
682 logger_ = new Logger;
683 }
684 if (counters_ == NULL) {
685 counters_ = new Counters;
686 }
687 }
688
689
InitializeDebugger()690 void Isolate::InitializeDebugger() {
691 #ifdef ENABLE_DEBUGGER_SUPPORT
692 ScopedLock lock(debugger_access_);
693 if (NoBarrier_Load(&debugger_initialized_)) return;
694 InitializeLoggingAndCounters();
695 debug_ = new Debug(this);
696 debugger_ = new Debugger(this);
697 Release_Store(&debugger_initialized_, true);
698 #endif
699 }
700
701
Init(Deserializer * des)702 bool Isolate::Init(Deserializer* des) {
703 ASSERT(state_ != INITIALIZED);
704 ASSERT(Isolate::Current() == this);
705 TRACE_ISOLATE(init);
706
707 #ifdef DEBUG
708 // The initialization process does not handle memory exhaustion.
709 DisallowAllocationFailure disallow_allocation_failure;
710 #endif
711
712 InitializeLoggingAndCounters();
713
714 InitializeDebugger();
715
716 memory_allocator_ = new MemoryAllocator(this);
717 code_range_ = new CodeRange(this);
718
719 // Safe after setting Heap::isolate_, initializing StackGuard and
720 // ensuring that Isolate::Current() == this.
721 heap_.SetStackLimits();
722
723 #define C(name) isolate_addresses_[Isolate::k_##name] = \
724 reinterpret_cast<Address>(name());
725 ISOLATE_ADDRESS_LIST(C)
726 ISOLATE_ADDRESS_LIST_PROF(C)
727 #undef C
728
729 string_tracker_ = new StringTracker();
730 string_tracker_->isolate_ = this;
731 compilation_cache_ = new CompilationCache(this);
732 transcendental_cache_ = new TranscendentalCache();
733 keyed_lookup_cache_ = new KeyedLookupCache();
734 context_slot_cache_ = new ContextSlotCache();
735 descriptor_lookup_cache_ = new DescriptorLookupCache();
736 unicode_cache_ = new UnicodeCache();
737 pc_to_code_cache_ = new PcToCodeCache(this);
738 write_input_buffer_ = new StringInputBuffer();
739 global_handles_ = new GlobalHandles(this);
740 bootstrapper_ = new Bootstrapper();
741 handle_scope_implementer_ = new HandleScopeImplementer();
742 stub_cache_ = new StubCache(this);
743 ast_sentinels_ = new AstSentinels();
744 regexp_stack_ = new RegExpStack();
745 regexp_stack_->isolate_ = this;
746
747 #ifdef ENABLE_LOGGING_AND_PROFILING
748 producer_heap_profile_ = new ProducerHeapProfile();
749 producer_heap_profile_->isolate_ = this;
750 #endif
751
752 // Enable logging before setting up the heap
753 logger_->Setup();
754
755 CpuProfiler::Setup();
756 HeapProfiler::Setup();
757
758 // Initialize other runtime facilities
759 #if defined(USE_SIMULATOR)
760 #if defined(V8_TARGET_ARCH_ARM) || defined(V8_TARGET_ARCH_MIPS)
761 Simulator::Initialize();
762 #endif
763 #endif
764
765 { // NOLINT
766 // Ensure that the thread has a valid stack guard. The v8::Locker object
767 // will ensure this too, but we don't have to use lockers if we are only
768 // using one thread.
769 ExecutionAccess lock(this);
770 stack_guard_.InitThread(lock);
771 }
772
773 // Setup the object heap.
774 const bool create_heap_objects = (des == NULL);
775 ASSERT(!heap_.HasBeenSetup());
776 if (!heap_.Setup(create_heap_objects)) {
777 V8::SetFatalError();
778 return false;
779 }
780
781 bootstrapper_->Initialize(create_heap_objects);
782 builtins_.Setup(create_heap_objects);
783
784 InitializeThreadLocal();
785
786 // Only preallocate on the first initialization.
787 if (FLAG_preallocate_message_memory && preallocated_message_space_ == NULL) {
788 // Start the thread which will set aside some memory.
789 PreallocatedMemoryThreadStart();
790 preallocated_message_space_ =
791 new NoAllocationStringAllocator(
792 preallocated_memory_thread_->data(),
793 preallocated_memory_thread_->length());
794 PreallocatedStorageInit(preallocated_memory_thread_->length() / 4);
795 }
796
797 if (FLAG_preemption) {
798 v8::Locker locker;
799 v8::Locker::StartPreemption(100);
800 }
801
802 #ifdef ENABLE_DEBUGGER_SUPPORT
803 debug_->Setup(create_heap_objects);
804 #endif
805 stub_cache_->Initialize(create_heap_objects);
806
807 // If we are deserializing, read the state into the now-empty heap.
808 if (des != NULL) {
809 des->Deserialize();
810 stub_cache_->Clear();
811 }
812
813 // Deserializing may put strange things in the root array's copy of the
814 // stack guard.
815 heap_.SetStackLimits();
816
817 deoptimizer_data_ = new DeoptimizerData;
818 runtime_profiler_ = new RuntimeProfiler(this);
819 runtime_profiler_->Setup();
820
821 // If we are deserializing, log non-function code objects and compiled
822 // functions found in the snapshot.
823 if (des != NULL && FLAG_log_code) {
824 HandleScope scope;
825 LOG(this, LogCodeObjects());
826 LOG(this, LogCompiledFunctions());
827 }
828
829 state_ = INITIALIZED;
830 return true;
831 }
832
833
834 // Initialized lazily to allow early
835 // v8::V8::SetAddHistogramSampleFunction calls.
stats_table()836 StatsTable* Isolate::stats_table() {
837 if (stats_table_ == NULL) {
838 stats_table_ = new StatsTable;
839 }
840 return stats_table_;
841 }
842
843
Enter()844 void Isolate::Enter() {
845 Isolate* current_isolate = NULL;
846 PerIsolateThreadData* current_data = CurrentPerIsolateThreadData();
847 if (current_data != NULL) {
848 current_isolate = current_data->isolate_;
849 ASSERT(current_isolate != NULL);
850 if (current_isolate == this) {
851 ASSERT(Current() == this);
852 ASSERT(entry_stack_ != NULL);
853 ASSERT(entry_stack_->previous_thread_data == NULL ||
854 entry_stack_->previous_thread_data->thread_id().Equals(
855 ThreadId::Current()));
856 // Same thread re-enters the isolate, no need to re-init anything.
857 entry_stack_->entry_count++;
858 return;
859 }
860 }
861
862 // Threads can have default isolate set into TLS as Current but not yet have
863 // PerIsolateThreadData for it, as it requires more advanced phase of the
864 // initialization. For example, a thread might be the one that system used for
865 // static initializers - in this case the default isolate is set in TLS but
866 // the thread did not yet Enter the isolate. If PerisolateThreadData is not
867 // there, use the isolate set in TLS.
868 if (current_isolate == NULL) {
869 current_isolate = Isolate::UncheckedCurrent();
870 }
871
872 PerIsolateThreadData* data = FindOrAllocatePerThreadDataForThisThread();
873 ASSERT(data != NULL);
874 ASSERT(data->isolate_ == this);
875
876 EntryStackItem* item = new EntryStackItem(current_data,
877 current_isolate,
878 entry_stack_);
879 entry_stack_ = item;
880
881 SetIsolateThreadLocals(this, data);
882
883 // In case it's the first time some thread enters the isolate.
884 set_thread_id(data->thread_id());
885 }
886
887
Exit()888 void Isolate::Exit() {
889 ASSERT(entry_stack_ != NULL);
890 ASSERT(entry_stack_->previous_thread_data == NULL ||
891 entry_stack_->previous_thread_data->thread_id().Equals(
892 ThreadId::Current()));
893
894 if (--entry_stack_->entry_count > 0) return;
895
896 ASSERT(CurrentPerIsolateThreadData() != NULL);
897 ASSERT(CurrentPerIsolateThreadData()->isolate_ == this);
898
899 // Pop the stack.
900 EntryStackItem* item = entry_stack_;
901 entry_stack_ = item->previous_item;
902
903 PerIsolateThreadData* previous_thread_data = item->previous_thread_data;
904 Isolate* previous_isolate = item->previous_isolate;
905
906 delete item;
907
908 // Reinit the current thread for the isolate it was running before this one.
909 SetIsolateThreadLocals(previous_isolate, previous_thread_data);
910 }
911
912
ResetEagerOptimizingData()913 void Isolate::ResetEagerOptimizingData() {
914 compilation_cache_->ResetEagerOptimizingData();
915 }
916
917
918 #ifdef DEBUG
919 #define ISOLATE_FIELD_OFFSET(type, name, ignored) \
920 const intptr_t Isolate::name##_debug_offset_ = OFFSET_OF(Isolate, name##_);
921 ISOLATE_INIT_LIST(ISOLATE_FIELD_OFFSET)
922 ISOLATE_INIT_ARRAY_LIST(ISOLATE_FIELD_OFFSET)
923 #undef ISOLATE_FIELD_OFFSET
924 #endif
925
926 } } // namespace v8::internal
927