1 // Copyright 2006-2008 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "v8.h"
29
30 #include "accessors.h"
31 #include "api.h"
32 #include "execution.h"
33 #include "global-handles.h"
34 #include "ic-inl.h"
35 #include "natives.h"
36 #include "platform.h"
37 #include "runtime.h"
38 #include "serialize.h"
39 #include "stub-cache.h"
40 #include "v8threads.h"
41
42 namespace v8 {
43 namespace internal {
44
45 // 32-bit encoding: a RelativeAddress must be able to fit in a
46 // pointer: it is encoded as an Address with (from LS to MS bits):
47 // - 2 bits identifying this as a HeapObject.
48 // - 4 bits to encode the AllocationSpace (including special values for
49 // code and fixed arrays in LO space)
50 // - 27 bits identifying a word in the space, in one of three formats:
51 // - paged spaces: 16 bits of page number, 11 bits of word offset in page
52 // - NEW space: 27 bits of word offset
53 // - LO space: 27 bits of page number
54
55 const int kSpaceShift = kHeapObjectTagSize;
56 const int kSpaceBits = 4;
57 const int kSpaceMask = (1 << kSpaceBits) - 1;
58
59 const int kOffsetShift = kSpaceShift + kSpaceBits;
60 const int kOffsetBits = 11;
61 const int kOffsetMask = (1 << kOffsetBits) - 1;
62
63 const int kPageShift = kOffsetShift + kOffsetBits;
64 const int kPageBits = 32 - (kOffsetBits + kSpaceBits + kHeapObjectTagSize);
65 const int kPageMask = (1 << kPageBits) - 1;
66
67 const int kPageAndOffsetShift = kOffsetShift;
68 const int kPageAndOffsetBits = kPageBits + kOffsetBits;
69 const int kPageAndOffsetMask = (1 << kPageAndOffsetBits) - 1;
70
71 // These values are special allocation space tags used for
72 // serialization.
73 // Mar the pages executable on platforms that support it.
74 const int kLargeCode = LAST_SPACE + 1;
75 // Allocate extra remembered-set bits.
76 const int kLargeFixedArray = LAST_SPACE + 2;
77
78
GetSpace(Address addr)79 static inline AllocationSpace GetSpace(Address addr) {
80 const intptr_t encoded = reinterpret_cast<intptr_t>(addr);
81 int space_number = (static_cast<int>(encoded >> kSpaceShift) & kSpaceMask);
82 if (space_number > LAST_SPACE) space_number = LO_SPACE;
83 return static_cast<AllocationSpace>(space_number);
84 }
85
86
IsLargeExecutableObject(Address addr)87 static inline bool IsLargeExecutableObject(Address addr) {
88 const intptr_t encoded = reinterpret_cast<intptr_t>(addr);
89 const int space_number =
90 (static_cast<int>(encoded >> kSpaceShift) & kSpaceMask);
91 return (space_number == kLargeCode);
92 }
93
94
IsLargeFixedArray(Address addr)95 static inline bool IsLargeFixedArray(Address addr) {
96 const intptr_t encoded = reinterpret_cast<intptr_t>(addr);
97 const int space_number =
98 (static_cast<int>(encoded >> kSpaceShift) & kSpaceMask);
99 return (space_number == kLargeFixedArray);
100 }
101
102
PageIndex(Address addr)103 static inline int PageIndex(Address addr) {
104 const intptr_t encoded = reinterpret_cast<intptr_t>(addr);
105 return static_cast<int>(encoded >> kPageShift) & kPageMask;
106 }
107
108
PageOffset(Address addr)109 static inline int PageOffset(Address addr) {
110 const intptr_t encoded = reinterpret_cast<intptr_t>(addr);
111 const int offset = static_cast<int>(encoded >> kOffsetShift) & kOffsetMask;
112 return offset << kObjectAlignmentBits;
113 }
114
115
NewSpaceOffset(Address addr)116 static inline int NewSpaceOffset(Address addr) {
117 const intptr_t encoded = reinterpret_cast<intptr_t>(addr);
118 const int page_offset =
119 static_cast<int>(encoded >> kPageAndOffsetShift) & kPageAndOffsetMask;
120 return page_offset << kObjectAlignmentBits;
121 }
122
123
LargeObjectIndex(Address addr)124 static inline int LargeObjectIndex(Address addr) {
125 const intptr_t encoded = reinterpret_cast<intptr_t>(addr);
126 return static_cast<int>(encoded >> kPageAndOffsetShift) & kPageAndOffsetMask;
127 }
128
129
130 // A RelativeAddress encodes a heap address that is independent of
131 // the actual memory addresses in real heap. The general case (for the
132 // OLD, CODE and MAP spaces) is as a (space id, page number, page offset)
133 // triple. The NEW space has page number == 0, because there are no
134 // pages. The LARGE_OBJECT space has page offset = 0, since there is
135 // exactly one object per page. RelativeAddresses are encodable as
136 // Addresses, so that they can replace the map() pointers of
137 // HeapObjects. The encoded Addresses are also encoded as HeapObjects
138 // and allow for marking (is_marked() see mark(), clear_mark()...) as
139 // used by the Mark-Compact collector.
140
141 class RelativeAddress {
142 public:
RelativeAddress(AllocationSpace space,int page_index,int page_offset)143 RelativeAddress(AllocationSpace space,
144 int page_index,
145 int page_offset)
146 : space_(space), page_index_(page_index), page_offset_(page_offset) {
147 // Assert that the space encoding (plus the two pseudo-spaces for
148 // special large objects) fits in the available bits.
149 ASSERT(((LAST_SPACE + 2) & ~kSpaceMask) == 0);
150 ASSERT(space <= LAST_SPACE && space >= 0);
151 }
152
153 // Return the encoding of 'this' as an Address. Decode with constructor.
154 Address Encode() const;
155
space() const156 AllocationSpace space() const {
157 if (space_ > LAST_SPACE) return LO_SPACE;
158 return static_cast<AllocationSpace>(space_);
159 }
page_index() const160 int page_index() const { return page_index_; }
page_offset() const161 int page_offset() const { return page_offset_; }
162
in_paged_space() const163 bool in_paged_space() const {
164 return space_ == CODE_SPACE ||
165 space_ == OLD_POINTER_SPACE ||
166 space_ == OLD_DATA_SPACE ||
167 space_ == MAP_SPACE ||
168 space_ == CELL_SPACE;
169 }
170
next_address(int offset)171 void next_address(int offset) { page_offset_ += offset; }
next_page(int init_offset=0)172 void next_page(int init_offset = 0) {
173 page_index_++;
174 page_offset_ = init_offset;
175 }
176
177 #ifdef DEBUG
178 void Verify();
179 #endif
180
set_to_large_code_object()181 void set_to_large_code_object() {
182 ASSERT(space_ == LO_SPACE);
183 space_ = kLargeCode;
184 }
set_to_large_fixed_array()185 void set_to_large_fixed_array() {
186 ASSERT(space_ == LO_SPACE);
187 space_ = kLargeFixedArray;
188 }
189
190
191 private:
192 int space_;
193 int page_index_;
194 int page_offset_;
195 };
196
197
Encode() const198 Address RelativeAddress::Encode() const {
199 ASSERT(page_index_ >= 0);
200 int word_offset = 0;
201 int result = 0;
202 switch (space_) {
203 case MAP_SPACE:
204 case CELL_SPACE:
205 case OLD_POINTER_SPACE:
206 case OLD_DATA_SPACE:
207 case CODE_SPACE:
208 ASSERT_EQ(0, page_index_ & ~kPageMask);
209 word_offset = page_offset_ >> kObjectAlignmentBits;
210 ASSERT_EQ(0, word_offset & ~kOffsetMask);
211 result = (page_index_ << kPageShift) | (word_offset << kOffsetShift);
212 break;
213 case NEW_SPACE:
214 ASSERT_EQ(0, page_index_);
215 word_offset = page_offset_ >> kObjectAlignmentBits;
216 ASSERT_EQ(0, word_offset & ~kPageAndOffsetMask);
217 result = word_offset << kPageAndOffsetShift;
218 break;
219 case LO_SPACE:
220 case kLargeCode:
221 case kLargeFixedArray:
222 ASSERT_EQ(0, page_offset_);
223 ASSERT_EQ(0, page_index_ & ~kPageAndOffsetMask);
224 result = page_index_ << kPageAndOffsetShift;
225 break;
226 }
227 // OR in AllocationSpace and kHeapObjectTag
228 ASSERT_EQ(0, space_ & ~kSpaceMask);
229 result |= (space_ << kSpaceShift) | kHeapObjectTag;
230 return reinterpret_cast<Address>(result);
231 }
232
233
234 #ifdef DEBUG
Verify()235 void RelativeAddress::Verify() {
236 ASSERT(page_offset_ >= 0 && page_index_ >= 0);
237 switch (space_) {
238 case MAP_SPACE:
239 case CELL_SPACE:
240 case OLD_POINTER_SPACE:
241 case OLD_DATA_SPACE:
242 case CODE_SPACE:
243 ASSERT(Page::kObjectStartOffset <= page_offset_ &&
244 page_offset_ <= Page::kPageSize);
245 break;
246 case NEW_SPACE:
247 ASSERT(page_index_ == 0);
248 break;
249 case LO_SPACE:
250 case kLargeCode:
251 case kLargeFixedArray:
252 ASSERT(page_offset_ == 0);
253 break;
254 }
255 }
256 #endif
257
258 enum GCTreatment {
259 DataObject, // Object that cannot contain a reference to new space.
260 PointerObject, // Object that can contain a reference to new space.
261 CodeObject // Object that contains executable code.
262 };
263
264 // A SimulatedHeapSpace simulates the allocation of objects in a page in
265 // the heap. It uses linear allocation - that is, it doesn't simulate the
266 // use of a free list. This simulated
267 // allocation must exactly match that done by Heap.
268
269 class SimulatedHeapSpace {
270 public:
271 // The default constructor initializes to an invalid state.
SimulatedHeapSpace()272 SimulatedHeapSpace(): current_(LAST_SPACE, -1, -1) {}
273
274 // Sets 'this' to the first address in 'space' that would be
275 // returned by allocation in an empty heap.
276 void InitEmptyHeap(AllocationSpace space);
277
278 // Sets 'this' to the next address in 'space' that would be returned
279 // by allocation in the current heap. Intended only for testing
280 // serialization and deserialization in the current address space.
281 void InitCurrentHeap(AllocationSpace space);
282
283 // Returns the RelativeAddress where the next
284 // object of 'size' bytes will be allocated, and updates 'this' to
285 // point to the next free address beyond that object.
286 RelativeAddress Allocate(int size, GCTreatment special_gc_treatment);
287
288 private:
289 RelativeAddress current_;
290 };
291
292
InitEmptyHeap(AllocationSpace space)293 void SimulatedHeapSpace::InitEmptyHeap(AllocationSpace space) {
294 switch (space) {
295 case MAP_SPACE:
296 case CELL_SPACE:
297 case OLD_POINTER_SPACE:
298 case OLD_DATA_SPACE:
299 case CODE_SPACE:
300 current_ = RelativeAddress(space, 0, Page::kObjectStartOffset);
301 break;
302 case NEW_SPACE:
303 case LO_SPACE:
304 current_ = RelativeAddress(space, 0, 0);
305 break;
306 }
307 }
308
309
InitCurrentHeap(AllocationSpace space)310 void SimulatedHeapSpace::InitCurrentHeap(AllocationSpace space) {
311 switch (space) {
312 case MAP_SPACE:
313 case CELL_SPACE:
314 case OLD_POINTER_SPACE:
315 case OLD_DATA_SPACE:
316 case CODE_SPACE: {
317 PagedSpace* ps;
318 if (space == MAP_SPACE) {
319 ps = Heap::map_space();
320 } else if (space == CELL_SPACE) {
321 ps = Heap::cell_space();
322 } else if (space == OLD_POINTER_SPACE) {
323 ps = Heap::old_pointer_space();
324 } else if (space == OLD_DATA_SPACE) {
325 ps = Heap::old_data_space();
326 } else {
327 ASSERT(space == CODE_SPACE);
328 ps = Heap::code_space();
329 }
330 Address top = ps->top();
331 Page* top_page = Page::FromAllocationTop(top);
332 int page_index = 0;
333 PageIterator it(ps, PageIterator::PAGES_IN_USE);
334 while (it.has_next()) {
335 if (it.next() == top_page) break;
336 page_index++;
337 }
338 current_ = RelativeAddress(space,
339 page_index,
340 top_page->Offset(top));
341 break;
342 }
343 case NEW_SPACE:
344 current_ = RelativeAddress(space,
345 0,
346 Heap::NewSpaceTop() - Heap::NewSpaceStart());
347 break;
348 case LO_SPACE:
349 int page_index = 0;
350 for (LargeObjectIterator it(Heap::lo_space()); it.has_next(); it.next()) {
351 page_index++;
352 }
353 current_ = RelativeAddress(space, page_index, 0);
354 break;
355 }
356 }
357
358
Allocate(int size,GCTreatment special_gc_treatment)359 RelativeAddress SimulatedHeapSpace::Allocate(int size,
360 GCTreatment special_gc_treatment) {
361 #ifdef DEBUG
362 current_.Verify();
363 #endif
364 int alloc_size = OBJECT_SIZE_ALIGN(size);
365 if (current_.in_paged_space() &&
366 current_.page_offset() + alloc_size > Page::kPageSize) {
367 ASSERT(alloc_size <= Page::kMaxHeapObjectSize);
368 current_.next_page(Page::kObjectStartOffset);
369 }
370 RelativeAddress result = current_;
371 if (current_.space() == LO_SPACE) {
372 current_.next_page();
373 if (special_gc_treatment == CodeObject) {
374 result.set_to_large_code_object();
375 } else if (special_gc_treatment == PointerObject) {
376 result.set_to_large_fixed_array();
377 }
378 } else {
379 current_.next_address(alloc_size);
380 }
381 #ifdef DEBUG
382 current_.Verify();
383 result.Verify();
384 #endif
385 return result;
386 }
387
388 // -----------------------------------------------------------------------------
389 // Coding of external references.
390
391 // The encoding of an external reference. The type is in the high word.
392 // The id is in the low word.
EncodeExternal(TypeCode type,uint16_t id)393 static uint32_t EncodeExternal(TypeCode type, uint16_t id) {
394 return static_cast<uint32_t>(type) << 16 | id;
395 }
396
397
GetInternalPointer(StatsCounter * counter)398 static int* GetInternalPointer(StatsCounter* counter) {
399 // All counters refer to dummy_counter, if deserializing happens without
400 // setting up counters.
401 static int dummy_counter = 0;
402 return counter->Enabled() ? counter->GetInternalPointer() : &dummy_counter;
403 }
404
405
406 // ExternalReferenceTable is a helper class that defines the relationship
407 // between external references and their encodings. It is used to build
408 // hashmaps in ExternalReferenceEncoder and ExternalReferenceDecoder.
409 class ExternalReferenceTable {
410 public:
instance()411 static ExternalReferenceTable* instance() {
412 if (!instance_) instance_ = new ExternalReferenceTable();
413 return instance_;
414 }
415
size() const416 int size() const { return refs_.length(); }
417
address(int i)418 Address address(int i) { return refs_[i].address; }
419
code(int i)420 uint32_t code(int i) { return refs_[i].code; }
421
name(int i)422 const char* name(int i) { return refs_[i].name; }
423
max_id(int code)424 int max_id(int code) { return max_id_[code]; }
425
426 private:
427 static ExternalReferenceTable* instance_;
428
ExternalReferenceTable()429 ExternalReferenceTable() : refs_(64) { PopulateTable(); }
~ExternalReferenceTable()430 ~ExternalReferenceTable() { }
431
432 struct ExternalReferenceEntry {
433 Address address;
434 uint32_t code;
435 const char* name;
436 };
437
438 void PopulateTable();
439
440 // For a few types of references, we can get their address from their id.
441 void AddFromId(TypeCode type, uint16_t id, const char* name);
442
443 // For other types of references, the caller will figure out the address.
444 void Add(Address address, TypeCode type, uint16_t id, const char* name);
445
446 List<ExternalReferenceEntry> refs_;
447 int max_id_[kTypeCodeCount];
448 };
449
450
451 ExternalReferenceTable* ExternalReferenceTable::instance_ = NULL;
452
453
AddFromId(TypeCode type,uint16_t id,const char * name)454 void ExternalReferenceTable::AddFromId(TypeCode type,
455 uint16_t id,
456 const char* name) {
457 Address address;
458 switch (type) {
459 case C_BUILTIN: {
460 ExternalReference ref(static_cast<Builtins::CFunctionId>(id));
461 address = ref.address();
462 break;
463 }
464 case BUILTIN: {
465 ExternalReference ref(static_cast<Builtins::Name>(id));
466 address = ref.address();
467 break;
468 }
469 case RUNTIME_FUNCTION: {
470 ExternalReference ref(static_cast<Runtime::FunctionId>(id));
471 address = ref.address();
472 break;
473 }
474 case IC_UTILITY: {
475 ExternalReference ref(IC_Utility(static_cast<IC::UtilityId>(id)));
476 address = ref.address();
477 break;
478 }
479 default:
480 UNREACHABLE();
481 return;
482 }
483 Add(address, type, id, name);
484 }
485
486
Add(Address address,TypeCode type,uint16_t id,const char * name)487 void ExternalReferenceTable::Add(Address address,
488 TypeCode type,
489 uint16_t id,
490 const char* name) {
491 CHECK_NE(NULL, address);
492 ExternalReferenceEntry entry;
493 entry.address = address;
494 entry.code = EncodeExternal(type, id);
495 entry.name = name;
496 CHECK_NE(0, entry.code);
497 refs_.Add(entry);
498 if (id > max_id_[type]) max_id_[type] = id;
499 }
500
501
PopulateTable()502 void ExternalReferenceTable::PopulateTable() {
503 for (int type_code = 0; type_code < kTypeCodeCount; type_code++) {
504 max_id_[type_code] = 0;
505 }
506
507 // The following populates all of the different type of external references
508 // into the ExternalReferenceTable.
509 //
510 // NOTE: This function was originally 100k of code. It has since been
511 // rewritten to be mostly table driven, as the callback macro style tends to
512 // very easily cause code bloat. Please be careful in the future when adding
513 // new references.
514
515 struct RefTableEntry {
516 TypeCode type;
517 uint16_t id;
518 const char* name;
519 };
520
521 static const RefTableEntry ref_table[] = {
522 // Builtins
523 #define DEF_ENTRY_C(name) \
524 { C_BUILTIN, \
525 Builtins::c_##name, \
526 "Builtins::" #name },
527
528 BUILTIN_LIST_C(DEF_ENTRY_C)
529 #undef DEF_ENTRY_C
530
531 #define DEF_ENTRY_C(name) \
532 { BUILTIN, \
533 Builtins::name, \
534 "Builtins::" #name },
535 #define DEF_ENTRY_A(name, kind, state) DEF_ENTRY_C(name)
536
537 BUILTIN_LIST_C(DEF_ENTRY_C)
538 BUILTIN_LIST_A(DEF_ENTRY_A)
539 BUILTIN_LIST_DEBUG_A(DEF_ENTRY_A)
540 #undef DEF_ENTRY_C
541 #undef DEF_ENTRY_A
542
543 // Runtime functions
544 #define RUNTIME_ENTRY(name, nargs) \
545 { RUNTIME_FUNCTION, \
546 Runtime::k##name, \
547 "Runtime::" #name },
548
549 RUNTIME_FUNCTION_LIST(RUNTIME_ENTRY)
550 #undef RUNTIME_ENTRY
551
552 // IC utilities
553 #define IC_ENTRY(name) \
554 { IC_UTILITY, \
555 IC::k##name, \
556 "IC::" #name },
557
558 IC_UTIL_LIST(IC_ENTRY)
559 #undef IC_ENTRY
560 }; // end of ref_table[].
561
562 for (size_t i = 0; i < ARRAY_SIZE(ref_table); ++i) {
563 AddFromId(ref_table[i].type, ref_table[i].id, ref_table[i].name);
564 }
565
566 #ifdef ENABLE_DEBUGGER_SUPPORT
567 // Debug addresses
568 Add(Debug_Address(Debug::k_after_break_target_address).address(),
569 DEBUG_ADDRESS,
570 Debug::k_after_break_target_address << kDebugIdShift,
571 "Debug::after_break_target_address()");
572 Add(Debug_Address(Debug::k_debug_break_return_address).address(),
573 DEBUG_ADDRESS,
574 Debug::k_debug_break_return_address << kDebugIdShift,
575 "Debug::debug_break_return_address()");
576 const char* debug_register_format = "Debug::register_address(%i)";
577 size_t dr_format_length = strlen(debug_register_format);
578 for (int i = 0; i < kNumJSCallerSaved; ++i) {
579 Vector<char> name = Vector<char>::New(dr_format_length + 1);
580 OS::SNPrintF(name, debug_register_format, i);
581 Add(Debug_Address(Debug::k_register_address, i).address(),
582 DEBUG_ADDRESS,
583 Debug::k_register_address << kDebugIdShift | i,
584 name.start());
585 }
586 #endif
587
588 // Stat counters
589 struct StatsRefTableEntry {
590 StatsCounter* counter;
591 uint16_t id;
592 const char* name;
593 };
594
595 static const StatsRefTableEntry stats_ref_table[] = {
596 #define COUNTER_ENTRY(name, caption) \
597 { &Counters::name, \
598 Counters::k_##name, \
599 "Counters::" #name },
600
601 STATS_COUNTER_LIST_1(COUNTER_ENTRY)
602 STATS_COUNTER_LIST_2(COUNTER_ENTRY)
603 #undef COUNTER_ENTRY
604 }; // end of stats_ref_table[].
605
606 for (size_t i = 0; i < ARRAY_SIZE(stats_ref_table); ++i) {
607 Add(reinterpret_cast<Address>(
608 GetInternalPointer(stats_ref_table[i].counter)),
609 STATS_COUNTER,
610 stats_ref_table[i].id,
611 stats_ref_table[i].name);
612 }
613
614 // Top addresses
615 const char* top_address_format = "Top::get_address_from_id(%i)";
616 size_t top_format_length = strlen(top_address_format);
617 for (uint16_t i = 0; i < Top::k_top_address_count; ++i) {
618 Vector<char> name = Vector<char>::New(top_format_length + 1);
619 const char* chars = name.start();
620 OS::SNPrintF(name, top_address_format, i);
621 Add(Top::get_address_from_id((Top::AddressId)i), TOP_ADDRESS, i, chars);
622 }
623
624 // Extensions
625 Add(FUNCTION_ADDR(GCExtension::GC), EXTENSION, 1,
626 "GCExtension::GC");
627
628 // Accessors
629 #define ACCESSOR_DESCRIPTOR_DECLARATION(name) \
630 Add((Address)&Accessors::name, \
631 ACCESSOR, \
632 Accessors::k##name, \
633 "Accessors::" #name);
634
635 ACCESSOR_DESCRIPTOR_LIST(ACCESSOR_DESCRIPTOR_DECLARATION)
636 #undef ACCESSOR_DESCRIPTOR_DECLARATION
637
638 // Stub cache tables
639 Add(SCTableReference::keyReference(StubCache::kPrimary).address(),
640 STUB_CACHE_TABLE,
641 1,
642 "StubCache::primary_->key");
643 Add(SCTableReference::valueReference(StubCache::kPrimary).address(),
644 STUB_CACHE_TABLE,
645 2,
646 "StubCache::primary_->value");
647 Add(SCTableReference::keyReference(StubCache::kSecondary).address(),
648 STUB_CACHE_TABLE,
649 3,
650 "StubCache::secondary_->key");
651 Add(SCTableReference::valueReference(StubCache::kSecondary).address(),
652 STUB_CACHE_TABLE,
653 4,
654 "StubCache::secondary_->value");
655
656 // Runtime entries
657 Add(ExternalReference::perform_gc_function().address(),
658 RUNTIME_ENTRY,
659 1,
660 "Runtime::PerformGC");
661 Add(ExternalReference::random_positive_smi_function().address(),
662 RUNTIME_ENTRY,
663 2,
664 "V8::RandomPositiveSmi");
665
666 // Miscellaneous
667 Add(ExternalReference::builtin_passed_function().address(),
668 UNCLASSIFIED,
669 1,
670 "Builtins::builtin_passed_function");
671 Add(ExternalReference::the_hole_value_location().address(),
672 UNCLASSIFIED,
673 2,
674 "Factory::the_hole_value().location()");
675 Add(ExternalReference::roots_address().address(),
676 UNCLASSIFIED,
677 3,
678 "Heap::roots_address()");
679 Add(ExternalReference::address_of_stack_guard_limit().address(),
680 UNCLASSIFIED,
681 4,
682 "StackGuard::address_of_jslimit()");
683 Add(ExternalReference::address_of_regexp_stack_limit().address(),
684 UNCLASSIFIED,
685 5,
686 "RegExpStack::limit_address()");
687 Add(ExternalReference::new_space_start().address(),
688 UNCLASSIFIED,
689 6,
690 "Heap::NewSpaceStart()");
691 Add(ExternalReference::heap_always_allocate_scope_depth().address(),
692 UNCLASSIFIED,
693 7,
694 "Heap::always_allocate_scope_depth()");
695 Add(ExternalReference::new_space_allocation_limit_address().address(),
696 UNCLASSIFIED,
697 8,
698 "Heap::NewSpaceAllocationLimitAddress()");
699 Add(ExternalReference::new_space_allocation_top_address().address(),
700 UNCLASSIFIED,
701 9,
702 "Heap::NewSpaceAllocationTopAddress()");
703 #ifdef ENABLE_DEBUGGER_SUPPORT
704 Add(ExternalReference::debug_break().address(),
705 UNCLASSIFIED,
706 10,
707 "Debug::Break()");
708 Add(ExternalReference::debug_step_in_fp_address().address(),
709 UNCLASSIFIED,
710 11,
711 "Debug::step_in_fp_addr()");
712 #endif
713 Add(ExternalReference::double_fp_operation(Token::ADD).address(),
714 UNCLASSIFIED,
715 12,
716 "add_two_doubles");
717 Add(ExternalReference::double_fp_operation(Token::SUB).address(),
718 UNCLASSIFIED,
719 13,
720 "sub_two_doubles");
721 Add(ExternalReference::double_fp_operation(Token::MUL).address(),
722 UNCLASSIFIED,
723 14,
724 "mul_two_doubles");
725 Add(ExternalReference::double_fp_operation(Token::DIV).address(),
726 UNCLASSIFIED,
727 15,
728 "div_two_doubles");
729 Add(ExternalReference::double_fp_operation(Token::MOD).address(),
730 UNCLASSIFIED,
731 16,
732 "mod_two_doubles");
733 Add(ExternalReference::compare_doubles().address(),
734 UNCLASSIFIED,
735 17,
736 "compare_doubles");
737 #ifdef V8_NATIVE_REGEXP
738 Add(ExternalReference::re_case_insensitive_compare_uc16().address(),
739 UNCLASSIFIED,
740 18,
741 "NativeRegExpMacroAssembler::CaseInsensitiveCompareUC16()");
742 Add(ExternalReference::re_check_stack_guard_state().address(),
743 UNCLASSIFIED,
744 19,
745 "RegExpMacroAssembler*::CheckStackGuardState()");
746 Add(ExternalReference::re_grow_stack().address(),
747 UNCLASSIFIED,
748 20,
749 "NativeRegExpMacroAssembler::GrowStack()");
750 #endif
751 }
752
753
ExternalReferenceEncoder()754 ExternalReferenceEncoder::ExternalReferenceEncoder()
755 : encodings_(Match) {
756 ExternalReferenceTable* external_references =
757 ExternalReferenceTable::instance();
758 for (int i = 0; i < external_references->size(); ++i) {
759 Put(external_references->address(i), i);
760 }
761 }
762
763
Encode(Address key) const764 uint32_t ExternalReferenceEncoder::Encode(Address key) const {
765 int index = IndexOf(key);
766 return index >=0 ? ExternalReferenceTable::instance()->code(index) : 0;
767 }
768
769
NameOfAddress(Address key) const770 const char* ExternalReferenceEncoder::NameOfAddress(Address key) const {
771 int index = IndexOf(key);
772 return index >=0 ? ExternalReferenceTable::instance()->name(index) : NULL;
773 }
774
775
IndexOf(Address key) const776 int ExternalReferenceEncoder::IndexOf(Address key) const {
777 if (key == NULL) return -1;
778 HashMap::Entry* entry =
779 const_cast<HashMap &>(encodings_).Lookup(key, Hash(key), false);
780 return entry == NULL
781 ? -1
782 : static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
783 }
784
785
Put(Address key,int index)786 void ExternalReferenceEncoder::Put(Address key, int index) {
787 HashMap::Entry* entry = encodings_.Lookup(key, Hash(key), true);
788 entry->value = reinterpret_cast<void *>(index);
789 }
790
791
ExternalReferenceDecoder()792 ExternalReferenceDecoder::ExternalReferenceDecoder()
793 : encodings_(NewArray<Address*>(kTypeCodeCount)) {
794 ExternalReferenceTable* external_references =
795 ExternalReferenceTable::instance();
796 for (int type = kFirstTypeCode; type < kTypeCodeCount; ++type) {
797 int max = external_references->max_id(type) + 1;
798 encodings_[type] = NewArray<Address>(max + 1);
799 }
800 for (int i = 0; i < external_references->size(); ++i) {
801 Put(external_references->code(i), external_references->address(i));
802 }
803 }
804
805
~ExternalReferenceDecoder()806 ExternalReferenceDecoder::~ExternalReferenceDecoder() {
807 for (int type = kFirstTypeCode; type < kTypeCodeCount; ++type) {
808 DeleteArray(encodings_[type]);
809 }
810 DeleteArray(encodings_);
811 }
812
813
814 //------------------------------------------------------------------------------
815 // Implementation of Serializer
816
817
818 // Helper class to write the bytes of the serialized heap.
819
820 class SnapshotWriter {
821 public:
SnapshotWriter()822 SnapshotWriter() {
823 len_ = 0;
824 max_ = 8 << 10; // 8K initial size
825 str_ = NewArray<byte>(max_);
826 }
827
~SnapshotWriter()828 ~SnapshotWriter() {
829 DeleteArray(str_);
830 }
831
GetBytes(byte ** str,int * len)832 void GetBytes(byte** str, int* len) {
833 *str = NewArray<byte>(len_);
834 memcpy(*str, str_, len_);
835 *len = len_;
836 }
837
838 void Reserve(int bytes, int pos);
839
PutC(char c)840 void PutC(char c) {
841 InsertC(c, len_);
842 }
843
PutInt(int i)844 void PutInt(int i) {
845 InsertInt(i, len_);
846 }
847
PutAddress(Address p)848 void PutAddress(Address p) {
849 PutBytes(reinterpret_cast<byte*>(&p), sizeof(p));
850 }
851
PutBytes(const byte * a,int size)852 void PutBytes(const byte* a, int size) {
853 InsertBytes(a, len_, size);
854 }
855
PutString(const char * s)856 void PutString(const char* s) {
857 InsertString(s, len_);
858 }
859
InsertC(char c,int pos)860 int InsertC(char c, int pos) {
861 Reserve(1, pos);
862 str_[pos] = c;
863 len_++;
864 return pos + 1;
865 }
866
InsertInt(int i,int pos)867 int InsertInt(int i, int pos) {
868 return InsertBytes(reinterpret_cast<byte*>(&i), pos, sizeof(i));
869 }
870
InsertBytes(const byte * a,int pos,int size)871 int InsertBytes(const byte* a, int pos, int size) {
872 Reserve(size, pos);
873 memcpy(&str_[pos], a, size);
874 len_ += size;
875 return pos + size;
876 }
877
878 int InsertString(const char* s, int pos);
879
length()880 int length() { return len_; }
881
position()882 Address position() { return reinterpret_cast<Address>(&str_[len_]); }
883
884 private:
885 byte* str_; // the snapshot
886 int len_; // the current length of str_
887 int max_; // the allocated size of str_
888 };
889
890
Reserve(int bytes,int pos)891 void SnapshotWriter::Reserve(int bytes, int pos) {
892 CHECK(0 <= pos && pos <= len_);
893 while (len_ + bytes >= max_) {
894 max_ *= 2;
895 byte* old = str_;
896 str_ = NewArray<byte>(max_);
897 memcpy(str_, old, len_);
898 DeleteArray(old);
899 }
900 if (pos < len_) {
901 byte* old = str_;
902 str_ = NewArray<byte>(max_);
903 memcpy(str_, old, pos);
904 memcpy(str_ + pos + bytes, old + pos, len_ - pos);
905 DeleteArray(old);
906 }
907 }
908
InsertString(const char * s,int pos)909 int SnapshotWriter::InsertString(const char* s, int pos) {
910 int size = strlen(s);
911 pos = InsertC('[', pos);
912 pos = InsertInt(size, pos);
913 pos = InsertC(']', pos);
914 return InsertBytes(reinterpret_cast<const byte*>(s), pos, size);
915 }
916
917
918 class ReferenceUpdater: public ObjectVisitor {
919 public:
ReferenceUpdater(HeapObject * obj,Serializer * serializer)920 ReferenceUpdater(HeapObject* obj, Serializer* serializer)
921 : obj_address_(obj->address()),
922 serializer_(serializer),
923 reference_encoder_(serializer->reference_encoder_),
924 offsets_(8),
925 addresses_(8) {
926 }
927
VisitPointers(Object ** start,Object ** end)928 virtual void VisitPointers(Object** start, Object** end) {
929 for (Object** p = start; p < end; ++p) {
930 if ((*p)->IsHeapObject()) {
931 offsets_.Add(reinterpret_cast<Address>(p) - obj_address_);
932 Address a = serializer_->GetSavedAddress(HeapObject::cast(*p));
933 addresses_.Add(a);
934 }
935 }
936 }
937
VisitExternalReferences(Address * start,Address * end)938 virtual void VisitExternalReferences(Address* start, Address* end) {
939 for (Address* p = start; p < end; ++p) {
940 uint32_t code = reference_encoder_->Encode(*p);
941 CHECK(*p == NULL ? code == 0 : code != 0);
942 offsets_.Add(reinterpret_cast<Address>(p) - obj_address_);
943 addresses_.Add(reinterpret_cast<Address>(code));
944 }
945 }
946
VisitRuntimeEntry(RelocInfo * rinfo)947 virtual void VisitRuntimeEntry(RelocInfo* rinfo) {
948 Address target = rinfo->target_address();
949 uint32_t encoding = reference_encoder_->Encode(target);
950 CHECK(target == NULL ? encoding == 0 : encoding != 0);
951 offsets_.Add(rinfo->target_address_address() - obj_address_);
952 addresses_.Add(reinterpret_cast<Address>(encoding));
953 }
954
Update(Address start_address)955 void Update(Address start_address) {
956 for (int i = 0; i < offsets_.length(); i++) {
957 memcpy(start_address + offsets_[i], &addresses_[i], sizeof(Address));
958 }
959 }
960
961 private:
962 Address obj_address_;
963 Serializer* serializer_;
964 ExternalReferenceEncoder* reference_encoder_;
965 List<int> offsets_;
966 List<Address> addresses_;
967 };
968
969
970 // Helper functions for a map of encoded heap object addresses.
HeapObjectHash(HeapObject * key)971 static uint32_t HeapObjectHash(HeapObject* key) {
972 uint32_t low32bits = static_cast<uint32_t>(reinterpret_cast<uintptr_t>(key));
973 return low32bits >> 2;
974 }
975
976
MatchHeapObject(void * key1,void * key2)977 static bool MatchHeapObject(void* key1, void* key2) {
978 return key1 == key2;
979 }
980
981
Serializer()982 Serializer::Serializer()
983 : global_handles_(4),
984 saved_addresses_(MatchHeapObject) {
985 root_ = true;
986 roots_ = 0;
987 objects_ = 0;
988 reference_encoder_ = NULL;
989 writer_ = new SnapshotWriter();
990 for (int i = 0; i <= LAST_SPACE; i++) {
991 allocator_[i] = new SimulatedHeapSpace();
992 }
993 }
994
995
~Serializer()996 Serializer::~Serializer() {
997 for (int i = 0; i <= LAST_SPACE; i++) {
998 delete allocator_[i];
999 }
1000 if (reference_encoder_) delete reference_encoder_;
1001 delete writer_;
1002 }
1003
1004
1005 bool Serializer::serialization_enabled_ = false;
1006
1007
1008 #ifdef DEBUG
1009 static const int kMaxTagLength = 32;
1010
Synchronize(const char * tag)1011 void Serializer::Synchronize(const char* tag) {
1012 if (FLAG_debug_serialization) {
1013 int length = strlen(tag);
1014 ASSERT(length <= kMaxTagLength);
1015 writer_->PutC('S');
1016 writer_->PutInt(length);
1017 writer_->PutBytes(reinterpret_cast<const byte*>(tag), length);
1018 }
1019 }
1020 #endif
1021
1022
InitializeAllocators()1023 void Serializer::InitializeAllocators() {
1024 for (int i = 0; i <= LAST_SPACE; i++) {
1025 allocator_[i]->InitEmptyHeap(static_cast<AllocationSpace>(i));
1026 }
1027 }
1028
1029
IsVisited(HeapObject * obj)1030 bool Serializer::IsVisited(HeapObject* obj) {
1031 HashMap::Entry* entry =
1032 saved_addresses_.Lookup(obj, HeapObjectHash(obj), false);
1033 return entry != NULL;
1034 }
1035
1036
GetSavedAddress(HeapObject * obj)1037 Address Serializer::GetSavedAddress(HeapObject* obj) {
1038 HashMap::Entry* entry =
1039 saved_addresses_.Lookup(obj, HeapObjectHash(obj), false);
1040 ASSERT(entry != NULL);
1041 return reinterpret_cast<Address>(entry->value);
1042 }
1043
1044
SaveAddress(HeapObject * obj,Address addr)1045 void Serializer::SaveAddress(HeapObject* obj, Address addr) {
1046 HashMap::Entry* entry =
1047 saved_addresses_.Lookup(obj, HeapObjectHash(obj), true);
1048 entry->value = addr;
1049 }
1050
1051
Serialize()1052 void Serializer::Serialize() {
1053 // No active threads.
1054 CHECK_EQ(NULL, ThreadState::FirstInUse());
1055 // No active or weak handles.
1056 CHECK(HandleScopeImplementer::instance()->Blocks()->is_empty());
1057 CHECK_EQ(0, GlobalHandles::NumberOfWeakHandles());
1058 // We need a counter function during serialization to resolve the
1059 // references to counters in the code on the heap.
1060 CHECK(StatsTable::HasCounterFunction());
1061 CHECK(enabled());
1062 InitializeAllocators();
1063 reference_encoder_ = new ExternalReferenceEncoder();
1064 PutHeader();
1065 Heap::IterateRoots(this);
1066 PutLog();
1067 PutContextStack();
1068 Disable();
1069 }
1070
1071
Finalize(byte ** str,int * len)1072 void Serializer::Finalize(byte** str, int* len) {
1073 writer_->GetBytes(str, len);
1074 }
1075
1076
1077 // Serialize objects by writing them into the stream.
1078
VisitPointers(Object ** start,Object ** end)1079 void Serializer::VisitPointers(Object** start, Object** end) {
1080 bool root = root_;
1081 root_ = false;
1082 for (Object** p = start; p < end; ++p) {
1083 bool serialized;
1084 Address a = Encode(*p, &serialized);
1085 if (root) {
1086 roots_++;
1087 // If the object was not just serialized,
1088 // write its encoded address instead.
1089 if (!serialized) PutEncodedAddress(a);
1090 }
1091 }
1092 root_ = root;
1093 }
1094
1095
1096 class GlobalHandlesRetriever: public ObjectVisitor {
1097 public:
GlobalHandlesRetriever(List<Object ** > * handles)1098 explicit GlobalHandlesRetriever(List<Object**>* handles)
1099 : global_handles_(handles) {}
1100
VisitPointers(Object ** start,Object ** end)1101 virtual void VisitPointers(Object** start, Object** end) {
1102 for (; start != end; ++start) {
1103 global_handles_->Add(start);
1104 }
1105 }
1106
1107 private:
1108 List<Object**>* global_handles_;
1109 };
1110
1111
PutFlags()1112 void Serializer::PutFlags() {
1113 writer_->PutC('F');
1114 List<const char*>* argv = FlagList::argv();
1115 writer_->PutInt(argv->length());
1116 writer_->PutC('[');
1117 for (int i = 0; i < argv->length(); i++) {
1118 if (i > 0) writer_->PutC('|');
1119 writer_->PutString((*argv)[i]);
1120 DeleteArray((*argv)[i]);
1121 }
1122 writer_->PutC(']');
1123 flags_end_ = writer_->length();
1124 delete argv;
1125 }
1126
1127
PutHeader()1128 void Serializer::PutHeader() {
1129 PutFlags();
1130 writer_->PutC('D');
1131 #ifdef DEBUG
1132 writer_->PutC(FLAG_debug_serialization ? '1' : '0');
1133 #else
1134 writer_->PutC('0');
1135 #endif
1136 #ifdef V8_NATIVE_REGEXP
1137 writer_->PutC('N');
1138 #else // Interpreted regexp
1139 writer_->PutC('I');
1140 #endif
1141 // Write sizes of paged memory spaces. Allocate extra space for the old
1142 // and code spaces, because objects in new space will be promoted to them.
1143 writer_->PutC('S');
1144 writer_->PutC('[');
1145 writer_->PutInt(Heap::old_pointer_space()->Size() +
1146 Heap::new_space()->Size());
1147 writer_->PutC('|');
1148 writer_->PutInt(Heap::old_data_space()->Size() + Heap::new_space()->Size());
1149 writer_->PutC('|');
1150 writer_->PutInt(Heap::code_space()->Size() + Heap::new_space()->Size());
1151 writer_->PutC('|');
1152 writer_->PutInt(Heap::map_space()->Size());
1153 writer_->PutC('|');
1154 writer_->PutInt(Heap::cell_space()->Size());
1155 writer_->PutC(']');
1156 // Write global handles.
1157 writer_->PutC('G');
1158 writer_->PutC('[');
1159 GlobalHandlesRetriever ghr(&global_handles_);
1160 GlobalHandles::IterateRoots(&ghr);
1161 for (int i = 0; i < global_handles_.length(); i++) {
1162 writer_->PutC('N');
1163 }
1164 writer_->PutC(']');
1165 }
1166
1167
PutLog()1168 void Serializer::PutLog() {
1169 #ifdef ENABLE_LOGGING_AND_PROFILING
1170 if (FLAG_log_code) {
1171 Logger::TearDown();
1172 int pos = writer_->InsertC('L', flags_end_);
1173 bool exists;
1174 Vector<const char> log = ReadFile(FLAG_logfile, &exists);
1175 writer_->InsertString(log.start(), pos);
1176 log.Dispose();
1177 }
1178 #endif
1179 }
1180
1181
IndexOf(const List<Object ** > & list,Object ** element)1182 static int IndexOf(const List<Object**>& list, Object** element) {
1183 for (int i = 0; i < list.length(); i++) {
1184 if (list[i] == element) return i;
1185 }
1186 return -1;
1187 }
1188
1189
PutGlobalHandleStack(const List<Handle<Object>> & stack)1190 void Serializer::PutGlobalHandleStack(const List<Handle<Object> >& stack) {
1191 writer_->PutC('[');
1192 writer_->PutInt(stack.length());
1193 for (int i = stack.length() - 1; i >= 0; i--) {
1194 writer_->PutC('|');
1195 int gh_index = IndexOf(global_handles_, stack[i].location());
1196 CHECK_GE(gh_index, 0);
1197 writer_->PutInt(gh_index);
1198 }
1199 writer_->PutC(']');
1200 }
1201
1202
PutContextStack()1203 void Serializer::PutContextStack() {
1204 List<Context*> contexts(2);
1205 while (HandleScopeImplementer::instance()->HasSavedContexts()) {
1206 Context* context =
1207 HandleScopeImplementer::instance()->RestoreContext();
1208 contexts.Add(context);
1209 }
1210 for (int i = contexts.length() - 1; i >= 0; i--) {
1211 HandleScopeImplementer::instance()->SaveContext(contexts[i]);
1212 }
1213 writer_->PutC('C');
1214 writer_->PutC('[');
1215 writer_->PutInt(contexts.length());
1216 if (!contexts.is_empty()) {
1217 Object** start = reinterpret_cast<Object**>(&contexts.first());
1218 VisitPointers(start, start + contexts.length());
1219 }
1220 writer_->PutC(']');
1221 }
1222
PutEncodedAddress(Address addr)1223 void Serializer::PutEncodedAddress(Address addr) {
1224 writer_->PutC('P');
1225 writer_->PutAddress(addr);
1226 }
1227
1228
Encode(Object * o,bool * serialized)1229 Address Serializer::Encode(Object* o, bool* serialized) {
1230 *serialized = false;
1231 if (o->IsSmi()) {
1232 return reinterpret_cast<Address>(o);
1233 } else {
1234 HeapObject* obj = HeapObject::cast(o);
1235 if (IsVisited(obj)) {
1236 return GetSavedAddress(obj);
1237 } else {
1238 // First visit: serialize the object.
1239 *serialized = true;
1240 return PutObject(obj);
1241 }
1242 }
1243 }
1244
1245
PutObject(HeapObject * obj)1246 Address Serializer::PutObject(HeapObject* obj) {
1247 Map* map = obj->map();
1248 InstanceType type = map->instance_type();
1249 int size = obj->SizeFromMap(map);
1250
1251 // Simulate the allocation of obj to predict where it will be
1252 // allocated during deserialization.
1253 Address addr = Allocate(obj).Encode();
1254
1255 SaveAddress(obj, addr);
1256
1257 if (type == CODE_TYPE) {
1258 Code* code = Code::cast(obj);
1259 // Ensure Code objects contain Object pointers, not Addresses.
1260 code->ConvertICTargetsFromAddressToObject();
1261 LOG(CodeMoveEvent(code->address(), addr));
1262 }
1263
1264 // Write out the object prologue: type, size, and simulated address of obj.
1265 writer_->PutC('[');
1266 CHECK_EQ(0, static_cast<int>(size & kObjectAlignmentMask));
1267 writer_->PutInt(type);
1268 writer_->PutInt(size >> kObjectAlignmentBits);
1269 PutEncodedAddress(addr); // encodes AllocationSpace
1270
1271 // Visit all the pointers in the object other than the map. This
1272 // will recursively serialize any as-yet-unvisited objects.
1273 obj->Iterate(this);
1274
1275 // Mark end of recursively embedded objects, start of object body.
1276 writer_->PutC('|');
1277 // Write out the raw contents of the object. No compression, but
1278 // fast to deserialize.
1279 writer_->PutBytes(obj->address(), size);
1280 // Update pointers and external references in the written object.
1281 ReferenceUpdater updater(obj, this);
1282 obj->Iterate(&updater);
1283 updater.Update(writer_->position() - size);
1284
1285 #ifdef DEBUG
1286 if (FLAG_debug_serialization) {
1287 // Write out the object epilogue to catch synchronization errors.
1288 PutEncodedAddress(addr);
1289 writer_->PutC(']');
1290 }
1291 #endif
1292
1293 if (type == CODE_TYPE) {
1294 Code* code = Code::cast(obj);
1295 // Convert relocations from Object* to Address in Code objects
1296 code->ConvertICTargetsFromObjectToAddress();
1297 }
1298
1299 objects_++;
1300 return addr;
1301 }
1302
1303
Allocate(HeapObject * obj)1304 RelativeAddress Serializer::Allocate(HeapObject* obj) {
1305 // Find out which AllocationSpace 'obj' is in.
1306 AllocationSpace s;
1307 bool found = false;
1308 for (int i = FIRST_SPACE; !found && i <= LAST_SPACE; i++) {
1309 s = static_cast<AllocationSpace>(i);
1310 found = Heap::InSpace(obj, s);
1311 }
1312 CHECK(found);
1313 int size = obj->Size();
1314 if (s == NEW_SPACE) {
1315 if (size > Heap::MaxObjectSizeInPagedSpace()) {
1316 s = LO_SPACE;
1317 } else {
1318 OldSpace* space = Heap::TargetSpace(obj);
1319 ASSERT(space == Heap::old_pointer_space() ||
1320 space == Heap::old_data_space());
1321 s = (space == Heap::old_pointer_space()) ?
1322 OLD_POINTER_SPACE :
1323 OLD_DATA_SPACE;
1324 }
1325 }
1326 GCTreatment gc_treatment = DataObject;
1327 if (obj->IsFixedArray()) gc_treatment = PointerObject;
1328 else if (obj->IsCode()) gc_treatment = CodeObject;
1329 return allocator_[s]->Allocate(size, gc_treatment);
1330 }
1331
1332
1333 //------------------------------------------------------------------------------
1334 // Implementation of Deserializer
1335
1336
1337 static const int kInitArraySize = 32;
1338
1339
Deserializer(const byte * str,int len)1340 Deserializer::Deserializer(const byte* str, int len)
1341 : reader_(str, len),
1342 map_pages_(kInitArraySize),
1343 cell_pages_(kInitArraySize),
1344 old_pointer_pages_(kInitArraySize),
1345 old_data_pages_(kInitArraySize),
1346 code_pages_(kInitArraySize),
1347 large_objects_(kInitArraySize),
1348 global_handles_(4) {
1349 root_ = true;
1350 roots_ = 0;
1351 objects_ = 0;
1352 reference_decoder_ = NULL;
1353 #ifdef DEBUG
1354 expect_debug_information_ = false;
1355 #endif
1356 }
1357
1358
~Deserializer()1359 Deserializer::~Deserializer() {
1360 if (reference_decoder_) delete reference_decoder_;
1361 }
1362
1363
ExpectEncodedAddress(Address expected)1364 void Deserializer::ExpectEncodedAddress(Address expected) {
1365 Address a = GetEncodedAddress();
1366 USE(a);
1367 ASSERT(a == expected);
1368 }
1369
1370
1371 #ifdef DEBUG
Synchronize(const char * tag)1372 void Deserializer::Synchronize(const char* tag) {
1373 if (expect_debug_information_) {
1374 char buf[kMaxTagLength];
1375 reader_.ExpectC('S');
1376 int length = reader_.GetInt();
1377 ASSERT(length <= kMaxTagLength);
1378 reader_.GetBytes(reinterpret_cast<Address>(buf), length);
1379 ASSERT_EQ(strlen(tag), length);
1380 ASSERT(strncmp(tag, buf, length) == 0);
1381 }
1382 }
1383 #endif
1384
1385
Deserialize()1386 void Deserializer::Deserialize() {
1387 // No active threads.
1388 ASSERT_EQ(NULL, ThreadState::FirstInUse());
1389 // No active handles.
1390 ASSERT(HandleScopeImplementer::instance()->Blocks()->is_empty());
1391 reference_decoder_ = new ExternalReferenceDecoder();
1392 // By setting linear allocation only, we forbid the use of free list
1393 // allocation which is not predicted by SimulatedAddress.
1394 GetHeader();
1395 Heap::IterateRoots(this);
1396 GetContextStack();
1397 }
1398
1399
VisitPointers(Object ** start,Object ** end)1400 void Deserializer::VisitPointers(Object** start, Object** end) {
1401 bool root = root_;
1402 root_ = false;
1403 for (Object** p = start; p < end; ++p) {
1404 if (root) {
1405 roots_++;
1406 // Read the next object or pointer from the stream
1407 // pointer in the stream.
1408 int c = reader_.GetC();
1409 if (c == '[') {
1410 *p = GetObject(); // embedded object
1411 } else {
1412 ASSERT(c == 'P'); // pointer to previously serialized object
1413 *p = Resolve(reader_.GetAddress());
1414 }
1415 } else {
1416 // A pointer internal to a HeapObject that we've already
1417 // read: resolve it to a true address (or Smi)
1418 *p = Resolve(reinterpret_cast<Address>(*p));
1419 }
1420 }
1421 root_ = root;
1422 }
1423
1424
VisitExternalReferences(Address * start,Address * end)1425 void Deserializer::VisitExternalReferences(Address* start, Address* end) {
1426 for (Address* p = start; p < end; ++p) {
1427 uint32_t code = static_cast<uint32_t>(reinterpret_cast<uintptr_t>(*p));
1428 *p = reference_decoder_->Decode(code);
1429 }
1430 }
1431
1432
VisitRuntimeEntry(RelocInfo * rinfo)1433 void Deserializer::VisitRuntimeEntry(RelocInfo* rinfo) {
1434 uint32_t* pc = reinterpret_cast<uint32_t*>(rinfo->target_address_address());
1435 uint32_t encoding = *pc;
1436 Address target = reference_decoder_->Decode(encoding);
1437 rinfo->set_target_address(target);
1438 }
1439
1440
GetFlags()1441 void Deserializer::GetFlags() {
1442 reader_.ExpectC('F');
1443 int argc = reader_.GetInt() + 1;
1444 char** argv = NewArray<char*>(argc);
1445 reader_.ExpectC('[');
1446 for (int i = 1; i < argc; i++) {
1447 if (i > 1) reader_.ExpectC('|');
1448 argv[i] = reader_.GetString();
1449 }
1450 reader_.ExpectC(']');
1451 has_log_ = false;
1452 for (int i = 1; i < argc; i++) {
1453 if (strcmp("--log_code", argv[i]) == 0) {
1454 has_log_ = true;
1455 } else if (strcmp("--nouse_ic", argv[i]) == 0) {
1456 FLAG_use_ic = false;
1457 } else if (strcmp("--debug_code", argv[i]) == 0) {
1458 FLAG_debug_code = true;
1459 } else if (strcmp("--nolazy", argv[i]) == 0) {
1460 FLAG_lazy = false;
1461 }
1462 DeleteArray(argv[i]);
1463 }
1464
1465 DeleteArray(argv);
1466 }
1467
1468
GetLog()1469 void Deserializer::GetLog() {
1470 if (has_log_) {
1471 reader_.ExpectC('L');
1472 char* snapshot_log = reader_.GetString();
1473 #ifdef ENABLE_LOGGING_AND_PROFILING
1474 if (FLAG_log_code) {
1475 LOG(Preamble(snapshot_log));
1476 }
1477 #endif
1478 DeleteArray(snapshot_log);
1479 }
1480 }
1481
1482
InitPagedSpace(PagedSpace * space,int capacity,List<Page * > * page_list)1483 static void InitPagedSpace(PagedSpace* space,
1484 int capacity,
1485 List<Page*>* page_list) {
1486 if (!space->EnsureCapacity(capacity)) {
1487 V8::FatalProcessOutOfMemory("InitPagedSpace");
1488 }
1489 PageIterator it(space, PageIterator::ALL_PAGES);
1490 while (it.has_next()) page_list->Add(it.next());
1491 }
1492
1493
GetHeader()1494 void Deserializer::GetHeader() {
1495 reader_.ExpectC('D');
1496 #ifdef DEBUG
1497 expect_debug_information_ = reader_.GetC() == '1';
1498 #else
1499 // In release mode, don't attempt to read a snapshot containing
1500 // synchronization tags.
1501 if (reader_.GetC() != '0') FATAL("Snapshot contains synchronization tags.");
1502 #endif
1503 #ifdef V8_NATIVE_REGEXP
1504 reader_.ExpectC('N');
1505 #else // Interpreted regexp.
1506 reader_.ExpectC('I');
1507 #endif
1508 // Ensure sufficient capacity in paged memory spaces to avoid growth
1509 // during deserialization.
1510 reader_.ExpectC('S');
1511 reader_.ExpectC('[');
1512 InitPagedSpace(Heap::old_pointer_space(),
1513 reader_.GetInt(),
1514 &old_pointer_pages_);
1515 reader_.ExpectC('|');
1516 InitPagedSpace(Heap::old_data_space(), reader_.GetInt(), &old_data_pages_);
1517 reader_.ExpectC('|');
1518 InitPagedSpace(Heap::code_space(), reader_.GetInt(), &code_pages_);
1519 reader_.ExpectC('|');
1520 InitPagedSpace(Heap::map_space(), reader_.GetInt(), &map_pages_);
1521 reader_.ExpectC('|');
1522 InitPagedSpace(Heap::cell_space(), reader_.GetInt(), &cell_pages_);
1523 reader_.ExpectC(']');
1524 // Create placeholders for global handles later to be fill during
1525 // IterateRoots.
1526 reader_.ExpectC('G');
1527 reader_.ExpectC('[');
1528 int c = reader_.GetC();
1529 while (c != ']') {
1530 ASSERT(c == 'N');
1531 global_handles_.Add(GlobalHandles::Create(NULL).location());
1532 c = reader_.GetC();
1533 }
1534 }
1535
1536
GetGlobalHandleStack(List<Handle<Object>> * stack)1537 void Deserializer::GetGlobalHandleStack(List<Handle<Object> >* stack) {
1538 reader_.ExpectC('[');
1539 int length = reader_.GetInt();
1540 for (int i = 0; i < length; i++) {
1541 reader_.ExpectC('|');
1542 int gh_index = reader_.GetInt();
1543 stack->Add(global_handles_[gh_index]);
1544 }
1545 reader_.ExpectC(']');
1546 }
1547
1548
GetContextStack()1549 void Deserializer::GetContextStack() {
1550 reader_.ExpectC('C');
1551 CHECK_EQ(reader_.GetC(), '[');
1552 int count = reader_.GetInt();
1553 List<Context*> entered_contexts(count);
1554 if (count > 0) {
1555 Object** start = reinterpret_cast<Object**>(&entered_contexts.first());
1556 VisitPointers(start, start + count);
1557 }
1558 reader_.ExpectC(']');
1559 for (int i = 0; i < count; i++) {
1560 HandleScopeImplementer::instance()->SaveContext(entered_contexts[i]);
1561 }
1562 }
1563
1564
GetEncodedAddress()1565 Address Deserializer::GetEncodedAddress() {
1566 reader_.ExpectC('P');
1567 return reader_.GetAddress();
1568 }
1569
1570
GetObject()1571 Object* Deserializer::GetObject() {
1572 // Read the prologue: type, size and encoded address.
1573 InstanceType type = static_cast<InstanceType>(reader_.GetInt());
1574 int size = reader_.GetInt() << kObjectAlignmentBits;
1575 Address a = GetEncodedAddress();
1576
1577 // Get a raw object of the right size in the right space.
1578 AllocationSpace space = GetSpace(a);
1579 Object* o;
1580 if (IsLargeExecutableObject(a)) {
1581 o = Heap::lo_space()->AllocateRawCode(size);
1582 } else if (IsLargeFixedArray(a)) {
1583 o = Heap::lo_space()->AllocateRawFixedArray(size);
1584 } else {
1585 AllocationSpace retry_space = (space == NEW_SPACE)
1586 ? Heap::TargetSpaceId(type)
1587 : space;
1588 o = Heap::AllocateRaw(size, space, retry_space);
1589 }
1590 ASSERT(!o->IsFailure());
1591 // Check that the simulation of heap allocation was correct.
1592 ASSERT(o == Resolve(a));
1593
1594 // Read any recursively embedded objects.
1595 int c = reader_.GetC();
1596 while (c == '[') {
1597 GetObject();
1598 c = reader_.GetC();
1599 }
1600 ASSERT(c == '|');
1601
1602 HeapObject* obj = reinterpret_cast<HeapObject*>(o);
1603 // Read the uninterpreted contents of the object after the map
1604 reader_.GetBytes(obj->address(), size);
1605 #ifdef DEBUG
1606 if (expect_debug_information_) {
1607 // Read in the epilogue to check that we're still synchronized
1608 ExpectEncodedAddress(a);
1609 reader_.ExpectC(']');
1610 }
1611 #endif
1612
1613 // Resolve the encoded pointers we just read in.
1614 // Same as obj->Iterate(this), but doesn't rely on the map pointer being set.
1615 VisitPointer(reinterpret_cast<Object**>(obj->address()));
1616 obj->IterateBody(type, size, this);
1617
1618 if (type == CODE_TYPE) {
1619 Code* code = Code::cast(obj);
1620 // Convert relocations from Object* to Address in Code objects
1621 code->ConvertICTargetsFromObjectToAddress();
1622 LOG(CodeMoveEvent(a, code->address()));
1623 }
1624 objects_++;
1625 return o;
1626 }
1627
1628
ResolvePaged(int page_index,int page_offset,PagedSpace * space,List<Page * > * page_list)1629 static inline Object* ResolvePaged(int page_index,
1630 int page_offset,
1631 PagedSpace* space,
1632 List<Page*>* page_list) {
1633 ASSERT(page_index < page_list->length());
1634 Address address = (*page_list)[page_index]->OffsetToAddress(page_offset);
1635 return HeapObject::FromAddress(address);
1636 }
1637
1638
1639 template<typename T>
ConcatReversed(List<T> * target,const List<T> & source)1640 void ConcatReversed(List<T>* target, const List<T>& source) {
1641 for (int i = source.length() - 1; i >= 0; i--) {
1642 target->Add(source[i]);
1643 }
1644 }
1645
1646
Resolve(Address encoded)1647 Object* Deserializer::Resolve(Address encoded) {
1648 Object* o = reinterpret_cast<Object*>(encoded);
1649 if (o->IsSmi()) return o;
1650
1651 // Encoded addresses of HeapObjects always have 'HeapObject' tags.
1652 ASSERT(o->IsHeapObject());
1653
1654 switch (GetSpace(encoded)) {
1655 // For Map space and Old space, we cache the known Pages in map_pages,
1656 // old_pointer_pages and old_data_pages. Even though MapSpace keeps a list
1657 // of page addresses, we don't rely on it since GetObject uses AllocateRaw,
1658 // and that appears not to update the page list.
1659 case MAP_SPACE:
1660 return ResolvePaged(PageIndex(encoded), PageOffset(encoded),
1661 Heap::map_space(), &map_pages_);
1662 case CELL_SPACE:
1663 return ResolvePaged(PageIndex(encoded), PageOffset(encoded),
1664 Heap::cell_space(), &cell_pages_);
1665 case OLD_POINTER_SPACE:
1666 return ResolvePaged(PageIndex(encoded), PageOffset(encoded),
1667 Heap::old_pointer_space(), &old_pointer_pages_);
1668 case OLD_DATA_SPACE:
1669 return ResolvePaged(PageIndex(encoded), PageOffset(encoded),
1670 Heap::old_data_space(), &old_data_pages_);
1671 case CODE_SPACE:
1672 return ResolvePaged(PageIndex(encoded), PageOffset(encoded),
1673 Heap::code_space(), &code_pages_);
1674 case NEW_SPACE:
1675 return HeapObject::FromAddress(Heap::NewSpaceStart() +
1676 NewSpaceOffset(encoded));
1677 case LO_SPACE:
1678 // Cache the known large_objects, allocated one per 'page'
1679 int index = LargeObjectIndex(encoded);
1680 if (index >= large_objects_.length()) {
1681 int new_object_count =
1682 Heap::lo_space()->PageCount() - large_objects_.length();
1683 List<Object*> new_objects(new_object_count);
1684 LargeObjectIterator it(Heap::lo_space());
1685 for (int i = 0; i < new_object_count; i++) {
1686 new_objects.Add(it.next());
1687 }
1688 #ifdef DEBUG
1689 for (int i = large_objects_.length() - 1; i >= 0; i--) {
1690 ASSERT(it.next() == large_objects_[i]);
1691 }
1692 #endif
1693 ConcatReversed(&large_objects_, new_objects);
1694 ASSERT(index < large_objects_.length());
1695 }
1696 return large_objects_[index]; // s.page_offset() is ignored.
1697 }
1698 UNREACHABLE();
1699 return NULL;
1700 }
1701
1702
1703 } } // namespace v8::internal
1704