1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/v8.h"
6
7 #include "src/accessors.h"
8 #include "src/api.h"
9 #include "src/bootstrapper.h"
10 #include "src/deoptimizer.h"
11 #include "src/execution.h"
12 #include "src/global-handles.h"
13 #include "src/ic-inl.h"
14 #include "src/natives.h"
15 #include "src/platform.h"
16 #include "src/runtime.h"
17 #include "src/serialize.h"
18 #include "src/snapshot.h"
19 #include "src/stub-cache.h"
20 #include "src/v8threads.h"
21
22 namespace v8 {
23 namespace internal {
24
25
26 // -----------------------------------------------------------------------------
27 // Coding of external references.
28
29 // The encoding of an external reference. The type is in the high word.
30 // The id is in the low word.
EncodeExternal(TypeCode type,uint16_t id)31 static uint32_t EncodeExternal(TypeCode type, uint16_t id) {
32 return static_cast<uint32_t>(type) << 16 | id;
33 }
34
35
GetInternalPointer(StatsCounter * counter)36 static int* GetInternalPointer(StatsCounter* counter) {
37 // All counters refer to dummy_counter, if deserializing happens without
38 // setting up counters.
39 static int dummy_counter = 0;
40 return counter->Enabled() ? counter->GetInternalPointer() : &dummy_counter;
41 }
42
43
instance(Isolate * isolate)44 ExternalReferenceTable* ExternalReferenceTable::instance(Isolate* isolate) {
45 ExternalReferenceTable* external_reference_table =
46 isolate->external_reference_table();
47 if (external_reference_table == NULL) {
48 external_reference_table = new ExternalReferenceTable(isolate);
49 isolate->set_external_reference_table(external_reference_table);
50 }
51 return external_reference_table;
52 }
53
54
AddFromId(TypeCode type,uint16_t id,const char * name,Isolate * isolate)55 void ExternalReferenceTable::AddFromId(TypeCode type,
56 uint16_t id,
57 const char* name,
58 Isolate* isolate) {
59 Address address;
60 switch (type) {
61 case C_BUILTIN: {
62 ExternalReference ref(static_cast<Builtins::CFunctionId>(id), isolate);
63 address = ref.address();
64 break;
65 }
66 case BUILTIN: {
67 ExternalReference ref(static_cast<Builtins::Name>(id), isolate);
68 address = ref.address();
69 break;
70 }
71 case RUNTIME_FUNCTION: {
72 ExternalReference ref(static_cast<Runtime::FunctionId>(id), isolate);
73 address = ref.address();
74 break;
75 }
76 case IC_UTILITY: {
77 ExternalReference ref(IC_Utility(static_cast<IC::UtilityId>(id)),
78 isolate);
79 address = ref.address();
80 break;
81 }
82 default:
83 UNREACHABLE();
84 return;
85 }
86 Add(address, type, id, name);
87 }
88
89
Add(Address address,TypeCode type,uint16_t id,const char * name)90 void ExternalReferenceTable::Add(Address address,
91 TypeCode type,
92 uint16_t id,
93 const char* name) {
94 ASSERT_NE(NULL, address);
95 ExternalReferenceEntry entry;
96 entry.address = address;
97 entry.code = EncodeExternal(type, id);
98 entry.name = name;
99 ASSERT_NE(0, entry.code);
100 refs_.Add(entry);
101 if (id > max_id_[type]) max_id_[type] = id;
102 }
103
104
PopulateTable(Isolate * isolate)105 void ExternalReferenceTable::PopulateTable(Isolate* isolate) {
106 for (int type_code = 0; type_code < kTypeCodeCount; type_code++) {
107 max_id_[type_code] = 0;
108 }
109
110 // The following populates all of the different type of external references
111 // into the ExternalReferenceTable.
112 //
113 // NOTE: This function was originally 100k of code. It has since been
114 // rewritten to be mostly table driven, as the callback macro style tends to
115 // very easily cause code bloat. Please be careful in the future when adding
116 // new references.
117
118 struct RefTableEntry {
119 TypeCode type;
120 uint16_t id;
121 const char* name;
122 };
123
124 static const RefTableEntry ref_table[] = {
125 // Builtins
126 #define DEF_ENTRY_C(name, ignored) \
127 { C_BUILTIN, \
128 Builtins::c_##name, \
129 "Builtins::" #name },
130
131 BUILTIN_LIST_C(DEF_ENTRY_C)
132 #undef DEF_ENTRY_C
133
134 #define DEF_ENTRY_C(name, ignored) \
135 { BUILTIN, \
136 Builtins::k##name, \
137 "Builtins::" #name },
138 #define DEF_ENTRY_A(name, kind, state, extra) DEF_ENTRY_C(name, ignored)
139
140 BUILTIN_LIST_C(DEF_ENTRY_C)
141 BUILTIN_LIST_A(DEF_ENTRY_A)
142 BUILTIN_LIST_DEBUG_A(DEF_ENTRY_A)
143 #undef DEF_ENTRY_C
144 #undef DEF_ENTRY_A
145
146 // Runtime functions
147 #define RUNTIME_ENTRY(name, nargs, ressize) \
148 { RUNTIME_FUNCTION, \
149 Runtime::k##name, \
150 "Runtime::" #name },
151
152 RUNTIME_FUNCTION_LIST(RUNTIME_ENTRY)
153 INLINE_OPTIMIZED_FUNCTION_LIST(RUNTIME_ENTRY)
154 #undef RUNTIME_ENTRY
155
156 #define RUNTIME_HIDDEN_ENTRY(name, nargs, ressize) \
157 { RUNTIME_FUNCTION, \
158 Runtime::kHidden##name, \
159 "Runtime::Hidden" #name },
160
161 RUNTIME_HIDDEN_FUNCTION_LIST(RUNTIME_HIDDEN_ENTRY)
162 #undef RUNTIME_HIDDEN_ENTRY
163
164 #define INLINE_OPTIMIZED_ENTRY(name, nargs, ressize) \
165 { RUNTIME_FUNCTION, \
166 Runtime::kInlineOptimized##name, \
167 "Runtime::" #name },
168
169 INLINE_OPTIMIZED_FUNCTION_LIST(INLINE_OPTIMIZED_ENTRY)
170 #undef INLINE_OPTIMIZED_ENTRY
171
172 // IC utilities
173 #define IC_ENTRY(name) \
174 { IC_UTILITY, \
175 IC::k##name, \
176 "IC::" #name },
177
178 IC_UTIL_LIST(IC_ENTRY)
179 #undef IC_ENTRY
180 }; // end of ref_table[].
181
182 for (size_t i = 0; i < ARRAY_SIZE(ref_table); ++i) {
183 AddFromId(ref_table[i].type,
184 ref_table[i].id,
185 ref_table[i].name,
186 isolate);
187 }
188
189 // Stat counters
190 struct StatsRefTableEntry {
191 StatsCounter* (Counters::*counter)();
192 uint16_t id;
193 const char* name;
194 };
195
196 const StatsRefTableEntry stats_ref_table[] = {
197 #define COUNTER_ENTRY(name, caption) \
198 { &Counters::name, \
199 Counters::k_##name, \
200 "Counters::" #name },
201
202 STATS_COUNTER_LIST_1(COUNTER_ENTRY)
203 STATS_COUNTER_LIST_2(COUNTER_ENTRY)
204 #undef COUNTER_ENTRY
205 }; // end of stats_ref_table[].
206
207 Counters* counters = isolate->counters();
208 for (size_t i = 0; i < ARRAY_SIZE(stats_ref_table); ++i) {
209 Add(reinterpret_cast<Address>(GetInternalPointer(
210 (counters->*(stats_ref_table[i].counter))())),
211 STATS_COUNTER,
212 stats_ref_table[i].id,
213 stats_ref_table[i].name);
214 }
215
216 // Top addresses
217
218 const char* AddressNames[] = {
219 #define BUILD_NAME_LITERAL(CamelName, hacker_name) \
220 "Isolate::" #hacker_name "_address",
221 FOR_EACH_ISOLATE_ADDRESS_NAME(BUILD_NAME_LITERAL)
222 NULL
223 #undef BUILD_NAME_LITERAL
224 };
225
226 for (uint16_t i = 0; i < Isolate::kIsolateAddressCount; ++i) {
227 Add(isolate->get_address_from_id((Isolate::AddressId)i),
228 TOP_ADDRESS, i, AddressNames[i]);
229 }
230
231 // Accessors
232 #define ACCESSOR_INFO_DECLARATION(name) \
233 Add(FUNCTION_ADDR(&Accessors::name##Getter), \
234 ACCESSOR, \
235 Accessors::k##name##Getter, \
236 "Accessors::" #name "Getter"); \
237 Add(FUNCTION_ADDR(&Accessors::name##Setter), \
238 ACCESSOR, \
239 Accessors::k##name##Setter, \
240 "Accessors::" #name "Setter");
241 ACCESSOR_INFO_LIST(ACCESSOR_INFO_DECLARATION)
242 #undef ACCESSOR_INFO_DECLARATION
243
244 StubCache* stub_cache = isolate->stub_cache();
245
246 // Stub cache tables
247 Add(stub_cache->key_reference(StubCache::kPrimary).address(),
248 STUB_CACHE_TABLE,
249 1,
250 "StubCache::primary_->key");
251 Add(stub_cache->value_reference(StubCache::kPrimary).address(),
252 STUB_CACHE_TABLE,
253 2,
254 "StubCache::primary_->value");
255 Add(stub_cache->map_reference(StubCache::kPrimary).address(),
256 STUB_CACHE_TABLE,
257 3,
258 "StubCache::primary_->map");
259 Add(stub_cache->key_reference(StubCache::kSecondary).address(),
260 STUB_CACHE_TABLE,
261 4,
262 "StubCache::secondary_->key");
263 Add(stub_cache->value_reference(StubCache::kSecondary).address(),
264 STUB_CACHE_TABLE,
265 5,
266 "StubCache::secondary_->value");
267 Add(stub_cache->map_reference(StubCache::kSecondary).address(),
268 STUB_CACHE_TABLE,
269 6,
270 "StubCache::secondary_->map");
271
272 // Runtime entries
273 Add(ExternalReference::delete_handle_scope_extensions(isolate).address(),
274 RUNTIME_ENTRY,
275 4,
276 "HandleScope::DeleteExtensions");
277 Add(ExternalReference::
278 incremental_marking_record_write_function(isolate).address(),
279 RUNTIME_ENTRY,
280 5,
281 "IncrementalMarking::RecordWrite");
282 Add(ExternalReference::store_buffer_overflow_function(isolate).address(),
283 RUNTIME_ENTRY,
284 6,
285 "StoreBuffer::StoreBufferOverflow");
286
287 // Miscellaneous
288 Add(ExternalReference::roots_array_start(isolate).address(),
289 UNCLASSIFIED,
290 3,
291 "Heap::roots_array_start()");
292 Add(ExternalReference::address_of_stack_limit(isolate).address(),
293 UNCLASSIFIED,
294 4,
295 "StackGuard::address_of_jslimit()");
296 Add(ExternalReference::address_of_real_stack_limit(isolate).address(),
297 UNCLASSIFIED,
298 5,
299 "StackGuard::address_of_real_jslimit()");
300 #ifndef V8_INTERPRETED_REGEXP
301 Add(ExternalReference::address_of_regexp_stack_limit(isolate).address(),
302 UNCLASSIFIED,
303 6,
304 "RegExpStack::limit_address()");
305 Add(ExternalReference::address_of_regexp_stack_memory_address(
306 isolate).address(),
307 UNCLASSIFIED,
308 7,
309 "RegExpStack::memory_address()");
310 Add(ExternalReference::address_of_regexp_stack_memory_size(isolate).address(),
311 UNCLASSIFIED,
312 8,
313 "RegExpStack::memory_size()");
314 Add(ExternalReference::address_of_static_offsets_vector(isolate).address(),
315 UNCLASSIFIED,
316 9,
317 "OffsetsVector::static_offsets_vector");
318 #endif // V8_INTERPRETED_REGEXP
319 Add(ExternalReference::new_space_start(isolate).address(),
320 UNCLASSIFIED,
321 10,
322 "Heap::NewSpaceStart()");
323 Add(ExternalReference::new_space_mask(isolate).address(),
324 UNCLASSIFIED,
325 11,
326 "Heap::NewSpaceMask()");
327 Add(ExternalReference::new_space_allocation_limit_address(isolate).address(),
328 UNCLASSIFIED,
329 14,
330 "Heap::NewSpaceAllocationLimitAddress()");
331 Add(ExternalReference::new_space_allocation_top_address(isolate).address(),
332 UNCLASSIFIED,
333 15,
334 "Heap::NewSpaceAllocationTopAddress()");
335 Add(ExternalReference::debug_break(isolate).address(),
336 UNCLASSIFIED,
337 16,
338 "Debug::Break()");
339 Add(ExternalReference::debug_step_in_fp_address(isolate).address(),
340 UNCLASSIFIED,
341 17,
342 "Debug::step_in_fp_addr()");
343 Add(ExternalReference::mod_two_doubles_operation(isolate).address(),
344 UNCLASSIFIED,
345 22,
346 "mod_two_doubles");
347 #ifndef V8_INTERPRETED_REGEXP
348 Add(ExternalReference::re_case_insensitive_compare_uc16(isolate).address(),
349 UNCLASSIFIED,
350 24,
351 "NativeRegExpMacroAssembler::CaseInsensitiveCompareUC16()");
352 Add(ExternalReference::re_check_stack_guard_state(isolate).address(),
353 UNCLASSIFIED,
354 25,
355 "RegExpMacroAssembler*::CheckStackGuardState()");
356 Add(ExternalReference::re_grow_stack(isolate).address(),
357 UNCLASSIFIED,
358 26,
359 "NativeRegExpMacroAssembler::GrowStack()");
360 Add(ExternalReference::re_word_character_map().address(),
361 UNCLASSIFIED,
362 27,
363 "NativeRegExpMacroAssembler::word_character_map");
364 #endif // V8_INTERPRETED_REGEXP
365 // Keyed lookup cache.
366 Add(ExternalReference::keyed_lookup_cache_keys(isolate).address(),
367 UNCLASSIFIED,
368 28,
369 "KeyedLookupCache::keys()");
370 Add(ExternalReference::keyed_lookup_cache_field_offsets(isolate).address(),
371 UNCLASSIFIED,
372 29,
373 "KeyedLookupCache::field_offsets()");
374 Add(ExternalReference::handle_scope_next_address(isolate).address(),
375 UNCLASSIFIED,
376 31,
377 "HandleScope::next");
378 Add(ExternalReference::handle_scope_limit_address(isolate).address(),
379 UNCLASSIFIED,
380 32,
381 "HandleScope::limit");
382 Add(ExternalReference::handle_scope_level_address(isolate).address(),
383 UNCLASSIFIED,
384 33,
385 "HandleScope::level");
386 Add(ExternalReference::new_deoptimizer_function(isolate).address(),
387 UNCLASSIFIED,
388 34,
389 "Deoptimizer::New()");
390 Add(ExternalReference::compute_output_frames_function(isolate).address(),
391 UNCLASSIFIED,
392 35,
393 "Deoptimizer::ComputeOutputFrames()");
394 Add(ExternalReference::address_of_min_int().address(),
395 UNCLASSIFIED,
396 36,
397 "LDoubleConstant::min_int");
398 Add(ExternalReference::address_of_one_half().address(),
399 UNCLASSIFIED,
400 37,
401 "LDoubleConstant::one_half");
402 Add(ExternalReference::isolate_address(isolate).address(),
403 UNCLASSIFIED,
404 38,
405 "isolate");
406 Add(ExternalReference::address_of_minus_zero().address(),
407 UNCLASSIFIED,
408 39,
409 "LDoubleConstant::minus_zero");
410 Add(ExternalReference::address_of_negative_infinity().address(),
411 UNCLASSIFIED,
412 40,
413 "LDoubleConstant::negative_infinity");
414 Add(ExternalReference::power_double_double_function(isolate).address(),
415 UNCLASSIFIED,
416 41,
417 "power_double_double_function");
418 Add(ExternalReference::power_double_int_function(isolate).address(),
419 UNCLASSIFIED,
420 42,
421 "power_double_int_function");
422 Add(ExternalReference::store_buffer_top(isolate).address(),
423 UNCLASSIFIED,
424 43,
425 "store_buffer_top");
426 Add(ExternalReference::address_of_canonical_non_hole_nan().address(),
427 UNCLASSIFIED,
428 44,
429 "canonical_nan");
430 Add(ExternalReference::address_of_the_hole_nan().address(),
431 UNCLASSIFIED,
432 45,
433 "the_hole_nan");
434 Add(ExternalReference::get_date_field_function(isolate).address(),
435 UNCLASSIFIED,
436 46,
437 "JSDate::GetField");
438 Add(ExternalReference::date_cache_stamp(isolate).address(),
439 UNCLASSIFIED,
440 47,
441 "date_cache_stamp");
442 Add(ExternalReference::address_of_pending_message_obj(isolate).address(),
443 UNCLASSIFIED,
444 48,
445 "address_of_pending_message_obj");
446 Add(ExternalReference::address_of_has_pending_message(isolate).address(),
447 UNCLASSIFIED,
448 49,
449 "address_of_has_pending_message");
450 Add(ExternalReference::address_of_pending_message_script(isolate).address(),
451 UNCLASSIFIED,
452 50,
453 "pending_message_script");
454 Add(ExternalReference::get_make_code_young_function(isolate).address(),
455 UNCLASSIFIED,
456 51,
457 "Code::MakeCodeYoung");
458 Add(ExternalReference::cpu_features().address(),
459 UNCLASSIFIED,
460 52,
461 "cpu_features");
462 Add(ExternalReference(Runtime::kHiddenAllocateInNewSpace, isolate).address(),
463 UNCLASSIFIED,
464 53,
465 "Runtime::AllocateInNewSpace");
466 Add(ExternalReference(
467 Runtime::kHiddenAllocateInTargetSpace, isolate).address(),
468 UNCLASSIFIED,
469 54,
470 "Runtime::AllocateInTargetSpace");
471 Add(ExternalReference::old_pointer_space_allocation_top_address(
472 isolate).address(),
473 UNCLASSIFIED,
474 55,
475 "Heap::OldPointerSpaceAllocationTopAddress");
476 Add(ExternalReference::old_pointer_space_allocation_limit_address(
477 isolate).address(),
478 UNCLASSIFIED,
479 56,
480 "Heap::OldPointerSpaceAllocationLimitAddress");
481 Add(ExternalReference::old_data_space_allocation_top_address(
482 isolate).address(),
483 UNCLASSIFIED,
484 57,
485 "Heap::OldDataSpaceAllocationTopAddress");
486 Add(ExternalReference::old_data_space_allocation_limit_address(
487 isolate).address(),
488 UNCLASSIFIED,
489 58,
490 "Heap::OldDataSpaceAllocationLimitAddress");
491 Add(ExternalReference::allocation_sites_list_address(isolate).address(),
492 UNCLASSIFIED,
493 59,
494 "Heap::allocation_sites_list_address()");
495 Add(ExternalReference::address_of_uint32_bias().address(),
496 UNCLASSIFIED,
497 60,
498 "uint32_bias");
499 Add(ExternalReference::get_mark_code_as_executed_function(isolate).address(),
500 UNCLASSIFIED,
501 61,
502 "Code::MarkCodeAsExecuted");
503
504 Add(ExternalReference::is_profiling_address(isolate).address(),
505 UNCLASSIFIED,
506 62,
507 "CpuProfiler::is_profiling");
508
509 Add(ExternalReference::scheduled_exception_address(isolate).address(),
510 UNCLASSIFIED,
511 63,
512 "Isolate::scheduled_exception");
513
514 Add(ExternalReference::invoke_function_callback(isolate).address(),
515 UNCLASSIFIED,
516 64,
517 "InvokeFunctionCallback");
518
519 Add(ExternalReference::invoke_accessor_getter_callback(isolate).address(),
520 UNCLASSIFIED,
521 65,
522 "InvokeAccessorGetterCallback");
523
524 // Debug addresses
525 Add(ExternalReference::debug_after_break_target_address(isolate).address(),
526 UNCLASSIFIED,
527 66,
528 "Debug::after_break_target_address()");
529
530 Add(ExternalReference::debug_restarter_frame_function_pointer_address(
531 isolate).address(),
532 UNCLASSIFIED,
533 67,
534 "Debug::restarter_frame_function_pointer_address()");
535
536 // Add a small set of deopt entry addresses to encoder without generating the
537 // deopt table code, which isn't possible at deserialization time.
538 HandleScope scope(isolate);
539 for (int entry = 0; entry < kDeoptTableSerializeEntryCount; ++entry) {
540 Address address = Deoptimizer::GetDeoptimizationEntry(
541 isolate,
542 entry,
543 Deoptimizer::LAZY,
544 Deoptimizer::CALCULATE_ENTRY_ADDRESS);
545 Add(address, LAZY_DEOPTIMIZATION, entry, "lazy_deopt");
546 }
547 }
548
549
ExternalReferenceEncoder(Isolate * isolate)550 ExternalReferenceEncoder::ExternalReferenceEncoder(Isolate* isolate)
551 : encodings_(HashMap::PointersMatch),
552 isolate_(isolate) {
553 ExternalReferenceTable* external_references =
554 ExternalReferenceTable::instance(isolate_);
555 for (int i = 0; i < external_references->size(); ++i) {
556 Put(external_references->address(i), i);
557 }
558 }
559
560
Encode(Address key) const561 uint32_t ExternalReferenceEncoder::Encode(Address key) const {
562 int index = IndexOf(key);
563 ASSERT(key == NULL || index >= 0);
564 return index >= 0 ?
565 ExternalReferenceTable::instance(isolate_)->code(index) : 0;
566 }
567
568
NameOfAddress(Address key) const569 const char* ExternalReferenceEncoder::NameOfAddress(Address key) const {
570 int index = IndexOf(key);
571 return index >= 0 ?
572 ExternalReferenceTable::instance(isolate_)->name(index) : NULL;
573 }
574
575
IndexOf(Address key) const576 int ExternalReferenceEncoder::IndexOf(Address key) const {
577 if (key == NULL) return -1;
578 HashMap::Entry* entry =
579 const_cast<HashMap&>(encodings_).Lookup(key, Hash(key), false);
580 return entry == NULL
581 ? -1
582 : static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
583 }
584
585
Put(Address key,int index)586 void ExternalReferenceEncoder::Put(Address key, int index) {
587 HashMap::Entry* entry = encodings_.Lookup(key, Hash(key), true);
588 entry->value = reinterpret_cast<void*>(index);
589 }
590
591
ExternalReferenceDecoder(Isolate * isolate)592 ExternalReferenceDecoder::ExternalReferenceDecoder(Isolate* isolate)
593 : encodings_(NewArray<Address*>(kTypeCodeCount)),
594 isolate_(isolate) {
595 ExternalReferenceTable* external_references =
596 ExternalReferenceTable::instance(isolate_);
597 for (int type = kFirstTypeCode; type < kTypeCodeCount; ++type) {
598 int max = external_references->max_id(type) + 1;
599 encodings_[type] = NewArray<Address>(max + 1);
600 }
601 for (int i = 0; i < external_references->size(); ++i) {
602 Put(external_references->code(i), external_references->address(i));
603 }
604 }
605
606
~ExternalReferenceDecoder()607 ExternalReferenceDecoder::~ExternalReferenceDecoder() {
608 for (int type = kFirstTypeCode; type < kTypeCodeCount; ++type) {
609 DeleteArray(encodings_[type]);
610 }
611 DeleteArray(encodings_);
612 }
613
614
615 class CodeAddressMap: public CodeEventLogger {
616 public:
CodeAddressMap(Isolate * isolate)617 explicit CodeAddressMap(Isolate* isolate)
618 : isolate_(isolate) {
619 isolate->logger()->addCodeEventListener(this);
620 }
621
~CodeAddressMap()622 virtual ~CodeAddressMap() {
623 isolate_->logger()->removeCodeEventListener(this);
624 }
625
CodeMoveEvent(Address from,Address to)626 virtual void CodeMoveEvent(Address from, Address to) {
627 address_to_name_map_.Move(from, to);
628 }
629
CodeDisableOptEvent(Code * code,SharedFunctionInfo * shared)630 virtual void CodeDisableOptEvent(Code* code, SharedFunctionInfo* shared) {
631 }
632
CodeDeleteEvent(Address from)633 virtual void CodeDeleteEvent(Address from) {
634 address_to_name_map_.Remove(from);
635 }
636
Lookup(Address address)637 const char* Lookup(Address address) {
638 return address_to_name_map_.Lookup(address);
639 }
640
641 private:
642 class NameMap {
643 public:
NameMap()644 NameMap() : impl_(HashMap::PointersMatch) {}
645
~NameMap()646 ~NameMap() {
647 for (HashMap::Entry* p = impl_.Start(); p != NULL; p = impl_.Next(p)) {
648 DeleteArray(static_cast<const char*>(p->value));
649 }
650 }
651
Insert(Address code_address,const char * name,int name_size)652 void Insert(Address code_address, const char* name, int name_size) {
653 HashMap::Entry* entry = FindOrCreateEntry(code_address);
654 if (entry->value == NULL) {
655 entry->value = CopyName(name, name_size);
656 }
657 }
658
Lookup(Address code_address)659 const char* Lookup(Address code_address) {
660 HashMap::Entry* entry = FindEntry(code_address);
661 return (entry != NULL) ? static_cast<const char*>(entry->value) : NULL;
662 }
663
Remove(Address code_address)664 void Remove(Address code_address) {
665 HashMap::Entry* entry = FindEntry(code_address);
666 if (entry != NULL) {
667 DeleteArray(static_cast<char*>(entry->value));
668 RemoveEntry(entry);
669 }
670 }
671
Move(Address from,Address to)672 void Move(Address from, Address to) {
673 if (from == to) return;
674 HashMap::Entry* from_entry = FindEntry(from);
675 ASSERT(from_entry != NULL);
676 void* value = from_entry->value;
677 RemoveEntry(from_entry);
678 HashMap::Entry* to_entry = FindOrCreateEntry(to);
679 ASSERT(to_entry->value == NULL);
680 to_entry->value = value;
681 }
682
683 private:
CopyName(const char * name,int name_size)684 static char* CopyName(const char* name, int name_size) {
685 char* result = NewArray<char>(name_size + 1);
686 for (int i = 0; i < name_size; ++i) {
687 char c = name[i];
688 if (c == '\0') c = ' ';
689 result[i] = c;
690 }
691 result[name_size] = '\0';
692 return result;
693 }
694
FindOrCreateEntry(Address code_address)695 HashMap::Entry* FindOrCreateEntry(Address code_address) {
696 return impl_.Lookup(code_address, ComputePointerHash(code_address), true);
697 }
698
FindEntry(Address code_address)699 HashMap::Entry* FindEntry(Address code_address) {
700 return impl_.Lookup(code_address,
701 ComputePointerHash(code_address),
702 false);
703 }
704
RemoveEntry(HashMap::Entry * entry)705 void RemoveEntry(HashMap::Entry* entry) {
706 impl_.Remove(entry->key, entry->hash);
707 }
708
709 HashMap impl_;
710
711 DISALLOW_COPY_AND_ASSIGN(NameMap);
712 };
713
LogRecordedBuffer(Code * code,SharedFunctionInfo *,const char * name,int length)714 virtual void LogRecordedBuffer(Code* code,
715 SharedFunctionInfo*,
716 const char* name,
717 int length) {
718 address_to_name_map_.Insert(code->address(), name, length);
719 }
720
721 NameMap address_to_name_map_;
722 Isolate* isolate_;
723 };
724
725
Deserializer(SnapshotByteSource * source)726 Deserializer::Deserializer(SnapshotByteSource* source)
727 : isolate_(NULL),
728 source_(source),
729 external_reference_decoder_(NULL) {
730 for (int i = 0; i < LAST_SPACE + 1; i++) {
731 reservations_[i] = kUninitializedReservation;
732 }
733 }
734
735
FlushICacheForNewCodeObjects()736 void Deserializer::FlushICacheForNewCodeObjects() {
737 PageIterator it(isolate_->heap()->code_space());
738 while (it.has_next()) {
739 Page* p = it.next();
740 CPU::FlushICache(p->area_start(), p->area_end() - p->area_start());
741 }
742 }
743
744
Deserialize(Isolate * isolate)745 void Deserializer::Deserialize(Isolate* isolate) {
746 isolate_ = isolate;
747 ASSERT(isolate_ != NULL);
748 isolate_->heap()->ReserveSpace(reservations_, &high_water_[0]);
749 // No active threads.
750 ASSERT_EQ(NULL, isolate_->thread_manager()->FirstThreadStateInUse());
751 // No active handles.
752 ASSERT(isolate_->handle_scope_implementer()->blocks()->is_empty());
753 ASSERT_EQ(NULL, external_reference_decoder_);
754 external_reference_decoder_ = new ExternalReferenceDecoder(isolate);
755 isolate_->heap()->IterateSmiRoots(this);
756 isolate_->heap()->IterateStrongRoots(this, VISIT_ONLY_STRONG);
757 isolate_->heap()->RepairFreeListsAfterBoot();
758 isolate_->heap()->IterateWeakRoots(this, VISIT_ALL);
759
760 isolate_->heap()->set_native_contexts_list(
761 isolate_->heap()->undefined_value());
762 isolate_->heap()->set_array_buffers_list(
763 isolate_->heap()->undefined_value());
764
765 // The allocation site list is build during root iteration, but if no sites
766 // were encountered then it needs to be initialized to undefined.
767 if (isolate_->heap()->allocation_sites_list() == Smi::FromInt(0)) {
768 isolate_->heap()->set_allocation_sites_list(
769 isolate_->heap()->undefined_value());
770 }
771
772 isolate_->heap()->InitializeWeakObjectToCodeTable();
773
774 // Update data pointers to the external strings containing natives sources.
775 for (int i = 0; i < Natives::GetBuiltinsCount(); i++) {
776 Object* source = isolate_->heap()->natives_source_cache()->get(i);
777 if (!source->IsUndefined()) {
778 ExternalAsciiString::cast(source)->update_data_cache();
779 }
780 }
781
782 FlushICacheForNewCodeObjects();
783
784 // Issue code events for newly deserialized code objects.
785 LOG_CODE_EVENT(isolate_, LogCodeObjects());
786 LOG_CODE_EVENT(isolate_, LogCompiledFunctions());
787 }
788
789
DeserializePartial(Isolate * isolate,Object ** root)790 void Deserializer::DeserializePartial(Isolate* isolate, Object** root) {
791 isolate_ = isolate;
792 for (int i = NEW_SPACE; i < kNumberOfSpaces; i++) {
793 ASSERT(reservations_[i] != kUninitializedReservation);
794 }
795 isolate_->heap()->ReserveSpace(reservations_, &high_water_[0]);
796 if (external_reference_decoder_ == NULL) {
797 external_reference_decoder_ = new ExternalReferenceDecoder(isolate);
798 }
799
800 // Keep track of the code space start and end pointers in case new
801 // code objects were unserialized
802 OldSpace* code_space = isolate_->heap()->code_space();
803 Address start_address = code_space->top();
804 VisitPointer(root);
805
806 // There's no code deserialized here. If this assert fires
807 // then that's changed and logging should be added to notify
808 // the profiler et al of the new code.
809 CHECK_EQ(start_address, code_space->top());
810 }
811
812
~Deserializer()813 Deserializer::~Deserializer() {
814 // TODO(svenpanne) Re-enable this assertion when v8 initialization is fixed.
815 // ASSERT(source_->AtEOF());
816 if (external_reference_decoder_) {
817 delete external_reference_decoder_;
818 external_reference_decoder_ = NULL;
819 }
820 }
821
822
823 // This is called on the roots. It is the driver of the deserialization
824 // process. It is also called on the body of each function.
VisitPointers(Object ** start,Object ** end)825 void Deserializer::VisitPointers(Object** start, Object** end) {
826 // The space must be new space. Any other space would cause ReadChunk to try
827 // to update the remembered using NULL as the address.
828 ReadChunk(start, end, NEW_SPACE, NULL);
829 }
830
831
RelinkAllocationSite(AllocationSite * site)832 void Deserializer::RelinkAllocationSite(AllocationSite* site) {
833 if (isolate_->heap()->allocation_sites_list() == Smi::FromInt(0)) {
834 site->set_weak_next(isolate_->heap()->undefined_value());
835 } else {
836 site->set_weak_next(isolate_->heap()->allocation_sites_list());
837 }
838 isolate_->heap()->set_allocation_sites_list(site);
839 }
840
841
842 // This routine writes the new object into the pointer provided and then
843 // returns true if the new object was in young space and false otherwise.
844 // The reason for this strange interface is that otherwise the object is
845 // written very late, which means the FreeSpace map is not set up by the
846 // time we need to use it to mark the space at the end of a page free.
ReadObject(int space_number,Object ** write_back)847 void Deserializer::ReadObject(int space_number,
848 Object** write_back) {
849 int size = source_->GetInt() << kObjectAlignmentBits;
850 Address address = Allocate(space_number, size);
851 HeapObject* obj = HeapObject::FromAddress(address);
852 *write_back = obj;
853 Object** current = reinterpret_cast<Object**>(address);
854 Object** limit = current + (size >> kPointerSizeLog2);
855 if (FLAG_log_snapshot_positions) {
856 LOG(isolate_, SnapshotPositionEvent(address, source_->position()));
857 }
858 ReadChunk(current, limit, space_number, address);
859
860 // TODO(mvstanton): consider treating the heap()->allocation_sites_list()
861 // as a (weak) root. If this root is relocated correctly,
862 // RelinkAllocationSite() isn't necessary.
863 if (obj->IsAllocationSite()) {
864 RelinkAllocationSite(AllocationSite::cast(obj));
865 }
866
867 #ifdef DEBUG
868 bool is_codespace = (space_number == CODE_SPACE);
869 ASSERT(obj->IsCode() == is_codespace);
870 #endif
871 }
872
ReadChunk(Object ** current,Object ** limit,int source_space,Address current_object_address)873 void Deserializer::ReadChunk(Object** current,
874 Object** limit,
875 int source_space,
876 Address current_object_address) {
877 Isolate* const isolate = isolate_;
878 // Write barrier support costs around 1% in startup time. In fact there
879 // are no new space objects in current boot snapshots, so it's not needed,
880 // but that may change.
881 bool write_barrier_needed = (current_object_address != NULL &&
882 source_space != NEW_SPACE &&
883 source_space != CELL_SPACE &&
884 source_space != PROPERTY_CELL_SPACE &&
885 source_space != CODE_SPACE &&
886 source_space != OLD_DATA_SPACE);
887 while (current < limit) {
888 int data = source_->Get();
889 switch (data) {
890 #define CASE_STATEMENT(where, how, within, space_number) \
891 case where + how + within + space_number: \
892 ASSERT((where & ~kPointedToMask) == 0); \
893 ASSERT((how & ~kHowToCodeMask) == 0); \
894 ASSERT((within & ~kWhereToPointMask) == 0); \
895 ASSERT((space_number & ~kSpaceMask) == 0);
896
897 #define CASE_BODY(where, how, within, space_number_if_any) \
898 { \
899 bool emit_write_barrier = false; \
900 bool current_was_incremented = false; \
901 int space_number = space_number_if_any == kAnyOldSpace ? \
902 (data & kSpaceMask) : space_number_if_any; \
903 if (where == kNewObject && how == kPlain && within == kStartOfObject) {\
904 ReadObject(space_number, current); \
905 emit_write_barrier = (space_number == NEW_SPACE); \
906 } else { \
907 Object* new_object = NULL; /* May not be a real Object pointer. */ \
908 if (where == kNewObject) { \
909 ReadObject(space_number, &new_object); \
910 } else if (where == kRootArray) { \
911 int root_id = source_->GetInt(); \
912 new_object = isolate->heap()->roots_array_start()[root_id]; \
913 emit_write_barrier = isolate->heap()->InNewSpace(new_object); \
914 } else if (where == kPartialSnapshotCache) { \
915 int cache_index = source_->GetInt(); \
916 new_object = isolate->serialize_partial_snapshot_cache() \
917 [cache_index]; \
918 emit_write_barrier = isolate->heap()->InNewSpace(new_object); \
919 } else if (where == kExternalReference) { \
920 int skip = source_->GetInt(); \
921 current = reinterpret_cast<Object**>(reinterpret_cast<Address>( \
922 current) + skip); \
923 int reference_id = source_->GetInt(); \
924 Address address = external_reference_decoder_-> \
925 Decode(reference_id); \
926 new_object = reinterpret_cast<Object*>(address); \
927 } else if (where == kBackref) { \
928 emit_write_barrier = (space_number == NEW_SPACE); \
929 new_object = GetAddressFromEnd(data & kSpaceMask); \
930 } else { \
931 ASSERT(where == kBackrefWithSkip); \
932 int skip = source_->GetInt(); \
933 current = reinterpret_cast<Object**>( \
934 reinterpret_cast<Address>(current) + skip); \
935 emit_write_barrier = (space_number == NEW_SPACE); \
936 new_object = GetAddressFromEnd(data & kSpaceMask); \
937 } \
938 if (within == kInnerPointer) { \
939 if (space_number != CODE_SPACE || new_object->IsCode()) { \
940 Code* new_code_object = reinterpret_cast<Code*>(new_object); \
941 new_object = reinterpret_cast<Object*>( \
942 new_code_object->instruction_start()); \
943 } else { \
944 ASSERT(space_number == CODE_SPACE); \
945 Cell* cell = Cell::cast(new_object); \
946 new_object = reinterpret_cast<Object*>( \
947 cell->ValueAddress()); \
948 } \
949 } \
950 if (how == kFromCode) { \
951 Address location_of_branch_data = \
952 reinterpret_cast<Address>(current); \
953 Assembler::deserialization_set_special_target_at( \
954 location_of_branch_data, \
955 Code::cast(HeapObject::FromAddress(current_object_address)), \
956 reinterpret_cast<Address>(new_object)); \
957 location_of_branch_data += Assembler::kSpecialTargetSize; \
958 current = reinterpret_cast<Object**>(location_of_branch_data); \
959 current_was_incremented = true; \
960 } else { \
961 *current = new_object; \
962 } \
963 } \
964 if (emit_write_barrier && write_barrier_needed) { \
965 Address current_address = reinterpret_cast<Address>(current); \
966 isolate->heap()->RecordWrite( \
967 current_object_address, \
968 static_cast<int>(current_address - current_object_address)); \
969 } \
970 if (!current_was_incremented) { \
971 current++; \
972 } \
973 break; \
974 } \
975
976 // This generates a case and a body for the new space (which has to do extra
977 // write barrier handling) and handles the other spaces with 8 fall-through
978 // cases and one body.
979 #define ALL_SPACES(where, how, within) \
980 CASE_STATEMENT(where, how, within, NEW_SPACE) \
981 CASE_BODY(where, how, within, NEW_SPACE) \
982 CASE_STATEMENT(where, how, within, OLD_DATA_SPACE) \
983 CASE_STATEMENT(where, how, within, OLD_POINTER_SPACE) \
984 CASE_STATEMENT(where, how, within, CODE_SPACE) \
985 CASE_STATEMENT(where, how, within, CELL_SPACE) \
986 CASE_STATEMENT(where, how, within, PROPERTY_CELL_SPACE) \
987 CASE_STATEMENT(where, how, within, MAP_SPACE) \
988 CASE_BODY(where, how, within, kAnyOldSpace)
989
990 #define FOUR_CASES(byte_code) \
991 case byte_code: \
992 case byte_code + 1: \
993 case byte_code + 2: \
994 case byte_code + 3:
995
996 #define SIXTEEN_CASES(byte_code) \
997 FOUR_CASES(byte_code) \
998 FOUR_CASES(byte_code + 4) \
999 FOUR_CASES(byte_code + 8) \
1000 FOUR_CASES(byte_code + 12)
1001
1002 #define COMMON_RAW_LENGTHS(f) \
1003 f(1) \
1004 f(2) \
1005 f(3) \
1006 f(4) \
1007 f(5) \
1008 f(6) \
1009 f(7) \
1010 f(8) \
1011 f(9) \
1012 f(10) \
1013 f(11) \
1014 f(12) \
1015 f(13) \
1016 f(14) \
1017 f(15) \
1018 f(16) \
1019 f(17) \
1020 f(18) \
1021 f(19) \
1022 f(20) \
1023 f(21) \
1024 f(22) \
1025 f(23) \
1026 f(24) \
1027 f(25) \
1028 f(26) \
1029 f(27) \
1030 f(28) \
1031 f(29) \
1032 f(30) \
1033 f(31)
1034
1035 // We generate 15 cases and bodies that process special tags that combine
1036 // the raw data tag and the length into one byte.
1037 #define RAW_CASE(index) \
1038 case kRawData + index: { \
1039 byte* raw_data_out = reinterpret_cast<byte*>(current); \
1040 source_->CopyRaw(raw_data_out, index * kPointerSize); \
1041 current = \
1042 reinterpret_cast<Object**>(raw_data_out + index * kPointerSize); \
1043 break; \
1044 }
1045 COMMON_RAW_LENGTHS(RAW_CASE)
1046 #undef RAW_CASE
1047
1048 // Deserialize a chunk of raw data that doesn't have one of the popular
1049 // lengths.
1050 case kRawData: {
1051 int size = source_->GetInt();
1052 byte* raw_data_out = reinterpret_cast<byte*>(current);
1053 source_->CopyRaw(raw_data_out, size);
1054 break;
1055 }
1056
1057 SIXTEEN_CASES(kRootArrayConstants + kNoSkipDistance)
1058 SIXTEEN_CASES(kRootArrayConstants + kNoSkipDistance + 16) {
1059 int root_id = RootArrayConstantFromByteCode(data);
1060 Object* object = isolate->heap()->roots_array_start()[root_id];
1061 ASSERT(!isolate->heap()->InNewSpace(object));
1062 *current++ = object;
1063 break;
1064 }
1065
1066 SIXTEEN_CASES(kRootArrayConstants + kHasSkipDistance)
1067 SIXTEEN_CASES(kRootArrayConstants + kHasSkipDistance + 16) {
1068 int root_id = RootArrayConstantFromByteCode(data);
1069 int skip = source_->GetInt();
1070 current = reinterpret_cast<Object**>(
1071 reinterpret_cast<intptr_t>(current) + skip);
1072 Object* object = isolate->heap()->roots_array_start()[root_id];
1073 ASSERT(!isolate->heap()->InNewSpace(object));
1074 *current++ = object;
1075 break;
1076 }
1077
1078 case kRepeat: {
1079 int repeats = source_->GetInt();
1080 Object* object = current[-1];
1081 ASSERT(!isolate->heap()->InNewSpace(object));
1082 for (int i = 0; i < repeats; i++) current[i] = object;
1083 current += repeats;
1084 break;
1085 }
1086
1087 STATIC_ASSERT(kRootArrayNumberOfConstantEncodings ==
1088 Heap::kOldSpaceRoots);
1089 STATIC_ASSERT(kMaxRepeats == 13);
1090 case kConstantRepeat:
1091 FOUR_CASES(kConstantRepeat + 1)
1092 FOUR_CASES(kConstantRepeat + 5)
1093 FOUR_CASES(kConstantRepeat + 9) {
1094 int repeats = RepeatsForCode(data);
1095 Object* object = current[-1];
1096 ASSERT(!isolate->heap()->InNewSpace(object));
1097 for (int i = 0; i < repeats; i++) current[i] = object;
1098 current += repeats;
1099 break;
1100 }
1101
1102 // Deserialize a new object and write a pointer to it to the current
1103 // object.
1104 ALL_SPACES(kNewObject, kPlain, kStartOfObject)
1105 // Support for direct instruction pointers in functions. It's an inner
1106 // pointer because it points at the entry point, not at the start of the
1107 // code object.
1108 CASE_STATEMENT(kNewObject, kPlain, kInnerPointer, CODE_SPACE)
1109 CASE_BODY(kNewObject, kPlain, kInnerPointer, CODE_SPACE)
1110 // Deserialize a new code object and write a pointer to its first
1111 // instruction to the current code object.
1112 ALL_SPACES(kNewObject, kFromCode, kInnerPointer)
1113 // Find a recently deserialized object using its offset from the current
1114 // allocation point and write a pointer to it to the current object.
1115 ALL_SPACES(kBackref, kPlain, kStartOfObject)
1116 ALL_SPACES(kBackrefWithSkip, kPlain, kStartOfObject)
1117 #if defined(V8_TARGET_ARCH_MIPS) || V8_OOL_CONSTANT_POOL
1118 // Deserialize a new object from pointer found in code and write
1119 // a pointer to it to the current object. Required only for MIPS or ARM
1120 // with ool constant pool, and omitted on the other architectures because
1121 // it is fully unrolled and would cause bloat.
1122 ALL_SPACES(kNewObject, kFromCode, kStartOfObject)
1123 // Find a recently deserialized code object using its offset from the
1124 // current allocation point and write a pointer to it to the current
1125 // object. Required only for MIPS or ARM with ool constant pool.
1126 ALL_SPACES(kBackref, kFromCode, kStartOfObject)
1127 ALL_SPACES(kBackrefWithSkip, kFromCode, kStartOfObject)
1128 #endif
1129 // Find a recently deserialized code object using its offset from the
1130 // current allocation point and write a pointer to its first instruction
1131 // to the current code object or the instruction pointer in a function
1132 // object.
1133 ALL_SPACES(kBackref, kFromCode, kInnerPointer)
1134 ALL_SPACES(kBackrefWithSkip, kFromCode, kInnerPointer)
1135 ALL_SPACES(kBackref, kPlain, kInnerPointer)
1136 ALL_SPACES(kBackrefWithSkip, kPlain, kInnerPointer)
1137 // Find an object in the roots array and write a pointer to it to the
1138 // current object.
1139 CASE_STATEMENT(kRootArray, kPlain, kStartOfObject, 0)
1140 CASE_BODY(kRootArray, kPlain, kStartOfObject, 0)
1141 // Find an object in the partial snapshots cache and write a pointer to it
1142 // to the current object.
1143 CASE_STATEMENT(kPartialSnapshotCache, kPlain, kStartOfObject, 0)
1144 CASE_BODY(kPartialSnapshotCache,
1145 kPlain,
1146 kStartOfObject,
1147 0)
1148 // Find an code entry in the partial snapshots cache and
1149 // write a pointer to it to the current object.
1150 CASE_STATEMENT(kPartialSnapshotCache, kPlain, kInnerPointer, 0)
1151 CASE_BODY(kPartialSnapshotCache,
1152 kPlain,
1153 kInnerPointer,
1154 0)
1155 // Find an external reference and write a pointer to it to the current
1156 // object.
1157 CASE_STATEMENT(kExternalReference, kPlain, kStartOfObject, 0)
1158 CASE_BODY(kExternalReference,
1159 kPlain,
1160 kStartOfObject,
1161 0)
1162 // Find an external reference and write a pointer to it in the current
1163 // code object.
1164 CASE_STATEMENT(kExternalReference, kFromCode, kStartOfObject, 0)
1165 CASE_BODY(kExternalReference,
1166 kFromCode,
1167 kStartOfObject,
1168 0)
1169
1170 #undef CASE_STATEMENT
1171 #undef CASE_BODY
1172 #undef ALL_SPACES
1173
1174 case kSkip: {
1175 int size = source_->GetInt();
1176 current = reinterpret_cast<Object**>(
1177 reinterpret_cast<intptr_t>(current) + size);
1178 break;
1179 }
1180
1181 case kNativesStringResource: {
1182 int index = source_->Get();
1183 Vector<const char> source_vector = Natives::GetRawScriptSource(index);
1184 NativesExternalStringResource* resource =
1185 new NativesExternalStringResource(isolate->bootstrapper(),
1186 source_vector.start(),
1187 source_vector.length());
1188 *current++ = reinterpret_cast<Object*>(resource);
1189 break;
1190 }
1191
1192 case kSynchronize: {
1193 // If we get here then that indicates that you have a mismatch between
1194 // the number of GC roots when serializing and deserializing.
1195 UNREACHABLE();
1196 }
1197
1198 default:
1199 UNREACHABLE();
1200 }
1201 }
1202 ASSERT_EQ(limit, current);
1203 }
1204
1205
PutInt(uintptr_t integer,const char * description)1206 void SnapshotByteSink::PutInt(uintptr_t integer, const char* description) {
1207 ASSERT(integer < 1 << 22);
1208 integer <<= 2;
1209 int bytes = 1;
1210 if (integer > 0xff) bytes = 2;
1211 if (integer > 0xffff) bytes = 3;
1212 integer |= bytes;
1213 Put(static_cast<int>(integer & 0xff), "IntPart1");
1214 if (bytes > 1) Put(static_cast<int>((integer >> 8) & 0xff), "IntPart2");
1215 if (bytes > 2) Put(static_cast<int>((integer >> 16) & 0xff), "IntPart3");
1216 }
1217
1218
Serializer(Isolate * isolate,SnapshotByteSink * sink)1219 Serializer::Serializer(Isolate* isolate, SnapshotByteSink* sink)
1220 : isolate_(isolate),
1221 sink_(sink),
1222 external_reference_encoder_(new ExternalReferenceEncoder(isolate)),
1223 root_index_wave_front_(0),
1224 code_address_map_(NULL) {
1225 // The serializer is meant to be used only to generate initial heap images
1226 // from a context in which there is only one isolate.
1227 for (int i = 0; i <= LAST_SPACE; i++) {
1228 fullness_[i] = 0;
1229 }
1230 }
1231
1232
~Serializer()1233 Serializer::~Serializer() {
1234 delete external_reference_encoder_;
1235 if (code_address_map_ != NULL) delete code_address_map_;
1236 }
1237
1238
SerializeStrongReferences()1239 void StartupSerializer::SerializeStrongReferences() {
1240 Isolate* isolate = this->isolate();
1241 // No active threads.
1242 CHECK_EQ(NULL, isolate->thread_manager()->FirstThreadStateInUse());
1243 // No active or weak handles.
1244 CHECK(isolate->handle_scope_implementer()->blocks()->is_empty());
1245 CHECK_EQ(0, isolate->global_handles()->NumberOfWeakHandles());
1246 CHECK_EQ(0, isolate->eternal_handles()->NumberOfHandles());
1247 // We don't support serializing installed extensions.
1248 CHECK(!isolate->has_installed_extensions());
1249 isolate->heap()->IterateSmiRoots(this);
1250 isolate->heap()->IterateStrongRoots(this, VISIT_ONLY_STRONG);
1251 }
1252
1253
Serialize(Object ** object)1254 void PartialSerializer::Serialize(Object** object) {
1255 this->VisitPointer(object);
1256 Pad();
1257 }
1258
1259
ShouldBeSkipped(Object ** current)1260 bool Serializer::ShouldBeSkipped(Object** current) {
1261 Object** roots = isolate()->heap()->roots_array_start();
1262 return current == &roots[Heap::kStoreBufferTopRootIndex]
1263 || current == &roots[Heap::kStackLimitRootIndex]
1264 || current == &roots[Heap::kRealStackLimitRootIndex];
1265 }
1266
1267
VisitPointers(Object ** start,Object ** end)1268 void Serializer::VisitPointers(Object** start, Object** end) {
1269 Isolate* isolate = this->isolate();;
1270
1271 for (Object** current = start; current < end; current++) {
1272 if (start == isolate->heap()->roots_array_start()) {
1273 root_index_wave_front_ =
1274 Max(root_index_wave_front_, static_cast<intptr_t>(current - start));
1275 }
1276 if (ShouldBeSkipped(current)) {
1277 sink_->Put(kSkip, "Skip");
1278 sink_->PutInt(kPointerSize, "SkipOneWord");
1279 } else if ((*current)->IsSmi()) {
1280 sink_->Put(kRawData + 1, "Smi");
1281 for (int i = 0; i < kPointerSize; i++) {
1282 sink_->Put(reinterpret_cast<byte*>(current)[i], "Byte");
1283 }
1284 } else {
1285 SerializeObject(*current, kPlain, kStartOfObject, 0);
1286 }
1287 }
1288 }
1289
1290
1291 // This ensures that the partial snapshot cache keeps things alive during GC and
1292 // tracks their movement. When it is called during serialization of the startup
1293 // snapshot nothing happens. When the partial (context) snapshot is created,
1294 // this array is populated with the pointers that the partial snapshot will
1295 // need. As that happens we emit serialized objects to the startup snapshot
1296 // that correspond to the elements of this cache array. On deserialization we
1297 // therefore need to visit the cache array. This fills it up with pointers to
1298 // deserialized objects.
Iterate(Isolate * isolate,ObjectVisitor * visitor)1299 void SerializerDeserializer::Iterate(Isolate* isolate,
1300 ObjectVisitor* visitor) {
1301 if (isolate->serializer_enabled()) return;
1302 for (int i = 0; ; i++) {
1303 if (isolate->serialize_partial_snapshot_cache_length() <= i) {
1304 // Extend the array ready to get a value from the visitor when
1305 // deserializing.
1306 isolate->PushToPartialSnapshotCache(Smi::FromInt(0));
1307 }
1308 Object** cache = isolate->serialize_partial_snapshot_cache();
1309 visitor->VisitPointers(&cache[i], &cache[i + 1]);
1310 // Sentinel is the undefined object, which is a root so it will not normally
1311 // be found in the cache.
1312 if (cache[i] == isolate->heap()->undefined_value()) {
1313 break;
1314 }
1315 }
1316 }
1317
1318
PartialSnapshotCacheIndex(HeapObject * heap_object)1319 int PartialSerializer::PartialSnapshotCacheIndex(HeapObject* heap_object) {
1320 Isolate* isolate = this->isolate();
1321
1322 for (int i = 0;
1323 i < isolate->serialize_partial_snapshot_cache_length();
1324 i++) {
1325 Object* entry = isolate->serialize_partial_snapshot_cache()[i];
1326 if (entry == heap_object) return i;
1327 }
1328
1329 // We didn't find the object in the cache. So we add it to the cache and
1330 // then visit the pointer so that it becomes part of the startup snapshot
1331 // and we can refer to it from the partial snapshot.
1332 int length = isolate->serialize_partial_snapshot_cache_length();
1333 isolate->PushToPartialSnapshotCache(heap_object);
1334 startup_serializer_->VisitPointer(reinterpret_cast<Object**>(&heap_object));
1335 // We don't recurse from the startup snapshot generator into the partial
1336 // snapshot generator.
1337 ASSERT(length == isolate->serialize_partial_snapshot_cache_length() - 1);
1338 return length;
1339 }
1340
1341
RootIndex(HeapObject * heap_object,HowToCode from)1342 int Serializer::RootIndex(HeapObject* heap_object, HowToCode from) {
1343 Heap* heap = isolate()->heap();
1344 if (heap->InNewSpace(heap_object)) return kInvalidRootIndex;
1345 for (int i = 0; i < root_index_wave_front_; i++) {
1346 Object* root = heap->roots_array_start()[i];
1347 if (!root->IsSmi() && root == heap_object) {
1348 #if defined(V8_TARGET_ARCH_MIPS) || V8_OOL_CONSTANT_POOL
1349 if (from == kFromCode) {
1350 // In order to avoid code bloat in the deserializer we don't have
1351 // support for the encoding that specifies a particular root should
1352 // be written from within code.
1353 return kInvalidRootIndex;
1354 }
1355 #endif
1356 return i;
1357 }
1358 }
1359 return kInvalidRootIndex;
1360 }
1361
1362
1363 // Encode the location of an already deserialized object in order to write its
1364 // location into a later object. We can encode the location as an offset from
1365 // the start of the deserialized objects or as an offset backwards from the
1366 // current allocation pointer.
SerializeReferenceToPreviousObject(int space,int address,HowToCode how_to_code,WhereToPoint where_to_point,int skip)1367 void Serializer::SerializeReferenceToPreviousObject(
1368 int space,
1369 int address,
1370 HowToCode how_to_code,
1371 WhereToPoint where_to_point,
1372 int skip) {
1373 int offset = CurrentAllocationAddress(space) - address;
1374 // Shift out the bits that are always 0.
1375 offset >>= kObjectAlignmentBits;
1376 if (skip == 0) {
1377 sink_->Put(kBackref + how_to_code + where_to_point + space, "BackRefSer");
1378 } else {
1379 sink_->Put(kBackrefWithSkip + how_to_code + where_to_point + space,
1380 "BackRefSerWithSkip");
1381 sink_->PutInt(skip, "BackRefSkipDistance");
1382 }
1383 sink_->PutInt(offset, "offset");
1384 }
1385
1386
SerializeObject(Object * o,HowToCode how_to_code,WhereToPoint where_to_point,int skip)1387 void StartupSerializer::SerializeObject(
1388 Object* o,
1389 HowToCode how_to_code,
1390 WhereToPoint where_to_point,
1391 int skip) {
1392 CHECK(o->IsHeapObject());
1393 HeapObject* heap_object = HeapObject::cast(o);
1394
1395 int root_index;
1396 if ((root_index = RootIndex(heap_object, how_to_code)) != kInvalidRootIndex) {
1397 PutRoot(root_index, heap_object, how_to_code, where_to_point, skip);
1398 return;
1399 }
1400
1401 if (address_mapper_.IsMapped(heap_object)) {
1402 int space = SpaceOfObject(heap_object);
1403 int address = address_mapper_.MappedTo(heap_object);
1404 SerializeReferenceToPreviousObject(space,
1405 address,
1406 how_to_code,
1407 where_to_point,
1408 skip);
1409 } else {
1410 if (skip != 0) {
1411 sink_->Put(kSkip, "FlushPendingSkip");
1412 sink_->PutInt(skip, "SkipDistance");
1413 }
1414
1415 // Object has not yet been serialized. Serialize it here.
1416 ObjectSerializer object_serializer(this,
1417 heap_object,
1418 sink_,
1419 how_to_code,
1420 where_to_point);
1421 object_serializer.Serialize();
1422 }
1423 }
1424
1425
SerializeWeakReferences()1426 void StartupSerializer::SerializeWeakReferences() {
1427 // This phase comes right after the partial serialization (of the snapshot).
1428 // After we have done the partial serialization the partial snapshot cache
1429 // will contain some references needed to decode the partial snapshot. We
1430 // add one entry with 'undefined' which is the sentinel that the deserializer
1431 // uses to know it is done deserializing the array.
1432 Object* undefined = isolate()->heap()->undefined_value();
1433 VisitPointer(&undefined);
1434 isolate()->heap()->IterateWeakRoots(this, VISIT_ALL);
1435 Pad();
1436 }
1437
1438
PutRoot(int root_index,HeapObject * object,SerializerDeserializer::HowToCode how_to_code,SerializerDeserializer::WhereToPoint where_to_point,int skip)1439 void Serializer::PutRoot(int root_index,
1440 HeapObject* object,
1441 SerializerDeserializer::HowToCode how_to_code,
1442 SerializerDeserializer::WhereToPoint where_to_point,
1443 int skip) {
1444 if (how_to_code == kPlain &&
1445 where_to_point == kStartOfObject &&
1446 root_index < kRootArrayNumberOfConstantEncodings &&
1447 !isolate()->heap()->InNewSpace(object)) {
1448 if (skip == 0) {
1449 sink_->Put(kRootArrayConstants + kNoSkipDistance + root_index,
1450 "RootConstant");
1451 } else {
1452 sink_->Put(kRootArrayConstants + kHasSkipDistance + root_index,
1453 "RootConstant");
1454 sink_->PutInt(skip, "SkipInPutRoot");
1455 }
1456 } else {
1457 if (skip != 0) {
1458 sink_->Put(kSkip, "SkipFromPutRoot");
1459 sink_->PutInt(skip, "SkipFromPutRootDistance");
1460 }
1461 sink_->Put(kRootArray + how_to_code + where_to_point, "RootSerialization");
1462 sink_->PutInt(root_index, "root_index");
1463 }
1464 }
1465
1466
SerializeObject(Object * o,HowToCode how_to_code,WhereToPoint where_to_point,int skip)1467 void PartialSerializer::SerializeObject(
1468 Object* o,
1469 HowToCode how_to_code,
1470 WhereToPoint where_to_point,
1471 int skip) {
1472 CHECK(o->IsHeapObject());
1473 HeapObject* heap_object = HeapObject::cast(o);
1474
1475 if (heap_object->IsMap()) {
1476 // The code-caches link to context-specific code objects, which
1477 // the startup and context serializes cannot currently handle.
1478 ASSERT(Map::cast(heap_object)->code_cache() ==
1479 heap_object->GetHeap()->empty_fixed_array());
1480 }
1481
1482 int root_index;
1483 if ((root_index = RootIndex(heap_object, how_to_code)) != kInvalidRootIndex) {
1484 PutRoot(root_index, heap_object, how_to_code, where_to_point, skip);
1485 return;
1486 }
1487
1488 if (ShouldBeInThePartialSnapshotCache(heap_object)) {
1489 if (skip != 0) {
1490 sink_->Put(kSkip, "SkipFromSerializeObject");
1491 sink_->PutInt(skip, "SkipDistanceFromSerializeObject");
1492 }
1493
1494 int cache_index = PartialSnapshotCacheIndex(heap_object);
1495 sink_->Put(kPartialSnapshotCache + how_to_code + where_to_point,
1496 "PartialSnapshotCache");
1497 sink_->PutInt(cache_index, "partial_snapshot_cache_index");
1498 return;
1499 }
1500
1501 // Pointers from the partial snapshot to the objects in the startup snapshot
1502 // should go through the root array or through the partial snapshot cache.
1503 // If this is not the case you may have to add something to the root array.
1504 ASSERT(!startup_serializer_->address_mapper()->IsMapped(heap_object));
1505 // All the internalized strings that the partial snapshot needs should be
1506 // either in the root table or in the partial snapshot cache.
1507 ASSERT(!heap_object->IsInternalizedString());
1508
1509 if (address_mapper_.IsMapped(heap_object)) {
1510 int space = SpaceOfObject(heap_object);
1511 int address = address_mapper_.MappedTo(heap_object);
1512 SerializeReferenceToPreviousObject(space,
1513 address,
1514 how_to_code,
1515 where_to_point,
1516 skip);
1517 } else {
1518 if (skip != 0) {
1519 sink_->Put(kSkip, "SkipFromSerializeObject");
1520 sink_->PutInt(skip, "SkipDistanceFromSerializeObject");
1521 }
1522 // Object has not yet been serialized. Serialize it here.
1523 ObjectSerializer serializer(this,
1524 heap_object,
1525 sink_,
1526 how_to_code,
1527 where_to_point);
1528 serializer.Serialize();
1529 }
1530 }
1531
1532
Serialize()1533 void Serializer::ObjectSerializer::Serialize() {
1534 int space = Serializer::SpaceOfObject(object_);
1535 int size = object_->Size();
1536
1537 sink_->Put(kNewObject + reference_representation_ + space,
1538 "ObjectSerialization");
1539 sink_->PutInt(size >> kObjectAlignmentBits, "Size in words");
1540
1541 if (serializer_->code_address_map_) {
1542 const char* code_name =
1543 serializer_->code_address_map_->Lookup(object_->address());
1544 LOG(serializer_->isolate_,
1545 CodeNameEvent(object_->address(), sink_->Position(), code_name));
1546 LOG(serializer_->isolate_,
1547 SnapshotPositionEvent(object_->address(), sink_->Position()));
1548 }
1549
1550 // Mark this object as already serialized.
1551 int offset = serializer_->Allocate(space, size);
1552 serializer_->address_mapper()->AddMapping(object_, offset);
1553
1554 // Serialize the map (first word of the object).
1555 serializer_->SerializeObject(object_->map(), kPlain, kStartOfObject, 0);
1556
1557 // Serialize the rest of the object.
1558 CHECK_EQ(0, bytes_processed_so_far_);
1559 bytes_processed_so_far_ = kPointerSize;
1560 object_->IterateBody(object_->map()->instance_type(), size, this);
1561 OutputRawData(object_->address() + size);
1562 }
1563
1564
VisitPointers(Object ** start,Object ** end)1565 void Serializer::ObjectSerializer::VisitPointers(Object** start,
1566 Object** end) {
1567 Object** current = start;
1568 while (current < end) {
1569 while (current < end && (*current)->IsSmi()) current++;
1570 if (current < end) OutputRawData(reinterpret_cast<Address>(current));
1571
1572 while (current < end && !(*current)->IsSmi()) {
1573 HeapObject* current_contents = HeapObject::cast(*current);
1574 int root_index = serializer_->RootIndex(current_contents, kPlain);
1575 // Repeats are not subject to the write barrier so there are only some
1576 // objects that can be used in a repeat encoding. These are the early
1577 // ones in the root array that are never in new space.
1578 if (current != start &&
1579 root_index != kInvalidRootIndex &&
1580 root_index < kRootArrayNumberOfConstantEncodings &&
1581 current_contents == current[-1]) {
1582 ASSERT(!serializer_->isolate()->heap()->InNewSpace(current_contents));
1583 int repeat_count = 1;
1584 while (current < end - 1 && current[repeat_count] == current_contents) {
1585 repeat_count++;
1586 }
1587 current += repeat_count;
1588 bytes_processed_so_far_ += repeat_count * kPointerSize;
1589 if (repeat_count > kMaxRepeats) {
1590 sink_->Put(kRepeat, "SerializeRepeats");
1591 sink_->PutInt(repeat_count, "SerializeRepeats");
1592 } else {
1593 sink_->Put(CodeForRepeats(repeat_count), "SerializeRepeats");
1594 }
1595 } else {
1596 serializer_->SerializeObject(
1597 current_contents, kPlain, kStartOfObject, 0);
1598 bytes_processed_so_far_ += kPointerSize;
1599 current++;
1600 }
1601 }
1602 }
1603 }
1604
1605
VisitEmbeddedPointer(RelocInfo * rinfo)1606 void Serializer::ObjectSerializer::VisitEmbeddedPointer(RelocInfo* rinfo) {
1607 // Out-of-line constant pool entries will be visited by the ConstantPoolArray.
1608 if (FLAG_enable_ool_constant_pool && rinfo->IsInConstantPool()) return;
1609
1610 int skip = OutputRawData(rinfo->target_address_address(),
1611 kCanReturnSkipInsteadOfSkipping);
1612 HowToCode how_to_code = rinfo->IsCodedSpecially() ? kFromCode : kPlain;
1613 Object* object = rinfo->target_object();
1614 serializer_->SerializeObject(object, how_to_code, kStartOfObject, skip);
1615 bytes_processed_so_far_ += rinfo->target_address_size();
1616 }
1617
1618
VisitExternalReference(Address * p)1619 void Serializer::ObjectSerializer::VisitExternalReference(Address* p) {
1620 int skip = OutputRawData(reinterpret_cast<Address>(p),
1621 kCanReturnSkipInsteadOfSkipping);
1622 sink_->Put(kExternalReference + kPlain + kStartOfObject, "ExternalRef");
1623 sink_->PutInt(skip, "SkipB4ExternalRef");
1624 Address target = *p;
1625 sink_->PutInt(serializer_->EncodeExternalReference(target), "reference id");
1626 bytes_processed_so_far_ += kPointerSize;
1627 }
1628
1629
VisitExternalReference(RelocInfo * rinfo)1630 void Serializer::ObjectSerializer::VisitExternalReference(RelocInfo* rinfo) {
1631 int skip = OutputRawData(rinfo->target_address_address(),
1632 kCanReturnSkipInsteadOfSkipping);
1633 HowToCode how_to_code = rinfo->IsCodedSpecially() ? kFromCode : kPlain;
1634 sink_->Put(kExternalReference + how_to_code + kStartOfObject, "ExternalRef");
1635 sink_->PutInt(skip, "SkipB4ExternalRef");
1636 Address target = rinfo->target_reference();
1637 sink_->PutInt(serializer_->EncodeExternalReference(target), "reference id");
1638 bytes_processed_so_far_ += rinfo->target_address_size();
1639 }
1640
1641
VisitRuntimeEntry(RelocInfo * rinfo)1642 void Serializer::ObjectSerializer::VisitRuntimeEntry(RelocInfo* rinfo) {
1643 int skip = OutputRawData(rinfo->target_address_address(),
1644 kCanReturnSkipInsteadOfSkipping);
1645 HowToCode how_to_code = rinfo->IsCodedSpecially() ? kFromCode : kPlain;
1646 sink_->Put(kExternalReference + how_to_code + kStartOfObject, "ExternalRef");
1647 sink_->PutInt(skip, "SkipB4ExternalRef");
1648 Address target = rinfo->target_address();
1649 sink_->PutInt(serializer_->EncodeExternalReference(target), "reference id");
1650 bytes_processed_so_far_ += rinfo->target_address_size();
1651 }
1652
1653
VisitCodeTarget(RelocInfo * rinfo)1654 void Serializer::ObjectSerializer::VisitCodeTarget(RelocInfo* rinfo) {
1655 // Out-of-line constant pool entries will be visited by the ConstantPoolArray.
1656 if (FLAG_enable_ool_constant_pool && rinfo->IsInConstantPool()) return;
1657
1658 int skip = OutputRawData(rinfo->target_address_address(),
1659 kCanReturnSkipInsteadOfSkipping);
1660 Code* object = Code::GetCodeFromTargetAddress(rinfo->target_address());
1661 serializer_->SerializeObject(object, kFromCode, kInnerPointer, skip);
1662 bytes_processed_so_far_ += rinfo->target_address_size();
1663 }
1664
1665
VisitCodeEntry(Address entry_address)1666 void Serializer::ObjectSerializer::VisitCodeEntry(Address entry_address) {
1667 int skip = OutputRawData(entry_address, kCanReturnSkipInsteadOfSkipping);
1668 Code* object = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
1669 serializer_->SerializeObject(object, kPlain, kInnerPointer, skip);
1670 bytes_processed_so_far_ += kPointerSize;
1671 }
1672
1673
VisitCell(RelocInfo * rinfo)1674 void Serializer::ObjectSerializer::VisitCell(RelocInfo* rinfo) {
1675 // Out-of-line constant pool entries will be visited by the ConstantPoolArray.
1676 if (FLAG_enable_ool_constant_pool && rinfo->IsInConstantPool()) return;
1677
1678 int skip = OutputRawData(rinfo->pc(), kCanReturnSkipInsteadOfSkipping);
1679 Cell* object = Cell::cast(rinfo->target_cell());
1680 serializer_->SerializeObject(object, kPlain, kInnerPointer, skip);
1681 }
1682
1683
VisitExternalAsciiString(v8::String::ExternalAsciiStringResource ** resource_pointer)1684 void Serializer::ObjectSerializer::VisitExternalAsciiString(
1685 v8::String::ExternalAsciiStringResource** resource_pointer) {
1686 Address references_start = reinterpret_cast<Address>(resource_pointer);
1687 OutputRawData(references_start);
1688 for (int i = 0; i < Natives::GetBuiltinsCount(); i++) {
1689 Object* source =
1690 serializer_->isolate()->heap()->natives_source_cache()->get(i);
1691 if (!source->IsUndefined()) {
1692 ExternalAsciiString* string = ExternalAsciiString::cast(source);
1693 typedef v8::String::ExternalAsciiStringResource Resource;
1694 const Resource* resource = string->resource();
1695 if (resource == *resource_pointer) {
1696 sink_->Put(kNativesStringResource, "NativesStringResource");
1697 sink_->PutSection(i, "NativesStringResourceEnd");
1698 bytes_processed_so_far_ += sizeof(resource);
1699 return;
1700 }
1701 }
1702 }
1703 // One of the strings in the natives cache should match the resource. We
1704 // can't serialize any other kinds of external strings.
1705 UNREACHABLE();
1706 }
1707
1708
CloneCodeObject(HeapObject * code)1709 static Code* CloneCodeObject(HeapObject* code) {
1710 Address copy = new byte[code->Size()];
1711 MemCopy(copy, code->address(), code->Size());
1712 return Code::cast(HeapObject::FromAddress(copy));
1713 }
1714
1715
WipeOutRelocations(Code * code)1716 static void WipeOutRelocations(Code* code) {
1717 int mode_mask =
1718 RelocInfo::kCodeTargetMask |
1719 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
1720 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
1721 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
1722 for (RelocIterator it(code, mode_mask); !it.done(); it.next()) {
1723 if (!(FLAG_enable_ool_constant_pool && it.rinfo()->IsInConstantPool())) {
1724 it.rinfo()->WipeOut();
1725 }
1726 }
1727 }
1728
1729
OutputRawData(Address up_to,Serializer::ObjectSerializer::ReturnSkip return_skip)1730 int Serializer::ObjectSerializer::OutputRawData(
1731 Address up_to, Serializer::ObjectSerializer::ReturnSkip return_skip) {
1732 Address object_start = object_->address();
1733 int base = bytes_processed_so_far_;
1734 int up_to_offset = static_cast<int>(up_to - object_start);
1735 int to_skip = up_to_offset - bytes_processed_so_far_;
1736 int bytes_to_output = to_skip;
1737 bytes_processed_so_far_ += to_skip;
1738 // This assert will fail if the reloc info gives us the target_address_address
1739 // locations in a non-ascending order. Luckily that doesn't happen.
1740 ASSERT(to_skip >= 0);
1741 bool outputting_code = false;
1742 if (to_skip != 0 && code_object_ && !code_has_been_output_) {
1743 // Output the code all at once and fix later.
1744 bytes_to_output = object_->Size() + to_skip - bytes_processed_so_far_;
1745 outputting_code = true;
1746 code_has_been_output_ = true;
1747 }
1748 if (bytes_to_output != 0 &&
1749 (!code_object_ || outputting_code)) {
1750 #define RAW_CASE(index) \
1751 if (!outputting_code && bytes_to_output == index * kPointerSize && \
1752 index * kPointerSize == to_skip) { \
1753 sink_->PutSection(kRawData + index, "RawDataFixed"); \
1754 to_skip = 0; /* This insn already skips. */ \
1755 } else /* NOLINT */
1756 COMMON_RAW_LENGTHS(RAW_CASE)
1757 #undef RAW_CASE
1758 { /* NOLINT */
1759 // We always end up here if we are outputting the code of a code object.
1760 sink_->Put(kRawData, "RawData");
1761 sink_->PutInt(bytes_to_output, "length");
1762 }
1763
1764 // To make snapshots reproducible, we need to wipe out all pointers in code.
1765 if (code_object_) {
1766 Code* code = CloneCodeObject(object_);
1767 WipeOutRelocations(code);
1768 // We need to wipe out the header fields *after* wiping out the
1769 // relocations, because some of these fields are needed for the latter.
1770 code->WipeOutHeader();
1771 object_start = code->address();
1772 }
1773
1774 const char* description = code_object_ ? "Code" : "Byte";
1775 for (int i = 0; i < bytes_to_output; i++) {
1776 sink_->PutSection(object_start[base + i], description);
1777 }
1778 if (code_object_) delete[] object_start;
1779 }
1780 if (to_skip != 0 && return_skip == kIgnoringReturn) {
1781 sink_->Put(kSkip, "Skip");
1782 sink_->PutInt(to_skip, "SkipDistance");
1783 to_skip = 0;
1784 }
1785 return to_skip;
1786 }
1787
1788
SpaceOfObject(HeapObject * object)1789 int Serializer::SpaceOfObject(HeapObject* object) {
1790 for (int i = FIRST_SPACE; i <= LAST_SPACE; i++) {
1791 AllocationSpace s = static_cast<AllocationSpace>(i);
1792 if (object->GetHeap()->InSpace(object, s)) {
1793 ASSERT(i < kNumberOfSpaces);
1794 return i;
1795 }
1796 }
1797 UNREACHABLE();
1798 return 0;
1799 }
1800
1801
Allocate(int space,int size)1802 int Serializer::Allocate(int space, int size) {
1803 CHECK(space >= 0 && space < kNumberOfSpaces);
1804 int allocation_address = fullness_[space];
1805 fullness_[space] = allocation_address + size;
1806 return allocation_address;
1807 }
1808
1809
SpaceAreaSize(int space)1810 int Serializer::SpaceAreaSize(int space) {
1811 if (space == CODE_SPACE) {
1812 return isolate_->memory_allocator()->CodePageAreaSize();
1813 } else {
1814 return Page::kPageSize - Page::kObjectStartOffset;
1815 }
1816 }
1817
1818
Pad()1819 void Serializer::Pad() {
1820 // The non-branching GetInt will read up to 3 bytes too far, so we need
1821 // to pad the snapshot to make sure we don't read over the end.
1822 for (unsigned i = 0; i < sizeof(int32_t) - 1; i++) {
1823 sink_->Put(kNop, "Padding");
1824 }
1825 }
1826
1827
InitializeCodeAddressMap()1828 void Serializer::InitializeCodeAddressMap() {
1829 isolate_->InitializeLoggingAndCounters();
1830 code_address_map_ = new CodeAddressMap(isolate_);
1831 }
1832
1833
AtEOF()1834 bool SnapshotByteSource::AtEOF() {
1835 if (0u + length_ - position_ > 2 * sizeof(uint32_t)) return false;
1836 for (int x = position_; x < length_; x++) {
1837 if (data_[x] != SerializerDeserializer::nop()) return false;
1838 }
1839 return true;
1840 }
1841
1842 } } // namespace v8::internal
1843