1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/snapshot/deserializer.h"
6
7 #include "src/assembler-inl.h"
8 #include "src/heap/heap-write-barrier-inl.h"
9 #include "src/isolate.h"
10 #include "src/objects/api-callbacks.h"
11 #include "src/objects/hash-table.h"
12 #include "src/objects/js-array-buffer-inl.h"
13 #include "src/objects/js-array-inl.h"
14 #include "src/objects/maybe-object.h"
15 #include "src/objects/string.h"
16 #include "src/snapshot/builtin-deserializer-allocator.h"
17 #include "src/snapshot/natives.h"
18 #include "src/snapshot/snapshot.h"
19
20 namespace v8 {
21 namespace internal {
22
23 template <class AllocatorT>
Initialize(Isolate * isolate)24 void Deserializer<AllocatorT>::Initialize(Isolate* isolate) {
25 DCHECK_NULL(isolate_);
26 DCHECK_NOT_NULL(isolate);
27 isolate_ = isolate;
28 DCHECK_NULL(external_reference_table_);
29 external_reference_table_ = isolate->heap()->external_reference_table();
30 #ifdef DEBUG
31 // Count the number of external references registered through the API.
32 num_api_references_ = 0;
33 if (isolate_->api_external_references() != nullptr) {
34 while (isolate_->api_external_references()[num_api_references_] != 0) {
35 num_api_references_++;
36 }
37 }
38 #endif // DEBUG
39 CHECK_EQ(magic_number_,
40 SerializedData::ComputeMagicNumber(external_reference_table_));
41 }
42
43 template <class AllocatorT>
IsLazyDeserializationEnabled() const44 bool Deserializer<AllocatorT>::IsLazyDeserializationEnabled() const {
45 return FLAG_lazy_deserialization && !isolate()->serializer_enabled();
46 }
47
48 template <class AllocatorT>
Rehash()49 void Deserializer<AllocatorT>::Rehash() {
50 DCHECK(can_rehash() || deserializing_user_code());
51 for (const auto& item : to_rehash_) item->RehashBasedOnMap(isolate());
52 }
53
54 template <class AllocatorT>
~Deserializer()55 Deserializer<AllocatorT>::~Deserializer() {
56 #ifdef DEBUG
57 // Do not perform checks if we aborted deserialization.
58 if (source_.position() == 0) return;
59 // Check that we only have padding bytes remaining.
60 while (source_.HasMore()) DCHECK_EQ(kNop, source_.Get());
61 // Check that we've fully used all reserved space.
62 DCHECK(allocator()->ReservationsAreFullyUsed());
63 #endif // DEBUG
64 }
65
66 // This is called on the roots. It is the driver of the deserialization
67 // process. It is also called on the body of each function.
68 template <class AllocatorT>
VisitRootPointers(Root root,const char * description,Object ** start,Object ** end)69 void Deserializer<AllocatorT>::VisitRootPointers(Root root,
70 const char* description,
71 Object** start, Object** end) {
72 // Builtins and bytecode handlers are deserialized in a separate pass by the
73 // BuiltinDeserializer.
74 if (root == Root::kBuiltins || root == Root::kDispatchTable) return;
75
76 // The space must be new space. Any other space would cause ReadChunk to try
77 // to update the remembered using nullptr as the address.
78 ReadData(reinterpret_cast<MaybeObject**>(start),
79 reinterpret_cast<MaybeObject**>(end), NEW_SPACE, kNullAddress);
80 }
81
82 template <class AllocatorT>
Synchronize(VisitorSynchronization::SyncTag tag)83 void Deserializer<AllocatorT>::Synchronize(
84 VisitorSynchronization::SyncTag tag) {
85 static const byte expected = kSynchronize;
86 CHECK_EQ(expected, source_.Get());
87 }
88
89 template <class AllocatorT>
DeserializeDeferredObjects()90 void Deserializer<AllocatorT>::DeserializeDeferredObjects() {
91 for (int code = source_.Get(); code != kSynchronize; code = source_.Get()) {
92 switch (code) {
93 case kAlignmentPrefix:
94 case kAlignmentPrefix + 1:
95 case kAlignmentPrefix + 2: {
96 int alignment = code - (SerializerDeserializer::kAlignmentPrefix - 1);
97 allocator()->SetAlignment(static_cast<AllocationAlignment>(alignment));
98 break;
99 }
100 default: {
101 int space = code & kSpaceMask;
102 DCHECK_LE(space, kNumberOfSpaces);
103 DCHECK_EQ(code - space, kNewObject);
104 HeapObject* object = GetBackReferencedObject(space);
105 int size = source_.GetInt() << kPointerSizeLog2;
106 Address obj_address = object->address();
107 MaybeObject** start =
108 reinterpret_cast<MaybeObject**>(obj_address + kPointerSize);
109 MaybeObject** end = reinterpret_cast<MaybeObject**>(obj_address + size);
110 bool filled = ReadData(start, end, space, obj_address);
111 CHECK(filled);
112 DCHECK(CanBeDeferred(object));
113 PostProcessNewObject(object, space);
114 }
115 }
116 }
117 }
118
StringTableInsertionKey(String * string)119 StringTableInsertionKey::StringTableInsertionKey(String* string)
120 : StringTableKey(ComputeHashField(string)), string_(string) {
121 DCHECK(string->IsInternalizedString());
122 }
123
IsMatch(Object * string)124 bool StringTableInsertionKey::IsMatch(Object* string) {
125 // We know that all entries in a hash table had their hash keys created.
126 // Use that knowledge to have fast failure.
127 if (Hash() != String::cast(string)->Hash()) return false;
128 // We want to compare the content of two internalized strings here.
129 return string_->SlowEquals(String::cast(string));
130 }
131
AsHandle(Isolate * isolate)132 Handle<String> StringTableInsertionKey::AsHandle(Isolate* isolate) {
133 return handle(string_, isolate);
134 }
135
ComputeHashField(String * string)136 uint32_t StringTableInsertionKey::ComputeHashField(String* string) {
137 // Make sure hash_field() is computed.
138 string->Hash();
139 return string->hash_field();
140 }
141
142 template <class AllocatorT>
PostProcessNewObject(HeapObject * obj,int space)143 HeapObject* Deserializer<AllocatorT>::PostProcessNewObject(HeapObject* obj,
144 int space) {
145 if ((FLAG_rehash_snapshot && can_rehash_) || deserializing_user_code()) {
146 if (obj->IsString()) {
147 // Uninitialize hash field as we need to recompute the hash.
148 String* string = String::cast(obj);
149 string->set_hash_field(String::kEmptyHashField);
150 } else if (obj->NeedsRehashing()) {
151 to_rehash_.push_back(obj);
152 }
153 }
154
155 if (deserializing_user_code()) {
156 if (obj->IsString()) {
157 String* string = String::cast(obj);
158 if (string->IsInternalizedString()) {
159 // Canonicalize the internalized string. If it already exists in the
160 // string table, set it to forward to the existing one.
161 StringTableInsertionKey key(string);
162 String* canonical =
163 StringTable::ForwardStringIfExists(isolate_, &key, string);
164
165 if (canonical != nullptr) return canonical;
166
167 new_internalized_strings_.push_back(handle(string, isolate_));
168 return string;
169 }
170 } else if (obj->IsScript()) {
171 new_scripts_.push_back(handle(Script::cast(obj), isolate_));
172 } else {
173 DCHECK(CanBeDeferred(obj));
174 }
175 } else if (obj->IsScript()) {
176 LOG(isolate_, ScriptEvent(Logger::ScriptEventType::kDeserialize,
177 Script::cast(obj)->id()));
178 LOG(isolate_, ScriptDetails(Script::cast(obj)));
179 }
180
181 if (obj->IsAllocationSite()) {
182 // Allocation sites are present in the snapshot, and must be linked into
183 // a list at deserialization time.
184 AllocationSite* site = AllocationSite::cast(obj);
185 // TODO(mvstanton): consider treating the heap()->allocation_sites_list()
186 // as a (weak) root. If this root is relocated correctly, this becomes
187 // unnecessary.
188 if (isolate_->heap()->allocation_sites_list() == Smi::kZero) {
189 site->set_weak_next(ReadOnlyRoots(isolate_).undefined_value());
190 } else {
191 site->set_weak_next(isolate_->heap()->allocation_sites_list());
192 }
193 isolate_->heap()->set_allocation_sites_list(site);
194 } else if (obj->IsCode()) {
195 // We flush all code pages after deserializing the startup snapshot. In that
196 // case, we only need to remember code objects in the large object space.
197 // When deserializing user code, remember each individual code object.
198 if (deserializing_user_code() || space == LO_SPACE) {
199 new_code_objects_.push_back(Code::cast(obj));
200 }
201 } else if (obj->IsAccessorInfo()) {
202 #ifdef USE_SIMULATOR
203 accessor_infos_.push_back(AccessorInfo::cast(obj));
204 #endif
205 } else if (obj->IsCallHandlerInfo()) {
206 #ifdef USE_SIMULATOR
207 call_handler_infos_.push_back(CallHandlerInfo::cast(obj));
208 #endif
209 } else if (obj->IsExternalString()) {
210 if (obj->map() == ReadOnlyRoots(isolate_).native_source_string_map()) {
211 ExternalOneByteString* string = ExternalOneByteString::cast(obj);
212 DCHECK(string->is_short());
213 string->SetResource(
214 isolate_, NativesExternalStringResource::DecodeForDeserialization(
215 string->resource()));
216 } else {
217 ExternalString* string = ExternalString::cast(obj);
218 uint32_t index = string->resource_as_uint32();
219 Address address =
220 static_cast<Address>(isolate_->api_external_references()[index]);
221 string->set_address_as_resource(address);
222 isolate_->heap()->UpdateExternalString(string, 0,
223 string->ExternalPayloadSize());
224 }
225 isolate_->heap()->RegisterExternalString(String::cast(obj));
226 } else if (obj->IsJSTypedArray()) {
227 JSTypedArray* typed_array = JSTypedArray::cast(obj);
228 CHECK(typed_array->byte_offset()->IsSmi());
229 int32_t byte_offset = NumberToInt32(typed_array->byte_offset());
230 if (byte_offset > 0) {
231 FixedTypedArrayBase* elements =
232 FixedTypedArrayBase::cast(typed_array->elements());
233 // Must be off-heap layout.
234 DCHECK(!typed_array->is_on_heap());
235
236 void* pointer_with_offset = reinterpret_cast<void*>(
237 reinterpret_cast<intptr_t>(elements->external_pointer()) +
238 byte_offset);
239 elements->set_external_pointer(pointer_with_offset);
240 }
241 } else if (obj->IsJSArrayBuffer()) {
242 JSArrayBuffer* buffer = JSArrayBuffer::cast(obj);
243 // Only fixup for the off-heap case.
244 if (buffer->backing_store() != nullptr) {
245 Smi* store_index = reinterpret_cast<Smi*>(buffer->backing_store());
246 void* backing_store = off_heap_backing_stores_[store_index->value()];
247
248 buffer->set_backing_store(backing_store);
249 isolate_->heap()->RegisterNewArrayBuffer(buffer);
250 }
251 } else if (obj->IsFixedTypedArrayBase()) {
252 FixedTypedArrayBase* fta = FixedTypedArrayBase::cast(obj);
253 // Only fixup for the off-heap case.
254 if (fta->base_pointer() == nullptr) {
255 Smi* store_index = reinterpret_cast<Smi*>(fta->external_pointer());
256 void* backing_store = off_heap_backing_stores_[store_index->value()];
257 fta->set_external_pointer(backing_store);
258 }
259 } else if (obj->IsBytecodeArray()) {
260 // TODO(mythria): Remove these once we store the default values for these
261 // fields in the serializer.
262 BytecodeArray* bytecode_array = BytecodeArray::cast(obj);
263 bytecode_array->set_interrupt_budget(
264 interpreter::Interpreter::InterruptBudget());
265 bytecode_array->set_osr_loop_nesting_level(0);
266 }
267
268 // Check alignment.
269 DCHECK_EQ(0, Heap::GetFillToAlign(obj->address(),
270 HeapObject::RequiredAlignment(obj->map())));
271 return obj;
272 }
273
274 template <class AllocatorT>
MaybeReplaceWithDeserializeLazy(int builtin_id)275 int Deserializer<AllocatorT>::MaybeReplaceWithDeserializeLazy(int builtin_id) {
276 DCHECK(Builtins::IsBuiltinId(builtin_id));
277 return IsLazyDeserializationEnabled() && Builtins::IsLazy(builtin_id)
278 ? Builtins::kDeserializeLazy
279 : builtin_id;
280 }
281
282 template <class AllocatorT>
GetBackReferencedObject(int space)283 HeapObject* Deserializer<AllocatorT>::GetBackReferencedObject(int space) {
284 HeapObject* obj;
285 switch (space) {
286 case LO_SPACE:
287 obj = allocator()->GetLargeObject(source_.GetInt());
288 break;
289 case MAP_SPACE:
290 obj = allocator()->GetMap(source_.GetInt());
291 break;
292 case RO_SPACE: {
293 uint32_t chunk_index = source_.GetInt();
294 uint32_t chunk_offset = source_.GetInt();
295 if (isolate()->heap()->deserialization_complete()) {
296 PagedSpace* read_only_space = isolate()->heap()->read_only_space();
297 Page* page = read_only_space->first_page();
298 for (uint32_t i = 0; i < chunk_index; ++i) {
299 page = page->next_page();
300 }
301 Address address = page->OffsetToAddress(chunk_offset);
302 obj = HeapObject::FromAddress(address);
303 } else {
304 obj = allocator()->GetObject(static_cast<AllocationSpace>(space),
305 chunk_index, chunk_offset);
306 }
307 break;
308 }
309 default: {
310 uint32_t chunk_index = source_.GetInt();
311 uint32_t chunk_offset = source_.GetInt();
312 obj = allocator()->GetObject(static_cast<AllocationSpace>(space),
313 chunk_index, chunk_offset);
314 break;
315 }
316 }
317
318 if (deserializing_user_code() && obj->IsThinString()) {
319 obj = ThinString::cast(obj)->actual();
320 }
321
322 hot_objects_.Add(obj);
323 DCHECK(!HasWeakHeapObjectTag(obj));
324 return obj;
325 }
326
327 // This routine writes the new object into the pointer provided.
328 // The reason for this strange interface is that otherwise the object is
329 // written very late, which means the FreeSpace map is not set up by the
330 // time we need to use it to mark the space at the end of a page free.
331 template <class AllocatorT>
ReadObject(int space_number,MaybeObject ** write_back,HeapObjectReferenceType reference_type)332 void Deserializer<AllocatorT>::ReadObject(
333 int space_number, MaybeObject** write_back,
334 HeapObjectReferenceType reference_type) {
335 const int size = source_.GetInt() << kObjectAlignmentBits;
336
337 Address address =
338 allocator()->Allocate(static_cast<AllocationSpace>(space_number), size);
339 HeapObject* obj = HeapObject::FromAddress(address);
340
341 isolate_->heap()->OnAllocationEvent(obj, size);
342 MaybeObject** current = reinterpret_cast<MaybeObject**>(address);
343 MaybeObject** limit = current + (size >> kPointerSizeLog2);
344
345 if (ReadData(current, limit, space_number, address)) {
346 // Only post process if object content has not been deferred.
347 obj = PostProcessNewObject(obj, space_number);
348 }
349
350 MaybeObject* write_back_obj =
351 reference_type == HeapObjectReferenceType::STRONG
352 ? HeapObjectReference::Strong(obj)
353 : HeapObjectReference::Weak(obj);
354 UnalignedCopy(write_back, &write_back_obj);
355 #ifdef DEBUG
356 if (obj->IsCode()) {
357 DCHECK(space_number == CODE_SPACE || space_number == LO_SPACE);
358 } else {
359 DCHECK(space_number != CODE_SPACE);
360 }
361 #endif // DEBUG
362 }
363
364 template <class AllocatorT>
ReadDataSingle()365 Object* Deserializer<AllocatorT>::ReadDataSingle() {
366 MaybeObject* o;
367 MaybeObject** start = &o;
368 MaybeObject** end = start + 1;
369 int source_space = NEW_SPACE;
370 Address current_object = kNullAddress;
371
372 CHECK(ReadData(start, end, source_space, current_object));
373 HeapObject* heap_object;
374 bool success = o->ToStrongHeapObject(&heap_object);
375 DCHECK(success);
376 USE(success);
377 return heap_object;
378 }
379
NoExternalReferencesCallback()380 static void NoExternalReferencesCallback() {
381 // The following check will trigger if a function or object template
382 // with references to native functions have been deserialized from
383 // snapshot, but no actual external references were provided when the
384 // isolate was created.
385 CHECK_WITH_MSG(false, "No external references provided via API");
386 }
387
388 template <class AllocatorT>
ReadData(MaybeObject ** current,MaybeObject ** limit,int source_space,Address current_object_address)389 bool Deserializer<AllocatorT>::ReadData(MaybeObject** current,
390 MaybeObject** limit, int source_space,
391 Address current_object_address) {
392 Isolate* const isolate = isolate_;
393 // Write barrier support costs around 1% in startup time. In fact there
394 // are no new space objects in current boot snapshots, so it's not needed,
395 // but that may change.
396 bool write_barrier_needed =
397 (current_object_address != kNullAddress && source_space != NEW_SPACE &&
398 source_space != CODE_SPACE);
399 while (current < limit) {
400 byte data = source_.Get();
401 switch (data) {
402 #define CASE_STATEMENT(where, how, within, space_number) \
403 case where + how + within + space_number: \
404 STATIC_ASSERT((where & ~kWhereMask) == 0); \
405 STATIC_ASSERT((how & ~kHowToCodeMask) == 0); \
406 STATIC_ASSERT((within & ~kWhereToPointMask) == 0); \
407 STATIC_ASSERT((space_number & ~kSpaceMask) == 0);
408
409 #define CASE_BODY(where, how, within, space_number_if_any) \
410 current = ReadDataCase<where, how, within, space_number_if_any>( \
411 isolate, current, current_object_address, data, write_barrier_needed); \
412 break;
413
414 // This generates a case and a body for the new space (which has to do extra
415 // write barrier handling) and handles the other spaces with fall-through cases
416 // and one body.
417 #define ALL_SPACES(where, how, within) \
418 CASE_STATEMENT(where, how, within, NEW_SPACE) \
419 CASE_BODY(where, how, within, NEW_SPACE) \
420 CASE_STATEMENT(where, how, within, OLD_SPACE) \
421 V8_FALLTHROUGH; \
422 CASE_STATEMENT(where, how, within, CODE_SPACE) \
423 V8_FALLTHROUGH; \
424 CASE_STATEMENT(where, how, within, MAP_SPACE) \
425 V8_FALLTHROUGH; \
426 CASE_STATEMENT(where, how, within, LO_SPACE) \
427 V8_FALLTHROUGH; \
428 CASE_STATEMENT(where, how, within, RO_SPACE) \
429 CASE_BODY(where, how, within, kAnyOldSpace)
430
431 #define FOUR_CASES(byte_code) \
432 case byte_code: \
433 case byte_code + 1: \
434 case byte_code + 2: \
435 case byte_code + 3:
436
437 #define SIXTEEN_CASES(byte_code) \
438 FOUR_CASES(byte_code) \
439 FOUR_CASES(byte_code + 4) \
440 FOUR_CASES(byte_code + 8) \
441 FOUR_CASES(byte_code + 12)
442
443 #define SINGLE_CASE(where, how, within, space) \
444 CASE_STATEMENT(where, how, within, space) \
445 CASE_BODY(where, how, within, space)
446
447 // Deserialize a new object and write a pointer to it to the current
448 // object.
449 ALL_SPACES(kNewObject, kPlain, kStartOfObject)
450 // Deserialize a new code object and write a pointer to its first
451 // instruction to the current code object.
452 ALL_SPACES(kNewObject, kFromCode, kInnerPointer)
453 // Find a recently deserialized object using its offset from the current
454 // allocation point and write a pointer to it to the current object.
455 ALL_SPACES(kBackref, kPlain, kStartOfObject)
456 ALL_SPACES(kBackrefWithSkip, kPlain, kStartOfObject)
457 #if V8_CODE_EMBEDS_OBJECT_POINTER
458 // Deserialize a new object from pointer found in code and write
459 // a pointer to it to the current object. Required only for MIPS, PPC, ARM
460 // or S390 with embedded constant pool, and omitted on the other
461 // architectures because it is fully unrolled and would cause bloat.
462 ALL_SPACES(kNewObject, kFromCode, kStartOfObject)
463 // Find a recently deserialized code object using its offset from the
464 // current allocation point and write a pointer to it to the current
465 // object. Required only for MIPS, PPC, ARM or S390 with embedded
466 // constant pool.
467 ALL_SPACES(kBackref, kFromCode, kStartOfObject)
468 ALL_SPACES(kBackrefWithSkip, kFromCode, kStartOfObject)
469 #endif
470 // Find a recently deserialized code object using its offset from the
471 // current allocation point and write a pointer to its first instruction
472 // to the current code object or the instruction pointer in a function
473 // object.
474 ALL_SPACES(kBackref, kFromCode, kInnerPointer)
475 ALL_SPACES(kBackrefWithSkip, kFromCode, kInnerPointer)
476 // Find an object in the roots array and write a pointer to it to the
477 // current object.
478 SINGLE_CASE(kRootArray, kPlain, kStartOfObject, 0)
479 #if V8_CODE_EMBEDS_OBJECT_POINTER
480 // Find an object in the roots array and write a pointer to it to in code.
481 SINGLE_CASE(kRootArray, kFromCode, kStartOfObject, 0)
482 #endif
483 // Find an object in the partial snapshots cache and write a pointer to it
484 // to the current object.
485 SINGLE_CASE(kPartialSnapshotCache, kPlain, kStartOfObject, 0)
486 SINGLE_CASE(kPartialSnapshotCache, kFromCode, kStartOfObject, 0)
487 SINGLE_CASE(kPartialSnapshotCache, kFromCode, kInnerPointer, 0)
488 // Find an object in the attached references and write a pointer to it to
489 // the current object.
490 SINGLE_CASE(kAttachedReference, kPlain, kStartOfObject, 0)
491 SINGLE_CASE(kAttachedReference, kFromCode, kStartOfObject, 0)
492 SINGLE_CASE(kAttachedReference, kFromCode, kInnerPointer, 0)
493 // Find a builtin and write a pointer to it to the current object.
494 SINGLE_CASE(kBuiltin, kPlain, kStartOfObject, 0)
495 SINGLE_CASE(kBuiltin, kFromCode, kStartOfObject, 0)
496 SINGLE_CASE(kBuiltin, kFromCode, kInnerPointer, 0)
497
498 #undef CASE_STATEMENT
499 #undef CASE_BODY
500 #undef ALL_SPACES
501
502 case kSkip: {
503 int size = source_.GetInt();
504 current = reinterpret_cast<MaybeObject**>(
505 reinterpret_cast<Address>(current) + size);
506 break;
507 }
508
509 // Find an external reference and write a pointer to it to the current
510 // object.
511 case kExternalReference + kPlain + kStartOfObject:
512 current = reinterpret_cast<MaybeObject**>(ReadExternalReferenceCase(
513 kPlain, reinterpret_cast<void**>(current), current_object_address));
514 break;
515 // Find an external reference and write a pointer to it in the current
516 // code object.
517 case kExternalReference + kFromCode + kStartOfObject:
518 current = reinterpret_cast<MaybeObject**>(ReadExternalReferenceCase(
519 kFromCode, reinterpret_cast<void**>(current),
520 current_object_address));
521 break;
522
523 case kInternalReferenceEncoded:
524 case kInternalReference: {
525 // Internal reference address is not encoded via skip, but by offset
526 // from code entry.
527 int pc_offset = source_.GetInt();
528 int target_offset = source_.GetInt();
529 Code* code =
530 Code::cast(HeapObject::FromAddress(current_object_address));
531 DCHECK(0 <= pc_offset && pc_offset <= code->raw_instruction_size());
532 DCHECK(0 <= target_offset &&
533 target_offset <= code->raw_instruction_size());
534 Address pc = code->entry() + pc_offset;
535 Address target = code->entry() + target_offset;
536 Assembler::deserialization_set_target_internal_reference_at(
537 pc, target,
538 data == kInternalReference ? RelocInfo::INTERNAL_REFERENCE
539 : RelocInfo::INTERNAL_REFERENCE_ENCODED);
540 break;
541 }
542
543 case kOffHeapTarget: {
544 DCHECK(FLAG_embedded_builtins);
545 int skip = source_.GetInt();
546 int builtin_index = source_.GetInt();
547 DCHECK(Builtins::IsBuiltinId(builtin_index));
548
549 current = reinterpret_cast<MaybeObject**>(
550 reinterpret_cast<Address>(current) + skip);
551
552 CHECK_NOT_NULL(isolate->embedded_blob());
553 EmbeddedData d = EmbeddedData::FromBlob();
554 Address address = d.InstructionStartOfBuiltin(builtin_index);
555 CHECK_NE(kNullAddress, address);
556
557 if (RelocInfo::OffHeapTargetIsCodedSpecially()) {
558 Address location_of_branch_data = reinterpret_cast<Address>(current);
559 int skip = Assembler::deserialization_special_target_size(
560 location_of_branch_data);
561 Assembler::deserialization_set_special_target_at(
562 location_of_branch_data,
563 Code::cast(HeapObject::FromAddress(current_object_address)),
564 address);
565 location_of_branch_data += skip;
566 current = reinterpret_cast<MaybeObject**>(location_of_branch_data);
567 } else {
568 MaybeObject* o = reinterpret_cast<MaybeObject*>(address);
569 UnalignedCopy(current, &o);
570 current++;
571 }
572 break;
573 }
574
575 case kNop:
576 break;
577
578 case kNextChunk: {
579 int space = source_.Get();
580 allocator()->MoveToNextChunk(static_cast<AllocationSpace>(space));
581 break;
582 }
583
584 case kDeferred: {
585 // Deferred can only occur right after the heap object header.
586 DCHECK_EQ(current, reinterpret_cast<MaybeObject**>(
587 current_object_address + kPointerSize));
588 HeapObject* obj = HeapObject::FromAddress(current_object_address);
589 // If the deferred object is a map, its instance type may be used
590 // during deserialization. Initialize it with a temporary value.
591 if (obj->IsMap()) Map::cast(obj)->set_instance_type(FILLER_TYPE);
592 current = limit;
593 return false;
594 }
595
596 case kSynchronize:
597 // If we get here then that indicates that you have a mismatch between
598 // the number of GC roots when serializing and deserializing.
599 UNREACHABLE();
600
601 // Deserialize raw data of variable length.
602 case kVariableRawData: {
603 int size_in_bytes = source_.GetInt();
604 byte* raw_data_out = reinterpret_cast<byte*>(current);
605 source_.CopyRaw(raw_data_out, size_in_bytes);
606 current = reinterpret_cast<MaybeObject**>(
607 reinterpret_cast<intptr_t>(current) + size_in_bytes);
608 break;
609 }
610
611 // Deserialize raw code directly into the body of the code object.
612 // Do not move current.
613 case kVariableRawCode: {
614 int size_in_bytes = source_.GetInt();
615 source_.CopyRaw(
616 reinterpret_cast<byte*>(current_object_address + Code::kDataStart),
617 size_in_bytes);
618 break;
619 }
620
621 case kVariableRepeat: {
622 int repeats = source_.GetInt();
623 MaybeObject* object = current[-1];
624 DCHECK(!Heap::InNewSpace(object));
625 DCHECK(!allocator()->next_reference_is_weak());
626 for (int i = 0; i < repeats; i++) UnalignedCopy(current++, &object);
627 break;
628 }
629
630 case kOffHeapBackingStore: {
631 int byte_length = source_.GetInt();
632 byte* backing_store = static_cast<byte*>(
633 isolate->array_buffer_allocator()->AllocateUninitialized(
634 byte_length));
635 CHECK_NOT_NULL(backing_store);
636 source_.CopyRaw(backing_store, byte_length);
637 off_heap_backing_stores_.push_back(backing_store);
638 break;
639 }
640
641 case kApiReference: {
642 int skip = source_.GetInt();
643 current = reinterpret_cast<MaybeObject**>(
644 reinterpret_cast<Address>(current) + skip);
645 uint32_t reference_id = static_cast<uint32_t>(source_.GetInt());
646 Address address;
647 if (isolate->api_external_references()) {
648 DCHECK_WITH_MSG(
649 reference_id < num_api_references_,
650 "too few external references provided through the API");
651 address = static_cast<Address>(
652 isolate->api_external_references()[reference_id]);
653 } else {
654 address = reinterpret_cast<Address>(NoExternalReferencesCallback);
655 }
656 memcpy(current, &address, kPointerSize);
657 current++;
658 break;
659 }
660
661 case kWeakPrefix:
662 DCHECK(!allocator()->next_reference_is_weak());
663 allocator()->set_next_reference_is_weak(true);
664 break;
665
666 case kAlignmentPrefix:
667 case kAlignmentPrefix + 1:
668 case kAlignmentPrefix + 2: {
669 int alignment = data - (SerializerDeserializer::kAlignmentPrefix - 1);
670 allocator()->SetAlignment(static_cast<AllocationAlignment>(alignment));
671 break;
672 }
673
674 STATIC_ASSERT(kNumberOfRootArrayConstants == Heap::kOldSpaceRoots);
675 STATIC_ASSERT(kNumberOfRootArrayConstants == 32);
676 SIXTEEN_CASES(kRootArrayConstantsWithSkip)
677 SIXTEEN_CASES(kRootArrayConstantsWithSkip + 16) {
678 int skip = source_.GetInt();
679 current = reinterpret_cast<MaybeObject**>(
680 reinterpret_cast<intptr_t>(current) + skip);
681 V8_FALLTHROUGH;
682 }
683
684 SIXTEEN_CASES(kRootArrayConstants)
685 SIXTEEN_CASES(kRootArrayConstants + 16) {
686 int id = data & kRootArrayConstantsMask;
687 Heap::RootListIndex root_index = static_cast<Heap::RootListIndex>(id);
688 MaybeObject* object =
689 MaybeObject::FromObject(isolate->heap()->root(root_index));
690 DCHECK(!Heap::InNewSpace(object));
691 DCHECK(!allocator()->next_reference_is_weak());
692 UnalignedCopy(current++, &object);
693 break;
694 }
695
696 STATIC_ASSERT(kNumberOfHotObjects == 8);
697 FOUR_CASES(kHotObjectWithSkip)
698 FOUR_CASES(kHotObjectWithSkip + 4) {
699 int skip = source_.GetInt();
700 current = reinterpret_cast<MaybeObject**>(
701 reinterpret_cast<Address>(current) + skip);
702 V8_FALLTHROUGH;
703 }
704
705 FOUR_CASES(kHotObject)
706 FOUR_CASES(kHotObject + 4) {
707 int index = data & kHotObjectMask;
708 Object* hot_object = hot_objects_.Get(index);
709 MaybeObject* hot_maybe_object = MaybeObject::FromObject(hot_object);
710 if (allocator()->GetAndClearNextReferenceIsWeak()) {
711 hot_maybe_object = MaybeObject::MakeWeak(hot_maybe_object);
712 }
713
714 UnalignedCopy(current, &hot_maybe_object);
715 if (write_barrier_needed && Heap::InNewSpace(hot_object)) {
716 Address current_address = reinterpret_cast<Address>(current);
717 GenerationalBarrier(HeapObject::FromAddress(current_object_address),
718 reinterpret_cast<MaybeObject**>(current_address),
719 hot_maybe_object);
720 }
721 current++;
722 break;
723 }
724
725 // Deserialize raw data of fixed length from 1 to 32 words.
726 STATIC_ASSERT(kNumberOfFixedRawData == 32);
727 SIXTEEN_CASES(kFixedRawData)
728 SIXTEEN_CASES(kFixedRawData + 16) {
729 byte* raw_data_out = reinterpret_cast<byte*>(current);
730 int size_in_bytes = (data - kFixedRawDataStart) << kPointerSizeLog2;
731 source_.CopyRaw(raw_data_out, size_in_bytes);
732 current = reinterpret_cast<MaybeObject**>(raw_data_out + size_in_bytes);
733 break;
734 }
735
736 STATIC_ASSERT(kNumberOfFixedRepeat == 16);
737 SIXTEEN_CASES(kFixedRepeat) {
738 int repeats = data - kFixedRepeatStart;
739 MaybeObject* object;
740 DCHECK(!allocator()->next_reference_is_weak());
741 UnalignedCopy(&object, current - 1);
742 DCHECK(!Heap::InNewSpace(object));
743 for (int i = 0; i < repeats; i++) UnalignedCopy(current++, &object);
744 break;
745 }
746
747 #ifdef DEBUG
748 #define UNUSED_CASE(byte_code) \
749 case byte_code: \
750 UNREACHABLE();
751 UNUSED_SERIALIZER_BYTE_CODES(UNUSED_CASE)
752 #endif
753 #undef UNUSED_CASE
754
755 #undef SIXTEEN_CASES
756 #undef FOUR_CASES
757 #undef SINGLE_CASE
758 }
759 }
760 CHECK_EQ(limit, current);
761 return true;
762 }
763
764 template <class AllocatorT>
ReadExternalReferenceCase(HowToCode how,void ** current,Address current_object_address)765 void** Deserializer<AllocatorT>::ReadExternalReferenceCase(
766 HowToCode how, void** current, Address current_object_address) {
767 int skip = source_.GetInt();
768 current = reinterpret_cast<void**>(reinterpret_cast<Address>(current) + skip);
769 uint32_t reference_id = static_cast<uint32_t>(source_.GetInt());
770 Address address = external_reference_table_->address(reference_id);
771
772 if (how == kFromCode) {
773 Address location_of_branch_data = reinterpret_cast<Address>(current);
774 int skip =
775 Assembler::deserialization_special_target_size(location_of_branch_data);
776 Assembler::deserialization_set_special_target_at(
777 location_of_branch_data,
778 Code::cast(HeapObject::FromAddress(current_object_address)), address);
779 location_of_branch_data += skip;
780 current = reinterpret_cast<void**>(location_of_branch_data);
781 } else {
782 void* new_current = reinterpret_cast<void**>(address);
783 UnalignedCopy(current, &new_current);
784 ++current;
785 }
786 return current;
787 }
788
789 template <class AllocatorT>
790 template <int where, int how, int within, int space_number_if_any>
ReadDataCase(Isolate * isolate,MaybeObject ** current,Address current_object_address,byte data,bool write_barrier_needed)791 MaybeObject** Deserializer<AllocatorT>::ReadDataCase(
792 Isolate* isolate, MaybeObject** current, Address current_object_address,
793 byte data, bool write_barrier_needed) {
794 bool emit_write_barrier = false;
795 bool current_was_incremented = false;
796 int space_number = space_number_if_any == kAnyOldSpace ? (data & kSpaceMask)
797 : space_number_if_any;
798 HeapObjectReferenceType reference_type = HeapObjectReferenceType::STRONG;
799 if (where == kNewObject && how == kPlain && within == kStartOfObject) {
800 if (allocator()->GetAndClearNextReferenceIsWeak()) {
801 reference_type = HeapObjectReferenceType::WEAK;
802 }
803 ReadObject(space_number, current, reference_type);
804 emit_write_barrier = (space_number == NEW_SPACE);
805 } else {
806 Object* new_object = nullptr; /* May not be a real Object pointer. */
807 if (where == kNewObject) {
808 ReadObject(space_number, reinterpret_cast<MaybeObject**>(&new_object),
809 HeapObjectReferenceType::STRONG);
810 } else if (where == kBackref) {
811 emit_write_barrier = (space_number == NEW_SPACE);
812 new_object = GetBackReferencedObject(data & kSpaceMask);
813 } else if (where == kBackrefWithSkip) {
814 int skip = source_.GetInt();
815 current = reinterpret_cast<MaybeObject**>(
816 reinterpret_cast<Address>(current) + skip);
817 emit_write_barrier = (space_number == NEW_SPACE);
818 new_object = GetBackReferencedObject(data & kSpaceMask);
819 } else if (where == kRootArray) {
820 int id = source_.GetInt();
821 Heap::RootListIndex root_index = static_cast<Heap::RootListIndex>(id);
822 new_object = isolate->heap()->root(root_index);
823 emit_write_barrier = Heap::InNewSpace(new_object);
824 hot_objects_.Add(HeapObject::cast(new_object));
825 } else if (where == kPartialSnapshotCache) {
826 int cache_index = source_.GetInt();
827 new_object = isolate->partial_snapshot_cache()->at(cache_index);
828 emit_write_barrier = Heap::InNewSpace(new_object);
829 } else if (where == kAttachedReference) {
830 int index = source_.GetInt();
831 new_object = *attached_objects_[index];
832 emit_write_barrier = Heap::InNewSpace(new_object);
833 } else {
834 DCHECK_EQ(where, kBuiltin);
835 int builtin_id = MaybeReplaceWithDeserializeLazy(source_.GetInt());
836 new_object = isolate->builtins()->builtin(builtin_id);
837 emit_write_barrier = false;
838 }
839 if (within == kInnerPointer) {
840 DCHECK_EQ(how, kFromCode);
841 if (where == kBuiltin) {
842 // At this point, new_object may still be uninitialized, thus the
843 // unchecked Code cast.
844 new_object = reinterpret_cast<Object*>(
845 reinterpret_cast<Code*>(new_object)->raw_instruction_start());
846 } else if (new_object->IsCode()) {
847 new_object = reinterpret_cast<Object*>(
848 Code::cast(new_object)->raw_instruction_start());
849 } else {
850 Cell* cell = Cell::cast(new_object);
851 new_object = reinterpret_cast<Object*>(cell->ValueAddress());
852 }
853 }
854 if (how == kFromCode) {
855 DCHECK(!allocator()->next_reference_is_weak());
856 Address location_of_branch_data = reinterpret_cast<Address>(current);
857 int skip = Assembler::deserialization_special_target_size(
858 location_of_branch_data);
859 Assembler::deserialization_set_special_target_at(
860 location_of_branch_data,
861 Code::cast(HeapObject::FromAddress(current_object_address)),
862 reinterpret_cast<Address>(new_object));
863 location_of_branch_data += skip;
864 current = reinterpret_cast<MaybeObject**>(location_of_branch_data);
865 current_was_incremented = true;
866 } else {
867 MaybeObject* new_maybe_object = MaybeObject::FromObject(new_object);
868 if (allocator()->GetAndClearNextReferenceIsWeak()) {
869 new_maybe_object = MaybeObject::MakeWeak(new_maybe_object);
870 }
871 UnalignedCopy(current, &new_maybe_object);
872 }
873 }
874 if (emit_write_barrier && write_barrier_needed) {
875 Address current_address = reinterpret_cast<Address>(current);
876 SLOW_DCHECK(isolate->heap()->ContainsSlow(current_object_address));
877 GenerationalBarrier(HeapObject::FromAddress(current_object_address),
878 reinterpret_cast<MaybeObject**>(current_address),
879 *reinterpret_cast<MaybeObject**>(current_address));
880 }
881 if (!current_was_incremented) {
882 current++;
883 }
884
885 return current;
886 }
887
888 // Explicit instantiation.
889 template class Deserializer<BuiltinDeserializerAllocator>;
890 template class Deserializer<DefaultDeserializerAllocator>;
891
892 } // namespace internal
893 } // namespace v8
894