• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/snapshot/deserializer.h"
6 
7 #include "src/base/logging.h"
8 #include "src/base/platform/wrappers.h"
9 #include "src/codegen/assembler-inl.h"
10 #include "src/common/assert-scope.h"
11 #include "src/common/globals.h"
12 #include "src/execution/isolate.h"
13 #include "src/heap/heap-inl.h"
14 #include "src/heap/heap-write-barrier-inl.h"
15 #include "src/heap/heap-write-barrier.h"
16 #include "src/heap/heap.h"
17 #include "src/heap/local-heap-inl.h"
18 #include "src/heap/read-only-heap.h"
19 #include "src/interpreter/interpreter.h"
20 #include "src/logging/local-logger.h"
21 #include "src/logging/log.h"
22 #include "src/objects/api-callbacks.h"
23 #include "src/objects/backing-store.h"
24 #include "src/objects/cell-inl.h"
25 #include "src/objects/embedder-data-array-inl.h"
26 #include "src/objects/hash-table.h"
27 #include "src/objects/js-array-buffer-inl.h"
28 #include "src/objects/js-array-inl.h"
29 #include "src/objects/maybe-object.h"
30 #include "src/objects/objects-body-descriptors-inl.h"
31 #include "src/objects/objects.h"
32 #include "src/objects/slots.h"
33 #include "src/objects/string.h"
34 #include "src/roots/roots.h"
35 #include "src/sandbox/external-pointer.h"
36 #include "src/snapshot/embedded/embedded-data-inl.h"
37 #include "src/snapshot/references.h"
38 #include "src/snapshot/serializer-deserializer.h"
39 #include "src/snapshot/shared-heap-serializer.h"
40 #include "src/snapshot/snapshot-data.h"
41 #include "src/snapshot/snapshot.h"
42 #include "src/tracing/trace-event.h"
43 #include "src/tracing/traced-value.h"
44 #include "src/utils/memcopy.h"
45 
46 namespace v8 {
47 namespace internal {
48 
49 // A SlotAccessor for a slot in a HeapObject, which abstracts the slot
50 // operations done by the deserializer in a way which is GC-safe. In particular,
51 // rather than an absolute slot address, this accessor holds a Handle to the
52 // HeapObject, which is updated if the HeapObject moves.
53 class SlotAccessorForHeapObject {
54  public:
ForSlotIndex(Handle<HeapObject> object,int index)55   static SlotAccessorForHeapObject ForSlotIndex(Handle<HeapObject> object,
56                                                 int index) {
57     return SlotAccessorForHeapObject(object, index * kTaggedSize);
58   }
ForSlotOffset(Handle<HeapObject> object,int offset)59   static SlotAccessorForHeapObject ForSlotOffset(Handle<HeapObject> object,
60                                                  int offset) {
61     return SlotAccessorForHeapObject(object, offset);
62   }
63 
slot() const64   MaybeObjectSlot slot() const { return object_->RawMaybeWeakField(offset_); }
object() const65   Handle<HeapObject> object() const { return object_; }
offset() const66   int offset() const { return offset_; }
67 
68   // Writes the given value to this slot, optionally with an offset (e.g. for
69   // repeat writes). Returns the number of slots written (which is one).
Write(MaybeObject value,int slot_offset=0)70   int Write(MaybeObject value, int slot_offset = 0) {
71     MaybeObjectSlot current_slot = slot() + slot_offset;
72     current_slot.Relaxed_Store(value);
73     WriteBarrier::Marking(*object_, current_slot, value);
74     // No need for a generational write barrier.
75     DCHECK(!Heap::InYoungGeneration(value));
76     return 1;
77   }
Write(HeapObject value,HeapObjectReferenceType ref_type,int slot_offset=0)78   int Write(HeapObject value, HeapObjectReferenceType ref_type,
79             int slot_offset = 0) {
80     return Write(HeapObjectReference::From(value, ref_type), slot_offset);
81   }
Write(Handle<HeapObject> value,HeapObjectReferenceType ref_type,int slot_offset=0)82   int Write(Handle<HeapObject> value, HeapObjectReferenceType ref_type,
83             int slot_offset = 0) {
84     return Write(*value, ref_type, slot_offset);
85   }
86 
87   // Same as Write, but additionally with a generational barrier.
WriteWithGenerationalBarrier(MaybeObject value)88   int WriteWithGenerationalBarrier(MaybeObject value) {
89     MaybeObjectSlot current_slot = slot();
90     current_slot.Relaxed_Store(value);
91     WriteBarrier::Marking(*object_, current_slot, value);
92     if (Heap::InYoungGeneration(value)) {
93       GenerationalBarrier(*object_, current_slot, value);
94     }
95     return 1;
96   }
WriteWithGenerationalBarrier(HeapObject value,HeapObjectReferenceType ref_type)97   int WriteWithGenerationalBarrier(HeapObject value,
98                                    HeapObjectReferenceType ref_type) {
99     return WriteWithGenerationalBarrier(
100         HeapObjectReference::From(value, ref_type));
101   }
WriteWithGenerationalBarrier(Handle<HeapObject> value,HeapObjectReferenceType ref_type)102   int WriteWithGenerationalBarrier(Handle<HeapObject> value,
103                                    HeapObjectReferenceType ref_type) {
104     return WriteWithGenerationalBarrier(*value, ref_type);
105   }
106 
107  private:
SlotAccessorForHeapObject(Handle<HeapObject> object,int offset)108   SlotAccessorForHeapObject(Handle<HeapObject> object, int offset)
109       : object_(object), offset_(offset) {}
110 
111   const Handle<HeapObject> object_;
112   const int offset_;
113 };
114 
115 // A SlotAccessor for absolute full slot addresses.
116 class SlotAccessorForRootSlots {
117  public:
SlotAccessorForRootSlots(FullMaybeObjectSlot slot)118   explicit SlotAccessorForRootSlots(FullMaybeObjectSlot slot) : slot_(slot) {}
119 
slot() const120   FullMaybeObjectSlot slot() const { return slot_; }
object() const121   Handle<HeapObject> object() const { UNREACHABLE(); }
offset() const122   int offset() const { UNREACHABLE(); }
123 
124   // Writes the given value to this slot, optionally with an offset (e.g. for
125   // repeat writes). Returns the number of slots written (which is one).
Write(MaybeObject value,int slot_offset=0)126   int Write(MaybeObject value, int slot_offset = 0) {
127     FullMaybeObjectSlot current_slot = slot() + slot_offset;
128     current_slot.Relaxed_Store(value);
129     return 1;
130   }
Write(HeapObject value,HeapObjectReferenceType ref_type,int slot_offset=0)131   int Write(HeapObject value, HeapObjectReferenceType ref_type,
132             int slot_offset = 0) {
133     return Write(HeapObjectReference::From(value, ref_type), slot_offset);
134   }
Write(Handle<HeapObject> value,HeapObjectReferenceType ref_type,int slot_offset=0)135   int Write(Handle<HeapObject> value, HeapObjectReferenceType ref_type,
136             int slot_offset = 0) {
137     return Write(*value, ref_type, slot_offset);
138   }
139 
WriteWithGenerationalBarrier(MaybeObject value)140   int WriteWithGenerationalBarrier(MaybeObject value) { return Write(value); }
WriteWithGenerationalBarrier(HeapObject value,HeapObjectReferenceType ref_type)141   int WriteWithGenerationalBarrier(HeapObject value,
142                                    HeapObjectReferenceType ref_type) {
143     return WriteWithGenerationalBarrier(
144         HeapObjectReference::From(value, ref_type));
145   }
WriteWithGenerationalBarrier(Handle<HeapObject> value,HeapObjectReferenceType ref_type)146   int WriteWithGenerationalBarrier(Handle<HeapObject> value,
147                                    HeapObjectReferenceType ref_type) {
148     return WriteWithGenerationalBarrier(*value, ref_type);
149   }
150 
151  private:
152   const FullMaybeObjectSlot slot_;
153 };
154 
155 // A SlotAccessor for creating a Handle, which saves a Handle allocation when
156 // a Handle already exists.
157 template <typename IsolateT>
158 class SlotAccessorForHandle {
159  public:
SlotAccessorForHandle(Handle<HeapObject> * handle,IsolateT * isolate)160   SlotAccessorForHandle(Handle<HeapObject>* handle, IsolateT* isolate)
161       : handle_(handle), isolate_(isolate) {}
162 
slot() const163   MaybeObjectSlot slot() const { UNREACHABLE(); }
object() const164   Handle<HeapObject> object() const { UNREACHABLE(); }
offset() const165   int offset() const { UNREACHABLE(); }
166 
Write(MaybeObject value,int slot_offset=0)167   int Write(MaybeObject value, int slot_offset = 0) { UNREACHABLE(); }
Write(HeapObject value,HeapObjectReferenceType ref_type,int slot_offset=0)168   int Write(HeapObject value, HeapObjectReferenceType ref_type,
169             int slot_offset = 0) {
170     DCHECK_EQ(slot_offset, 0);
171     DCHECK_EQ(ref_type, HeapObjectReferenceType::STRONG);
172     *handle_ = handle(value, isolate_);
173     return 1;
174   }
Write(Handle<HeapObject> value,HeapObjectReferenceType ref_type,int slot_offset=0)175   int Write(Handle<HeapObject> value, HeapObjectReferenceType ref_type,
176             int slot_offset = 0) {
177     DCHECK_EQ(slot_offset, 0);
178     DCHECK_EQ(ref_type, HeapObjectReferenceType::STRONG);
179     *handle_ = value;
180     return 1;
181   }
182 
WriteWithGenerationalBarrier(HeapObject value,HeapObjectReferenceType ref_type)183   int WriteWithGenerationalBarrier(HeapObject value,
184                                    HeapObjectReferenceType ref_type) {
185     return Write(value, ref_type);
186   }
WriteWithGenerationalBarrier(Handle<HeapObject> value,HeapObjectReferenceType ref_type)187   int WriteWithGenerationalBarrier(Handle<HeapObject> value,
188                                    HeapObjectReferenceType ref_type) {
189     return Write(value, ref_type);
190   }
191 
192  private:
193   Handle<HeapObject>* handle_;
194   IsolateT* isolate_;
195 };
196 
197 template <typename IsolateT>
198 template <typename TSlot>
WriteAddress(TSlot dest,Address value)199 int Deserializer<IsolateT>::WriteAddress(TSlot dest, Address value) {
200   DCHECK(!next_reference_is_weak_);
201   memcpy(dest.ToVoidPtr(), &value, kSystemPointerSize);
202   STATIC_ASSERT(IsAligned(kSystemPointerSize, TSlot::kSlotDataSize));
203   return (kSystemPointerSize / TSlot::kSlotDataSize);
204 }
205 
206 template <typename IsolateT>
207 template <typename TSlot>
WriteExternalPointer(TSlot dest,Address value,ExternalPointerTag tag)208 int Deserializer<IsolateT>::WriteExternalPointer(TSlot dest, Address value,
209                                                  ExternalPointerTag tag) {
210   DCHECK(!next_reference_is_weak_);
211   DCHECK(IsAligned(kExternalPointerSize, TSlot::kSlotDataSize));
212   InitExternalPointerField(dest.address(), main_thread_isolate(), value, tag);
213   return (kExternalPointerSize / TSlot::kSlotDataSize);
214 }
215 
216 namespace {
217 #ifdef DEBUG
GetNumApiReferences(Isolate * isolate)218 int GetNumApiReferences(Isolate* isolate) {
219   int num_api_references = 0;
220   // The read-only deserializer is run by read-only heap set-up before the
221   // heap is fully set up. External reference table relies on a few parts of
222   // this set-up (like old-space), so it may be uninitialized at this point.
223   if (isolate->isolate_data()->external_reference_table()->is_initialized()) {
224     // Count the number of external references registered through the API.
225     if (isolate->api_external_references() != nullptr) {
226       while (isolate->api_external_references()[num_api_references] != 0) {
227         num_api_references++;
228       }
229     }
230   }
231   return num_api_references;
232 }
GetNumApiReferences(LocalIsolate * isolate)233 int GetNumApiReferences(LocalIsolate* isolate) { return 0; }
234 #endif
235 }  // namespace
236 
237 template <typename IsolateT>
Deserializer(IsolateT * isolate,base::Vector<const byte> payload,uint32_t magic_number,bool deserializing_user_code,bool can_rehash)238 Deserializer<IsolateT>::Deserializer(IsolateT* isolate,
239                                      base::Vector<const byte> payload,
240                                      uint32_t magic_number,
241                                      bool deserializing_user_code,
242                                      bool can_rehash)
243     : isolate_(isolate),
244       source_(payload),
245       magic_number_(magic_number),
246       deserializing_user_code_(deserializing_user_code),
247       should_rehash_((FLAG_rehash_snapshot && can_rehash) ||
248                      deserializing_user_code) {
249   DCHECK_NOT_NULL(isolate);
250   isolate->RegisterDeserializerStarted();
251 
252   // We start the indices here at 1, so that we can distinguish between an
253   // actual index and an empty backing store (serialized as
254   // kEmptyBackingStoreRefSentinel) in a deserialized object requiring fix-up.
255   STATIC_ASSERT(kEmptyBackingStoreRefSentinel == 0);
256   backing_stores_.push_back({});
257 
258 #ifdef DEBUG
259   num_api_references_ = GetNumApiReferences(isolate);
260 #endif  // DEBUG
261   CHECK_EQ(magic_number_, SerializedData::kMagicNumber);
262 }
263 
264 template <typename IsolateT>
Rehash()265 void Deserializer<IsolateT>::Rehash() {
266   DCHECK(should_rehash());
267   for (Handle<HeapObject> item : to_rehash_) {
268     item->RehashBasedOnMap(isolate());
269   }
270 }
271 
272 template <typename IsolateT>
~Deserializer()273 Deserializer<IsolateT>::~Deserializer() {
274 #ifdef DEBUG
275   // Do not perform checks if we aborted deserialization.
276   if (source_.position() == 0) return;
277   // Check that we only have padding bytes remaining.
278   while (source_.HasMore()) DCHECK_EQ(kNop, source_.Get());
279   // Check that there are no remaining forward refs.
280   DCHECK_EQ(num_unresolved_forward_refs_, 0);
281   DCHECK(unresolved_forward_refs_.empty());
282 #endif  // DEBUG
283   isolate_->RegisterDeserializerFinished();
284 }
285 
286 // This is called on the roots.  It is the driver of the deserialization
287 // process.  It is also called on the body of each function.
288 template <typename IsolateT>
VisitRootPointers(Root root,const char * description,FullObjectSlot start,FullObjectSlot end)289 void Deserializer<IsolateT>::VisitRootPointers(Root root,
290                                                const char* description,
291                                                FullObjectSlot start,
292                                                FullObjectSlot end) {
293   ReadData(FullMaybeObjectSlot(start), FullMaybeObjectSlot(end));
294 }
295 
296 template <typename IsolateT>
Synchronize(VisitorSynchronization::SyncTag tag)297 void Deserializer<IsolateT>::Synchronize(VisitorSynchronization::SyncTag tag) {
298   static const byte expected = kSynchronize;
299   CHECK_EQ(expected, source_.Get());
300 }
301 
302 template <typename IsolateT>
DeserializeDeferredObjects()303 void Deserializer<IsolateT>::DeserializeDeferredObjects() {
304   for (int code = source_.Get(); code != kSynchronize; code = source_.Get()) {
305     SnapshotSpace space = NewObject::Decode(code);
306     ReadObject(space);
307   }
308 }
309 
310 template <typename IsolateT>
LogNewMapEvents()311 void Deserializer<IsolateT>::LogNewMapEvents() {
312   if (V8_LIKELY(!FLAG_log_maps)) return;
313   DisallowGarbageCollection no_gc;
314   for (Handle<Map> map : new_maps_) {
315     DCHECK(FLAG_log_maps);
316     LOG(isolate(), MapCreate(*map));
317     LOG(isolate(), MapDetails(*map));
318   }
319 }
320 
321 template <typename IsolateT>
WeakenDescriptorArrays()322 void Deserializer<IsolateT>::WeakenDescriptorArrays() {
323   DisallowGarbageCollection no_gc;
324   Map descriptor_array_map = ReadOnlyRoots(isolate()).descriptor_array_map();
325   for (Handle<DescriptorArray> descriptor_array : new_descriptor_arrays_) {
326     DescriptorArray raw = *descriptor_array;
327     DCHECK(raw.IsStrongDescriptorArray());
328     raw.set_map_safe_transition(descriptor_array_map);
329     WriteBarrier::Marking(raw, raw.number_of_descriptors());
330   }
331 }
332 
333 template <typename IsolateT>
LogScriptEvents(Script script)334 void Deserializer<IsolateT>::LogScriptEvents(Script script) {
335   DisallowGarbageCollection no_gc;
336   LOG(isolate(),
337       ScriptEvent(Logger::ScriptEventType::kDeserialize, script.id()));
338   LOG(isolate(), ScriptDetails(script));
339 }
340 
341 namespace {
342 template <typename IsolateT>
ComputeRawHashField(IsolateT * isolate,String string)343 uint32_t ComputeRawHashField(IsolateT* isolate, String string) {
344   // Make sure raw_hash_field() is computed.
345   string.EnsureHash(SharedStringAccessGuardIfNeeded(isolate));
346   return string.raw_hash_field();
347 }
348 }  // namespace
349 
StringTableInsertionKey(Isolate * isolate,Handle<String> string,DeserializingUserCodeOption deserializing_user_code)350 StringTableInsertionKey::StringTableInsertionKey(
351     Isolate* isolate, Handle<String> string,
352     DeserializingUserCodeOption deserializing_user_code)
353     : StringTableKey(ComputeRawHashField(isolate, *string), string->length()),
354       string_(string) {
355 #ifdef DEBUG
356   deserializing_user_code_ = deserializing_user_code;
357 #endif
358   DCHECK(string->IsInternalizedString());
359 }
360 
StringTableInsertionKey(LocalIsolate * isolate,Handle<String> string,DeserializingUserCodeOption deserializing_user_code)361 StringTableInsertionKey::StringTableInsertionKey(
362     LocalIsolate* isolate, Handle<String> string,
363     DeserializingUserCodeOption deserializing_user_code)
364     : StringTableKey(ComputeRawHashField(isolate, *string), string->length()),
365       string_(string) {
366 #ifdef DEBUG
367   deserializing_user_code_ = deserializing_user_code;
368 #endif
369   DCHECK(string->IsInternalizedString());
370 }
371 
372 template <typename IsolateT>
IsMatch(IsolateT * isolate,String string)373 bool StringTableInsertionKey::IsMatch(IsolateT* isolate, String string) {
374   // We want to compare the content of two strings here.
375   return string_->SlowEquals(string, SharedStringAccessGuardIfNeeded(isolate));
376 }
377 template bool StringTableInsertionKey::IsMatch(Isolate* isolate, String string);
378 template bool StringTableInsertionKey::IsMatch(LocalIsolate* isolate,
379                                                String string);
380 
381 namespace {
382 
NoExternalReferencesCallback()383 void NoExternalReferencesCallback() {
384   // The following check will trigger if a function or object template
385   // with references to native functions have been deserialized from
386   // snapshot, but no actual external references were provided when the
387   // isolate was created.
388   FATAL("No external references provided via API");
389 }
390 
PostProcessExternalString(ExternalString string,Isolate * isolate)391 void PostProcessExternalString(ExternalString string, Isolate* isolate) {
392   DisallowGarbageCollection no_gc;
393   uint32_t index = string.GetResourceRefForDeserialization();
394   Address address =
395       static_cast<Address>(isolate->api_external_references()[index]);
396   string.AllocateExternalPointerEntries(isolate);
397   string.set_address_as_resource(isolate, address);
398   isolate->heap()->UpdateExternalString(string, 0,
399                                         string.ExternalPayloadSize());
400   isolate->heap()->RegisterExternalString(string);
401 }
402 
403 }  // namespace
404 
405 template <typename IsolateT>
PostProcessNewJSReceiver(Map map,Handle<JSReceiver> obj,JSReceiver raw_obj,InstanceType instance_type,SnapshotSpace space)406 void Deserializer<IsolateT>::PostProcessNewJSReceiver(
407     Map map, Handle<JSReceiver> obj, JSReceiver raw_obj,
408     InstanceType instance_type, SnapshotSpace space) {
409   DisallowGarbageCollection no_gc;
410   DCHECK_EQ(*obj, raw_obj);
411   DCHECK_EQ(raw_obj.map(), map);
412   DCHECK_EQ(map.instance_type(), instance_type);
413 
414   if (InstanceTypeChecker::IsJSDataView(instance_type)) {
415     auto data_view = JSDataView::cast(raw_obj);
416     auto buffer = JSArrayBuffer::cast(data_view.buffer());
417     void* backing_store = EmptyBackingStoreBuffer();
418     uint32_t store_index = buffer.GetBackingStoreRefForDeserialization();
419     if (store_index != kEmptyBackingStoreRefSentinel) {
420       // The backing store of the JSArrayBuffer has not been correctly restored
421       // yet, as that may trigger GC. The backing_store field currently contains
422       // a numbered reference to an already deserialized backing store.
423       backing_store = backing_stores_[store_index]->buffer_start();
424     }
425     data_view.set_data_pointer(
426         main_thread_isolate(),
427         reinterpret_cast<uint8_t*>(backing_store) + data_view.byte_offset());
428   } else if (InstanceTypeChecker::IsJSTypedArray(instance_type)) {
429     auto typed_array = JSTypedArray::cast(raw_obj);
430     // Fixup typed array pointers.
431     if (typed_array.is_on_heap()) {
432       typed_array.AddExternalPointerCompensationForDeserialization(
433           main_thread_isolate());
434     } else {
435       // Serializer writes backing store ref as a DataPtr() value.
436       uint32_t store_index =
437           typed_array.GetExternalBackingStoreRefForDeserialization();
438       auto backing_store = backing_stores_[store_index];
439       void* start = backing_store ? backing_store->buffer_start()
440                                   : EmptyBackingStoreBuffer();
441       typed_array.SetOffHeapDataPtr(main_thread_isolate(), start,
442                                     typed_array.byte_offset());
443     }
444   } else if (InstanceTypeChecker::IsJSArrayBuffer(instance_type)) {
445     auto buffer = JSArrayBuffer::cast(raw_obj);
446     // Postpone allocation of backing store to avoid triggering the GC.
447     if (buffer.GetBackingStoreRefForDeserialization() !=
448         kEmptyBackingStoreRefSentinel) {
449       new_off_heap_array_buffers_.push_back(Handle<JSArrayBuffer>::cast(obj));
450     } else {
451       buffer.set_backing_store(main_thread_isolate(),
452                                EmptyBackingStoreBuffer());
453     }
454   }
455 
456   // Check alignment.
457   DCHECK_EQ(0, Heap::GetFillToAlign(obj->address(),
458                                     HeapObject::RequiredAlignment(map)));
459 }
460 
461 template <typename IsolateT>
PostProcessNewObject(Handle<Map> map,Handle<HeapObject> obj,SnapshotSpace space)462 void Deserializer<IsolateT>::PostProcessNewObject(Handle<Map> map,
463                                                   Handle<HeapObject> obj,
464                                                   SnapshotSpace space) {
465   DisallowGarbageCollection no_gc;
466   Map raw_map = *map;
467   DCHECK_EQ(raw_map, obj->map(isolate_));
468   InstanceType instance_type = raw_map.instance_type();
469 
470   // Check alignment.
471   DCHECK_EQ(0, Heap::GetFillToAlign(obj->address(),
472                                     HeapObject::RequiredAlignment(raw_map)));
473   HeapObject raw_obj = *obj;
474   DCHECK_IMPLIES(deserializing_user_code(), should_rehash());
475   if (should_rehash()) {
476     if (InstanceTypeChecker::IsString(instance_type)) {
477       // Uninitialize hash field as we need to recompute the hash.
478       String string = String::cast(raw_obj);
479       string.set_raw_hash_field(String::kEmptyHashField);
480       // Rehash strings before read-only space is sealed. Strings outside
481       // read-only space are rehashed lazily. (e.g. when rehashing dictionaries)
482       if (space == SnapshotSpace::kReadOnlyHeap) {
483         to_rehash_.push_back(obj);
484       }
485     } else if (raw_obj.NeedsRehashing(instance_type)) {
486       to_rehash_.push_back(obj);
487     }
488 
489     if (deserializing_user_code()) {
490       if (InstanceTypeChecker::IsInternalizedString(instance_type)) {
491         // Canonicalize the internalized string. If it already exists in the
492         // string table, set the string to point to the existing one and patch
493         // the deserialized string handle to point to the existing one.
494         // TODO(leszeks): This handle patching is ugly, consider adding an
495         // explicit internalized string bytecode. Also, the new thin string
496         // should be dead, try immediately freeing it.
497         Handle<String> string = Handle<String>::cast(obj);
498 
499         StringTableInsertionKey key(
500             isolate(), string,
501             DeserializingUserCodeOption::kIsDeserializingUserCode);
502         String result = *isolate()->string_table()->LookupKey(isolate(), &key);
503 
504         if (result != raw_obj) {
505           String::cast(raw_obj).MakeThin(isolate(), result);
506           // Mutate the given object handle so that the backreference entry is
507           // also updated.
508           obj.PatchValue(result);
509         }
510         return;
511       } else if (InstanceTypeChecker::IsScript(instance_type)) {
512         new_scripts_.push_back(Handle<Script>::cast(obj));
513       } else if (InstanceTypeChecker::IsAllocationSite(instance_type)) {
514         // We should link new allocation sites, but we can't do this immediately
515         // because |AllocationSite::HasWeakNext()| internally accesses
516         // |Heap::roots_| that may not have been initialized yet. So defer this
517         // to |ObjectDeserializer::CommitPostProcessedObjects()|.
518         new_allocation_sites_.push_back(Handle<AllocationSite>::cast(obj));
519       } else {
520         DCHECK(CanBeDeferred(*obj));
521       }
522     }
523   }
524 
525   if (InstanceTypeChecker::IsCode(instance_type)) {
526     // We flush all code pages after deserializing the startup snapshot.
527     // Hence we only remember each individual code object when deserializing
528     // user code.
529     if (deserializing_user_code()) {
530       new_code_objects_.push_back(Handle<Code>::cast(obj));
531     }
532   } else if (V8_EXTERNAL_CODE_SPACE_BOOL &&
533              InstanceTypeChecker::IsCodeDataContainer(instance_type)) {
534     auto code_data_container = CodeDataContainer::cast(raw_obj);
535     code_data_container.set_code_cage_base(isolate()->code_cage_base());
536     code_data_container.AllocateExternalPointerEntries(main_thread_isolate());
537     code_data_container.UpdateCodeEntryPoint(main_thread_isolate(),
538                                              code_data_container.code());
539   } else if (InstanceTypeChecker::IsMap(instance_type)) {
540     if (FLAG_log_maps) {
541       // Keep track of all seen Maps to log them later since they might be only
542       // partially initialized at this point.
543       new_maps_.push_back(Handle<Map>::cast(obj));
544     }
545   } else if (InstanceTypeChecker::IsAccessorInfo(instance_type)) {
546 #ifdef USE_SIMULATOR
547     accessor_infos_.push_back(Handle<AccessorInfo>::cast(obj));
548 #endif
549   } else if (InstanceTypeChecker::IsCallHandlerInfo(instance_type)) {
550 #ifdef USE_SIMULATOR
551     call_handler_infos_.push_back(Handle<CallHandlerInfo>::cast(obj));
552 #endif
553   } else if (InstanceTypeChecker::IsExternalString(instance_type)) {
554     PostProcessExternalString(ExternalString::cast(raw_obj),
555                               main_thread_isolate());
556   } else if (InstanceTypeChecker::IsJSReceiver(instance_type)) {
557     return PostProcessNewJSReceiver(raw_map, Handle<JSReceiver>::cast(obj),
558                                     JSReceiver::cast(raw_obj), instance_type,
559                                     space);
560   } else if (InstanceTypeChecker::IsBytecodeArray(instance_type)) {
561     // TODO(mythria): Remove these once we store the default values for these
562     // fields in the serializer.
563     BytecodeArray::cast(raw_obj).reset_osr_urgency();
564   } else if (InstanceTypeChecker::IsDescriptorArray(instance_type)) {
565     DCHECK(InstanceTypeChecker::IsStrongDescriptorArray(instance_type));
566     Handle<DescriptorArray> descriptors = Handle<DescriptorArray>::cast(obj);
567     new_descriptor_arrays_.push_back(descriptors);
568   } else if (InstanceTypeChecker::IsNativeContext(instance_type)) {
569     NativeContext::cast(raw_obj).AllocateExternalPointerEntries(
570         main_thread_isolate());
571   } else if (InstanceTypeChecker::IsScript(instance_type)) {
572     LogScriptEvents(Script::cast(*obj));
573   }
574 }
575 
576 template <typename IsolateT>
GetAndResetNextReferenceType()577 HeapObjectReferenceType Deserializer<IsolateT>::GetAndResetNextReferenceType() {
578   HeapObjectReferenceType type = next_reference_is_weak_
579                                      ? HeapObjectReferenceType::WEAK
580                                      : HeapObjectReferenceType::STRONG;
581   next_reference_is_weak_ = false;
582   return type;
583 }
584 
585 template <typename IsolateT>
GetBackReferencedObject()586 Handle<HeapObject> Deserializer<IsolateT>::GetBackReferencedObject() {
587   Handle<HeapObject> obj = back_refs_[source_.GetInt()];
588 
589   // We don't allow ThinStrings in backreferences -- if internalization produces
590   // a thin string, then it should also update the backref handle.
591   DCHECK(!obj->IsThinString(isolate()));
592 
593   hot_objects_.Add(obj);
594   DCHECK(!HasWeakHeapObjectTag(*obj));
595   return obj;
596 }
597 
598 template <typename IsolateT>
ReadObject()599 Handle<HeapObject> Deserializer<IsolateT>::ReadObject() {
600   Handle<HeapObject> ret;
601   CHECK_EQ(ReadSingleBytecodeData(
602                source_.Get(), SlotAccessorForHandle<IsolateT>(&ret, isolate())),
603            1);
604   return ret;
605 }
606 
607 namespace {
SpaceToAllocation(SnapshotSpace space)608 AllocationType SpaceToAllocation(SnapshotSpace space) {
609   switch (space) {
610     case SnapshotSpace::kCode:
611       return AllocationType::kCode;
612     case SnapshotSpace::kMap:
613       return AllocationType::kMap;
614     case SnapshotSpace::kOld:
615       return AllocationType::kOld;
616     case SnapshotSpace::kReadOnlyHeap:
617       return AllocationType::kReadOnly;
618   }
619 }
620 }  // namespace
621 
622 template <typename IsolateT>
ReadObject(SnapshotSpace space)623 Handle<HeapObject> Deserializer<IsolateT>::ReadObject(SnapshotSpace space) {
624   const int size_in_tagged = source_.GetInt();
625   const int size_in_bytes = size_in_tagged * kTaggedSize;
626 
627   // The map can't be a forward ref. If you want the map to be a forward ref,
628   // then you're probably serializing the meta-map, in which case you want to
629   // use the kNewMetaMap bytecode.
630   DCHECK_NE(source()->Peek(), kRegisterPendingForwardRef);
631   Handle<Map> map = Handle<Map>::cast(ReadObject());
632 
633   AllocationType allocation = SpaceToAllocation(space);
634 
635   // When sharing a string table, all in-place internalizable and internalized
636   // strings internalized strings are allocated in the shared heap.
637   //
638   // TODO(12007): When shipping, add a new SharedOld SnapshotSpace.
639   if (FLAG_shared_string_table) {
640     InstanceType instance_type = map->instance_type();
641     if (InstanceTypeChecker::IsInternalizedString(instance_type) ||
642         String::IsInPlaceInternalizable(instance_type)) {
643       allocation = isolate()
644                        ->factory()
645                        ->RefineAllocationTypeForInPlaceInternalizableString(
646                            allocation, *map);
647     }
648   }
649 
650   // Filling an object's fields can cause GCs and heap walks, so this object has
651   // to be in a 'sufficiently initialised' state by the time the next allocation
652   // can happen. For this to be the case, the object is carefully deserialized
653   // as follows:
654   //   * The space for the object is allocated.
655   //   * The map is set on the object so that the GC knows what type the object
656   //     has.
657   //   * The rest of the object is filled with a fixed Smi value
658   //     - This is a Smi so that tagged fields become initialized to a valid
659   //       tagged value.
660   //     - It's a fixed value, "Smi::uninitialized_deserialization_value()", so
661   //       that we can DCHECK for it when reading objects that are assumed to be
662   //       partially initialized objects.
663   //   * The fields of the object are deserialized in order, under the
664   //     assumption that objects are laid out in such a way that any fields
665   //     required for object iteration (e.g. length fields) are deserialized
666   //     before fields with objects.
667   //     - We ensure this is the case by DCHECKing on object allocation that the
668   //       previously allocated object has a valid size (see `Allocate`).
669   HeapObject raw_obj =
670       Allocate(allocation, size_in_bytes, HeapObject::RequiredAlignment(*map));
671   raw_obj.set_map_after_allocation(*map);
672   MemsetTagged(raw_obj.RawField(kTaggedSize),
673                Smi::uninitialized_deserialization_value(), size_in_tagged - 1);
674 
675   // Make sure BytecodeArrays have a valid age, so that the marker doesn't
676   // break when making them older.
677   if (raw_obj.IsBytecodeArray(isolate())) {
678     BytecodeArray::cast(raw_obj).set_bytecode_age(
679         BytecodeArray::kFirstBytecodeAge);
680   }
681 
682 #ifdef DEBUG
683   PtrComprCageBase cage_base(isolate());
684   // We want to make sure that all embedder pointers are initialized to null.
685   if (raw_obj.IsJSObject(cage_base) &&
686       JSObject::cast(raw_obj).MayHaveEmbedderFields()) {
687     JSObject js_obj = JSObject::cast(raw_obj);
688     for (int i = 0; i < js_obj.GetEmbedderFieldCount(); ++i) {
689       void* pointer;
690       CHECK(EmbedderDataSlot(js_obj, i).ToAlignedPointer(main_thread_isolate(),
691                                                          &pointer));
692       CHECK_NULL(pointer);
693     }
694   } else if (raw_obj.IsEmbedderDataArray(cage_base)) {
695     EmbedderDataArray array = EmbedderDataArray::cast(raw_obj);
696     EmbedderDataSlot start(array, 0);
697     EmbedderDataSlot end(array, array.length());
698     for (EmbedderDataSlot slot = start; slot < end; ++slot) {
699       void* pointer;
700       CHECK(slot.ToAlignedPointer(main_thread_isolate(), &pointer));
701       CHECK_NULL(pointer);
702     }
703   }
704 #endif
705 
706   Handle<HeapObject> obj = handle(raw_obj, isolate());
707   back_refs_.push_back(obj);
708 
709   ReadData(obj, 1, size_in_tagged);
710   PostProcessNewObject(map, obj, space);
711 
712 #ifdef DEBUG
713   if (obj->IsCode(cage_base)) {
714     DCHECK(space == SnapshotSpace::kCode ||
715            space == SnapshotSpace::kReadOnlyHeap);
716   } else {
717     DCHECK_NE(space, SnapshotSpace::kCode);
718   }
719 #endif  // DEBUG
720 
721   return obj;
722 }
723 
724 template <typename IsolateT>
ReadMetaMap()725 Handle<HeapObject> Deserializer<IsolateT>::ReadMetaMap() {
726   const SnapshotSpace space = SnapshotSpace::kReadOnlyHeap;
727   const int size_in_bytes = Map::kSize;
728   const int size_in_tagged = size_in_bytes / kTaggedSize;
729 
730   HeapObject raw_obj =
731       Allocate(SpaceToAllocation(space), size_in_bytes, kTaggedAligned);
732   raw_obj.set_map_after_allocation(Map::unchecked_cast(raw_obj));
733   MemsetTagged(raw_obj.RawField(kTaggedSize),
734                Smi::uninitialized_deserialization_value(), size_in_tagged - 1);
735 
736   Handle<HeapObject> obj = handle(raw_obj, isolate());
737   back_refs_.push_back(obj);
738 
739   // Set the instance-type manually, to allow backrefs to read it.
740   Map::unchecked_cast(*obj).set_instance_type(MAP_TYPE);
741 
742   ReadData(obj, 1, size_in_tagged);
743   PostProcessNewObject(Handle<Map>::cast(obj), obj, space);
744 
745   return obj;
746 }
747 
748 class DeserializerRelocInfoVisitor {
749  public:
DeserializerRelocInfoVisitor(Deserializer<Isolate> * deserializer,const std::vector<Handle<HeapObject>> * objects)750   DeserializerRelocInfoVisitor(Deserializer<Isolate>* deserializer,
751                                const std::vector<Handle<HeapObject>>* objects)
752       : deserializer_(deserializer), objects_(objects), current_object_(0) {}
753 
DeserializerRelocInfoVisitor(Deserializer<LocalIsolate> * deserializer,const std::vector<Handle<HeapObject>> * objects)754   DeserializerRelocInfoVisitor(Deserializer<LocalIsolate>* deserializer,
755                                const std::vector<Handle<HeapObject>>* objects) {
756     UNREACHABLE();
757   }
758 
~DeserializerRelocInfoVisitor()759   ~DeserializerRelocInfoVisitor() {
760     DCHECK_EQ(current_object_, objects_->size());
761   }
762 
763   void VisitCodeTarget(Code host, RelocInfo* rinfo);
764   void VisitEmbeddedPointer(Code host, RelocInfo* rinfo);
765   void VisitRuntimeEntry(Code host, RelocInfo* rinfo);
766   void VisitExternalReference(Code host, RelocInfo* rinfo);
767   void VisitInternalReference(Code host, RelocInfo* rinfo);
768   void VisitOffHeapTarget(Code host, RelocInfo* rinfo);
769 
770  private:
isolate()771   Isolate* isolate() { return deserializer_->isolate(); }
source()772   SnapshotByteSource& source() { return deserializer_->source_; }
773 
774   Deserializer<Isolate>* deserializer_;
775   const std::vector<Handle<HeapObject>>* objects_;
776   int current_object_;
777 };
778 
VisitCodeTarget(Code host,RelocInfo * rinfo)779 void DeserializerRelocInfoVisitor::VisitCodeTarget(Code host,
780                                                    RelocInfo* rinfo) {
781   HeapObject object = *objects_->at(current_object_++);
782   rinfo->set_target_address(Code::cast(object).raw_instruction_start());
783 }
784 
VisitEmbeddedPointer(Code host,RelocInfo * rinfo)785 void DeserializerRelocInfoVisitor::VisitEmbeddedPointer(Code host,
786                                                         RelocInfo* rinfo) {
787   HeapObject object = *objects_->at(current_object_++);
788   // Embedded object reference must be a strong one.
789   rinfo->set_target_object(isolate()->heap(), object);
790 }
791 
VisitRuntimeEntry(Code host,RelocInfo * rinfo)792 void DeserializerRelocInfoVisitor::VisitRuntimeEntry(Code host,
793                                                      RelocInfo* rinfo) {
794   // We no longer serialize code that contains runtime entries.
795   UNREACHABLE();
796 }
797 
VisitExternalReference(Code host,RelocInfo * rinfo)798 void DeserializerRelocInfoVisitor::VisitExternalReference(Code host,
799                                                           RelocInfo* rinfo) {
800   byte data = source().Get();
801   CHECK_EQ(data, Deserializer<Isolate>::kExternalReference);
802 
803   Address address = deserializer_->ReadExternalReferenceCase();
804 
805   if (rinfo->IsCodedSpecially()) {
806     Address location_of_branch_data = rinfo->pc();
807     Assembler::deserialization_set_special_target_at(location_of_branch_data,
808                                                      host, address);
809   } else {
810     WriteUnalignedValue(rinfo->target_address_address(), address);
811   }
812 }
813 
VisitInternalReference(Code host,RelocInfo * rinfo)814 void DeserializerRelocInfoVisitor::VisitInternalReference(Code host,
815                                                           RelocInfo* rinfo) {
816   byte data = source().Get();
817   CHECK_EQ(data, Deserializer<Isolate>::kInternalReference);
818 
819   // Internal reference target is encoded as an offset from code entry.
820   int target_offset = source().GetInt();
821   // TODO(jgruber,v8:11036): We are being permissive for this DCHECK, but
822   // consider using raw_instruction_size() instead of raw_body_size() in the
823   // future.
824   STATIC_ASSERT(Code::kOnHeapBodyIsContiguous);
825   DCHECK_LT(static_cast<unsigned>(target_offset),
826             static_cast<unsigned>(host.raw_body_size()));
827   Address target = host.entry() + target_offset;
828   Assembler::deserialization_set_target_internal_reference_at(
829       rinfo->pc(), target, rinfo->rmode());
830 }
831 
VisitOffHeapTarget(Code host,RelocInfo * rinfo)832 void DeserializerRelocInfoVisitor::VisitOffHeapTarget(Code host,
833                                                       RelocInfo* rinfo) {
834   byte data = source().Get();
835   CHECK_EQ(data, Deserializer<Isolate>::kOffHeapTarget);
836 
837   Builtin builtin = Builtins::FromInt(source().GetInt());
838 
839   CHECK_NOT_NULL(isolate()->embedded_blob_code());
840   EmbeddedData d = EmbeddedData::FromBlob(isolate());
841   Address address = d.InstructionStartOfBuiltin(builtin);
842   CHECK_NE(kNullAddress, address);
843 
844   // TODO(ishell): implement RelocInfo::set_target_off_heap_target()
845   if (RelocInfo::OffHeapTargetIsCodedSpecially()) {
846     Address location_of_branch_data = rinfo->pc();
847     Assembler::deserialization_set_special_target_at(location_of_branch_data,
848                                                      host, address);
849   } else {
850     WriteUnalignedValue(rinfo->target_address_address(), address);
851   }
852 }
853 
854 template <typename IsolateT>
855 template <typename SlotAccessor>
ReadRepeatedObject(SlotAccessor slot_accessor,int repeat_count)856 int Deserializer<IsolateT>::ReadRepeatedObject(SlotAccessor slot_accessor,
857                                                int repeat_count) {
858   CHECK_LE(2, repeat_count);
859 
860   Handle<HeapObject> heap_object = ReadObject();
861   DCHECK(!Heap::InYoungGeneration(*heap_object));
862   for (int i = 0; i < repeat_count; i++) {
863     // TODO(leszeks): Use a ranged barrier here.
864     slot_accessor.Write(heap_object, HeapObjectReferenceType::STRONG, i);
865   }
866   return repeat_count;
867 }
868 
869 namespace {
870 
871 // Template used by the below CASE_RANGE macro to statically verify that the
872 // given number of cases matches the number of expected cases for that bytecode.
873 template <int byte_code_count, int expected>
VerifyBytecodeCount(byte bytecode)874 constexpr byte VerifyBytecodeCount(byte bytecode) {
875   STATIC_ASSERT(byte_code_count == expected);
876   return bytecode;
877 }
878 
879 }  // namespace
880 
881 // Helper macro (and its implementation detail) for specifying a range of cases.
882 // Use as "case CASE_RANGE(byte_code, num_bytecodes):"
883 #define CASE_RANGE(byte_code, num_bytecodes) \
884   CASE_R##num_bytecodes(                     \
885       (VerifyBytecodeCount<byte_code##Count, num_bytecodes>(byte_code)))
886 #define CASE_R1(byte_code) byte_code
887 #define CASE_R2(byte_code) CASE_R1(byte_code) : case CASE_R1(byte_code + 1)
888 #define CASE_R3(byte_code) CASE_R2(byte_code) : case CASE_R1(byte_code + 2)
889 #define CASE_R4(byte_code) CASE_R2(byte_code) : case CASE_R2(byte_code + 2)
890 #define CASE_R8(byte_code) CASE_R4(byte_code) : case CASE_R4(byte_code + 4)
891 #define CASE_R16(byte_code) CASE_R8(byte_code) : case CASE_R8(byte_code + 8)
892 #define CASE_R32(byte_code) CASE_R16(byte_code) : case CASE_R16(byte_code + 16)
893 
894 // This generates a case range for all the spaces.
895 #define CASE_RANGE_ALL_SPACES(bytecode)                           \
896   SpaceEncoder<bytecode>::Encode(SnapshotSpace::kOld)             \
897       : case SpaceEncoder<bytecode>::Encode(SnapshotSpace::kCode) \
898       : case SpaceEncoder<bytecode>::Encode(SnapshotSpace::kMap)  \
899       : case SpaceEncoder<bytecode>::Encode(SnapshotSpace::kReadOnlyHeap)
900 
901 template <typename IsolateT>
ReadData(Handle<HeapObject> object,int start_slot_index,int end_slot_index)902 void Deserializer<IsolateT>::ReadData(Handle<HeapObject> object,
903                                       int start_slot_index,
904                                       int end_slot_index) {
905   int current = start_slot_index;
906   while (current < end_slot_index) {
907     byte data = source_.Get();
908     current += ReadSingleBytecodeData(
909         data, SlotAccessorForHeapObject::ForSlotIndex(object, current));
910   }
911   CHECK_EQ(current, end_slot_index);
912 }
913 
914 template <typename IsolateT>
ReadData(FullMaybeObjectSlot start,FullMaybeObjectSlot end)915 void Deserializer<IsolateT>::ReadData(FullMaybeObjectSlot start,
916                                       FullMaybeObjectSlot end) {
917   FullMaybeObjectSlot current = start;
918   while (current < end) {
919     byte data = source_.Get();
920     current += ReadSingleBytecodeData(data, SlotAccessorForRootSlots(current));
921   }
922   CHECK_EQ(current, end);
923 }
924 
925 template <typename IsolateT>
926 template <typename SlotAccessor>
ReadSingleBytecodeData(byte data,SlotAccessor slot_accessor)927 int Deserializer<IsolateT>::ReadSingleBytecodeData(byte data,
928                                                    SlotAccessor slot_accessor) {
929   using TSlot = decltype(slot_accessor.slot());
930 
931   switch (data) {
932     // Deserialize a new object and write a pointer to it to the current
933     // object.
934     case CASE_RANGE_ALL_SPACES(kNewObject): {
935       SnapshotSpace space = NewObject::Decode(data);
936       // Save the reference type before recursing down into reading the object.
937       HeapObjectReferenceType ref_type = GetAndResetNextReferenceType();
938       Handle<HeapObject> heap_object = ReadObject(space);
939       return slot_accessor.Write(heap_object, ref_type);
940     }
941 
942     // Find a recently deserialized object using its offset from the current
943     // allocation point and write a pointer to it to the current object.
944     case kBackref: {
945       Handle<HeapObject> heap_object = GetBackReferencedObject();
946       return slot_accessor.Write(heap_object, GetAndResetNextReferenceType());
947     }
948 
949     // Reference an object in the read-only heap. This should be used when an
950     // object is read-only, but is not a root.
951     case kReadOnlyHeapRef: {
952       DCHECK(isolate()->heap()->deserialization_complete());
953       uint32_t chunk_index = source_.GetInt();
954       uint32_t chunk_offset = source_.GetInt();
955 
956       ReadOnlySpace* read_only_space = isolate()->heap()->read_only_space();
957       ReadOnlyPage* page = read_only_space->pages()[chunk_index];
958       Address address = page->OffsetToAddress(chunk_offset);
959       HeapObject heap_object = HeapObject::FromAddress(address);
960 
961       return slot_accessor.Write(heap_object, GetAndResetNextReferenceType());
962     }
963 
964     // Find an object in the roots array and write a pointer to it to the
965     // current object.
966     case kRootArray: {
967       int id = source_.GetInt();
968       RootIndex root_index = static_cast<RootIndex>(id);
969       Handle<HeapObject> heap_object =
970           Handle<HeapObject>::cast(isolate()->root_handle(root_index));
971       hot_objects_.Add(heap_object);
972       return slot_accessor.Write(heap_object, GetAndResetNextReferenceType());
973     }
974 
975     // Find an object in the startup object cache and write a pointer to it to
976     // the current object.
977     case kStartupObjectCache: {
978       int cache_index = source_.GetInt();
979       // TODO(leszeks): Could we use the address of the startup_object_cache
980       // entry as a Handle backing?
981       HeapObject heap_object = HeapObject::cast(
982           main_thread_isolate()->startup_object_cache()->at(cache_index));
983       return slot_accessor.Write(heap_object, GetAndResetNextReferenceType());
984     }
985 
986     // Find an object in the read-only object cache and write a pointer to it
987     // to the current object.
988     case kReadOnlyObjectCache: {
989       int cache_index = source_.GetInt();
990       // TODO(leszeks): Could we use the address of the cached_read_only_object
991       // entry as a Handle backing?
992       HeapObject heap_object = HeapObject::cast(
993           isolate()->read_only_heap()->cached_read_only_object(cache_index));
994       return slot_accessor.Write(heap_object, GetAndResetNextReferenceType());
995     }
996 
997     // Find an object in the shared heap object cache and write a pointer to it
998     // to the current object.
999     case kSharedHeapObjectCache: {
1000       int cache_index = source_.GetInt();
1001       // TODO(leszeks): Could we use the address of the
1002       // shared_heap_object_cache entry as a Handle backing?
1003       HeapObject heap_object = HeapObject::cast(
1004           main_thread_isolate()->shared_heap_object_cache()->at(cache_index));
1005       DCHECK(
1006           SharedHeapSerializer::ShouldBeInSharedHeapObjectCache(heap_object));
1007       return slot_accessor.Write(heap_object, GetAndResetNextReferenceType());
1008     }
1009 
1010     // Deserialize a new meta-map and write a pointer to it to the current
1011     // object.
1012     case kNewMetaMap: {
1013       Handle<HeapObject> heap_object = ReadMetaMap();
1014       return slot_accessor.Write(heap_object, HeapObjectReferenceType::STRONG);
1015     }
1016 
1017     // Find an external reference and write a pointer to it to the current
1018     // object.
1019     case kSandboxedExternalReference:
1020     case kExternalReference: {
1021       Address address = ReadExternalReferenceCase();
1022       if (V8_SANDBOXED_EXTERNAL_POINTERS_BOOL &&
1023           data == kSandboxedExternalReference) {
1024         ExternalPointerTag tag = ReadExternalPointerTag();
1025         return WriteExternalPointer(slot_accessor.slot(), address, tag);
1026       } else {
1027         DCHECK(!V8_SANDBOXED_EXTERNAL_POINTERS_BOOL);
1028         return WriteAddress(slot_accessor.slot(), address);
1029       }
1030     }
1031 
1032     case kInternalReference:
1033     case kOffHeapTarget:
1034       // These bytecodes are expected only during RelocInfo iteration.
1035       UNREACHABLE();
1036 
1037     // Find an object in the attached references and write a pointer to it to
1038     // the current object.
1039     case kAttachedReference: {
1040       int index = source_.GetInt();
1041       Handle<HeapObject> heap_object = attached_objects_[index];
1042 
1043       // This is the only case where we might encounter new space objects, so
1044       // maybe emit a generational write barrier.
1045       return slot_accessor.WriteWithGenerationalBarrier(
1046           heap_object, GetAndResetNextReferenceType());
1047     }
1048 
1049     case kNop:
1050       return 0;
1051 
1052     case kRegisterPendingForwardRef: {
1053       HeapObjectReferenceType ref_type = GetAndResetNextReferenceType();
1054       unresolved_forward_refs_.emplace_back(slot_accessor.object(),
1055                                             slot_accessor.offset(), ref_type);
1056       num_unresolved_forward_refs_++;
1057       return 1;
1058     }
1059 
1060     case kResolvePendingForwardRef: {
1061       // Pending forward refs can only be resolved after the heap object's map
1062       // field is deserialized; currently they only appear immediately after
1063       // the map field.
1064       DCHECK_EQ(slot_accessor.offset(), HeapObject::kHeaderSize);
1065       Handle<HeapObject> obj = slot_accessor.object();
1066       int index = source_.GetInt();
1067       auto& forward_ref = unresolved_forward_refs_[index];
1068       SlotAccessorForHeapObject::ForSlotOffset(forward_ref.object,
1069                                                forward_ref.offset)
1070           .Write(*obj, forward_ref.ref_type);
1071       num_unresolved_forward_refs_--;
1072       if (num_unresolved_forward_refs_ == 0) {
1073         // If there's no more pending fields, clear the entire pending field
1074         // vector.
1075         unresolved_forward_refs_.clear();
1076       } else {
1077         // Otherwise, at least clear the pending field.
1078         forward_ref.object = Handle<HeapObject>();
1079       }
1080       return 0;
1081     }
1082 
1083     case kSynchronize:
1084       // If we get here then that indicates that you have a mismatch between
1085       // the number of GC roots when serializing and deserializing.
1086       UNREACHABLE();
1087 
1088     // Deserialize raw data of variable length.
1089     case kVariableRawData: {
1090       // This operation is only supported for tagged-size slots, else we might
1091       // become misaligned.
1092       DCHECK_EQ(TSlot::kSlotDataSize, kTaggedSize);
1093       int size_in_tagged = source_.GetInt();
1094       // TODO(leszeks): Only copy slots when there are Smis in the serialized
1095       // data.
1096       source_.CopySlots(slot_accessor.slot().location(), size_in_tagged);
1097       return size_in_tagged;
1098     }
1099 
1100     // Deserialize raw code directly into the body of the code object.
1101     case kCodeBody: {
1102       // This operation is only supported for tagged-size slots, else we might
1103       // become misaligned.
1104       DCHECK_EQ(TSlot::kSlotDataSize, kTaggedSize);
1105       // CodeBody can only occur right after the heap object header.
1106       DCHECK_EQ(slot_accessor.offset(), HeapObject::kHeaderSize);
1107 
1108       int size_in_tagged = source_.GetInt();
1109       int size_in_bytes = size_in_tagged * kTaggedSize;
1110 
1111       {
1112         DisallowGarbageCollection no_gc;
1113         Code code = Code::cast(*slot_accessor.object());
1114 
1115         // First deserialize the code itself.
1116         source_.CopyRaw(
1117             reinterpret_cast<void*>(code.address() + Code::kDataStart),
1118             size_in_bytes);
1119       }
1120 
1121       // Then deserialize the code header
1122       ReadData(slot_accessor.object(), HeapObject::kHeaderSize / kTaggedSize,
1123                Code::kDataStart / kTaggedSize);
1124 
1125       // Then deserialize the pre-serialized RelocInfo objects.
1126       std::vector<Handle<HeapObject>> preserialized_objects;
1127       while (source_.Peek() != kSynchronize) {
1128         Handle<HeapObject> obj = ReadObject();
1129         preserialized_objects.push_back(obj);
1130       }
1131       // Skip the synchronize bytecode.
1132       source_.Advance(1);
1133 
1134       // Finally iterate RelocInfos (the same way it was done by the serializer)
1135       // and deserialize respective data into RelocInfos. The RelocIterator
1136       // holds a raw pointer to the code, so we have to disable garbage
1137       // collection here. It's ok though, any objects it would have needed are
1138       // in the preserialized_objects vector.
1139       {
1140         DisallowGarbageCollection no_gc;
1141 
1142         Code code = Code::cast(*slot_accessor.object());
1143         if (V8_EXTERNAL_CODE_SPACE_BOOL) {
1144           code.set_main_cage_base(isolate()->cage_base(), kRelaxedStore);
1145         }
1146         DeserializerRelocInfoVisitor visitor(this, &preserialized_objects);
1147         for (RelocIterator it(code, Code::BodyDescriptor::kRelocModeMask);
1148              !it.done(); it.next()) {
1149           it.rinfo()->Visit(&visitor);
1150         }
1151       }
1152 
1153       // Advance to the end of the code object.
1154       return (Code::kDataStart - HeapObject::kHeaderSize) / kTaggedSize +
1155              size_in_tagged;
1156     }
1157 
1158     case kVariableRepeat: {
1159       int repeats = VariableRepeatCount::Decode(source_.GetInt());
1160       return ReadRepeatedObject(slot_accessor, repeats);
1161     }
1162 
1163     case kOffHeapBackingStore:
1164     case kOffHeapResizableBackingStore: {
1165       int byte_length = source_.GetInt();
1166       std::unique_ptr<BackingStore> backing_store;
1167       if (data == kOffHeapBackingStore) {
1168         backing_store = BackingStore::Allocate(
1169             main_thread_isolate(), byte_length, SharedFlag::kNotShared,
1170             InitializedFlag::kUninitialized);
1171       } else {
1172         int max_byte_length = source_.GetInt();
1173         size_t page_size, initial_pages, max_pages;
1174         Maybe<bool> result =
1175             JSArrayBuffer::GetResizableBackingStorePageConfiguration(
1176                 nullptr, byte_length, max_byte_length, kDontThrow, &page_size,
1177                 &initial_pages, &max_pages);
1178         DCHECK(result.FromJust());
1179         USE(result);
1180         constexpr bool kIsWasmMemory = false;
1181         backing_store = BackingStore::TryAllocateAndPartiallyCommitMemory(
1182             main_thread_isolate(), byte_length, max_byte_length, page_size,
1183             initial_pages, max_pages, kIsWasmMemory, SharedFlag::kNotShared);
1184       }
1185       CHECK_NOT_NULL(backing_store);
1186       source_.CopyRaw(backing_store->buffer_start(), byte_length);
1187       backing_stores_.push_back(std::move(backing_store));
1188       return 0;
1189     }
1190 
1191     case kSandboxedApiReference:
1192     case kApiReference: {
1193       uint32_t reference_id = static_cast<uint32_t>(source_.GetInt());
1194       Address address;
1195       if (main_thread_isolate()->api_external_references()) {
1196         DCHECK_WITH_MSG(reference_id < num_api_references_,
1197                         "too few external references provided through the API");
1198         address = static_cast<Address>(
1199             main_thread_isolate()->api_external_references()[reference_id]);
1200       } else {
1201         address = reinterpret_cast<Address>(NoExternalReferencesCallback);
1202       }
1203       if (V8_SANDBOXED_EXTERNAL_POINTERS_BOOL &&
1204           data == kSandboxedApiReference) {
1205         ExternalPointerTag tag = ReadExternalPointerTag();
1206         return WriteExternalPointer(slot_accessor.slot(), address, tag);
1207       } else {
1208         DCHECK(!V8_SANDBOXED_EXTERNAL_POINTERS_BOOL);
1209         return WriteAddress(slot_accessor.slot(), address);
1210       }
1211     }
1212 
1213     case kClearedWeakReference:
1214       return slot_accessor.Write(HeapObjectReference::ClearedValue(isolate()));
1215 
1216     case kWeakPrefix: {
1217       // We shouldn't have two weak prefixes in a row.
1218       DCHECK(!next_reference_is_weak_);
1219       // We shouldn't have weak refs without a current object.
1220       DCHECK_NE(slot_accessor.object()->address(), kNullAddress);
1221       next_reference_is_weak_ = true;
1222       return 0;
1223     }
1224 
1225     case CASE_RANGE(kRootArrayConstants, 32): {
1226       // First kRootArrayConstantsCount roots are guaranteed to be in
1227       // the old space.
1228       STATIC_ASSERT(static_cast<int>(RootIndex::kFirstImmortalImmovableRoot) ==
1229                     0);
1230       STATIC_ASSERT(kRootArrayConstantsCount <=
1231                     static_cast<int>(RootIndex::kLastImmortalImmovableRoot));
1232 
1233       RootIndex root_index = RootArrayConstant::Decode(data);
1234       Handle<HeapObject> heap_object =
1235           Handle<HeapObject>::cast(isolate()->root_handle(root_index));
1236       return slot_accessor.Write(heap_object, HeapObjectReferenceType::STRONG);
1237     }
1238 
1239     case CASE_RANGE(kHotObject, 8): {
1240       int index = HotObject::Decode(data);
1241       Handle<HeapObject> hot_object = hot_objects_.Get(index);
1242       return slot_accessor.Write(hot_object, GetAndResetNextReferenceType());
1243     }
1244 
1245     case CASE_RANGE(kFixedRawData, 32): {
1246       // Deserialize raw data of fixed length from 1 to 32 times kTaggedSize.
1247       int size_in_tagged = FixedRawDataWithSize::Decode(data);
1248       STATIC_ASSERT(TSlot::kSlotDataSize == kTaggedSize ||
1249                     TSlot::kSlotDataSize == 2 * kTaggedSize);
1250       int size_in_slots = size_in_tagged / (TSlot::kSlotDataSize / kTaggedSize);
1251       // kFixedRawData can have kTaggedSize != TSlot::kSlotDataSize when
1252       // serializing Smi roots in pointer-compressed builds. In this case, the
1253       // size in bytes is unconditionally the (full) slot size.
1254       DCHECK_IMPLIES(kTaggedSize != TSlot::kSlotDataSize, size_in_slots == 1);
1255       // TODO(leszeks): Only copy slots when there are Smis in the serialized
1256       // data.
1257       source_.CopySlots(slot_accessor.slot().location(), size_in_slots);
1258       return size_in_slots;
1259     }
1260 
1261     case CASE_RANGE(kFixedRepeat, 16): {
1262       int repeats = FixedRepeatWithCount::Decode(data);
1263       return ReadRepeatedObject(slot_accessor, repeats);
1264     }
1265 
1266 #ifdef DEBUG
1267 #define UNUSED_CASE(byte_code) \
1268   case byte_code:              \
1269     UNREACHABLE();
1270       UNUSED_SERIALIZER_BYTE_CODES(UNUSED_CASE)
1271 #endif
1272 #undef UNUSED_CASE
1273   }
1274 
1275   // The above switch, including UNUSED_SERIALIZER_BYTE_CODES, covers all
1276   // possible bytecodes; but, clang doesn't realize this, so we have an explicit
1277   // UNREACHABLE here too.
1278   UNREACHABLE();
1279 }
1280 
1281 #undef CASE_RANGE_ALL_SPACES
1282 #undef CASE_RANGE
1283 #undef CASE_R32
1284 #undef CASE_R16
1285 #undef CASE_R8
1286 #undef CASE_R4
1287 #undef CASE_R3
1288 #undef CASE_R2
1289 #undef CASE_R1
1290 
1291 template <typename IsolateT>
ReadExternalReferenceCase()1292 Address Deserializer<IsolateT>::ReadExternalReferenceCase() {
1293   uint32_t reference_id = static_cast<uint32_t>(source_.GetInt());
1294   return main_thread_isolate()->external_reference_table()->address(
1295       reference_id);
1296 }
1297 
1298 template <typename IsolateT>
ReadExternalPointerTag()1299 ExternalPointerTag Deserializer<IsolateT>::ReadExternalPointerTag() {
1300   uint64_t shifted_tag = static_cast<uint64_t>(source_.GetInt());
1301   return static_cast<ExternalPointerTag>(shifted_tag
1302                                          << kExternalPointerTagShift);
1303 }
1304 
1305 template <typename IsolateT>
Allocate(AllocationType allocation,int size,AllocationAlignment alignment)1306 HeapObject Deserializer<IsolateT>::Allocate(AllocationType allocation, int size,
1307                                             AllocationAlignment alignment) {
1308 #ifdef DEBUG
1309   if (!previous_allocation_obj_.is_null()) {
1310     // Make sure that the previous object is initialized sufficiently to
1311     // be iterated over by the GC.
1312     int object_size = previous_allocation_obj_->Size(isolate_);
1313     DCHECK_LE(object_size, previous_allocation_size_);
1314   }
1315 #endif
1316 
1317   HeapObject obj = HeapObject::FromAddress(isolate()->heap()->AllocateRawOrFail(
1318       size, allocation, AllocationOrigin::kRuntime, alignment));
1319 
1320 #ifdef DEBUG
1321   previous_allocation_obj_ = handle(obj, isolate());
1322   previous_allocation_size_ = size;
1323 #endif
1324 
1325   return obj;
1326 }
1327 
1328 template class EXPORT_TEMPLATE_DEFINE(V8_EXPORT_PRIVATE) Deserializer<Isolate>;
1329 template class EXPORT_TEMPLATE_DEFINE(V8_EXPORT_PRIVATE)
1330     Deserializer<LocalIsolate>;
1331 
1332 }  // namespace internal
1333 }  // namespace v8
1334