1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/snapshot/serializer.h"
6
7 #include "src/assembler-inl.h"
8 #include "src/heap/heap.h"
9 #include "src/interpreter/interpreter.h"
10 #include "src/objects/code.h"
11 #include "src/objects/js-array-buffer-inl.h"
12 #include "src/objects/js-array-inl.h"
13 #include "src/objects/map.h"
14 #include "src/snapshot/builtin-serializer-allocator.h"
15 #include "src/snapshot/natives.h"
16 #include "src/snapshot/snapshot.h"
17
18 namespace v8 {
19 namespace internal {
20
21 template <class AllocatorT>
Serializer(Isolate * isolate)22 Serializer<AllocatorT>::Serializer(Isolate* isolate)
23 : isolate_(isolate),
24 external_reference_encoder_(isolate),
25 root_index_map_(isolate),
26 allocator_(this) {
27 #ifdef OBJECT_PRINT
28 if (FLAG_serialization_statistics) {
29 for (int space = 0; space < LAST_SPACE; ++space) {
30 instance_type_count_[space] = NewArray<int>(kInstanceTypes);
31 instance_type_size_[space] = NewArray<size_t>(kInstanceTypes);
32 for (int i = 0; i < kInstanceTypes; i++) {
33 instance_type_count_[space][i] = 0;
34 instance_type_size_[space][i] = 0;
35 }
36 }
37 } else {
38 for (int space = 0; space < LAST_SPACE; ++space) {
39 instance_type_count_[space] = nullptr;
40 instance_type_size_[space] = nullptr;
41 }
42 }
43 #endif // OBJECT_PRINT
44 }
45
46 template <class AllocatorT>
~Serializer()47 Serializer<AllocatorT>::~Serializer() {
48 if (code_address_map_ != nullptr) delete code_address_map_;
49 #ifdef OBJECT_PRINT
50 for (int space = 0; space < LAST_SPACE; ++space) {
51 if (instance_type_count_[space] != nullptr) {
52 DeleteArray(instance_type_count_[space]);
53 DeleteArray(instance_type_size_[space]);
54 }
55 }
56 #endif // OBJECT_PRINT
57 }
58
59 #ifdef OBJECT_PRINT
60 template <class AllocatorT>
CountInstanceType(Map * map,int size,AllocationSpace space)61 void Serializer<AllocatorT>::CountInstanceType(Map* map, int size,
62 AllocationSpace space) {
63 int instance_type = map->instance_type();
64 instance_type_count_[space][instance_type]++;
65 instance_type_size_[space][instance_type] += size;
66 }
67 #endif // OBJECT_PRINT
68
69 template <class AllocatorT>
OutputStatistics(const char * name)70 void Serializer<AllocatorT>::OutputStatistics(const char* name) {
71 if (!FLAG_serialization_statistics) return;
72
73 PrintF("%s:\n", name);
74 allocator()->OutputStatistics();
75
76 #ifdef OBJECT_PRINT
77 PrintF(" Instance types (count and bytes):\n");
78 #define PRINT_INSTANCE_TYPE(Name) \
79 for (int space = 0; space < LAST_SPACE; ++space) { \
80 if (instance_type_count_[space][Name]) { \
81 PrintF("%10d %10" PRIuS " %-10s %s\n", \
82 instance_type_count_[space][Name], \
83 instance_type_size_[space][Name], \
84 AllocationSpaceName(static_cast<AllocationSpace>(space)), #Name); \
85 } \
86 }
87 INSTANCE_TYPE_LIST(PRINT_INSTANCE_TYPE)
88 #undef PRINT_INSTANCE_TYPE
89
90 PrintF("\n");
91 #endif // OBJECT_PRINT
92 }
93
94 template <class AllocatorT>
SerializeDeferredObjects()95 void Serializer<AllocatorT>::SerializeDeferredObjects() {
96 while (!deferred_objects_.empty()) {
97 HeapObject* obj = deferred_objects_.back();
98 deferred_objects_.pop_back();
99 ObjectSerializer obj_serializer(this, obj, &sink_, kPlain, kStartOfObject);
100 obj_serializer.SerializeDeferred();
101 }
102 sink_.Put(kSynchronize, "Finished with deferred objects");
103 }
104
105 template <class AllocatorT>
MustBeDeferred(HeapObject * object)106 bool Serializer<AllocatorT>::MustBeDeferred(HeapObject* object) {
107 return false;
108 }
109
110 template <class AllocatorT>
VisitRootPointers(Root root,const char * description,Object ** start,Object ** end)111 void Serializer<AllocatorT>::VisitRootPointers(Root root,
112 const char* description,
113 Object** start, Object** end) {
114 // Builtins and bytecode handlers are serialized in a separate pass by the
115 // BuiltinSerializer.
116 if (root == Root::kBuiltins || root == Root::kDispatchTable) return;
117
118 for (Object** current = start; current < end; current++) {
119 SerializeRootObject(*current);
120 }
121 }
122
123 template <class AllocatorT>
SerializeRootObject(Object * object)124 void Serializer<AllocatorT>::SerializeRootObject(Object* object) {
125 if (object->IsSmi()) {
126 PutSmi(Smi::cast(object));
127 } else {
128 SerializeObject(HeapObject::cast(object), kPlain, kStartOfObject, 0);
129 }
130 }
131
132 #ifdef DEBUG
133 template <class AllocatorT>
PrintStack()134 void Serializer<AllocatorT>::PrintStack() {
135 for (const auto o : stack_) {
136 o->Print();
137 PrintF("\n");
138 }
139 }
140 #endif // DEBUG
141
142 template <class AllocatorT>
SerializeHotObject(HeapObject * obj,HowToCode how_to_code,WhereToPoint where_to_point,int skip)143 bool Serializer<AllocatorT>::SerializeHotObject(HeapObject* obj,
144 HowToCode how_to_code,
145 WhereToPoint where_to_point,
146 int skip) {
147 if (how_to_code != kPlain || where_to_point != kStartOfObject) return false;
148 // Encode a reference to a hot object by its index in the working set.
149 int index = hot_objects_.Find(obj);
150 if (index == HotObjectsList::kNotFound) return false;
151 DCHECK(index >= 0 && index < kNumberOfHotObjects);
152 if (FLAG_trace_serializer) {
153 PrintF(" Encoding hot object %d:", index);
154 obj->ShortPrint();
155 PrintF("\n");
156 }
157 if (skip != 0) {
158 sink_.Put(kHotObjectWithSkip + index, "HotObjectWithSkip");
159 sink_.PutInt(skip, "HotObjectSkipDistance");
160 } else {
161 sink_.Put(kHotObject + index, "HotObject");
162 }
163 return true;
164 }
165
166 template <class AllocatorT>
SerializeBackReference(HeapObject * obj,HowToCode how_to_code,WhereToPoint where_to_point,int skip)167 bool Serializer<AllocatorT>::SerializeBackReference(HeapObject* obj,
168 HowToCode how_to_code,
169 WhereToPoint where_to_point,
170 int skip) {
171 SerializerReference reference = reference_map_.LookupReference(obj);
172 if (!reference.is_valid()) return false;
173 // Encode the location of an already deserialized object in order to write
174 // its location into a later object. We can encode the location as an
175 // offset fromthe start of the deserialized objects or as an offset
176 // backwards from thecurrent allocation pointer.
177 if (reference.is_attached_reference()) {
178 FlushSkip(skip);
179 if (FLAG_trace_serializer) {
180 PrintF(" Encoding attached reference %d\n",
181 reference.attached_reference_index());
182 }
183 PutAttachedReference(reference, how_to_code, where_to_point);
184 } else {
185 DCHECK(reference.is_back_reference());
186 if (FLAG_trace_serializer) {
187 PrintF(" Encoding back reference to: ");
188 obj->ShortPrint();
189 PrintF("\n");
190 }
191
192 PutAlignmentPrefix(obj);
193 AllocationSpace space = reference.space();
194 if (skip == 0) {
195 sink_.Put(kBackref + how_to_code + where_to_point + space, "BackRef");
196 } else {
197 sink_.Put(kBackrefWithSkip + how_to_code + where_to_point + space,
198 "BackRefWithSkip");
199 sink_.PutInt(skip, "BackRefSkipDistance");
200 }
201 PutBackReference(obj, reference);
202 }
203 return true;
204 }
205
206 template <class AllocatorT>
SerializeBuiltinReference(HeapObject * obj,HowToCode how_to_code,WhereToPoint where_to_point,int skip)207 bool Serializer<AllocatorT>::SerializeBuiltinReference(
208 HeapObject* obj, HowToCode how_to_code, WhereToPoint where_to_point,
209 int skip) {
210 if (!obj->IsCode()) return false;
211
212 Code* code = Code::cast(obj);
213 int builtin_index = code->builtin_index();
214 if (builtin_index < 0) return false;
215
216 DCHECK((how_to_code == kPlain && where_to_point == kStartOfObject) ||
217 (how_to_code == kFromCode));
218 DCHECK_LT(builtin_index, Builtins::builtin_count);
219 DCHECK_LE(0, builtin_index);
220
221 if (FLAG_trace_serializer) {
222 PrintF(" Encoding builtin reference: %s\n",
223 isolate()->builtins()->name(builtin_index));
224 }
225
226 FlushSkip(skip);
227 sink_.Put(kBuiltin + how_to_code + where_to_point, "Builtin");
228 sink_.PutInt(builtin_index, "builtin_index");
229
230 return true;
231 }
232
233 template <class AllocatorT>
ObjectIsBytecodeHandler(HeapObject * obj) const234 bool Serializer<AllocatorT>::ObjectIsBytecodeHandler(HeapObject* obj) const {
235 if (!obj->IsCode()) return false;
236 Code* code = Code::cast(obj);
237 if (isolate()->heap()->IsDeserializeLazyHandler(code)) return false;
238 return (code->kind() == Code::BYTECODE_HANDLER);
239 }
240
241 template <class AllocatorT>
PutRoot(int root_index,HeapObject * object,SerializerDeserializer::HowToCode how_to_code,SerializerDeserializer::WhereToPoint where_to_point,int skip)242 void Serializer<AllocatorT>::PutRoot(
243 int root_index, HeapObject* object,
244 SerializerDeserializer::HowToCode how_to_code,
245 SerializerDeserializer::WhereToPoint where_to_point, int skip) {
246 if (FLAG_trace_serializer) {
247 PrintF(" Encoding root %d:", root_index);
248 object->ShortPrint();
249 PrintF("\n");
250 }
251
252 // Assert that the first 32 root array items are a conscious choice. They are
253 // chosen so that the most common ones can be encoded more efficiently.
254 STATIC_ASSERT(Heap::kArgumentsMarkerRootIndex ==
255 kNumberOfRootArrayConstants - 1);
256
257 if (how_to_code == kPlain && where_to_point == kStartOfObject &&
258 root_index < kNumberOfRootArrayConstants && !Heap::InNewSpace(object)) {
259 if (skip == 0) {
260 sink_.Put(kRootArrayConstants + root_index, "RootConstant");
261 } else {
262 sink_.Put(kRootArrayConstantsWithSkip + root_index, "RootConstant");
263 sink_.PutInt(skip, "SkipInPutRoot");
264 }
265 } else {
266 FlushSkip(skip);
267 sink_.Put(kRootArray + how_to_code + where_to_point, "RootSerialization");
268 sink_.PutInt(root_index, "root_index");
269 hot_objects_.Add(object);
270 }
271 }
272
273 template <class AllocatorT>
PutSmi(Smi * smi)274 void Serializer<AllocatorT>::PutSmi(Smi* smi) {
275 sink_.Put(kOnePointerRawData, "Smi");
276 byte* bytes = reinterpret_cast<byte*>(&smi);
277 for (int i = 0; i < kPointerSize; i++) sink_.Put(bytes[i], "Byte");
278 }
279
280 template <class AllocatorT>
PutBackReference(HeapObject * object,SerializerReference reference)281 void Serializer<AllocatorT>::PutBackReference(HeapObject* object,
282 SerializerReference reference) {
283 DCHECK(allocator()->BackReferenceIsAlreadyAllocated(reference));
284 switch (reference.space()) {
285 case MAP_SPACE:
286 sink_.PutInt(reference.map_index(), "BackRefMapIndex");
287 break;
288
289 case LO_SPACE:
290 sink_.PutInt(reference.large_object_index(), "BackRefLargeObjectIndex");
291 break;
292
293 default:
294 sink_.PutInt(reference.chunk_index(), "BackRefChunkIndex");
295 sink_.PutInt(reference.chunk_offset(), "BackRefChunkOffset");
296 break;
297 }
298
299 hot_objects_.Add(object);
300 }
301
302 template <class AllocatorT>
PutAttachedReference(SerializerReference reference,HowToCode how_to_code,WhereToPoint where_to_point)303 void Serializer<AllocatorT>::PutAttachedReference(SerializerReference reference,
304 HowToCode how_to_code,
305 WhereToPoint where_to_point) {
306 DCHECK(reference.is_attached_reference());
307 DCHECK((how_to_code == kPlain && where_to_point == kStartOfObject) ||
308 (how_to_code == kFromCode && where_to_point == kStartOfObject) ||
309 (how_to_code == kFromCode && where_to_point == kInnerPointer));
310 sink_.Put(kAttachedReference + how_to_code + where_to_point, "AttachedRef");
311 sink_.PutInt(reference.attached_reference_index(), "AttachedRefIndex");
312 }
313
314 template <class AllocatorT>
PutAlignmentPrefix(HeapObject * object)315 int Serializer<AllocatorT>::PutAlignmentPrefix(HeapObject* object) {
316 AllocationAlignment alignment = HeapObject::RequiredAlignment(object->map());
317 if (alignment != kWordAligned) {
318 DCHECK(1 <= alignment && alignment <= 3);
319 byte prefix = (kAlignmentPrefix - 1) + alignment;
320 sink_.Put(prefix, "Alignment");
321 return Heap::GetMaximumFillToAlign(alignment);
322 }
323 return 0;
324 }
325
326 template <class AllocatorT>
PutNextChunk(int space)327 void Serializer<AllocatorT>::PutNextChunk(int space) {
328 sink_.Put(kNextChunk, "NextChunk");
329 sink_.Put(space, "NextChunkSpace");
330 }
331
332 template <class AllocatorT>
Pad()333 void Serializer<AllocatorT>::Pad() {
334 // The non-branching GetInt will read up to 3 bytes too far, so we need
335 // to pad the snapshot to make sure we don't read over the end.
336 for (unsigned i = 0; i < sizeof(int32_t) - 1; i++) {
337 sink_.Put(kNop, "Padding");
338 }
339 // Pad up to pointer size for checksum.
340 while (!IsAligned(sink_.Position(), kPointerAlignment)) {
341 sink_.Put(kNop, "Padding");
342 }
343 }
344
345 template <class AllocatorT>
InitializeCodeAddressMap()346 void Serializer<AllocatorT>::InitializeCodeAddressMap() {
347 isolate_->InitializeLoggingAndCounters();
348 code_address_map_ = new CodeAddressMap(isolate_);
349 }
350
351 template <class AllocatorT>
CopyCode(Code * code)352 Code* Serializer<AllocatorT>::CopyCode(Code* code) {
353 code_buffer_.clear(); // Clear buffer without deleting backing store.
354 int size = code->CodeSize();
355 code_buffer_.insert(code_buffer_.end(),
356 reinterpret_cast<byte*>(code->address()),
357 reinterpret_cast<byte*>(code->address() + size));
358 return Code::cast(HeapObject::FromAddress(
359 reinterpret_cast<Address>(&code_buffer_.front())));
360 }
361
362 template <class AllocatorT>
SerializePrologue(AllocationSpace space,int size,Map * map)363 void Serializer<AllocatorT>::ObjectSerializer::SerializePrologue(
364 AllocationSpace space, int size, Map* map) {
365 if (serializer_->code_address_map_) {
366 const char* code_name =
367 serializer_->code_address_map_->Lookup(object_->address());
368 LOG(serializer_->isolate_,
369 CodeNameEvent(object_->address(), sink_->Position(), code_name));
370 }
371
372 SerializerReference back_reference;
373 if (space == LO_SPACE) {
374 sink_->Put(kNewObject + reference_representation_ + space,
375 "NewLargeObject");
376 sink_->PutInt(size >> kObjectAlignmentBits, "ObjectSizeInWords");
377 if (object_->IsCode()) {
378 sink_->Put(EXECUTABLE, "executable large object");
379 } else {
380 sink_->Put(NOT_EXECUTABLE, "not executable large object");
381 }
382 back_reference = serializer_->allocator()->AllocateLargeObject(size);
383 } else if (space == MAP_SPACE) {
384 DCHECK_EQ(Map::kSize, size);
385 back_reference = serializer_->allocator()->AllocateMap();
386 sink_->Put(kNewObject + reference_representation_ + space, "NewMap");
387 // This is redundant, but we include it anyways.
388 sink_->PutInt(size >> kObjectAlignmentBits, "ObjectSizeInWords");
389 } else {
390 int fill = serializer_->PutAlignmentPrefix(object_);
391 back_reference = serializer_->allocator()->Allocate(space, size + fill);
392 sink_->Put(kNewObject + reference_representation_ + space, "NewObject");
393 sink_->PutInt(size >> kObjectAlignmentBits, "ObjectSizeInWords");
394 }
395
396 #ifdef OBJECT_PRINT
397 if (FLAG_serialization_statistics) {
398 serializer_->CountInstanceType(map, size, space);
399 }
400 #endif // OBJECT_PRINT
401
402 // Mark this object as already serialized.
403 serializer_->reference_map()->Add(object_, back_reference);
404
405 // Serialize the map (first word of the object).
406 serializer_->SerializeObject(map, kPlain, kStartOfObject, 0);
407 }
408
409 template <class AllocatorT>
SerializeBackingStore(void * backing_store,int32_t byte_length)410 int32_t Serializer<AllocatorT>::ObjectSerializer::SerializeBackingStore(
411 void* backing_store, int32_t byte_length) {
412 SerializerReference reference =
413 serializer_->reference_map()->LookupReference(backing_store);
414
415 // Serialize the off-heap backing store.
416 if (!reference.is_valid()) {
417 sink_->Put(kOffHeapBackingStore, "Off-heap backing store");
418 sink_->PutInt(byte_length, "length");
419 sink_->PutRaw(static_cast<byte*>(backing_store), byte_length,
420 "BackingStore");
421 reference = serializer_->allocator()->AllocateOffHeapBackingStore();
422 // Mark this backing store as already serialized.
423 serializer_->reference_map()->Add(backing_store, reference);
424 }
425
426 return static_cast<int32_t>(reference.off_heap_backing_store_index());
427 }
428
429 template <class AllocatorT>
SerializeJSTypedArray()430 void Serializer<AllocatorT>::ObjectSerializer::SerializeJSTypedArray() {
431 JSTypedArray* typed_array = JSTypedArray::cast(object_);
432 FixedTypedArrayBase* elements =
433 FixedTypedArrayBase::cast(typed_array->elements());
434
435 if (!typed_array->WasNeutered()) {
436 if (!typed_array->is_on_heap()) {
437 // Explicitly serialize the backing store now.
438 JSArrayBuffer* buffer = JSArrayBuffer::cast(typed_array->buffer());
439 CHECK(buffer->byte_length()->IsSmi());
440 CHECK(typed_array->byte_offset()->IsSmi());
441 int32_t byte_length = NumberToInt32(buffer->byte_length());
442 int32_t byte_offset = NumberToInt32(typed_array->byte_offset());
443
444 // We need to calculate the backing store from the external pointer
445 // because the ArrayBuffer may already have been serialized.
446 void* backing_store = reinterpret_cast<void*>(
447 reinterpret_cast<intptr_t>(elements->external_pointer()) -
448 byte_offset);
449 int32_t ref = SerializeBackingStore(backing_store, byte_length);
450
451 // The external_pointer is the backing_store + typed_array->byte_offset.
452 // To properly share the buffer, we set the backing store ref here. On
453 // deserialization we re-add the byte_offset to external_pointer.
454 elements->set_external_pointer(Smi::FromInt(ref));
455 }
456 } else {
457 // When a JSArrayBuffer is neutered, the FixedTypedArray that points to the
458 // same backing store does not know anything about it. This fixup step finds
459 // neutered TypedArrays and clears the values in the FixedTypedArray so that
460 // we don't try to serialize the now invalid backing store.
461 elements->set_external_pointer(Smi::kZero);
462 elements->set_length(0);
463 }
464 SerializeObject();
465 }
466
467 template <class AllocatorT>
SerializeJSArrayBuffer()468 void Serializer<AllocatorT>::ObjectSerializer::SerializeJSArrayBuffer() {
469 JSArrayBuffer* buffer = JSArrayBuffer::cast(object_);
470 void* backing_store = buffer->backing_store();
471 // We cannot store byte_length larger than Smi range in the snapshot.
472 // Attempt to make sure that NumberToInt32 produces something sensible.
473 CHECK(buffer->byte_length()->IsSmi());
474 int32_t byte_length = NumberToInt32(buffer->byte_length());
475
476 // The embedder-allocated backing store only exists for the off-heap case.
477 if (backing_store != nullptr) {
478 int32_t ref = SerializeBackingStore(backing_store, byte_length);
479 buffer->set_backing_store(Smi::FromInt(ref));
480 }
481 SerializeObject();
482 buffer->set_backing_store(backing_store);
483 }
484
485 template <class AllocatorT>
SerializeExternalString()486 void Serializer<AllocatorT>::ObjectSerializer::SerializeExternalString() {
487 Heap* heap = serializer_->isolate()->heap();
488 // For external strings with known resources, we replace the resource field
489 // with the encoded external reference, which we restore upon deserialize.
490 // for native native source code strings, we replace the resource field
491 // with the native source id.
492 // For the rest we serialize them to look like ordinary sequential strings.
493 if (object_->map() != ReadOnlyRoots(heap).native_source_string_map()) {
494 ExternalString* string = ExternalString::cast(object_);
495 Address resource = string->resource_as_address();
496 ExternalReferenceEncoder::Value reference;
497 if (serializer_->external_reference_encoder_.TryEncode(resource).To(
498 &reference)) {
499 DCHECK(reference.is_from_api());
500 string->set_uint32_as_resource(reference.index());
501 SerializeObject();
502 string->set_address_as_resource(resource);
503 } else {
504 SerializeExternalStringAsSequentialString();
505 }
506 } else {
507 ExternalOneByteString* string = ExternalOneByteString::cast(object_);
508 DCHECK(string->is_short());
509 const NativesExternalStringResource* resource =
510 reinterpret_cast<const NativesExternalStringResource*>(
511 string->resource());
512 // Replace the resource field with the type and index of the native source.
513 string->set_resource(resource->EncodeForSerialization());
514 SerializeObject();
515 // Restore the resource field.
516 string->set_resource(resource);
517 }
518 }
519
520 template <class AllocatorT>
521 void Serializer<
SerializeExternalStringAsSequentialString()522 AllocatorT>::ObjectSerializer::SerializeExternalStringAsSequentialString() {
523 // Instead of serializing this as an external string, we serialize
524 // an imaginary sequential string with the same content.
525 ReadOnlyRoots roots(serializer_->isolate());
526 DCHECK(object_->IsExternalString());
527 DCHECK(object_->map() != roots.native_source_string_map());
528 ExternalString* string = ExternalString::cast(object_);
529 int length = string->length();
530 Map* map;
531 int content_size;
532 int allocation_size;
533 const byte* resource;
534 // Find the map and size for the imaginary sequential string.
535 bool internalized = object_->IsInternalizedString();
536 if (object_->IsExternalOneByteString()) {
537 map = internalized ? roots.one_byte_internalized_string_map()
538 : roots.one_byte_string_map();
539 allocation_size = SeqOneByteString::SizeFor(length);
540 content_size = length * kCharSize;
541 resource = reinterpret_cast<const byte*>(
542 ExternalOneByteString::cast(string)->resource()->data());
543 } else {
544 map = internalized ? roots.internalized_string_map() : roots.string_map();
545 allocation_size = SeqTwoByteString::SizeFor(length);
546 content_size = length * kShortSize;
547 resource = reinterpret_cast<const byte*>(
548 ExternalTwoByteString::cast(string)->resource()->data());
549 }
550
551 AllocationSpace space =
552 (allocation_size > kMaxRegularHeapObjectSize) ? LO_SPACE : OLD_SPACE;
553 SerializePrologue(space, allocation_size, map);
554
555 // Output the rest of the imaginary string.
556 int bytes_to_output = allocation_size - HeapObject::kHeaderSize;
557
558 // Output raw data header. Do not bother with common raw length cases here.
559 sink_->Put(kVariableRawData, "RawDataForString");
560 sink_->PutInt(bytes_to_output, "length");
561
562 // Serialize string header (except for map).
563 uint8_t* string_start = reinterpret_cast<uint8_t*>(string->address());
564 for (int i = HeapObject::kHeaderSize; i < SeqString::kHeaderSize; i++) {
565 sink_->PutSection(string_start[i], "StringHeader");
566 }
567
568 // Serialize string content.
569 sink_->PutRaw(resource, content_size, "StringContent");
570
571 // Since the allocation size is rounded up to object alignment, there
572 // maybe left-over bytes that need to be padded.
573 int padding_size = allocation_size - SeqString::kHeaderSize - content_size;
574 DCHECK(0 <= padding_size && padding_size < kObjectAlignment);
575 for (int i = 0; i < padding_size; i++) sink_->PutSection(0, "StringPadding");
576 }
577
578 // Clear and later restore the next link in the weak cell or allocation site.
579 // TODO(all): replace this with proper iteration of weak slots in serializer.
580 class UnlinkWeakNextScope {
581 public:
UnlinkWeakNextScope(Heap * heap,HeapObject * object)582 explicit UnlinkWeakNextScope(Heap* heap, HeapObject* object)
583 : object_(nullptr) {
584 if (object->IsAllocationSite()) {
585 object_ = object;
586 next_ = AllocationSite::cast(object)->weak_next();
587 AllocationSite::cast(object)->set_weak_next(
588 ReadOnlyRoots(heap).undefined_value());
589 }
590 }
591
~UnlinkWeakNextScope()592 ~UnlinkWeakNextScope() {
593 if (object_ != nullptr) {
594 AllocationSite::cast(object_)->set_weak_next(next_,
595 UPDATE_WEAK_WRITE_BARRIER);
596 }
597 }
598
599 private:
600 HeapObject* object_;
601 Object* next_;
602 DisallowHeapAllocation no_gc_;
603 };
604
605 template <class AllocatorT>
Serialize()606 void Serializer<AllocatorT>::ObjectSerializer::Serialize() {
607 if (FLAG_trace_serializer) {
608 PrintF(" Encoding heap object: ");
609 object_->ShortPrint();
610 PrintF("\n");
611 }
612
613 if (object_->IsExternalString()) {
614 SerializeExternalString();
615 return;
616 } else if (!serializer_->isolate()->heap()->InReadOnlySpace(object_)) {
617 // Only clear padding for strings outside RO_SPACE. RO_SPACE should have
618 // been cleared elsewhere.
619 if (object_->IsSeqOneByteString()) {
620 // Clear padding bytes at the end. Done here to avoid having to do this
621 // at allocation sites in generated code.
622 SeqOneByteString::cast(object_)->clear_padding();
623 } else if (object_->IsSeqTwoByteString()) {
624 SeqTwoByteString::cast(object_)->clear_padding();
625 }
626 }
627 if (object_->IsJSTypedArray()) {
628 SerializeJSTypedArray();
629 return;
630 }
631 if (object_->IsJSArrayBuffer()) {
632 SerializeJSArrayBuffer();
633 return;
634 }
635
636 // We don't expect fillers.
637 DCHECK(!object_->IsFiller());
638
639 if (object_->IsScript()) {
640 // Clear cached line ends.
641 Object* undefined = ReadOnlyRoots(serializer_->isolate()).undefined_value();
642 Script::cast(object_)->set_line_ends(undefined);
643 }
644
645 SerializeObject();
646 }
647
648 template <class AllocatorT>
SerializeObject()649 void Serializer<AllocatorT>::ObjectSerializer::SerializeObject() {
650 int size = object_->Size();
651 Map* map = object_->map();
652 AllocationSpace space =
653 MemoryChunk::FromAddress(object_->address())->owner()->identity();
654 DCHECK(space != NEW_LO_SPACE);
655 SerializePrologue(space, size, map);
656
657 // Serialize the rest of the object.
658 CHECK_EQ(0, bytes_processed_so_far_);
659 bytes_processed_so_far_ = kPointerSize;
660
661 RecursionScope recursion(serializer_);
662 // Objects that are immediately post processed during deserialization
663 // cannot be deferred, since post processing requires the object content.
664 if ((recursion.ExceedsMaximum() && CanBeDeferred(object_)) ||
665 serializer_->MustBeDeferred(object_)) {
666 serializer_->QueueDeferredObject(object_);
667 sink_->Put(kDeferred, "Deferring object content");
668 return;
669 }
670
671 SerializeContent(map, size);
672 }
673
674 template <class AllocatorT>
SerializeDeferred()675 void Serializer<AllocatorT>::ObjectSerializer::SerializeDeferred() {
676 if (FLAG_trace_serializer) {
677 PrintF(" Encoding deferred heap object: ");
678 object_->ShortPrint();
679 PrintF("\n");
680 }
681
682 int size = object_->Size();
683 Map* map = object_->map();
684 SerializerReference back_reference =
685 serializer_->reference_map()->LookupReference(object_);
686 DCHECK(back_reference.is_back_reference());
687
688 // Serialize the rest of the object.
689 CHECK_EQ(0, bytes_processed_so_far_);
690 bytes_processed_so_far_ = kPointerSize;
691
692 serializer_->PutAlignmentPrefix(object_);
693 sink_->Put(kNewObject + back_reference.space(), "deferred object");
694 serializer_->PutBackReference(object_, back_reference);
695 sink_->PutInt(size >> kPointerSizeLog2, "deferred object size");
696
697 SerializeContent(map, size);
698 }
699
700 template <class AllocatorT>
SerializeContent(Map * map,int size)701 void Serializer<AllocatorT>::ObjectSerializer::SerializeContent(Map* map,
702 int size) {
703 UnlinkWeakNextScope unlink_weak_next(serializer_->isolate()->heap(), object_);
704 if (object_->IsCode()) {
705 // For code objects, output raw bytes first.
706 OutputCode(size);
707 // Then iterate references via reloc info.
708 object_->IterateBody(map, size, this);
709 // Finally skip to the end.
710 serializer_->FlushSkip(SkipTo(object_->address() + size));
711 } else {
712 // For other objects, iterate references first.
713 object_->IterateBody(map, size, this);
714 // Then output data payload, if any.
715 OutputRawData(object_->address() + size);
716 }
717 }
718
719 template <class AllocatorT>
VisitPointers(HeapObject * host,Object ** start,Object ** end)720 void Serializer<AllocatorT>::ObjectSerializer::VisitPointers(HeapObject* host,
721 Object** start,
722 Object** end) {
723 VisitPointers(host, reinterpret_cast<MaybeObject**>(start),
724 reinterpret_cast<MaybeObject**>(end));
725 }
726
727 template <class AllocatorT>
VisitPointers(HeapObject * host,MaybeObject ** start,MaybeObject ** end)728 void Serializer<AllocatorT>::ObjectSerializer::VisitPointers(
729 HeapObject* host, MaybeObject** start, MaybeObject** end) {
730 MaybeObject** current = start;
731 while (current < end) {
732 while (current < end &&
733 ((*current)->IsSmi() || (*current)->IsClearedWeakHeapObject())) {
734 current++;
735 }
736 if (current < end) {
737 OutputRawData(reinterpret_cast<Address>(current));
738 }
739 HeapObject* current_contents;
740 HeapObjectReferenceType reference_type;
741 while (current < end && (*current)->ToStrongOrWeakHeapObject(
742 ¤t_contents, &reference_type)) {
743 int root_index = serializer_->root_index_map()->Lookup(current_contents);
744 // Repeats are not subject to the write barrier so we can only use
745 // immortal immovable root members. They are never in new space.
746 if (current != start && root_index != RootIndexMap::kInvalidRootIndex &&
747 Heap::RootIsImmortalImmovable(root_index) &&
748 *current == current[-1]) {
749 DCHECK_EQ(reference_type, HeapObjectReferenceType::STRONG);
750 DCHECK(!Heap::InNewSpace(current_contents));
751 int repeat_count = 1;
752 while (¤t[repeat_count] < end - 1 &&
753 current[repeat_count] == *current) {
754 repeat_count++;
755 }
756 current += repeat_count;
757 bytes_processed_so_far_ += repeat_count * kPointerSize;
758 if (repeat_count > kNumberOfFixedRepeat) {
759 sink_->Put(kVariableRepeat, "VariableRepeat");
760 sink_->PutInt(repeat_count, "repeat count");
761 } else {
762 sink_->Put(kFixedRepeatStart + repeat_count, "FixedRepeat");
763 }
764 } else {
765 if (reference_type == HeapObjectReferenceType::WEAK) {
766 sink_->Put(kWeakPrefix, "WeakReference");
767 }
768 serializer_->SerializeObject(current_contents, kPlain, kStartOfObject,
769 0);
770 bytes_processed_so_far_ += kPointerSize;
771 current++;
772 }
773 }
774 }
775 }
776
777 template <class AllocatorT>
VisitEmbeddedPointer(Code * host,RelocInfo * rinfo)778 void Serializer<AllocatorT>::ObjectSerializer::VisitEmbeddedPointer(
779 Code* host, RelocInfo* rinfo) {
780 int skip = SkipTo(rinfo->target_address_address());
781 HowToCode how_to_code = rinfo->IsCodedSpecially() ? kFromCode : kPlain;
782 Object* object = rinfo->target_object();
783 serializer_->SerializeObject(HeapObject::cast(object), how_to_code,
784 kStartOfObject, skip);
785 bytes_processed_so_far_ += rinfo->target_address_size();
786 }
787
788 template <class AllocatorT>
VisitExternalReference(Foreign * host,Address * p)789 void Serializer<AllocatorT>::ObjectSerializer::VisitExternalReference(
790 Foreign* host, Address* p) {
791 int skip = SkipTo(reinterpret_cast<Address>(p));
792 Address target = *p;
793 auto encoded_reference = serializer_->EncodeExternalReference(target);
794 if (encoded_reference.is_from_api()) {
795 sink_->Put(kApiReference, "ApiRef");
796 } else {
797 sink_->Put(kExternalReference + kPlain + kStartOfObject, "ExternalRef");
798 }
799 sink_->PutInt(skip, "SkipB4ExternalRef");
800 sink_->PutInt(encoded_reference.index(), "reference index");
801 bytes_processed_so_far_ += kPointerSize;
802 }
803
804 template <class AllocatorT>
VisitExternalReference(Code * host,RelocInfo * rinfo)805 void Serializer<AllocatorT>::ObjectSerializer::VisitExternalReference(
806 Code* host, RelocInfo* rinfo) {
807 int skip = SkipTo(rinfo->target_address_address());
808 Address target = rinfo->target_external_reference();
809 auto encoded_reference = serializer_->EncodeExternalReference(target);
810 if (encoded_reference.is_from_api()) {
811 DCHECK(!rinfo->IsCodedSpecially());
812 sink_->Put(kApiReference, "ApiRef");
813 } else {
814 HowToCode how_to_code = rinfo->IsCodedSpecially() ? kFromCode : kPlain;
815 sink_->Put(kExternalReference + how_to_code + kStartOfObject,
816 "ExternalRef");
817 }
818 sink_->PutInt(skip, "SkipB4ExternalRef");
819 DCHECK_NE(target, kNullAddress); // Code does not reference null.
820 sink_->PutInt(encoded_reference.index(), "reference index");
821 bytes_processed_so_far_ += rinfo->target_address_size();
822 }
823
824 template <class AllocatorT>
VisitInternalReference(Code * host,RelocInfo * rinfo)825 void Serializer<AllocatorT>::ObjectSerializer::VisitInternalReference(
826 Code* host, RelocInfo* rinfo) {
827 // We do not use skip from last patched pc to find the pc to patch, since
828 // target_address_address may not return addresses in ascending order when
829 // used for internal references. External references may be stored at the
830 // end of the code in the constant pool, whereas internal references are
831 // inline. That would cause the skip to be negative. Instead, we store the
832 // offset from code entry.
833 Address entry = Code::cast(object_)->entry();
834 DCHECK_GE(rinfo->target_internal_reference_address(), entry);
835 uintptr_t pc_offset = rinfo->target_internal_reference_address() - entry;
836 DCHECK_LE(pc_offset, Code::cast(object_)->raw_instruction_size());
837 DCHECK_GE(rinfo->target_internal_reference(), entry);
838 uintptr_t target_offset = rinfo->target_internal_reference() - entry;
839 DCHECK_LE(target_offset, Code::cast(object_)->raw_instruction_size());
840 sink_->Put(rinfo->rmode() == RelocInfo::INTERNAL_REFERENCE
841 ? kInternalReference
842 : kInternalReferenceEncoded,
843 "InternalRef");
844 sink_->PutInt(pc_offset, "internal ref address");
845 sink_->PutInt(target_offset, "internal ref value");
846 }
847
848 template <class AllocatorT>
VisitRuntimeEntry(Code * host,RelocInfo * rinfo)849 void Serializer<AllocatorT>::ObjectSerializer::VisitRuntimeEntry(
850 Code* host, RelocInfo* rinfo) {
851 int skip = SkipTo(rinfo->target_address_address());
852 HowToCode how_to_code = rinfo->IsCodedSpecially() ? kFromCode : kPlain;
853 Address target = rinfo->target_address();
854 auto encoded_reference = serializer_->EncodeExternalReference(target);
855 DCHECK(!encoded_reference.is_from_api());
856 sink_->Put(kExternalReference + how_to_code + kStartOfObject, "ExternalRef");
857 sink_->PutInt(skip, "SkipB4ExternalRef");
858 sink_->PutInt(encoded_reference.index(), "reference index");
859 bytes_processed_so_far_ += rinfo->target_address_size();
860 }
861
862 template <class AllocatorT>
VisitOffHeapTarget(Code * host,RelocInfo * rinfo)863 void Serializer<AllocatorT>::ObjectSerializer::VisitOffHeapTarget(
864 Code* host, RelocInfo* rinfo) {
865 DCHECK(FLAG_embedded_builtins);
866 {
867 STATIC_ASSERT(EmbeddedData::kTableSize == Builtins::builtin_count);
868 CHECK(Builtins::IsIsolateIndependentBuiltin(host));
869 Address addr = rinfo->target_off_heap_target();
870 CHECK_NE(kNullAddress, addr);
871 CHECK_NOT_NULL(
872 InstructionStream::TryLookupCode(serializer_->isolate(), addr));
873 }
874
875 int skip = SkipTo(rinfo->target_address_address());
876 sink_->Put(kOffHeapTarget, "OffHeapTarget");
877 sink_->PutInt(skip, "SkipB4OffHeapTarget");
878 sink_->PutInt(host->builtin_index(), "builtin index");
879 bytes_processed_so_far_ += rinfo->target_address_size();
880 }
881
882 namespace {
883 class CompareRelocInfo {
884 public:
operator ()(RelocInfo x,RelocInfo y)885 bool operator()(RelocInfo x, RelocInfo y) {
886 // Everything that does not use target_address_address will compare equal.
887 Address x_num = 0;
888 Address y_num = 0;
889 if (HasTargetAddressAddress(x.rmode())) {
890 x_num = x.target_address_address();
891 }
892 if (HasTargetAddressAddress(y.rmode())) {
893 y_num = y.target_address_address();
894 }
895 return x_num > y_num;
896 }
897
898 private:
HasTargetAddressAddress(RelocInfo::Mode mode)899 static bool HasTargetAddressAddress(RelocInfo::Mode mode) {
900 return RelocInfo::IsEmbeddedObject(mode) || RelocInfo::IsCodeTarget(mode) ||
901 RelocInfo::IsExternalReference(mode) ||
902 RelocInfo::IsRuntimeEntry(mode);
903 }
904 };
905 } // namespace
906
907 template <class AllocatorT>
VisitRelocInfo(RelocIterator * it)908 void Serializer<AllocatorT>::ObjectSerializer::VisitRelocInfo(
909 RelocIterator* it) {
910 std::priority_queue<RelocInfo, std::vector<RelocInfo>, CompareRelocInfo>
911 reloc_queue;
912 for (; !it->done(); it->next()) {
913 reloc_queue.push(*it->rinfo());
914 }
915 while (!reloc_queue.empty()) {
916 RelocInfo rinfo = reloc_queue.top();
917 reloc_queue.pop();
918 rinfo.Visit(this);
919 }
920 }
921
922 template <class AllocatorT>
VisitCodeTarget(Code * host,RelocInfo * rinfo)923 void Serializer<AllocatorT>::ObjectSerializer::VisitCodeTarget(
924 Code* host, RelocInfo* rinfo) {
925 int skip = SkipTo(rinfo->target_address_address());
926 Code* object = Code::GetCodeFromTargetAddress(rinfo->target_address());
927 serializer_->SerializeObject(object, kFromCode, kInnerPointer, skip);
928 bytes_processed_so_far_ += rinfo->target_address_size();
929 }
930
931 template <class AllocatorT>
OutputRawData(Address up_to)932 void Serializer<AllocatorT>::ObjectSerializer::OutputRawData(Address up_to) {
933 Address object_start = object_->address();
934 int base = bytes_processed_so_far_;
935 int up_to_offset = static_cast<int>(up_to - object_start);
936 int to_skip = up_to_offset - bytes_processed_so_far_;
937 int bytes_to_output = to_skip;
938 bytes_processed_so_far_ += to_skip;
939 DCHECK_GE(to_skip, 0);
940 if (bytes_to_output != 0) {
941 DCHECK(to_skip == bytes_to_output);
942 if (IsAligned(bytes_to_output, kPointerAlignment) &&
943 bytes_to_output <= kNumberOfFixedRawData * kPointerSize) {
944 int size_in_words = bytes_to_output >> kPointerSizeLog2;
945 sink_->PutSection(kFixedRawDataStart + size_in_words, "FixedRawData");
946 } else {
947 sink_->Put(kVariableRawData, "VariableRawData");
948 sink_->PutInt(bytes_to_output, "length");
949 }
950 #ifdef MEMORY_SANITIZER
951 // Check that we do not serialize uninitialized memory.
952 __msan_check_mem_is_initialized(
953 reinterpret_cast<void*>(object_start + base), bytes_to_output);
954 #endif // MEMORY_SANITIZER
955 if (object_->IsBytecodeArray()) {
956 // The code age byte can be changed concurrently by GC.
957 const int bytes_to_age_byte = BytecodeArray::kBytecodeAgeOffset - base;
958 if (0 <= bytes_to_age_byte && bytes_to_age_byte < bytes_to_output) {
959 sink_->PutRaw(reinterpret_cast<byte*>(object_start + base),
960 bytes_to_age_byte, "Bytes");
961 byte bytecode_age = BytecodeArray::kNoAgeBytecodeAge;
962 sink_->PutRaw(&bytecode_age, 1, "Bytes");
963 const int bytes_written = bytes_to_age_byte + 1;
964 sink_->PutRaw(
965 reinterpret_cast<byte*>(object_start + base + bytes_written),
966 bytes_to_output - bytes_written, "Bytes");
967 } else {
968 sink_->PutRaw(reinterpret_cast<byte*>(object_start + base),
969 bytes_to_output, "Bytes");
970 }
971 } else {
972 sink_->PutRaw(reinterpret_cast<byte*>(object_start + base),
973 bytes_to_output, "Bytes");
974 }
975 }
976 }
977
978 template <class AllocatorT>
SkipTo(Address to)979 int Serializer<AllocatorT>::ObjectSerializer::SkipTo(Address to) {
980 Address object_start = object_->address();
981 int up_to_offset = static_cast<int>(to - object_start);
982 int to_skip = up_to_offset - bytes_processed_so_far_;
983 bytes_processed_so_far_ += to_skip;
984 // This assert will fail if the reloc info gives us the target_address_address
985 // locations in a non-ascending order. We make sure this doesn't happen by
986 // sorting the relocation info.
987 DCHECK_GE(to_skip, 0);
988 return to_skip;
989 }
990
991 template <class AllocatorT>
OutputCode(int size)992 void Serializer<AllocatorT>::ObjectSerializer::OutputCode(int size) {
993 DCHECK_EQ(kPointerSize, bytes_processed_so_far_);
994 Code* code = Code::cast(object_);
995 // To make snapshots reproducible, we make a copy of the code object
996 // and wipe all pointers in the copy, which we then serialize.
997 code = serializer_->CopyCode(code);
998 int mode_mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
999 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
1000 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
1001 RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) |
1002 RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED) |
1003 RelocInfo::ModeMask(RelocInfo::OFF_HEAP_TARGET) |
1004 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
1005 for (RelocIterator it(code, mode_mask); !it.done(); it.next()) {
1006 RelocInfo* rinfo = it.rinfo();
1007 rinfo->WipeOut();
1008 }
1009 // We need to wipe out the header fields *after* wiping out the
1010 // relocations, because some of these fields are needed for the latter.
1011 code->WipeOutHeader();
1012
1013 Address start = code->address() + Code::kDataStart;
1014 int bytes_to_output = size - Code::kDataStart;
1015
1016 sink_->Put(kVariableRawCode, "VariableRawCode");
1017 sink_->PutInt(bytes_to_output, "length");
1018
1019 #ifdef MEMORY_SANITIZER
1020 // Check that we do not serialize uninitialized memory.
1021 __msan_check_mem_is_initialized(reinterpret_cast<void*>(start),
1022 bytes_to_output);
1023 #endif // MEMORY_SANITIZER
1024 sink_->PutRaw(reinterpret_cast<byte*>(start), bytes_to_output, "Code");
1025 }
1026
1027 // Explicit instantiation.
1028 template class Serializer<BuiltinSerializerAllocator>;
1029 template class Serializer<DefaultSerializerAllocator>;
1030
1031 } // namespace internal
1032 } // namespace v8
1033