• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2006-2008 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 // The common functionality when building with or without snapshots.
6 
7 #include "src/snapshot/snapshot.h"
8 
9 #include "src/assembler-inl.h"
10 #include "src/base/platform/platform.h"
11 #include "src/callable.h"
12 #include "src/interface-descriptors.h"
13 #include "src/objects-inl.h"
14 #include "src/snapshot/builtin-deserializer.h"
15 #include "src/snapshot/builtin-serializer.h"
16 #include "src/snapshot/partial-deserializer.h"
17 #include "src/snapshot/snapshot-source-sink.h"
18 #include "src/snapshot/startup-deserializer.h"
19 #include "src/utils.h"
20 #include "src/version.h"
21 
22 namespace v8 {
23 namespace internal {
24 
25 #ifdef DEBUG
SnapshotIsValid(const v8::StartupData * snapshot_blob)26 bool Snapshot::SnapshotIsValid(const v8::StartupData* snapshot_blob) {
27   return Snapshot::ExtractNumContexts(snapshot_blob) > 0;
28 }
29 #endif  // DEBUG
30 
HasContextSnapshot(Isolate * isolate,size_t index)31 bool Snapshot::HasContextSnapshot(Isolate* isolate, size_t index) {
32   // Do not use snapshots if the isolate is used to create snapshots.
33   const v8::StartupData* blob = isolate->snapshot_blob();
34   if (blob == nullptr) return false;
35   if (blob->data == nullptr) return false;
36   size_t num_contexts = static_cast<size_t>(ExtractNumContexts(blob));
37   return index < num_contexts;
38 }
39 
Initialize(Isolate * isolate)40 bool Snapshot::Initialize(Isolate* isolate) {
41   if (!isolate->snapshot_available()) return false;
42   base::ElapsedTimer timer;
43   if (FLAG_profile_deserialization) timer.Start();
44 
45   const v8::StartupData* blob = isolate->snapshot_blob();
46   CheckVersion(blob);
47   Vector<const byte> startup_data = ExtractStartupData(blob);
48   SnapshotData startup_snapshot_data(startup_data);
49   Vector<const byte> builtin_data = ExtractBuiltinData(blob);
50   BuiltinSnapshotData builtin_snapshot_data(builtin_data);
51   StartupDeserializer deserializer(&startup_snapshot_data,
52                                    &builtin_snapshot_data);
53   deserializer.SetRehashability(ExtractRehashability(blob));
54   bool success = isolate->Init(&deserializer);
55   if (FLAG_profile_deserialization) {
56     double ms = timer.Elapsed().InMillisecondsF();
57     int bytes = startup_data.length();
58     PrintF("[Deserializing isolate (%d bytes) took %0.3f ms]\n", bytes, ms);
59   }
60   return success;
61 }
62 
NewContextFromSnapshot(Isolate * isolate,Handle<JSGlobalProxy> global_proxy,size_t context_index,v8::DeserializeEmbedderFieldsCallback embedder_fields_deserializer)63 MaybeHandle<Context> Snapshot::NewContextFromSnapshot(
64     Isolate* isolate, Handle<JSGlobalProxy> global_proxy, size_t context_index,
65     v8::DeserializeEmbedderFieldsCallback embedder_fields_deserializer) {
66   if (!isolate->snapshot_available()) return Handle<Context>();
67   base::ElapsedTimer timer;
68   if (FLAG_profile_deserialization) timer.Start();
69 
70   const v8::StartupData* blob = isolate->snapshot_blob();
71   bool can_rehash = ExtractRehashability(blob);
72   Vector<const byte> context_data =
73       ExtractContextData(blob, static_cast<uint32_t>(context_index));
74   SnapshotData snapshot_data(context_data);
75 
76   MaybeHandle<Context> maybe_result = PartialDeserializer::DeserializeContext(
77       isolate, &snapshot_data, can_rehash, global_proxy,
78       embedder_fields_deserializer);
79 
80   Handle<Context> result;
81   if (!maybe_result.ToHandle(&result)) return MaybeHandle<Context>();
82 
83   if (FLAG_profile_deserialization) {
84     double ms = timer.Elapsed().InMillisecondsF();
85     int bytes = context_data.length();
86     PrintF("[Deserializing context #%zu (%d bytes) took %0.3f ms]\n",
87            context_index, bytes, ms);
88   }
89   return result;
90 }
91 
92 // static
DeserializeBuiltin(Isolate * isolate,int builtin_id)93 Code* Snapshot::DeserializeBuiltin(Isolate* isolate, int builtin_id) {
94   if (FLAG_trace_lazy_deserialization) {
95     PrintF("Lazy-deserializing builtin %s\n", Builtins::name(builtin_id));
96   }
97 
98   base::ElapsedTimer timer;
99   if (FLAG_profile_deserialization) timer.Start();
100 
101   const v8::StartupData* blob = isolate->snapshot_blob();
102   Vector<const byte> builtin_data = Snapshot::ExtractBuiltinData(blob);
103   BuiltinSnapshotData builtin_snapshot_data(builtin_data);
104 
105   CodeSpaceMemoryModificationScope code_allocation(isolate->heap());
106   BuiltinDeserializer builtin_deserializer(isolate, &builtin_snapshot_data);
107   Code* code = builtin_deserializer.DeserializeBuiltin(builtin_id);
108   DCHECK_EQ(code, isolate->builtins()->builtin(builtin_id));
109 
110   if (FLAG_profile_deserialization) {
111     double ms = timer.Elapsed().InMillisecondsF();
112     int bytes = code->Size();
113     PrintF("[Deserializing builtin %s (%d bytes) took %0.3f ms]\n",
114            Builtins::name(builtin_id), bytes, ms);
115   }
116 
117   if (isolate->logger()->is_listening_to_code_events() ||
118       isolate->is_profiling()) {
119     isolate->logger()->LogCodeObject(code);
120   }
121 
122   return code;
123 }
124 
125 // static
EnsureAllBuiltinsAreDeserialized(Isolate * isolate)126 void Snapshot::EnsureAllBuiltinsAreDeserialized(Isolate* isolate) {
127   if (!FLAG_lazy_deserialization) return;
128 
129   if (FLAG_trace_lazy_deserialization) {
130     PrintF("Forcing eager builtin deserialization\n");
131   }
132 
133   Builtins* builtins = isolate->builtins();
134   for (int i = 0; i < Builtins::builtin_count; i++) {
135     if (!Builtins::IsLazy(i)) continue;
136 
137     DCHECK_NE(Builtins::kDeserializeLazy, i);
138     Code* code = builtins->builtin(i);
139     if (code->builtin_index() == Builtins::kDeserializeLazy) {
140       code = Snapshot::DeserializeBuiltin(isolate, i);
141     }
142 
143     DCHECK_EQ(i, code->builtin_index());
144     DCHECK_EQ(code, builtins->builtin(i));
145   }
146 }
147 
148 // static
EnsureBuiltinIsDeserialized(Isolate * isolate,Handle<SharedFunctionInfo> shared)149 Code* Snapshot::EnsureBuiltinIsDeserialized(Isolate* isolate,
150                                             Handle<SharedFunctionInfo> shared) {
151   DCHECK(FLAG_lazy_deserialization);
152 
153   int builtin_id = shared->builtin_id();
154 
155   // We should never lazily deserialize DeserializeLazy.
156   DCHECK_NE(Builtins::kDeserializeLazy, builtin_id);
157 
158   // Look up code from builtins list.
159   Code* code = isolate->builtins()->builtin(builtin_id);
160 
161   // Deserialize if builtin is not on the list.
162   if (code->builtin_index() != builtin_id) {
163     DCHECK_EQ(code->builtin_index(), Builtins::kDeserializeLazy);
164     code = Snapshot::DeserializeBuiltin(isolate, builtin_id);
165     DCHECK_EQ(builtin_id, code->builtin_index());
166     DCHECK_EQ(code, isolate->builtins()->builtin(builtin_id));
167   }
168   return code;
169 }
170 
171 // static
DeserializeHandler(Isolate * isolate,interpreter::Bytecode bytecode,interpreter::OperandScale operand_scale)172 Code* Snapshot::DeserializeHandler(Isolate* isolate,
173                                    interpreter::Bytecode bytecode,
174                                    interpreter::OperandScale operand_scale) {
175   if (FLAG_trace_lazy_deserialization) {
176     PrintF("Lazy-deserializing handler %s\n",
177            interpreter::Bytecodes::ToString(bytecode, operand_scale).c_str());
178   }
179 
180   base::ElapsedTimer timer;
181   if (FLAG_profile_deserialization) timer.Start();
182 
183   const v8::StartupData* blob = isolate->snapshot_blob();
184   Vector<const byte> builtin_data = Snapshot::ExtractBuiltinData(blob);
185   BuiltinSnapshotData builtin_snapshot_data(builtin_data);
186 
187   CodeSpaceMemoryModificationScope code_allocation(isolate->heap());
188   BuiltinDeserializer builtin_deserializer(isolate, &builtin_snapshot_data);
189   Code* code = builtin_deserializer.DeserializeHandler(bytecode, operand_scale);
190 
191   if (FLAG_profile_deserialization) {
192     double ms = timer.Elapsed().InMillisecondsF();
193     int bytes = code->Size();
194     PrintF("[Deserializing handler %s (%d bytes) took %0.3f ms]\n",
195            interpreter::Bytecodes::ToString(bytecode, operand_scale).c_str(),
196            bytes, ms);
197   }
198 
199   if (isolate->logger()->is_listening_to_code_events() ||
200       isolate->is_profiling()) {
201     isolate->logger()->LogBytecodeHandler(bytecode, operand_scale, code);
202   }
203 
204   return code;
205 }
206 
ProfileDeserialization(const SnapshotData * startup_snapshot,const SnapshotData * builtin_snapshot,const std::vector<SnapshotData * > & context_snapshots)207 void ProfileDeserialization(
208     const SnapshotData* startup_snapshot, const SnapshotData* builtin_snapshot,
209     const std::vector<SnapshotData*>& context_snapshots) {
210   if (FLAG_profile_deserialization) {
211     int startup_total = 0;
212     PrintF("Deserialization will reserve:\n");
213     for (const auto& reservation : startup_snapshot->Reservations()) {
214       startup_total += reservation.chunk_size();
215     }
216     for (const auto& reservation : builtin_snapshot->Reservations()) {
217       startup_total += reservation.chunk_size();
218     }
219     PrintF("%10d bytes per isolate\n", startup_total);
220     for (size_t i = 0; i < context_snapshots.size(); i++) {
221       int context_total = 0;
222       for (const auto& reservation : context_snapshots[i]->Reservations()) {
223         context_total += reservation.chunk_size();
224       }
225       PrintF("%10d bytes per context #%zu\n", context_total, i);
226     }
227   }
228 }
229 
CreateSnapshotBlob(const SnapshotData * startup_snapshot,const BuiltinSnapshotData * builtin_snapshot,const std::vector<SnapshotData * > & context_snapshots,bool can_be_rehashed)230 v8::StartupData Snapshot::CreateSnapshotBlob(
231     const SnapshotData* startup_snapshot,
232     const BuiltinSnapshotData* builtin_snapshot,
233     const std::vector<SnapshotData*>& context_snapshots, bool can_be_rehashed) {
234   uint32_t num_contexts = static_cast<uint32_t>(context_snapshots.size());
235   uint32_t startup_snapshot_offset = StartupSnapshotOffset(num_contexts);
236   uint32_t total_length = startup_snapshot_offset;
237   total_length += static_cast<uint32_t>(startup_snapshot->RawData().length());
238   total_length += static_cast<uint32_t>(builtin_snapshot->RawData().length());
239   for (const auto context_snapshot : context_snapshots) {
240     total_length += static_cast<uint32_t>(context_snapshot->RawData().length());
241   }
242 
243   ProfileDeserialization(startup_snapshot, builtin_snapshot, context_snapshots);
244 
245   char* data = new char[total_length];
246   SetHeaderValue(data, kNumberOfContextsOffset, num_contexts);
247   SetHeaderValue(data, kRehashabilityOffset, can_be_rehashed ? 1 : 0);
248 
249   // Write version string into snapshot data.
250   memset(data + kVersionStringOffset, 0, kVersionStringLength);
251   Version::GetString(
252       Vector<char>(data + kVersionStringOffset, kVersionStringLength));
253 
254   // Startup snapshot (isolate-specific data).
255   uint32_t payload_offset = startup_snapshot_offset;
256   uint32_t payload_length =
257       static_cast<uint32_t>(startup_snapshot->RawData().length());
258   CopyBytes(data + payload_offset,
259             reinterpret_cast<const char*>(startup_snapshot->RawData().start()),
260             payload_length);
261   if (FLAG_profile_deserialization) {
262     PrintF("Snapshot blob consists of:\n%10d bytes in %d chunks for startup\n",
263            payload_length,
264            static_cast<uint32_t>(startup_snapshot->Reservations().size()));
265   }
266   payload_offset += payload_length;
267 
268   // Builtins.
269   SetHeaderValue(data, kBuiltinOffsetOffset, payload_offset);
270   payload_length = builtin_snapshot->RawData().length();
271   CopyBytes(data + payload_offset,
272             reinterpret_cast<const char*>(builtin_snapshot->RawData().start()),
273             payload_length);
274   if (FLAG_profile_deserialization) {
275     PrintF("%10d bytes for builtins\n", payload_length);
276   }
277   payload_offset += payload_length;
278 
279   // Partial snapshots (context-specific data).
280   for (uint32_t i = 0; i < num_contexts; i++) {
281     SetHeaderValue(data, ContextSnapshotOffsetOffset(i), payload_offset);
282     SnapshotData* context_snapshot = context_snapshots[i];
283     payload_length = context_snapshot->RawData().length();
284     CopyBytes(
285         data + payload_offset,
286         reinterpret_cast<const char*>(context_snapshot->RawData().start()),
287         payload_length);
288     if (FLAG_profile_deserialization) {
289       PrintF("%10d bytes in %d chunks for context #%d\n", payload_length,
290              static_cast<uint32_t>(context_snapshot->Reservations().size()), i);
291     }
292     payload_offset += payload_length;
293   }
294 
295   v8::StartupData result = {data, static_cast<int>(total_length)};
296   DCHECK_EQ(total_length, payload_offset);
297   return result;
298 }
299 
300 namespace {
BuiltinAliasesOffHeapTrampolineRegister(Isolate * isolate,Code * code)301 bool BuiltinAliasesOffHeapTrampolineRegister(Isolate* isolate, Code* code) {
302   DCHECK(Builtins::IsIsolateIndependent(code->builtin_index()));
303   switch (Builtins::KindOf(code->builtin_index())) {
304     case Builtins::CPP:
305     case Builtins::TFC:
306     case Builtins::TFH:
307     case Builtins::TFJ:
308     case Builtins::TFS:
309       break;
310 
311     // Bytecode handlers will only ever be used by the interpreter and so there
312     // will never be a need to use trampolines with them.
313     case Builtins::BCH:
314     case Builtins::API:
315     case Builtins::ASM:
316       // TODO(jgruber): Extend checks to remaining kinds.
317       return false;
318   }
319 
320   Callable callable = Builtins::CallableFor(
321       isolate, static_cast<Builtins::Name>(code->builtin_index()));
322   CallInterfaceDescriptor descriptor = callable.descriptor();
323 
324   if (descriptor.ContextRegister() == kOffHeapTrampolineRegister) {
325     return true;
326   }
327 
328   for (int i = 0; i < descriptor.GetRegisterParameterCount(); i++) {
329     Register reg = descriptor.GetRegisterParameter(i);
330     if (reg == kOffHeapTrampolineRegister) return true;
331   }
332 
333   return false;
334 }
335 
FinalizeEmbeddedCodeTargets(Isolate * isolate,EmbeddedData * blob)336 void FinalizeEmbeddedCodeTargets(Isolate* isolate, EmbeddedData* blob) {
337   static const int kRelocMask =
338       RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
339       RelocInfo::ModeMask(RelocInfo::RELATIVE_CODE_TARGET);
340 
341   for (int i = 0; i < Builtins::builtin_count; i++) {
342     if (!Builtins::IsIsolateIndependent(i)) continue;
343 
344     Code* code = isolate->builtins()->builtin(i);
345     RelocIterator on_heap_it(code, kRelocMask);
346     RelocIterator off_heap_it(blob, code, kRelocMask);
347 
348 #if defined(V8_TARGET_ARCH_X64) || defined(V8_TARGET_ARCH_ARM64) || \
349     defined(V8_TARGET_ARCH_ARM)
350     // On X64, ARM, ARM64 we emit relative builtin-to-builtin jumps for isolate
351     // independent builtins in the snapshot. This fixes up the relative jumps
352     // to the right offsets in the snapshot.
353     // See also: Code::IsIsolateIndependent.
354     while (!on_heap_it.done()) {
355       DCHECK(!off_heap_it.done());
356 
357       RelocInfo* rinfo = on_heap_it.rinfo();
358       DCHECK_EQ(rinfo->rmode(), off_heap_it.rinfo()->rmode());
359       Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
360       CHECK(Builtins::IsIsolateIndependentBuiltin(target));
361 
362       // Do not emit write-barrier for off-heap writes.
363       off_heap_it.rinfo()->set_target_address(
364           blob->InstructionStartOfBuiltin(target->builtin_index()),
365           SKIP_WRITE_BARRIER);
366 
367       on_heap_it.next();
368       off_heap_it.next();
369     }
370     DCHECK(off_heap_it.done());
371 #else
372     // Architectures other than x64 and arm/arm64 do not use pc-relative calls
373     // and thus must not contain embedded code targets. Instead, we use an
374     // indirection through the root register.
375     CHECK(on_heap_it.done());
376     CHECK(off_heap_it.done());
377 #endif  // defined(V8_TARGET_ARCH_X64) || defined(V8_TARGET_ARCH_ARM64)
378   }
379 }
380 }  // namespace
381 
382 // static
FromIsolate(Isolate * isolate)383 EmbeddedData EmbeddedData::FromIsolate(Isolate* isolate) {
384   Builtins* builtins = isolate->builtins();
385 
386   // Store instruction stream lengths and offsets.
387   std::vector<struct Metadata> metadata(kTableSize);
388 
389   bool saw_unsafe_builtin = false;
390   uint32_t raw_data_size = 0;
391   for (int i = 0; i < Builtins::builtin_count; i++) {
392     Code* code = builtins->builtin(i);
393 
394     if (Builtins::IsIsolateIndependent(i)) {
395       DCHECK(!Builtins::IsLazy(i));
396 
397       // Sanity-check that the given builtin is isolate-independent and does not
398       // use the trampoline register in its calling convention.
399       if (!code->IsIsolateIndependent(isolate)) {
400         saw_unsafe_builtin = true;
401         fprintf(stderr, "%s is not isolate-independent.\n", Builtins::name(i));
402       }
403       if (Builtins::IsWasmRuntimeStub(i) &&
404           RelocInfo::RequiresRelocation(code)) {
405         // Wasm additionally requires that its runtime stubs must be
406         // individually PIC (i.e. we must be able to copy each stub outside the
407         // embedded area without relocations). In particular, that means
408         // pc-relative calls to other builtins are disallowed.
409         saw_unsafe_builtin = true;
410         fprintf(stderr, "%s is a wasm runtime stub but needs relocation.\n",
411                 Builtins::name(i));
412       }
413       if (BuiltinAliasesOffHeapTrampolineRegister(isolate, code)) {
414         saw_unsafe_builtin = true;
415         fprintf(stderr, "%s aliases the off-heap trampoline register.\n",
416                 Builtins::name(i));
417       }
418 
419       uint32_t length = static_cast<uint32_t>(code->raw_instruction_size());
420 
421       DCHECK_EQ(0, raw_data_size % kCodeAlignment);
422       metadata[i].instructions_offset = raw_data_size;
423       metadata[i].instructions_length = length;
424 
425       // Align the start of each instruction stream.
426       raw_data_size += PadAndAlign(length);
427     } else {
428       metadata[i].instructions_offset = raw_data_size;
429     }
430   }
431   CHECK_WITH_MSG(
432       !saw_unsafe_builtin,
433       "One or more builtins marked as isolate-independent either contains "
434       "isolate-dependent code or aliases the off-heap trampoline register. "
435       "If in doubt, ask jgruber@");
436 
437   const uint32_t blob_size = RawDataOffset() + raw_data_size;
438   uint8_t* const blob = new uint8_t[blob_size];
439   uint8_t* const raw_data_start = blob + RawDataOffset();
440 
441   // Initially zap the entire blob, effectively padding the alignment area
442   // between two builtins with int3's (on x64/ia32).
443   ZapCode(reinterpret_cast<Address>(blob), blob_size);
444 
445   // Write the metadata tables.
446   DCHECK_EQ(MetadataSize(), sizeof(metadata[0]) * metadata.size());
447   std::memcpy(blob + MetadataOffset(), metadata.data(), MetadataSize());
448 
449   // Write the raw data section.
450   for (int i = 0; i < Builtins::builtin_count; i++) {
451     if (!Builtins::IsIsolateIndependent(i)) continue;
452     Code* code = builtins->builtin(i);
453     uint32_t offset = metadata[i].instructions_offset;
454     uint8_t* dst = raw_data_start + offset;
455     DCHECK_LE(RawDataOffset() + offset + code->raw_instruction_size(),
456               blob_size);
457     std::memcpy(dst, reinterpret_cast<uint8_t*>(code->raw_instruction_start()),
458                 code->raw_instruction_size());
459   }
460 
461   EmbeddedData d(blob, blob_size);
462 
463   // Fix up call targets that point to other embedded builtins.
464   FinalizeEmbeddedCodeTargets(isolate, &d);
465 
466   // Hash the blob and store the result.
467   STATIC_ASSERT(HashSize() == kSizetSize);
468   const size_t hash = d.CreateHash();
469   std::memcpy(blob + HashOffset(), &hash, HashSize());
470 
471   DCHECK_EQ(hash, d.CreateHash());
472   DCHECK_EQ(hash, d.Hash());
473 
474   if (FLAG_serialization_statistics) d.PrintStatistics();
475 
476   return d;
477 }
478 
FromBlob()479 EmbeddedData EmbeddedData::FromBlob() {
480   const uint8_t* data = Isolate::CurrentEmbeddedBlob();
481   uint32_t size = Isolate::CurrentEmbeddedBlobSize();
482   DCHECK_NOT_NULL(data);
483   DCHECK_LT(0, size);
484   return {data, size};
485 }
486 
InstructionStartOfBuiltin(int i) const487 Address EmbeddedData::InstructionStartOfBuiltin(int i) const {
488   DCHECK(Builtins::IsBuiltinId(i));
489   const struct Metadata* metadata = Metadata();
490   const uint8_t* result = RawData() + metadata[i].instructions_offset;
491   DCHECK_LE(result, data_ + size_);
492   DCHECK_IMPLIES(result == data_ + size_, InstructionSizeOfBuiltin(i) == 0);
493   return reinterpret_cast<Address>(result);
494 }
495 
InstructionSizeOfBuiltin(int i) const496 uint32_t EmbeddedData::InstructionSizeOfBuiltin(int i) const {
497   DCHECK(Builtins::IsBuiltinId(i));
498   const struct Metadata* metadata = Metadata();
499   return metadata[i].instructions_length;
500 }
501 
CreateHash() const502 size_t EmbeddedData::CreateHash() const {
503   STATIC_ASSERT(HashOffset() == 0);
504   STATIC_ASSERT(HashSize() == kSizetSize);
505   return base::hash_range(data_ + HashSize(), data_ + size_);
506 }
507 
ExtractNumContexts(const v8::StartupData * data)508 uint32_t Snapshot::ExtractNumContexts(const v8::StartupData* data) {
509   CHECK_LT(kNumberOfContextsOffset, data->raw_size);
510   uint32_t num_contexts = GetHeaderValue(data, kNumberOfContextsOffset);
511   return num_contexts;
512 }
513 
PrintStatistics() const514 void EmbeddedData::PrintStatistics() const {
515   DCHECK(FLAG_serialization_statistics);
516 
517   constexpr int kCount = Builtins::builtin_count;
518 
519   int embedded_count = 0;
520   int instruction_size = 0;
521   int sizes[kCount];
522   for (int i = 0; i < kCount; i++) {
523     if (!Builtins::IsIsolateIndependent(i)) continue;
524     const int size = InstructionSizeOfBuiltin(i);
525     instruction_size += size;
526     sizes[embedded_count] = size;
527     embedded_count++;
528   }
529 
530   // Sort for percentiles.
531   std::sort(&sizes[0], &sizes[embedded_count]);
532 
533   const int k50th = embedded_count * 0.5;
534   const int k75th = embedded_count * 0.75;
535   const int k90th = embedded_count * 0.90;
536   const int k99th = embedded_count * 0.99;
537 
538   const int metadata_size = static_cast<int>(HashSize() + MetadataSize());
539 
540   PrintF("EmbeddedData:\n");
541   PrintF("  Total size:                         %d\n",
542          static_cast<int>(size()));
543   PrintF("  Metadata size:                      %d\n", metadata_size);
544   PrintF("  Instruction size:                   %d\n", instruction_size);
545   PrintF("  Padding:                            %d\n",
546          static_cast<int>(size() - metadata_size - instruction_size));
547   PrintF("  Embedded builtin count:             %d\n", embedded_count);
548   PrintF("  Instruction size (50th percentile): %d\n", sizes[k50th]);
549   PrintF("  Instruction size (75th percentile): %d\n", sizes[k75th]);
550   PrintF("  Instruction size (90th percentile): %d\n", sizes[k90th]);
551   PrintF("  Instruction size (99th percentile): %d\n", sizes[k99th]);
552   PrintF("\n");
553 }
554 
ExtractContextOffset(const v8::StartupData * data,uint32_t index)555 uint32_t Snapshot::ExtractContextOffset(const v8::StartupData* data,
556                                         uint32_t index) {
557   // Extract the offset of the context at a given index from the StartupData,
558   // and check that it is within bounds.
559   uint32_t context_offset =
560       GetHeaderValue(data, ContextSnapshotOffsetOffset(index));
561   CHECK_LT(context_offset, static_cast<uint32_t>(data->raw_size));
562   return context_offset;
563 }
564 
ExtractRehashability(const v8::StartupData * data)565 bool Snapshot::ExtractRehashability(const v8::StartupData* data) {
566   CHECK_LT(kRehashabilityOffset, static_cast<uint32_t>(data->raw_size));
567   return GetHeaderValue(data, kRehashabilityOffset) != 0;
568 }
569 
ExtractStartupData(const v8::StartupData * data)570 Vector<const byte> Snapshot::ExtractStartupData(const v8::StartupData* data) {
571   uint32_t num_contexts = ExtractNumContexts(data);
572   uint32_t startup_offset = StartupSnapshotOffset(num_contexts);
573   CHECK_LT(startup_offset, data->raw_size);
574   uint32_t builtin_offset = GetHeaderValue(data, kBuiltinOffsetOffset);
575   CHECK_LT(builtin_offset, data->raw_size);
576   CHECK_GT(builtin_offset, startup_offset);
577   uint32_t startup_length = builtin_offset - startup_offset;
578   const byte* startup_data =
579       reinterpret_cast<const byte*>(data->data + startup_offset);
580   return Vector<const byte>(startup_data, startup_length);
581 }
582 
ExtractBuiltinData(const v8::StartupData * data)583 Vector<const byte> Snapshot::ExtractBuiltinData(const v8::StartupData* data) {
584   DCHECK(SnapshotIsValid(data));
585 
586   uint32_t from_offset = GetHeaderValue(data, kBuiltinOffsetOffset);
587   CHECK_LT(from_offset, data->raw_size);
588 
589   uint32_t to_offset = GetHeaderValue(data, ContextSnapshotOffsetOffset(0));
590   CHECK_LT(to_offset, data->raw_size);
591 
592   CHECK_GT(to_offset, from_offset);
593   uint32_t length = to_offset - from_offset;
594   const byte* builtin_data =
595       reinterpret_cast<const byte*>(data->data + from_offset);
596   return Vector<const byte>(builtin_data, length);
597 }
598 
ExtractContextData(const v8::StartupData * data,uint32_t index)599 Vector<const byte> Snapshot::ExtractContextData(const v8::StartupData* data,
600                                                 uint32_t index) {
601   uint32_t num_contexts = ExtractNumContexts(data);
602   CHECK_LT(index, num_contexts);
603 
604   uint32_t context_offset = ExtractContextOffset(data, index);
605   uint32_t next_context_offset;
606   if (index == num_contexts - 1) {
607     next_context_offset = data->raw_size;
608   } else {
609     next_context_offset = ExtractContextOffset(data, index + 1);
610     CHECK_LT(next_context_offset, data->raw_size);
611   }
612 
613   const byte* context_data =
614       reinterpret_cast<const byte*>(data->data + context_offset);
615   uint32_t context_length = next_context_offset - context_offset;
616   return Vector<const byte>(context_data, context_length);
617 }
618 
CheckVersion(const v8::StartupData * data)619 void Snapshot::CheckVersion(const v8::StartupData* data) {
620   char version[kVersionStringLength];
621   memset(version, 0, kVersionStringLength);
622   CHECK_LT(kVersionStringOffset + kVersionStringLength,
623            static_cast<uint32_t>(data->raw_size));
624   Version::GetString(Vector<char>(version, kVersionStringLength));
625   if (strncmp(version, data->data + kVersionStringOffset,
626               kVersionStringLength) != 0) {
627     FATAL(
628         "Version mismatch between V8 binary and snapshot.\n"
629         "#   V8 binary version: %.*s\n"
630         "#    Snapshot version: %.*s\n"
631         "# The snapshot consists of %d bytes and contains %d context(s).",
632         kVersionStringLength, version, kVersionStringLength,
633         data->data + kVersionStringOffset, data->raw_size,
634         ExtractNumContexts(data));
635   }
636 }
637 
638 template <class AllocatorT>
SnapshotData(const Serializer<AllocatorT> * serializer)639 SnapshotData::SnapshotData(const Serializer<AllocatorT>* serializer) {
640   DisallowHeapAllocation no_gc;
641   std::vector<Reservation> reservations = serializer->EncodeReservations();
642   const std::vector<byte>* payload = serializer->Payload();
643 
644   // Calculate sizes.
645   uint32_t reservation_size =
646       static_cast<uint32_t>(reservations.size()) * kUInt32Size;
647   uint32_t size =
648       kHeaderSize + reservation_size + static_cast<uint32_t>(payload->size());
649 
650   // Allocate backing store and create result data.
651   AllocateData(size);
652 
653   // Set header values.
654   SetMagicNumber(serializer->isolate());
655   SetHeaderValue(kNumReservationsOffset, static_cast<int>(reservations.size()));
656   SetHeaderValue(kPayloadLengthOffset, static_cast<int>(payload->size()));
657 
658   // Copy reservation chunk sizes.
659   CopyBytes(data_ + kHeaderSize, reinterpret_cast<byte*>(reservations.data()),
660             reservation_size);
661 
662   // Copy serialized data.
663   CopyBytes(data_ + kHeaderSize + reservation_size, payload->data(),
664             static_cast<size_t>(payload->size()));
665 }
666 
667 // Explicit instantiation.
668 template SnapshotData::SnapshotData(
669     const Serializer<DefaultSerializerAllocator>* serializer);
670 
Reservations() const671 std::vector<SerializedData::Reservation> SnapshotData::Reservations() const {
672   uint32_t size = GetHeaderValue(kNumReservationsOffset);
673   std::vector<SerializedData::Reservation> reservations(size);
674   memcpy(reservations.data(), data_ + kHeaderSize,
675          size * sizeof(SerializedData::Reservation));
676   return reservations;
677 }
678 
Payload() const679 Vector<const byte> SnapshotData::Payload() const {
680   uint32_t reservations_size =
681       GetHeaderValue(kNumReservationsOffset) * kUInt32Size;
682   const byte* payload = data_ + kHeaderSize + reservations_size;
683   uint32_t length = GetHeaderValue(kPayloadLengthOffset);
684   DCHECK_EQ(data_ + size_, payload + length);
685   return Vector<const byte>(payload, length);
686 }
687 
BuiltinSnapshotData(const BuiltinSerializer * serializer)688 BuiltinSnapshotData::BuiltinSnapshotData(const BuiltinSerializer* serializer)
689     : SnapshotData(serializer) {}
690 
Payload() const691 Vector<const byte> BuiltinSnapshotData::Payload() const {
692   uint32_t reservations_size =
693       GetHeaderValue(kNumReservationsOffset) * kUInt32Size;
694   const byte* payload = data_ + kHeaderSize + reservations_size;
695   const int builtin_offsets_size =
696       BuiltinSnapshotUtils::kNumberOfCodeObjects * kUInt32Size;
697   uint32_t payload_length = GetHeaderValue(kPayloadLengthOffset);
698   DCHECK_EQ(data_ + size_, payload + payload_length);
699   DCHECK_GT(payload_length, builtin_offsets_size);
700   return Vector<const byte>(payload, payload_length - builtin_offsets_size);
701 }
702 
BuiltinOffsets() const703 Vector<const uint32_t> BuiltinSnapshotData::BuiltinOffsets() const {
704   uint32_t reservations_size =
705       GetHeaderValue(kNumReservationsOffset) * kUInt32Size;
706   const byte* payload = data_ + kHeaderSize + reservations_size;
707   const int builtin_offsets_size =
708       BuiltinSnapshotUtils::kNumberOfCodeObjects * kUInt32Size;
709   uint32_t payload_length = GetHeaderValue(kPayloadLengthOffset);
710   DCHECK_EQ(data_ + size_, payload + payload_length);
711   DCHECK_GT(payload_length, builtin_offsets_size);
712   const uint32_t* data = reinterpret_cast<const uint32_t*>(
713       payload + payload_length - builtin_offsets_size);
714   return Vector<const uint32_t>(data,
715                                 BuiltinSnapshotUtils::kNumberOfCodeObjects);
716 }
717 
718 }  // namespace internal
719 }  // namespace v8
720