• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2022 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "runtime_image.h"
18 
19 #include <lz4.h>
20 #include <sstream>
21 #include <unistd.h>
22 
23 #include "android-base/file.h"
24 #include "android-base/stringprintf.h"
25 #include "android-base/strings.h"
26 
27 #include "base/arena_allocator.h"
28 #include "base/arena_containers.h"
29 #include "base/bit_utils.h"
30 #include "base/file_utils.h"
31 #include "base/length_prefixed_array.h"
32 #include "base/scoped_flock.h"
33 #include "base/stl_util.h"
34 #include "base/systrace.h"
35 #include "base/unix_file/fd_file.h"
36 #include "base/utils.h"
37 #include "class_loader_context.h"
38 #include "class_loader_utils.h"
39 #include "class_root-inl.h"
40 #include "dex/class_accessor-inl.h"
41 #include "gc/space/image_space.h"
42 #include "image.h"
43 #include "mirror/object-inl.h"
44 #include "mirror/object-refvisitor-inl.h"
45 #include "mirror/object_array-alloc-inl.h"
46 #include "mirror/object_array-inl.h"
47 #include "mirror/object_array.h"
48 #include "mirror/string-inl.h"
49 #include "nterp_helpers.h"
50 #include "oat.h"
51 #include "profile/profile_compilation_info.h"
52 #include "scoped_thread_state_change-inl.h"
53 #include "vdex_file.h"
54 
55 namespace art {
56 
57 using android::base::StringPrintf;
58 
59 /**
60  * The native data structures that we store in the image.
61  */
62 enum class NativeRelocationKind {
63   kArtFieldArray,
64   kArtMethodArray,
65   kArtMethod,
66   kImTable,
67   // For dex cache arrays which can stay in memory even after startup. Those are
68   // dex cache arrays whose size is below a given threshold, defined by
69   // DexCache::ShouldAllocateFullArray.
70   kFullNativeDexCacheArray,
71   // For dex cache arrays which we will want to release after app startup.
72   kStartupNativeDexCacheArray,
73 };
74 
75 /**
76  * Helper class to generate an app image at runtime.
77  */
78 class RuntimeImageHelper {
79  public:
RuntimeImageHelper(gc::Heap * heap)80   explicit RuntimeImageHelper(gc::Heap* heap) :
81     allocator_(Runtime::Current()->GetArenaPool()),
82     objects_(allocator_.Adapter()),
83     art_fields_(allocator_.Adapter()),
84     art_methods_(allocator_.Adapter()),
85     im_tables_(allocator_.Adapter()),
86     metadata_(allocator_.Adapter()),
87     dex_cache_arrays_(allocator_.Adapter()),
88     string_reference_offsets_(allocator_.Adapter()),
89     sections_(ImageHeader::kSectionCount, allocator_.Adapter()),
90     object_offsets_(allocator_.Adapter()),
91     classes_(allocator_.Adapter()),
92     array_classes_(allocator_.Adapter()),
93     dex_caches_(allocator_.Adapter()),
94     class_hashes_(allocator_.Adapter()),
95     native_relocations_(allocator_.Adapter()),
96     boot_image_begin_(heap->GetBootImagesStartAddress()),
97     boot_image_size_(heap->GetBootImagesSize()),
98     image_begin_(boot_image_begin_ + boot_image_size_),
99     // Note: image relocation considers the image header in the bitmap.
100     object_section_size_(sizeof(ImageHeader)),
101     intern_table_(InternStringHash(this), InternStringEquals(this)),
102     class_table_(ClassDescriptorHash(this), ClassDescriptorEquals()) {}
103 
Generate(std::string * error_msg)104   bool Generate(std::string* error_msg) {
105     if (!WriteObjects(error_msg)) {
106       return false;
107     }
108 
109     // Generate the sections information stored in the header.
110     CreateImageSections();
111 
112     // Now that all sections have been created and we know their offset and
113     // size, relocate native pointers inside classes and ImTables.
114     RelocateNativePointers();
115 
116     // Generate the bitmap section, stored page aligned after the sections data
117     // and of size `object_section_size_` page aligned.
118     size_t sections_end = sections_[ImageHeader::kSectionMetadata].End();
119     image_bitmap_ = gc::accounting::ContinuousSpaceBitmap::Create(
120         "image bitmap",
121         reinterpret_cast<uint8_t*>(image_begin_),
122         RoundUp(object_section_size_, kPageSize));
123     for (uint32_t offset : object_offsets_) {
124       DCHECK(IsAligned<kObjectAlignment>(image_begin_ + sizeof(ImageHeader) + offset));
125       image_bitmap_.Set(
126           reinterpret_cast<mirror::Object*>(image_begin_ + sizeof(ImageHeader) + offset));
127     }
128     const size_t bitmap_bytes = image_bitmap_.Size();
129     auto* bitmap_section = &sections_[ImageHeader::kSectionImageBitmap];
130     *bitmap_section = ImageSection(RoundUp(sections_end, kPageSize),
131                                    RoundUp(bitmap_bytes, kPageSize));
132 
133     // Compute boot image checksum and boot image components, to be stored in
134     // the header.
135     gc::Heap* const heap = Runtime::Current()->GetHeap();
136     uint32_t boot_image_components = 0u;
137     uint32_t boot_image_checksums = 0u;
138     const std::vector<gc::space::ImageSpace*>& image_spaces = heap->GetBootImageSpaces();
139     for (size_t i = 0u, size = image_spaces.size(); i != size; ) {
140       const ImageHeader& header = image_spaces[i]->GetImageHeader();
141       boot_image_components += header.GetComponentCount();
142       boot_image_checksums ^= header.GetImageChecksum();
143       DCHECK_LE(header.GetImageSpaceCount(), size - i);
144       i += header.GetImageSpaceCount();
145     }
146 
147     header_ = ImageHeader(
148         /* image_reservation_size= */ RoundUp(sections_end, kPageSize),
149         /* component_count= */ 1,
150         image_begin_,
151         sections_end,
152         sections_.data(),
153         /* image_roots= */ image_begin_ + sizeof(ImageHeader),
154         /* oat_checksum= */ 0,
155         /* oat_file_begin= */ 0,
156         /* oat_data_begin= */ 0,
157         /* oat_data_end= */ 0,
158         /* oat_file_end= */ 0,
159         heap->GetBootImagesStartAddress(),
160         heap->GetBootImagesSize(),
161         boot_image_components,
162         boot_image_checksums,
163         static_cast<uint32_t>(kRuntimePointerSize));
164 
165     // Data size includes everything except the bitmap and the header.
166     header_.data_size_ = sections_end - sizeof(ImageHeader);
167 
168     // Write image methods - needs to happen after creation of the header.
169     WriteImageMethods();
170 
171     return true;
172   }
173 
FillData(std::vector<uint8_t> & data)174   void FillData(std::vector<uint8_t>& data) {
175     // Note we don't put the header, we only have it reserved in `data` as
176     // Image::WriteData expects the object section to contain the image header.
177     auto compute_dest = [&](const ImageSection& section) {
178       return data.data() + section.Offset();
179     };
180 
181     auto objects_section = header_.GetImageSection(ImageHeader::kSectionObjects);
182     memcpy(compute_dest(objects_section) + sizeof(ImageHeader), objects_.data(), objects_.size());
183 
184     auto fields_section = header_.GetImageSection(ImageHeader::kSectionArtFields);
185     memcpy(compute_dest(fields_section), art_fields_.data(), fields_section.Size());
186 
187     auto methods_section = header_.GetImageSection(ImageHeader::kSectionArtMethods);
188     memcpy(compute_dest(methods_section), art_methods_.data(), methods_section.Size());
189 
190     auto im_tables_section = header_.GetImageSection(ImageHeader::kSectionImTables);
191     memcpy(compute_dest(im_tables_section), im_tables_.data(), im_tables_section.Size());
192 
193     auto intern_section = header_.GetImageSection(ImageHeader::kSectionInternedStrings);
194     intern_table_.WriteToMemory(compute_dest(intern_section));
195 
196     auto class_table_section = header_.GetImageSection(ImageHeader::kSectionClassTable);
197     class_table_.WriteToMemory(compute_dest(class_table_section));
198 
199     auto string_offsets_section =
200         header_.GetImageSection(ImageHeader::kSectionStringReferenceOffsets);
201     memcpy(compute_dest(string_offsets_section),
202            string_reference_offsets_.data(),
203            string_offsets_section.Size());
204 
205     auto dex_cache_section = header_.GetImageSection(ImageHeader::kSectionDexCacheArrays);
206     memcpy(compute_dest(dex_cache_section), dex_cache_arrays_.data(), dex_cache_section.Size());
207 
208     auto metadata_section = header_.GetImageSection(ImageHeader::kSectionMetadata);
209     memcpy(compute_dest(metadata_section), metadata_.data(), metadata_section.Size());
210 
211     DCHECK_EQ(metadata_section.Offset() + metadata_section.Size(), data.size());
212   }
213 
214 
GetHeader()215   ImageHeader* GetHeader() {
216     return &header_;
217   }
218 
GetImageBitmap() const219   const gc::accounting::ContinuousSpaceBitmap& GetImageBitmap() const {
220     return image_bitmap_;
221   }
222 
GetDexLocation() const223   const std::string& GetDexLocation() const {
224     return dex_location_;
225   }
226 
227  private:
IsInBootImage(const void * obj) const228   bool IsInBootImage(const void* obj) const {
229     return reinterpret_cast<uintptr_t>(obj) - boot_image_begin_ < boot_image_size_;
230   }
231 
232   // Returns the image contents for `cls`. If `cls` is in the boot image, the
233   // method just returns it.
GetClassContent(ObjPtr<mirror::Class> cls)234   mirror::Class* GetClassContent(ObjPtr<mirror::Class> cls) REQUIRES_SHARED(Locks::mutator_lock_) {
235     if (cls == nullptr || IsInBootImage(cls.Ptr())) {
236       return cls.Ptr();
237     }
238     const dex::ClassDef* class_def = cls->GetClassDef();
239     DCHECK(class_def != nullptr) << cls->PrettyClass();
240     auto it = classes_.find(class_def);
241     DCHECK(it != classes_.end()) << cls->PrettyClass();
242     mirror::Class* result = reinterpret_cast<mirror::Class*>(objects_.data() + it->second);
243     DCHECK(result->GetClass()->IsClass());
244     return result;
245   }
246 
247   // Returns a pointer that can be stored in `objects_`:
248   // - The pointer itself for boot image objects,
249   // - The offset in the image for all other objects.
GetOrComputeImageAddress(ObjPtr<T> object)250   template <typename T> T* GetOrComputeImageAddress(ObjPtr<T> object)
251       REQUIRES_SHARED(Locks::mutator_lock_) {
252     if (object == nullptr || IsInBootImage(object.Ptr())) {
253       DCHECK(object == nullptr || Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(object));
254       return object.Ptr();
255     }
256 
257     if (object->IsClassLoader()) {
258       // DexCache and Class point to class loaders. For runtime-generated app
259       // images, we don't encode the class loader. It will be set when the
260       // runtime is loading the image.
261       return nullptr;
262     }
263 
264     if (object->GetClass() == GetClassRoot<mirror::ClassExt>()) {
265       // No need to encode `ClassExt`. If needed, it will be reconstructed at
266       // runtime.
267       return nullptr;
268     }
269 
270     uint32_t offset = 0u;
271     if (object->IsClass()) {
272       offset = CopyClass(object->AsClass());
273     } else if (object->IsDexCache()) {
274       offset = CopyDexCache(object->AsDexCache());
275     } else {
276       offset = CopyObject(object);
277     }
278     return reinterpret_cast<T*>(image_begin_ + sizeof(ImageHeader) + offset);
279   }
280 
CreateImageSections()281   void CreateImageSections() {
282     sections_[ImageHeader::kSectionObjects] = ImageSection(0u, object_section_size_);
283     sections_[ImageHeader::kSectionArtFields] =
284         ImageSection(sections_[ImageHeader::kSectionObjects].End(), art_fields_.size());
285 
286     // Round up to the alignment for ArtMethod.
287     static_assert(IsAligned<sizeof(void*)>(ArtMethod::Size(kRuntimePointerSize)));
288     size_t cur_pos = RoundUp(sections_[ImageHeader::kSectionArtFields].End(), sizeof(void*));
289     sections_[ImageHeader::kSectionArtMethods] = ImageSection(cur_pos, art_methods_.size());
290 
291     // Round up to the alignment for ImTables.
292     cur_pos = RoundUp(sections_[ImageHeader::kSectionArtMethods].End(), sizeof(void*));
293     sections_[ImageHeader::kSectionImTables] = ImageSection(cur_pos, im_tables_.size());
294 
295     // Round up to the alignment for conflict tables.
296     cur_pos = RoundUp(sections_[ImageHeader::kSectionImTables].End(), sizeof(void*));
297     sections_[ImageHeader::kSectionIMTConflictTables] = ImageSection(cur_pos, 0u);
298 
299     sections_[ImageHeader::kSectionRuntimeMethods] =
300         ImageSection(sections_[ImageHeader::kSectionIMTConflictTables].End(), 0u);
301 
302     // Round up to the alignment the string table expects. See HashSet::WriteToMemory.
303     cur_pos = RoundUp(sections_[ImageHeader::kSectionRuntimeMethods].End(), sizeof(uint64_t));
304 
305     size_t intern_table_bytes = intern_table_.WriteToMemory(nullptr);
306     sections_[ImageHeader::kSectionInternedStrings] = ImageSection(cur_pos, intern_table_bytes);
307 
308     // Obtain the new position and round it up to the appropriate alignment.
309     cur_pos = RoundUp(sections_[ImageHeader::kSectionInternedStrings].End(), sizeof(uint64_t));
310 
311     size_t class_table_bytes = class_table_.WriteToMemory(nullptr);
312     sections_[ImageHeader::kSectionClassTable] = ImageSection(cur_pos, class_table_bytes);
313 
314     // Round up to the alignment of the offsets we are going to store.
315     cur_pos = RoundUp(sections_[ImageHeader::kSectionClassTable].End(), sizeof(uint32_t));
316     sections_[ImageHeader::kSectionStringReferenceOffsets] = ImageSection(
317         cur_pos, string_reference_offsets_.size() * sizeof(string_reference_offsets_[0]));
318 
319     // Round up to the alignment dex caches arrays expects.
320     cur_pos =
321         RoundUp(sections_[ImageHeader::kSectionStringReferenceOffsets].End(), sizeof(void*));
322     sections_[ImageHeader::kSectionDexCacheArrays] =
323         ImageSection(cur_pos, dex_cache_arrays_.size());
324 
325     // Round up to the alignment expected for the metadata, which holds dex
326     // cache arrays.
327     cur_pos = RoundUp(sections_[ImageHeader::kSectionDexCacheArrays].End(), sizeof(void*));
328     sections_[ImageHeader::kSectionMetadata] = ImageSection(cur_pos, metadata_.size());
329   }
330 
331   // Returns the copied mirror Object if in the image, or the object directly if
332   // in the boot image. For the copy, this is really its content, it should not
333   // be returned as an `ObjPtr` (as it's not a GC object), nor stored anywhere.
FromImageOffsetToRuntimeContent(uint32_t offset)334   template<typename T> T* FromImageOffsetToRuntimeContent(uint32_t offset) {
335     if (offset == 0u || IsInBootImage(reinterpret_cast<const void*>(offset))) {
336       return reinterpret_cast<T*>(offset);
337     }
338     uint32_t vector_data_offset = FromImageOffsetToVectorOffset(offset);
339     return reinterpret_cast<T*>(objects_.data() + vector_data_offset);
340   }
341 
FromImageOffsetToVectorOffset(uint32_t offset) const342   uint32_t FromImageOffsetToVectorOffset(uint32_t offset) const {
343     DCHECK(!IsInBootImage(reinterpret_cast<const void*>(offset)));
344     return offset - sizeof(ImageHeader) - image_begin_;
345   }
346 
347   class InternStringHash {
348    public:
InternStringHash(RuntimeImageHelper * helper)349     explicit InternStringHash(RuntimeImageHelper* helper) : helper_(helper) {}
350 
351     // NO_THREAD_SAFETY_ANALYSIS as these helpers get passed to `HashSet`.
operator ()(mirror::String * str) const352     size_t operator()(mirror::String* str) const NO_THREAD_SAFETY_ANALYSIS {
353       int32_t hash = str->GetStoredHashCode();
354       DCHECK_EQ(hash, str->ComputeHashCode());
355       // An additional cast to prevent undesired sign extension.
356       return static_cast<uint32_t>(hash);
357     }
358 
operator ()(uint32_t entry) const359     size_t operator()(uint32_t entry) const NO_THREAD_SAFETY_ANALYSIS {
360       return (*this)(helper_->FromImageOffsetToRuntimeContent<mirror::String>(entry));
361     }
362 
363    private:
364     RuntimeImageHelper* helper_;
365   };
366 
367   class InternStringEquals {
368    public:
InternStringEquals(RuntimeImageHelper * helper)369     explicit InternStringEquals(RuntimeImageHelper* helper) : helper_(helper) {}
370 
371     // NO_THREAD_SAFETY_ANALYSIS as these helpers get passed to `HashSet`.
operator ()(uint32_t entry,mirror::String * other) const372     bool operator()(uint32_t entry, mirror::String* other) const NO_THREAD_SAFETY_ANALYSIS {
373       if (kIsDebugBuild) {
374         Locks::mutator_lock_->AssertSharedHeld(Thread::Current());
375       }
376       return other->Equals(helper_->FromImageOffsetToRuntimeContent<mirror::String>(entry));
377     }
378 
operator ()(uint32_t entry,uint32_t other) const379     bool operator()(uint32_t entry, uint32_t other) const NO_THREAD_SAFETY_ANALYSIS {
380       return (*this)(entry, helper_->FromImageOffsetToRuntimeContent<mirror::String>(other));
381     }
382 
383    private:
384     RuntimeImageHelper* helper_;
385   };
386 
387   using InternTableSet =
388         HashSet<uint32_t, DefaultEmptyFn<uint32_t>, InternStringHash, InternStringEquals>;
389 
390   class ClassDescriptorHash {
391    public:
ClassDescriptorHash(RuntimeImageHelper * helper)392     explicit ClassDescriptorHash(RuntimeImageHelper* helper) : helper_(helper) {}
393 
operator ()(const ClassTable::TableSlot & slot) const394     uint32_t operator()(const ClassTable::TableSlot& slot) const NO_THREAD_SAFETY_ANALYSIS {
395       uint32_t ptr = slot.NonHashData();
396       if (helper_->IsInBootImage(reinterpret_cast32<const void*>(ptr))) {
397         return reinterpret_cast32<mirror::Class*>(ptr)->DescriptorHash();
398       }
399       return helper_->class_hashes_.Get(helper_->FromImageOffsetToVectorOffset(ptr));
400     }
401 
402    private:
403     RuntimeImageHelper* helper_;
404   };
405 
406   class ClassDescriptorEquals {
407    public:
ClassDescriptorEquals()408     ClassDescriptorEquals() {}
409 
operator ()(const ClassTable::TableSlot & a,const ClassTable::TableSlot & b) const410     bool operator()(const ClassTable::TableSlot& a, const ClassTable::TableSlot& b)
411         const NO_THREAD_SAFETY_ANALYSIS {
412       // No need to fetch the descriptor: we know the classes we are inserting
413       // in the ClassTable are unique.
414       return a.Data() == b.Data();
415     }
416   };
417 
418   using ClassTableSet = HashSet<ClassTable::TableSlot,
419                                 ClassTable::TableSlotEmptyFn,
420                                 ClassDescriptorHash,
421                                 ClassDescriptorEquals>;
422 
423   // Helper class to collect classes that we will generate in the image.
424   class ClassTableVisitor {
425    public:
ClassTableVisitor(Handle<mirror::ClassLoader> loader,VariableSizedHandleScope & handles)426     ClassTableVisitor(Handle<mirror::ClassLoader> loader, VariableSizedHandleScope& handles)
427         : loader_(loader), handles_(handles) {}
428 
operator ()(ObjPtr<mirror::Class> klass)429     bool operator()(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_) {
430       // Record app classes and boot classpath classes: app classes will be
431       // generated in the image and put in the class table, boot classpath
432       // classes will be put in the class table.
433       ObjPtr<mirror::ClassLoader> class_loader = klass->GetClassLoader();
434       if (class_loader == loader_.Get() || class_loader == nullptr) {
435         handles_.NewHandle(klass);
436       }
437       return true;
438     }
439 
440    private:
441     Handle<mirror::ClassLoader> loader_;
442     VariableSizedHandleScope& handles_;
443   };
444 
445   // Helper class visitor to filter out classes we cannot emit.
446   class PruneVisitor {
447    public:
PruneVisitor(Thread * self,RuntimeImageHelper * helper,const ArenaSet<const DexFile * > & dex_files,ArenaVector<Handle<mirror::Class>> & classes,ArenaAllocator & allocator)448     PruneVisitor(Thread* self,
449                  RuntimeImageHelper* helper,
450                  const ArenaSet<const DexFile*>& dex_files,
451                  ArenaVector<Handle<mirror::Class>>& classes,
452                  ArenaAllocator& allocator)
453         : self_(self),
454           helper_(helper),
455           dex_files_(dex_files),
456           visited_(allocator.Adapter()),
457           classes_to_write_(classes) {}
458 
CanEmitHelper(Handle<mirror::Class> cls)459     bool CanEmitHelper(Handle<mirror::Class> cls) REQUIRES_SHARED(Locks::mutator_lock_) {
460       // If the class comes from a dex file which is not part of the primary
461       // APK, don't encode it.
462       if (!ContainsElement(dex_files_, &cls->GetDexFile())) {
463         return false;
464       }
465 
466       // Ensure pointers to classes in `cls` can also be emitted.
467       StackHandleScope<1> hs(self_);
468       MutableHandle<mirror::Class> other_class = hs.NewHandle(cls->GetSuperClass());
469       if (!CanEmit(other_class)) {
470         return false;
471       }
472 
473       other_class.Assign(cls->GetComponentType());
474       if (!CanEmit(other_class)) {
475         return false;
476       }
477 
478       for (size_t i = 0, num_interfaces = cls->NumDirectInterfaces(); i < num_interfaces; ++i) {
479         other_class.Assign(cls->GetDirectInterface(i));
480         if (!CanEmit(other_class)) {
481           return false;
482         }
483       }
484       return true;
485     }
486 
CanEmit(Handle<mirror::Class> cls)487     bool CanEmit(Handle<mirror::Class> cls) REQUIRES_SHARED(Locks::mutator_lock_) {
488       if (cls == nullptr) {
489         return true;
490       }
491       // Only emit classes that are resolved and not erroneous.
492       if (!cls->IsResolved() || cls->IsErroneous()) {
493         return false;
494       }
495 
496       // Proxy classes are generated at runtime, so don't emit them.
497       if (cls->IsProxyClass()) {
498         return false;
499       }
500 
501       // Classes in the boot image can be trivially encoded directly.
502       if (helper_->IsInBootImage(cls.Get())) {
503         return true;
504       }
505 
506       if (cls->IsBootStrapClassLoaded()) {
507         // We cannot encode classes that are part of the boot classpath.
508         return false;
509       }
510 
511       DCHECK(!cls->IsPrimitive());
512 
513       if (cls->IsArrayClass()) {
514         if (cls->IsBootStrapClassLoaded()) {
515           // For boot classpath arrays, we can only emit them if they are
516           // in the boot image already.
517           return helper_->IsInBootImage(cls.Get());
518         }
519         ObjPtr<mirror::Class> temp = cls.Get();
520         while ((temp = temp->GetComponentType())->IsArrayClass()) {}
521         StackHandleScope<1> hs(self_);
522         Handle<mirror::Class> other_class = hs.NewHandle(temp);
523         return CanEmit(other_class);
524       }
525       const dex::ClassDef* class_def = cls->GetClassDef();
526       DCHECK_NE(class_def, nullptr);
527       auto existing = visited_.find(class_def);
528       if (existing != visited_.end()) {
529         // Already processed;
530         return existing->second == VisitState::kCanEmit;
531       }
532 
533       visited_.Put(class_def, VisitState::kVisiting);
534       if (CanEmitHelper(cls)) {
535         visited_.Overwrite(class_def, VisitState::kCanEmit);
536         return true;
537       } else {
538         visited_.Overwrite(class_def, VisitState::kCannotEmit);
539         return false;
540       }
541     }
542 
Visit(Handle<mirror::Object> obj)543     void Visit(Handle<mirror::Object> obj) REQUIRES_SHARED(Locks::mutator_lock_) {
544       MutableHandle<mirror::Class> cls(obj.GetReference());
545       if (CanEmit(cls)) {
546         if (cls->IsBootStrapClassLoaded()) {
547           DCHECK(helper_->IsInBootImage(cls.Get()));
548           // Insert the bootclasspath class in the class table.
549           uint32_t hash = cls->DescriptorHash();
550           helper_->class_table_.InsertWithHash(ClassTable::TableSlot(cls.Get(), hash), hash);
551         } else {
552           classes_to_write_.push_back(cls);
553         }
554       }
555     }
556 
557    private:
558     enum class VisitState {
559       kVisiting,
560       kCanEmit,
561       kCannotEmit,
562     };
563 
564     Thread* const self_;
565     RuntimeImageHelper* const helper_;
566     const ArenaSet<const DexFile*>& dex_files_;
567     ArenaSafeMap<const dex::ClassDef*, VisitState> visited_;
568     ArenaVector<Handle<mirror::Class>>& classes_to_write_;
569   };
570 
EmitClasses(Thread * self,Handle<mirror::ObjectArray<mirror::Object>> dex_cache_array)571   void EmitClasses(Thread* self, Handle<mirror::ObjectArray<mirror::Object>> dex_cache_array)
572       REQUIRES_SHARED(Locks::mutator_lock_) {
573     ScopedTrace trace("Emit strings and classes");
574     ArenaSet<const DexFile*> dex_files(allocator_.Adapter());
575     for (int32_t i = 0; i < dex_cache_array->GetLength(); ++i) {
576       dex_files.insert(dex_cache_array->Get(i)->AsDexCache()->GetDexFile());
577     }
578 
579     StackHandleScope<1> hs(self);
580     Handle<mirror::ClassLoader> loader = hs.NewHandle(
581         dex_cache_array->Get(0)->AsDexCache()->GetClassLoader());
582     ClassTable* const class_table = loader->GetClassTable();
583     if (class_table == nullptr) {
584       return;
585     }
586 
587     VariableSizedHandleScope handles(self);
588     {
589       ClassTableVisitor class_table_visitor(loader, handles);
590       class_table->Visit(class_table_visitor);
591     }
592 
593     ArenaVector<Handle<mirror::Class>> classes_to_write(allocator_.Adapter());
594     classes_to_write.reserve(class_table->Size());
595     {
596       PruneVisitor prune_visitor(self, this, dex_files, classes_to_write, allocator_);
597       handles.VisitHandles(prune_visitor);
598     }
599 
600     for (Handle<mirror::Class> cls : classes_to_write) {
601       ScopedAssertNoThreadSuspension sants("Writing class");
602       CopyClass(cls.Get());
603     }
604 
605     // Relocate the type array entries. We do this now before creating image
606     // sections because we may add new boot image classes into our
607     // `class_table`_.
608     for (auto entry : dex_caches_) {
609       const DexFile& dex_file = *entry.first;
610       mirror::DexCache* cache = reinterpret_cast<mirror::DexCache*>(&objects_[entry.second]);
611       mirror::GcRootArray<mirror::Class>* old_types_array = cache->GetResolvedTypesArray();
612       if (HasNativeRelocation(old_types_array)) {
613         auto reloc_it = native_relocations_.find(old_types_array);
614         DCHECK(reloc_it != native_relocations_.end());
615         ArenaVector<uint8_t>& data =
616             (reloc_it->second.first == NativeRelocationKind::kFullNativeDexCacheArray)
617                 ? dex_cache_arrays_ : metadata_;
618         mirror::GcRootArray<mirror::Class>* content_array =
619             reinterpret_cast<mirror::GcRootArray<mirror::Class>*>(
620                 data.data() + reloc_it->second.second);
621         for (uint32_t i = 0; i < dex_file.NumTypeIds(); ++i) {
622           ObjPtr<mirror::Class> cls = old_types_array->Get(i);
623           if (cls == nullptr) {
624             content_array->Set(i, nullptr);
625           } else if (IsInBootImage(cls.Ptr())) {
626             if (!cls->IsPrimitive()) {
627               // The dex cache is concurrently updated by the app. If the class
628               // collection logic in `PruneVisitor` did not see this class, insert it now.
629               // Note that application class tables do not contain primitive
630               // classes.
631               uint32_t hash = cls->DescriptorHash();
632               class_table_.InsertWithHash(ClassTable::TableSlot(cls.Ptr(), hash), hash);
633             }
634             content_array->Set(i, cls.Ptr());
635           } else if (cls->IsArrayClass()) {
636             std::string class_name;
637             cls->GetDescriptor(&class_name);
638             auto class_it = array_classes_.find(class_name);
639             if (class_it == array_classes_.end()) {
640               content_array->Set(i, nullptr);
641             } else {
642               mirror::Class* ptr = reinterpret_cast<mirror::Class*>(
643                   image_begin_ + sizeof(ImageHeader) + class_it->second);
644               content_array->Set(i, ptr);
645             }
646           } else {
647             DCHECK(!cls->IsPrimitive());
648             DCHECK(!cls->IsProxyClass());
649             const dex::ClassDef* class_def = cls->GetClassDef();
650             DCHECK_NE(class_def, nullptr);
651             auto class_it = classes_.find(class_def);
652             if (class_it == classes_.end()) {
653               content_array->Set(i, nullptr);
654             } else {
655               mirror::Class* ptr = reinterpret_cast<mirror::Class*>(
656                   image_begin_ + sizeof(ImageHeader) + class_it->second);
657               content_array->Set(i, ptr);
658             }
659           }
660         }
661       }
662     }
663   }
664 
665   // Helper visitor returning the location of a native pointer in the image.
666   class NativePointerVisitor {
667    public:
NativePointerVisitor(RuntimeImageHelper * helper)668     explicit NativePointerVisitor(RuntimeImageHelper* helper) : helper_(helper) {}
669 
670     template <typename T>
operator ()(T * ptr,void ** dest_addr ATTRIBUTE_UNUSED) const671     T* operator()(T* ptr, void** dest_addr ATTRIBUTE_UNUSED) const {
672       return helper_->NativeLocationInImage(ptr, /* must_have_relocation= */ true);
673     }
674 
operator ()(T * ptr,bool must_have_relocation=true) const675     template <typename T> T* operator()(T* ptr, bool must_have_relocation = true) const {
676       return helper_->NativeLocationInImage(ptr, must_have_relocation);
677     }
678 
679    private:
680     RuntimeImageHelper* helper_;
681   };
682 
NativeLocationInImage(T * ptr,bool must_have_relocation) const683   template <typename T> T* NativeLocationInImage(T* ptr, bool must_have_relocation) const {
684     if (ptr == nullptr || IsInBootImage(ptr)) {
685       return ptr;
686     }
687 
688     auto it = native_relocations_.find(ptr);
689     if (it == native_relocations_.end()) {
690       DCHECK(!must_have_relocation);
691       return nullptr;
692     }
693     switch (it->second.first) {
694       case NativeRelocationKind::kArtMethod:
695       case NativeRelocationKind::kArtMethodArray: {
696         uint32_t offset = sections_[ImageHeader::kSectionArtMethods].Offset();
697         return reinterpret_cast<T*>(image_begin_ + offset + it->second.second);
698       }
699       case NativeRelocationKind::kArtFieldArray: {
700         uint32_t offset = sections_[ImageHeader::kSectionArtFields].Offset();
701         return reinterpret_cast<T*>(image_begin_ + offset + it->second.second);
702       }
703       case NativeRelocationKind::kImTable: {
704         uint32_t offset = sections_[ImageHeader::kSectionImTables].Offset();
705         return reinterpret_cast<T*>(image_begin_ + offset + it->second.second);
706       }
707       case NativeRelocationKind::kStartupNativeDexCacheArray: {
708         uint32_t offset = sections_[ImageHeader::kSectionMetadata].Offset();
709         return reinterpret_cast<T*>(image_begin_ + offset + it->second.second);
710       }
711       case NativeRelocationKind::kFullNativeDexCacheArray: {
712         uint32_t offset = sections_[ImageHeader::kSectionDexCacheArrays].Offset();
713         return reinterpret_cast<T*>(image_begin_ + offset + it->second.second);
714       }
715     }
716   }
717 
718   template <typename Visitor>
RelocateMethodPointerArrays(mirror::Class * klass,const Visitor & visitor)719   void RelocateMethodPointerArrays(mirror::Class* klass, const Visitor& visitor)
720       REQUIRES_SHARED(Locks::mutator_lock_) {
721     // A bit of magic here: we cast contents from our buffer to mirror::Class,
722     // and do pointer comparison between 1) these classes, and 2) boot image objects.
723     // Both kinds do not move.
724 
725     // See if we need to fixup the vtable field.
726     mirror::Class* super = FromImageOffsetToRuntimeContent<mirror::Class>(
727         reinterpret_cast32<uint32_t>(
728             klass->GetSuperClass<kVerifyNone, kWithoutReadBarrier>().Ptr()));
729     DCHECK(super != nullptr) << "j.l.Object should never be in an app runtime image";
730     mirror::PointerArray* vtable = FromImageOffsetToRuntimeContent<mirror::PointerArray>(
731         reinterpret_cast32<uint32_t>(klass->GetVTable<kVerifyNone, kWithoutReadBarrier>().Ptr()));
732     mirror::PointerArray* super_vtable = FromImageOffsetToRuntimeContent<mirror::PointerArray>(
733         reinterpret_cast32<uint32_t>(super->GetVTable<kVerifyNone, kWithoutReadBarrier>().Ptr()));
734     if (vtable != nullptr && vtable != super_vtable) {
735       DCHECK(!IsInBootImage(vtable));
736       vtable->Fixup(vtable, kRuntimePointerSize, visitor);
737     }
738 
739     // See if we need to fixup entries in the IfTable.
740     mirror::IfTable* iftable = FromImageOffsetToRuntimeContent<mirror::IfTable>(
741         reinterpret_cast32<uint32_t>(
742             klass->GetIfTable<kVerifyNone, kWithoutReadBarrier>().Ptr()));
743     mirror::IfTable* super_iftable = FromImageOffsetToRuntimeContent<mirror::IfTable>(
744         reinterpret_cast32<uint32_t>(
745             super->GetIfTable<kVerifyNone, kWithoutReadBarrier>().Ptr()));
746     int32_t iftable_count = iftable->Count();
747     int32_t super_iftable_count = super_iftable->Count();
748     for (int32_t i = 0; i < iftable_count; ++i) {
749       mirror::PointerArray* methods = FromImageOffsetToRuntimeContent<mirror::PointerArray>(
750           reinterpret_cast32<uint32_t>(
751               iftable->GetMethodArrayOrNull<kVerifyNone, kWithoutReadBarrier>(i).Ptr()));
752       mirror::PointerArray* super_methods = (i < super_iftable_count)
753           ? FromImageOffsetToRuntimeContent<mirror::PointerArray>(
754                 reinterpret_cast32<uint32_t>(
755                     super_iftable->GetMethodArrayOrNull<kVerifyNone, kWithoutReadBarrier>(i).Ptr()))
756           : nullptr;
757       if (methods != super_methods) {
758         DCHECK(!IsInBootImage(methods));
759         methods->Fixup(methods, kRuntimePointerSize, visitor);
760       }
761     }
762   }
763 
764   template <typename Visitor, typename T>
RelocateNativeDexCacheArray(mirror::NativeArray<T> * old_method_array,uint32_t num_ids,const Visitor & visitor)765   void RelocateNativeDexCacheArray(mirror::NativeArray<T>* old_method_array,
766                                    uint32_t num_ids,
767                                    const Visitor& visitor)
768       REQUIRES_SHARED(Locks::mutator_lock_) {
769     if (old_method_array == nullptr) {
770       return;
771     }
772 
773     auto it = native_relocations_.find(old_method_array);
774     DCHECK(it != native_relocations_.end());
775     ArenaVector<uint8_t>& data =
776         (it->second.first == NativeRelocationKind::kFullNativeDexCacheArray)
777             ? dex_cache_arrays_ : metadata_;
778 
779     mirror::NativeArray<T>* content_array =
780         reinterpret_cast<mirror::NativeArray<T>*>(data.data() + it->second.second);
781     for (uint32_t i = 0; i < num_ids; ++i) {
782       // We may not have relocations for some entries, in which case we'll
783       // just store null.
784       content_array->Set(i, visitor(content_array->Get(i), /* must_have_relocation= */ false));
785     }
786   }
787 
788   template <typename Visitor>
RelocateDexCacheArrays(mirror::DexCache * cache,const DexFile & dex_file,const Visitor & visitor)789   void RelocateDexCacheArrays(mirror::DexCache* cache,
790                               const DexFile& dex_file,
791                               const Visitor& visitor)
792       REQUIRES_SHARED(Locks::mutator_lock_) {
793     mirror::NativeArray<ArtMethod>* old_method_array = cache->GetResolvedMethodsArray();
794     cache->SetResolvedMethodsArray(visitor(old_method_array));
795     RelocateNativeDexCacheArray(old_method_array, dex_file.NumMethodIds(), visitor);
796 
797     mirror::NativeArray<ArtField>* old_field_array = cache->GetResolvedFieldsArray();
798     cache->SetResolvedFieldsArray(visitor(old_field_array));
799     RelocateNativeDexCacheArray(old_field_array, dex_file.NumFieldIds(), visitor);
800 
801     mirror::GcRootArray<mirror::String>* old_strings_array = cache->GetStringsArray();
802     cache->SetStringsArray(visitor(old_strings_array));
803 
804     mirror::GcRootArray<mirror::Class>* old_types_array = cache->GetResolvedTypesArray();
805     cache->SetResolvedTypesArray(visitor(old_types_array));
806   }
807 
RelocateNativePointers()808   void RelocateNativePointers() {
809     ScopedTrace relocate_native_pointers("Relocate native pointers");
810     ScopedObjectAccess soa(Thread::Current());
811     NativePointerVisitor visitor(this);
812     for (auto entry : classes_) {
813       mirror::Class* cls = reinterpret_cast<mirror::Class*>(&objects_[entry.second]);
814       cls->FixupNativePointers(cls, kRuntimePointerSize, visitor);
815       RelocateMethodPointerArrays(cls, visitor);
816     }
817     for (auto it : array_classes_) {
818       mirror::Class* cls = reinterpret_cast<mirror::Class*>(&objects_[it.second]);
819       cls->FixupNativePointers(cls, kRuntimePointerSize, visitor);
820       RelocateMethodPointerArrays(cls, visitor);
821     }
822     for (auto it : native_relocations_) {
823       if (it.second.first == NativeRelocationKind::kImTable) {
824         ImTable* im_table = reinterpret_cast<ImTable*>(im_tables_.data() + it.second.second);
825         RelocateImTable(im_table, visitor);
826       }
827     }
828     for (auto it : dex_caches_) {
829       mirror::DexCache* cache = reinterpret_cast<mirror::DexCache*>(&objects_[it.second]);
830       RelocateDexCacheArrays(cache, *it.first, visitor);
831     }
832   }
833 
RelocateImTable(ImTable * im_table,const NativePointerVisitor & visitor)834   void RelocateImTable(ImTable* im_table, const NativePointerVisitor& visitor) {
835     for (size_t i = 0; i < ImTable::kSize; ++i) {
836       ArtMethod* method = im_table->Get(i, kRuntimePointerSize);
837       ArtMethod* new_method = nullptr;
838       if (method->IsRuntimeMethod() && !IsInBootImage(method)) {
839         // New IMT conflict method: just use the boot image version.
840         // TODO: Consider copying the new IMT conflict method.
841         new_method = Runtime::Current()->GetImtConflictMethod();
842         DCHECK(IsInBootImage(new_method));
843       } else {
844         new_method = visitor(method);
845       }
846       if (method != new_method) {
847         im_table->Set(i, new_method, kRuntimePointerSize);
848       }
849     }
850   }
851 
CopyFieldArrays(ObjPtr<mirror::Class> cls,uint32_t class_image_address)852   void CopyFieldArrays(ObjPtr<mirror::Class> cls, uint32_t class_image_address)
853       REQUIRES_SHARED(Locks::mutator_lock_) {
854     LengthPrefixedArray<ArtField>* fields[] = {
855         cls->GetSFieldsPtr(), cls->GetIFieldsPtr(),
856     };
857     for (LengthPrefixedArray<ArtField>* cur_fields : fields) {
858       if (cur_fields != nullptr) {
859         // Copy the array.
860         size_t number_of_fields = cur_fields->size();
861         size_t size = LengthPrefixedArray<ArtField>::ComputeSize(number_of_fields);
862         size_t offset = art_fields_.size();
863         art_fields_.resize(offset + size);
864         auto* dest_array =
865             reinterpret_cast<LengthPrefixedArray<ArtField>*>(art_fields_.data() + offset);
866         memcpy(dest_array, cur_fields, size);
867         native_relocations_.Put(cur_fields,
868                                 std::make_pair(NativeRelocationKind::kArtFieldArray, offset));
869 
870         // Update the class pointer of individual fields.
871         for (size_t i = 0; i != number_of_fields; ++i) {
872           dest_array->At(i).GetDeclaringClassAddressWithoutBarrier()->Assign(
873               reinterpret_cast<mirror::Class*>(class_image_address));
874         }
875       }
876     }
877   }
878 
CopyMethodArrays(ObjPtr<mirror::Class> cls,uint32_t class_image_address,bool is_class_initialized)879   void CopyMethodArrays(ObjPtr<mirror::Class> cls,
880                         uint32_t class_image_address,
881                         bool is_class_initialized)
882       REQUIRES_SHARED(Locks::mutator_lock_) {
883     size_t number_of_methods = cls->NumMethods();
884     if (number_of_methods == 0) {
885       return;
886     }
887 
888     size_t size = LengthPrefixedArray<ArtMethod>::ComputeSize(number_of_methods);
889     size_t offset = art_methods_.size();
890     art_methods_.resize(offset + size);
891     auto* dest_array =
892         reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(art_methods_.data() + offset);
893     memcpy(dest_array, cls->GetMethodsPtr(), size);
894     native_relocations_.Put(cls->GetMethodsPtr(),
895                             std::make_pair(NativeRelocationKind::kArtMethodArray, offset));
896 
897     for (size_t i = 0; i != number_of_methods; ++i) {
898       ArtMethod* method = &cls->GetMethodsPtr()->At(i);
899       ArtMethod* copy = &dest_array->At(i);
900 
901       // Update the class pointer.
902       ObjPtr<mirror::Class> declaring_class = method->GetDeclaringClass();
903       if (declaring_class == cls) {
904         copy->GetDeclaringClassAddressWithoutBarrier()->Assign(
905             reinterpret_cast<mirror::Class*>(class_image_address));
906       } else {
907         DCHECK(method->IsCopied());
908         if (!IsInBootImage(declaring_class.Ptr())) {
909           DCHECK(classes_.find(declaring_class->GetClassDef()) != classes_.end());
910           copy->GetDeclaringClassAddressWithoutBarrier()->Assign(
911               reinterpret_cast<mirror::Class*>(
912                   image_begin_ +
913                   sizeof(ImageHeader) +
914                   classes_.Get(declaring_class->GetClassDef())));
915         }
916       }
917 
918       // Record the native relocation of the method.
919       uintptr_t copy_offset =
920           reinterpret_cast<uintptr_t>(copy) - reinterpret_cast<uintptr_t>(art_methods_.data());
921       native_relocations_.Put(method,
922                               std::make_pair(NativeRelocationKind::kArtMethod, copy_offset));
923 
924       // Ignore the single-implementation info for abstract method.
925       if (method->IsAbstract()) {
926         copy->SetHasSingleImplementation(false);
927         copy->SetSingleImplementation(nullptr, kRuntimePointerSize);
928       }
929 
930       // Set the entrypoint and data pointer of the method.
931       StubType stub;
932       if (method->IsNative()) {
933         stub = StubType::kQuickGenericJNITrampoline;
934       } else if (!cls->IsVerified()) {
935         stub = StubType::kQuickToInterpreterBridge;
936       } else if (!is_class_initialized && method->NeedsClinitCheckBeforeCall()) {
937         stub = StubType::kQuickResolutionTrampoline;
938       } else if (interpreter::IsNterpSupported() && CanMethodUseNterp(method)) {
939         stub = StubType::kNterpTrampoline;
940       } else {
941         stub = StubType::kQuickToInterpreterBridge;
942       }
943       const std::vector<gc::space::ImageSpace*>& image_spaces =
944           Runtime::Current()->GetHeap()->GetBootImageSpaces();
945       DCHECK(!image_spaces.empty());
946       const OatFile* oat_file = image_spaces[0]->GetOatFile();
947       DCHECK(oat_file != nullptr);
948       const OatHeader& header = oat_file->GetOatHeader();
949       copy->SetEntryPointFromQuickCompiledCode(header.GetOatAddress(stub));
950 
951       if (method->IsNative()) {
952         StubType stub_type = method->IsCriticalNative()
953             ? StubType::kJNIDlsymLookupCriticalTrampoline
954             : StubType::kJNIDlsymLookupTrampoline;
955         copy->SetEntryPointFromJni(header.GetOatAddress(stub_type));
956       } else if (method->HasCodeItem()) {
957         const uint8_t* code_item = reinterpret_cast<const uint8_t*>(method->GetCodeItem());
958         DCHECK_GE(code_item, method->GetDexFile()->DataBegin());
959         uint32_t code_item_offset = dchecked_integral_cast<uint32_t>(
960             code_item - method->GetDexFile()->DataBegin());;
961         copy->SetDataPtrSize(
962             reinterpret_cast<const void*>(code_item_offset), kRuntimePointerSize);
963       }
964     }
965   }
966 
CopyImTable(ObjPtr<mirror::Class> cls)967   void CopyImTable(ObjPtr<mirror::Class> cls) REQUIRES_SHARED(Locks::mutator_lock_) {
968     ImTable* table = cls->GetImt(kRuntimePointerSize);
969 
970     // If the table is null or shared and/or already emitted, we can skip.
971     if (table == nullptr || IsInBootImage(table) || HasNativeRelocation(table)) {
972       return;
973     }
974     const size_t size = ImTable::SizeInBytes(kRuntimePointerSize);
975     size_t offset = im_tables_.size();
976     im_tables_.resize(offset + size);
977     uint8_t* dest = im_tables_.data() + offset;
978     memcpy(dest, table, size);
979     native_relocations_.Put(table, std::make_pair(NativeRelocationKind::kImTable, offset));
980   }
981 
HasNativeRelocation(void * ptr) const982   bool HasNativeRelocation(void* ptr) const {
983     return native_relocations_.find(ptr) != native_relocations_.end();
984   }
985 
986 
LoadClassesFromReferenceProfile(Thread * self,const dchecked_vector<Handle<mirror::DexCache>> & dex_caches)987   static void LoadClassesFromReferenceProfile(
988       Thread* self,
989       const dchecked_vector<Handle<mirror::DexCache>>& dex_caches)
990           REQUIRES_SHARED(Locks::mutator_lock_) {
991     AppInfo* app_info = Runtime::Current()->GetAppInfo();
992     std::string profile_file = app_info->GetPrimaryApkReferenceProfile();
993 
994     if (profile_file.empty()) {
995       return;
996     }
997 
998     // Lock the file, it could be concurrently updated by the system. Don't block
999     // as this is app startup sensitive.
1000     std::string error;
1001     ScopedFlock profile =
1002         LockedFile::Open(profile_file.c_str(), O_RDONLY, /*block=*/false, &error);
1003 
1004     if (profile == nullptr) {
1005       LOG(DEBUG) << "Couldn't lock the profile file " << profile_file << ": " << error;
1006       return;
1007     }
1008 
1009     ProfileCompilationInfo profile_info(/* for_boot_image= */ false);
1010 
1011     if (!profile_info.Load(profile->Fd())) {
1012       LOG(DEBUG) << "Could not load profile file";
1013       return;
1014     }
1015 
1016     StackHandleScope<1> hs(self);
1017     Handle<mirror::ClassLoader> class_loader =
1018         hs.NewHandle<mirror::ClassLoader>(dex_caches[0]->GetClassLoader());
1019     ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1020     ScopedTrace loading_classes("Loading classes from profile");
1021     for (auto dex_cache : dex_caches) {
1022       const DexFile* dex_file = dex_cache->GetDexFile();
1023       const ArenaSet<dex::TypeIndex>* class_types = profile_info.GetClasses(*dex_file);
1024       if (class_types == nullptr) {
1025         // This means the profile file did not reference the dex file, which is the case
1026         // if there's no classes and methods of that dex file in the profile.
1027         continue;
1028       }
1029 
1030       for (dex::TypeIndex idx : *class_types) {
1031         // The index is greater or equal to NumTypeIds if the type is an extra
1032         // descriptor, not referenced by the dex file.
1033         if (idx.index_ < dex_file->NumTypeIds()) {
1034           ObjPtr<mirror::Class> klass = class_linker->ResolveType(idx, dex_cache, class_loader);
1035           if (klass == nullptr) {
1036             self->ClearException();
1037             LOG(DEBUG) << "Failed to preload " << dex_file->PrettyType(idx);
1038             continue;
1039           }
1040         }
1041       }
1042     }
1043   }
1044 
WriteObjects(std::string * error_msg)1045   bool WriteObjects(std::string* error_msg) {
1046     ScopedTrace write_objects("Writing objects");
1047     ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1048     ScopedObjectAccess soa(Thread::Current());
1049     VariableSizedHandleScope handles(soa.Self());
1050 
1051     Handle<mirror::Class> object_array_class = handles.NewHandle(
1052         GetClassRoot<mirror::ObjectArray<mirror::Object>>(class_linker));
1053 
1054     Handle<mirror::ObjectArray<mirror::Object>> image_roots = handles.NewHandle(
1055         mirror::ObjectArray<mirror::Object>::Alloc(
1056             soa.Self(), object_array_class.Get(), ImageHeader::kImageRootsMax));
1057 
1058     if (image_roots == nullptr) {
1059       DCHECK(soa.Self()->IsExceptionPending());
1060       soa.Self()->ClearException();
1061       *error_msg = "Out of memory when trying to generate a runtime app image";
1062       return false;
1063     }
1064 
1065     // Find the dex files that will be used for generating the app image.
1066     dchecked_vector<Handle<mirror::DexCache>> dex_caches;
1067     FindDexCaches(soa.Self(), dex_caches, handles);
1068 
1069     if (dex_caches.size() == 0) {
1070       *error_msg = "Did not find dex caches to generate an app image";
1071       return false;
1072     }
1073     const OatDexFile* oat_dex_file = dex_caches[0]->GetDexFile()->GetOatDexFile();
1074     VdexFile* vdex_file = oat_dex_file->GetOatFile()->GetVdexFile();
1075     // The first entry in `dex_caches` contains the location of the primary APK.
1076     dex_location_ = oat_dex_file->GetDexFileLocation();
1077 
1078     size_t number_of_dex_files = vdex_file->GetNumberOfDexFiles();
1079     if (number_of_dex_files != dex_caches.size()) {
1080       // This means some dex files haven't been executed. For simplicity, just
1081       // register them and recollect dex caches.
1082       Handle<mirror::ClassLoader> loader = handles.NewHandle(dex_caches[0]->GetClassLoader());
1083       VisitClassLoaderDexFiles(soa.Self(), loader, [&](const art::DexFile* dex_file)
1084           REQUIRES_SHARED(Locks::mutator_lock_) {
1085         class_linker->RegisterDexFile(*dex_file, dex_caches[0]->GetClassLoader());
1086         return true;  // Continue with other dex files.
1087       });
1088       dex_caches.clear();
1089       FindDexCaches(soa.Self(), dex_caches, handles);
1090       if (number_of_dex_files != dex_caches.size()) {
1091         *error_msg = "Number of dex caches does not match number of dex files in the primary APK";
1092         return false;
1093       }
1094     }
1095 
1096     // If classes referenced in the reference profile are not loaded, preload
1097     // them. This makes sure we generate a good runtime app image, even if this
1098     // current app run did not load all startup classes.
1099     LoadClassesFromReferenceProfile(soa.Self(), dex_caches);
1100 
1101     // We store the checksums of the dex files used at runtime. These can be
1102     // different compared to the vdex checksums due to compact dex.
1103     std::vector<uint32_t> checksums(number_of_dex_files);
1104     uint32_t checksum_index = 0;
1105     for (const OatDexFile* current_oat_dex_file : oat_dex_file->GetOatFile()->GetOatDexFiles()) {
1106       const DexFile::Header* header =
1107           reinterpret_cast<const DexFile::Header*>(current_oat_dex_file->GetDexFilePointer());
1108       checksums[checksum_index++] = header->checksum_;
1109     }
1110     DCHECK_EQ(checksum_index, number_of_dex_files);
1111 
1112     // Create the fake OatHeader to store the dependencies of the image.
1113     SafeMap<std::string, std::string> key_value_store;
1114     Runtime* runtime = Runtime::Current();
1115     key_value_store.Put(OatHeader::kApexVersionsKey, runtime->GetApexVersions());
1116     key_value_store.Put(OatHeader::kBootClassPathKey,
1117                         android::base::Join(runtime->GetBootClassPathLocations(), ':'));
1118     key_value_store.Put(OatHeader::kBootClassPathChecksumsKey,
1119                         runtime->GetBootClassPathChecksums());
1120     key_value_store.Put(OatHeader::kClassPathKey,
1121                         oat_dex_file->GetOatFile()->GetClassLoaderContext());
1122     key_value_store.Put(OatHeader::kConcurrentCopying,
1123                         gUseReadBarrier ? OatHeader::kTrueValue : OatHeader::kFalseValue);
1124 
1125     std::unique_ptr<const InstructionSetFeatures> isa_features =
1126         InstructionSetFeatures::FromCppDefines();
1127     std::unique_ptr<OatHeader> oat_header(
1128         OatHeader::Create(kRuntimeISA,
1129                           isa_features.get(),
1130                           number_of_dex_files,
1131                           &key_value_store));
1132 
1133     // Create the byte array containing the oat header and dex checksums.
1134     uint32_t checksums_size = checksums.size() * sizeof(uint32_t);
1135     Handle<mirror::ByteArray> header_data = handles.NewHandle(
1136         mirror::ByteArray::Alloc(soa.Self(), oat_header->GetHeaderSize() + checksums_size));
1137 
1138     if (header_data == nullptr) {
1139       DCHECK(soa.Self()->IsExceptionPending());
1140       soa.Self()->ClearException();
1141       *error_msg = "Out of memory when trying to generate a runtime app image";
1142       return false;
1143     }
1144 
1145     memcpy(header_data->GetData(), oat_header.get(), oat_header->GetHeaderSize());
1146     memcpy(header_data->GetData() + oat_header->GetHeaderSize(), checksums.data(), checksums_size);
1147 
1148     // Create and populate the dex caches aray.
1149     Handle<mirror::ObjectArray<mirror::Object>> dex_cache_array = handles.NewHandle(
1150         mirror::ObjectArray<mirror::Object>::Alloc(
1151             soa.Self(), object_array_class.Get(), dex_caches.size()));
1152 
1153     if (dex_cache_array == nullptr) {
1154       DCHECK(soa.Self()->IsExceptionPending());
1155       soa.Self()->ClearException();
1156       *error_msg = "Out of memory when trying to generate a runtime app image";
1157       return false;
1158     }
1159 
1160     for (uint32_t i = 0; i < dex_caches.size(); ++i) {
1161       dex_cache_array->Set(i, dex_caches[i].Get());
1162     }
1163 
1164     image_roots->Set(ImageHeader::kDexCaches, dex_cache_array.Get());
1165     image_roots->Set(ImageHeader::kClassRoots, class_linker->GetClassRoots());
1166     image_roots->Set(ImageHeader::kAppImageOatHeader, header_data.Get());
1167 
1168     {
1169       // Now that we have created all objects needed for the `image_roots`, copy
1170       // it into the buffer. Note that this will recursively copy all objects
1171       // contained in `image_roots`. That's acceptable as we don't have cycles,
1172       // nor a deep graph.
1173       ScopedAssertNoThreadSuspension sants("Writing runtime app image");
1174       CopyObject(image_roots.Get());
1175     }
1176 
1177     // Emit classes defined in the app class loader (which will also indirectly
1178     // emit dex caches and their arrays).
1179     EmitClasses(soa.Self(), dex_cache_array);
1180 
1181     return true;
1182   }
1183 
1184   class FixupVisitor {
1185    public:
FixupVisitor(RuntimeImageHelper * image,size_t copy_offset)1186     FixupVisitor(RuntimeImageHelper* image, size_t copy_offset)
1187         : image_(image), copy_offset_(copy_offset) {}
1188 
1189     // We do not visit native roots. These are handled with other logic.
VisitRootIfNonNull(mirror::CompressedReference<mirror::Object> * root ATTRIBUTE_UNUSED) const1190     void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED)
1191         const {
1192       LOG(FATAL) << "UNREACHABLE";
1193     }
VisitRoot(mirror::CompressedReference<mirror::Object> * root ATTRIBUTE_UNUSED) const1194     void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {
1195       LOG(FATAL) << "UNREACHABLE";
1196     }
1197 
operator ()(ObjPtr<mirror::Object> obj,MemberOffset offset,bool is_static) const1198     void operator()(ObjPtr<mirror::Object> obj,
1199                     MemberOffset offset,
1200                     bool is_static) const
1201         REQUIRES_SHARED(Locks::mutator_lock_) {
1202       // We don't copy static fields, they are being handled when we try to
1203       // initialize the class.
1204       ObjPtr<mirror::Object> ref =
1205           is_static ? nullptr : obj->GetFieldObject<mirror::Object>(offset);
1206       mirror::Object* address = image_->GetOrComputeImageAddress(ref);
1207       mirror::Object* copy =
1208           reinterpret_cast<mirror::Object*>(image_->objects_.data() + copy_offset_);
1209       copy->GetFieldObjectReferenceAddr<kVerifyNone>(offset)->Assign(address);
1210     }
1211 
1212     // java.lang.ref.Reference visitor.
operator ()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,ObjPtr<mirror::Reference> ref) const1213     void operator()(ObjPtr<mirror::Class> klass ATTRIBUTE_UNUSED,
1214                     ObjPtr<mirror::Reference> ref) const
1215         REQUIRES_SHARED(Locks::mutator_lock_) {
1216       operator()(ref, mirror::Reference::ReferentOffset(), /* is_static */ false);
1217     }
1218 
1219    private:
1220     RuntimeImageHelper* image_;
1221     size_t copy_offset_;
1222   };
1223 
1224   template <typename T>
CopyNativeDexCacheArray(uint32_t num_entries,uint32_t max_entries,mirror::NativeArray<T> * array)1225   void CopyNativeDexCacheArray(uint32_t num_entries,
1226                                uint32_t max_entries,
1227                                mirror::NativeArray<T>* array) {
1228     if (array == nullptr) {
1229       return;
1230     }
1231 
1232     bool only_startup = !mirror::DexCache::ShouldAllocateFullArray(num_entries, max_entries);
1233     ArenaVector<uint8_t>& data = only_startup ? metadata_ : dex_cache_arrays_;
1234     NativeRelocationKind relocation_kind = only_startup
1235         ? NativeRelocationKind::kStartupNativeDexCacheArray
1236         : NativeRelocationKind::kFullNativeDexCacheArray;
1237 
1238     size_t size = num_entries * sizeof(void*);
1239     // We need to reserve space to store `num_entries` because ImageSpace doesn't have
1240     // access to the dex files when relocating dex caches.
1241     size_t offset = RoundUp(data.size(), sizeof(void*)) + sizeof(uintptr_t);
1242     data.resize(RoundUp(data.size(), sizeof(void*)) + sizeof(uintptr_t) + size);
1243     reinterpret_cast<uintptr_t*>(data.data() + offset)[-1] = num_entries;
1244 
1245     // Copy each entry individually. We cannot use memcpy, as the entries may be
1246     // updated concurrently by other mutator threads.
1247     mirror::NativeArray<T>* copy = reinterpret_cast<mirror::NativeArray<T>*>(data.data() + offset);
1248     for (uint32_t i = 0; i < num_entries; ++i) {
1249       copy->Set(i, array->Get(i));
1250     }
1251     native_relocations_.Put(array, std::make_pair(relocation_kind, offset));
1252   }
1253 
1254   template <typename T>
CreateGcRootDexCacheArray(uint32_t num_entries,uint32_t max_entries,mirror::GcRootArray<T> * array)1255   mirror::GcRootArray<T>* CreateGcRootDexCacheArray(uint32_t num_entries,
1256                                                     uint32_t max_entries,
1257                                                     mirror::GcRootArray<T>* array) {
1258     if (array == nullptr) {
1259       return nullptr;
1260     }
1261     bool only_startup = !mirror::DexCache::ShouldAllocateFullArray(num_entries, max_entries);
1262     ArenaVector<uint8_t>& data = only_startup ? metadata_ : dex_cache_arrays_;
1263     NativeRelocationKind relocation_kind = only_startup
1264         ? NativeRelocationKind::kStartupNativeDexCacheArray
1265         : NativeRelocationKind::kFullNativeDexCacheArray;
1266     size_t size = num_entries * sizeof(GcRoot<T>);
1267     // We need to reserve space to store `num_entries` because ImageSpace doesn't have
1268     // access to the dex files when relocating dex caches.
1269     static_assert(sizeof(GcRoot<T>) == sizeof(uint32_t));
1270     size_t offset = data.size() + sizeof(uint32_t);
1271     data.resize(data.size() + sizeof(uint32_t) + size);
1272     reinterpret_cast<uint32_t*>(data.data() + offset)[-1] = num_entries;
1273     native_relocations_.Put(array, std::make_pair(relocation_kind, offset));
1274 
1275     return reinterpret_cast<mirror::GcRootArray<T>*>(data.data() + offset);
1276   }
EmitDexCacheArrays()1277   static bool EmitDexCacheArrays() {
1278     // We need to treat dex cache arrays specially in an image for userfaultfd.
1279     // Disable for now. See b/270936884.
1280     return !gUseUserfaultfd;
1281   }
1282 
CopyDexCache(ObjPtr<mirror::DexCache> cache)1283   uint32_t CopyDexCache(ObjPtr<mirror::DexCache> cache) REQUIRES_SHARED(Locks::mutator_lock_) {
1284     auto it = dex_caches_.find(cache->GetDexFile());
1285     if (it != dex_caches_.end()) {
1286       return it->second;
1287     }
1288     uint32_t offset = CopyObject(cache);
1289     dex_caches_.Put(cache->GetDexFile(), offset);
1290     // For dex caches, clear pointers to data that will be set at runtime.
1291     mirror::Object* copy = reinterpret_cast<mirror::Object*>(objects_.data() + offset);
1292     reinterpret_cast<mirror::DexCache*>(copy)->ResetNativeArrays();
1293     reinterpret_cast<mirror::DexCache*>(copy)->SetDexFile(nullptr);
1294 
1295     if (!EmitDexCacheArrays()) {
1296       return offset;
1297     }
1298 
1299     // Copy the ArtMethod array.
1300     mirror::NativeArray<ArtMethod>* resolved_methods = cache->GetResolvedMethodsArray();
1301     CopyNativeDexCacheArray(cache->GetDexFile()->NumMethodIds(),
1302                             mirror::DexCache::kDexCacheMethodCacheSize,
1303                             resolved_methods);
1304     // Store the array pointer in the dex cache, which will be relocated at the end.
1305     reinterpret_cast<mirror::DexCache*>(copy)->SetResolvedMethodsArray(resolved_methods);
1306 
1307     // Copy the ArtField array.
1308     mirror::NativeArray<ArtField>* resolved_fields = cache->GetResolvedFieldsArray();
1309     CopyNativeDexCacheArray(cache->GetDexFile()->NumFieldIds(),
1310                             mirror::DexCache::kDexCacheFieldCacheSize,
1311                             resolved_fields);
1312     // Store the array pointer in the dex cache, which will be relocated at the end.
1313     reinterpret_cast<mirror::DexCache*>(copy)->SetResolvedFieldsArray(resolved_fields);
1314 
1315     // Copy the type array.
1316     mirror::GcRootArray<mirror::Class>* resolved_types = cache->GetResolvedTypesArray();
1317     CreateGcRootDexCacheArray(cache->GetDexFile()->NumTypeIds(),
1318                               mirror::DexCache::kDexCacheTypeCacheSize,
1319                               resolved_types);
1320     // Store the array pointer in the dex cache, which will be relocated at the end.
1321     reinterpret_cast<mirror::DexCache*>(copy)->SetResolvedTypesArray(resolved_types);
1322 
1323     // Copy the string array.
1324     mirror::GcRootArray<mirror::String>* strings = cache->GetStringsArray();
1325     // Note: `new_strings` points to temporary data, and is only valid here.
1326     mirror::GcRootArray<mirror::String>* new_strings =
1327         CreateGcRootDexCacheArray(cache->GetDexFile()->NumStringIds(),
1328                                   mirror::DexCache::kDexCacheStringCacheSize,
1329                                   strings);
1330     // Store the array pointer in the dex cache, which will be relocated at the end.
1331     reinterpret_cast<mirror::DexCache*>(copy)->SetStringsArray(strings);
1332 
1333     // The code below copies new objects, so invalidate the address we have for
1334     // `copy`.
1335     copy = nullptr;
1336     if (strings != nullptr) {
1337       for (uint32_t i = 0; i < cache->GetDexFile()->NumStringIds(); ++i) {
1338         ObjPtr<mirror::String> str = strings->Get(i);
1339         if (str == nullptr || IsInBootImage(str.Ptr())) {
1340           new_strings->Set(i, str.Ptr());
1341         } else {
1342           uint32_t hash = static_cast<uint32_t>(str->GetStoredHashCode());
1343           DCHECK_EQ(hash, static_cast<uint32_t>(str->ComputeHashCode()))
1344               << "Dex cache strings should be interned";
1345           auto it2 = intern_table_.FindWithHash(str.Ptr(), hash);
1346           if (it2 == intern_table_.end()) {
1347             uint32_t string_offset = CopyObject(str);
1348             uint32_t address = image_begin_ + string_offset + sizeof(ImageHeader);
1349             intern_table_.InsertWithHash(address, hash);
1350             new_strings->Set(i, reinterpret_cast<mirror::String*>(address));
1351           } else {
1352             new_strings->Set(i, reinterpret_cast<mirror::String*>(*it2));
1353           }
1354           // To not confuse string references from the dex cache object and
1355           // string references from the array, we put an offset bigger than the
1356           // size of a DexCache object. ClassLinker::VisitInternedStringReferences
1357           // knows how to decode this offset.
1358           string_reference_offsets_.emplace_back(
1359               sizeof(ImageHeader) + offset, sizeof(mirror::DexCache) + i);
1360         }
1361       }
1362     }
1363 
1364     return offset;
1365   }
1366 
IsInitialized(mirror::Class * cls)1367   bool IsInitialized(mirror::Class* cls) REQUIRES_SHARED(Locks::mutator_lock_) {
1368     if (IsInBootImage(cls)) {
1369       const OatDexFile* oat_dex_file = cls->GetDexFile().GetOatDexFile();
1370       DCHECK(oat_dex_file != nullptr) << "We should always have an .oat file for a boot image";
1371       uint16_t class_def_index = cls->GetDexClassDefIndex();
1372       ClassStatus oat_file_class_status = oat_dex_file->GetOatClass(class_def_index).GetStatus();
1373       return oat_file_class_status == ClassStatus::kVisiblyInitialized;
1374     } else {
1375       return cls->IsVisiblyInitialized<kVerifyNone>();
1376     }
1377   }
1378   // Try to initialize `copy`. Note that `cls` may not be initialized.
1379   // This is called after the image generation logic has visited super classes
1380   // and super interfaces, so we can just check those directly.
TryInitializeClass(mirror::Class * copy,ObjPtr<mirror::Class> cls,uint32_t class_offset)1381   bool TryInitializeClass(mirror::Class* copy, ObjPtr<mirror::Class> cls, uint32_t class_offset)
1382       REQUIRES_SHARED(Locks::mutator_lock_) {
1383     if (!cls->IsVerified()) {
1384       return false;
1385     }
1386     if (cls->IsArrayClass()) {
1387       return true;
1388     }
1389 
1390     // Check if we have been able to initialize the super class.
1391     mirror::Class* super = GetClassContent(cls->GetSuperClass());
1392     DCHECK(super != nullptr)
1393         << "App image classes should always have a super class: " << cls->PrettyClass();
1394     if (!IsInitialized(super)) {
1395       return false;
1396     }
1397 
1398     // We won't initialize class with class initializers.
1399     if (cls->FindClassInitializer(kRuntimePointerSize) != nullptr) {
1400       return false;
1401     }
1402 
1403     // For non-interface classes, we require all implemented interfaces to be
1404     // initialized.
1405     if (!cls->IsInterface()) {
1406       for (size_t i = 0; i < cls->NumDirectInterfaces(); i++) {
1407         mirror::Class* itf = GetClassContent(cls->GetDirectInterface(i));
1408         if (!IsInitialized(itf)) {
1409           return false;
1410         }
1411       }
1412     }
1413 
1414     // Trivial case: no static fields.
1415     if (cls->NumStaticFields() == 0u) {
1416       return true;
1417     }
1418 
1419     // Go over all static fields and try to initialize them.
1420     EncodedStaticFieldValueIterator it(cls->GetDexFile(), *cls->GetClassDef());
1421     if (!it.HasNext()) {
1422       return true;
1423     }
1424 
1425     // Temporary string offsets in case we failed to initialize the class. We
1426     // will add the offsets at the end of this method if we are successful.
1427     ArenaVector<AppImageReferenceOffsetInfo> string_offsets(allocator_.Adapter());
1428     ClassLinker* linker = Runtime::Current()->GetClassLinker();
1429     ClassAccessor accessor(cls->GetDexFile(), *cls->GetClassDef());
1430     for (const ClassAccessor::Field& field : accessor.GetStaticFields()) {
1431       if (!it.HasNext()) {
1432         break;
1433       }
1434       ArtField* art_field = linker->LookupResolvedField(field.GetIndex(),
1435                                                         cls->GetDexCache(),
1436                                                         cls->GetClassLoader(),
1437                                                         /* is_static= */ true);
1438       DCHECK_NE(art_field, nullptr);
1439       MemberOffset offset(art_field->GetOffset());
1440       switch (it.GetValueType()) {
1441         case EncodedArrayValueIterator::ValueType::kBoolean:
1442           copy->SetFieldBoolean<false>(offset, it.GetJavaValue().z);
1443           break;
1444         case EncodedArrayValueIterator::ValueType::kByte:
1445           copy->SetFieldByte<false>(offset, it.GetJavaValue().b);
1446           break;
1447         case EncodedArrayValueIterator::ValueType::kShort:
1448           copy->SetFieldShort<false>(offset, it.GetJavaValue().s);
1449           break;
1450         case EncodedArrayValueIterator::ValueType::kChar:
1451           copy->SetFieldChar<false>(offset, it.GetJavaValue().c);
1452           break;
1453         case EncodedArrayValueIterator::ValueType::kInt:
1454           copy->SetField32<false>(offset, it.GetJavaValue().i);
1455           break;
1456         case EncodedArrayValueIterator::ValueType::kLong:
1457           copy->SetField64<false>(offset, it.GetJavaValue().j);
1458           break;
1459         case EncodedArrayValueIterator::ValueType::kFloat:
1460           copy->SetField32<false>(offset, it.GetJavaValue().i);
1461           break;
1462         case EncodedArrayValueIterator::ValueType::kDouble:
1463           copy->SetField64<false>(offset, it.GetJavaValue().j);
1464           break;
1465         case EncodedArrayValueIterator::ValueType::kNull:
1466           copy->SetFieldObject<false>(offset, nullptr);
1467           break;
1468         case EncodedArrayValueIterator::ValueType::kString: {
1469           ObjPtr<mirror::String> str =
1470               linker->LookupString(dex::StringIndex(it.GetJavaValue().i), cls->GetDexCache());
1471           mirror::String* str_copy = nullptr;
1472           if (str == nullptr) {
1473             // String wasn't created yet.
1474             return false;
1475           } else if (IsInBootImage(str.Ptr())) {
1476             str_copy = str.Ptr();
1477           } else {
1478             uint32_t hash = static_cast<uint32_t>(str->GetStoredHashCode());
1479             DCHECK_EQ(hash, static_cast<uint32_t>(str->ComputeHashCode()))
1480                 << "Dex cache strings should be interned";
1481             auto string_it = intern_table_.FindWithHash(str.Ptr(), hash);
1482             if (string_it == intern_table_.end()) {
1483               // The string must be interned.
1484               uint32_t string_offset = CopyObject(str);
1485               // Reload the class copy after having copied the string.
1486               copy = reinterpret_cast<mirror::Class*>(objects_.data() + class_offset);
1487               uint32_t address = image_begin_ + string_offset + sizeof(ImageHeader);
1488               intern_table_.InsertWithHash(address, hash);
1489               str_copy = reinterpret_cast<mirror::String*>(address);
1490             } else {
1491               str_copy = reinterpret_cast<mirror::String*>(*string_it);
1492             }
1493             string_offsets.emplace_back(sizeof(ImageHeader) + class_offset, offset.Int32Value());
1494           }
1495           uint8_t* raw_addr = reinterpret_cast<uint8_t*>(copy) + offset.Int32Value();
1496           mirror::HeapReference<mirror::Object>* objref_addr =
1497               reinterpret_cast<mirror::HeapReference<mirror::Object>*>(raw_addr);
1498           objref_addr->Assign</* kIsVolatile= */ false>(str_copy);
1499           break;
1500         }
1501         case EncodedArrayValueIterator::ValueType::kType: {
1502           // Note that it may be that the referenced type hasn't been processed
1503           // yet by the image generation logic. In this case we bail out for
1504           // simplicity.
1505           ObjPtr<mirror::Class> type =
1506               linker->LookupResolvedType(dex::TypeIndex(it.GetJavaValue().i), cls);
1507           mirror::Class* type_copy = nullptr;
1508           if (type == nullptr) {
1509             // Class wasn't resolved yet.
1510             return false;
1511           } else if (IsInBootImage(type.Ptr())) {
1512             // Make sure the type is in our class table.
1513             uint32_t hash = type->DescriptorHash();
1514             class_table_.InsertWithHash(ClassTable::TableSlot(type.Ptr(), hash), hash);
1515             type_copy = type.Ptr();
1516           } else if (type->IsArrayClass()) {
1517             std::string class_name;
1518             type->GetDescriptor(&class_name);
1519             auto class_it = array_classes_.find(class_name);
1520             if (class_it == array_classes_.end()) {
1521               return false;
1522             }
1523             type_copy = reinterpret_cast<mirror::Class*>(
1524                 image_begin_ + sizeof(ImageHeader) + class_it->second);
1525           } else {
1526             const dex::ClassDef* class_def = type->GetClassDef();
1527             DCHECK_NE(class_def, nullptr);
1528             auto class_it = classes_.find(class_def);
1529             if (class_it == classes_.end()) {
1530               return false;
1531             }
1532             type_copy = reinterpret_cast<mirror::Class*>(
1533                 image_begin_ + sizeof(ImageHeader) + class_it->second);
1534           }
1535           uint8_t* raw_addr = reinterpret_cast<uint8_t*>(copy) + offset.Int32Value();
1536           mirror::HeapReference<mirror::Object>* objref_addr =
1537               reinterpret_cast<mirror::HeapReference<mirror::Object>*>(raw_addr);
1538           objref_addr->Assign</* kIsVolatile= */ false>(type_copy);
1539           break;
1540         }
1541         default:
1542           LOG(FATAL) << "Unreachable";
1543       }
1544       it.Next();
1545     }
1546     // We have successfully initialized the class, we can now record the string
1547     // offsets.
1548     string_reference_offsets_.insert(
1549         string_reference_offsets_.end(), string_offsets.begin(), string_offsets.end());
1550     return true;
1551   }
1552 
CopyClass(ObjPtr<mirror::Class> cls)1553   uint32_t CopyClass(ObjPtr<mirror::Class> cls) REQUIRES_SHARED(Locks::mutator_lock_) {
1554     DCHECK(!cls->IsBootStrapClassLoaded());
1555     uint32_t offset = 0u;
1556     if (cls->IsArrayClass()) {
1557       std::string class_name;
1558       cls->GetDescriptor(&class_name);
1559       auto it = array_classes_.find(class_name);
1560       if (it != array_classes_.end()) {
1561         return it->second;
1562       }
1563       offset = CopyObject(cls);
1564       array_classes_.Put(class_name, offset);
1565     } else {
1566       const dex::ClassDef* class_def = cls->GetClassDef();
1567       auto it = classes_.find(class_def);
1568       if (it != classes_.end()) {
1569         return it->second;
1570       }
1571       offset = CopyObject(cls);
1572       classes_.Put(class_def, offset);
1573     }
1574 
1575     uint32_t hash = cls->DescriptorHash();
1576     // Save the hash, the `HashSet` implementation requires to find it.
1577     class_hashes_.Put(offset, hash);
1578     uint32_t class_image_address = image_begin_ + sizeof(ImageHeader) + offset;
1579     bool inserted =
1580         class_table_.InsertWithHash(ClassTable::TableSlot(class_image_address, hash), hash).second;
1581     DCHECK(inserted) << "Class " << cls->PrettyDescriptor()
1582                      << " (" << cls.Ptr() << ") already inserted";
1583 
1584     // Clear internal state.
1585     mirror::Class* copy = reinterpret_cast<mirror::Class*>(objects_.data() + offset);
1586     copy->SetClinitThreadId(static_cast<pid_t>(0u));
1587     if (cls->IsArrayClass()) {
1588       DCHECK(copy->IsVisiblyInitialized());
1589     } else {
1590       copy->SetStatusInternal(cls->IsVerified() ? ClassStatus::kVerified : ClassStatus::kResolved);
1591     }
1592 
1593     // Clear static field values.
1594     auto clear_class = [&] () REQUIRES_SHARED(Locks::mutator_lock_) {
1595       MemberOffset static_offset = cls->GetFirstReferenceStaticFieldOffset(kRuntimePointerSize);
1596       memset(objects_.data() + offset + static_offset.Uint32Value(),
1597              0,
1598              cls->GetClassSize() - static_offset.Uint32Value());
1599     };
1600     clear_class();
1601 
1602     bool is_class_initialized = TryInitializeClass(copy, cls, offset);
1603     // Reload the copy, it may have moved after `TryInitializeClass`.
1604     copy = reinterpret_cast<mirror::Class*>(objects_.data() + offset);
1605     if (is_class_initialized) {
1606       copy->SetStatusInternal(ClassStatus::kVisiblyInitialized);
1607       if (!cls->IsArrayClass() && !cls->IsFinalizable()) {
1608         copy->SetObjectSizeAllocFastPath(RoundUp(cls->GetObjectSize(), kObjectAlignment));
1609       }
1610       if (cls->IsInterface()) {
1611         copy->SetAccessFlags(copy->GetAccessFlags() | kAccRecursivelyInitialized);
1612       }
1613     } else {
1614       // If we fail to initialize, remove initialization related flags and
1615       // clear again.
1616       copy->SetObjectSizeAllocFastPath(std::numeric_limits<uint32_t>::max());
1617       copy->SetAccessFlags(copy->GetAccessFlags() & ~kAccRecursivelyInitialized);
1618       clear_class();
1619     }
1620 
1621     CopyFieldArrays(cls, class_image_address);
1622     CopyMethodArrays(cls, class_image_address, is_class_initialized);
1623     if (cls->ShouldHaveImt()) {
1624       CopyImTable(cls);
1625     }
1626 
1627     return offset;
1628   }
1629 
1630   // Copy `obj` in `objects_` and relocate references. Returns the offset
1631   // within our buffer.
CopyObject(ObjPtr<mirror::Object> obj)1632   uint32_t CopyObject(ObjPtr<mirror::Object> obj) REQUIRES_SHARED(Locks::mutator_lock_) {
1633     // Copy the object in `objects_`.
1634     size_t object_size = obj->SizeOf();
1635     size_t offset = objects_.size();
1636     DCHECK(IsAligned<kObjectAlignment>(offset));
1637     object_offsets_.push_back(offset);
1638     objects_.resize(RoundUp(offset + object_size, kObjectAlignment));
1639 
1640     mirror::Object* copy = reinterpret_cast<mirror::Object*>(objects_.data() + offset);
1641     mirror::Object::CopyRawObjectData(
1642         reinterpret_cast<uint8_t*>(copy), obj, object_size - sizeof(mirror::Object));
1643     // Clear any lockword data.
1644     copy->SetLockWord(LockWord::Default(), /* as_volatile= */ false);
1645     copy->SetClass(obj->GetClass());
1646 
1647     // Fixup reference pointers.
1648     FixupVisitor visitor(this, offset);
1649     obj->VisitReferences</*kVisitNativeRoots=*/ false>(visitor, visitor);
1650 
1651     if (obj->IsString()) {
1652       // Ensure a string always has a hashcode stored. This is checked at
1653       // runtime because boot images don't want strings dirtied due to hashcode.
1654       reinterpret_cast<mirror::String*>(copy)->GetHashCode();
1655     }
1656 
1657     object_section_size_ += RoundUp(object_size, kObjectAlignment);
1658     return offset;
1659   }
1660 
1661   class CollectDexCacheVisitor : public DexCacheVisitor {
1662    public:
CollectDexCacheVisitor(VariableSizedHandleScope & handles)1663     explicit CollectDexCacheVisitor(VariableSizedHandleScope& handles) : handles_(handles) {}
1664 
Visit(ObjPtr<mirror::DexCache> dex_cache)1665     void Visit(ObjPtr<mirror::DexCache> dex_cache)
1666         REQUIRES_SHARED(Locks::dex_lock_, Locks::mutator_lock_) override {
1667       dex_caches_.push_back(handles_.NewHandle(dex_cache));
1668     }
GetDexCaches() const1669     const std::vector<Handle<mirror::DexCache>>& GetDexCaches() const {
1670       return dex_caches_;
1671     }
1672    private:
1673     VariableSizedHandleScope& handles_;
1674     std::vector<Handle<mirror::DexCache>> dex_caches_;
1675   };
1676 
1677   // Find dex caches corresponding to the primary APK.
FindDexCaches(Thread * self,dchecked_vector<Handle<mirror::DexCache>> & dex_caches,VariableSizedHandleScope & handles)1678   void FindDexCaches(Thread* self,
1679                      dchecked_vector<Handle<mirror::DexCache>>& dex_caches,
1680                      VariableSizedHandleScope& handles)
1681       REQUIRES_SHARED(Locks::mutator_lock_) {
1682     ScopedTrace trace("Find dex caches");
1683     DCHECK(dex_caches.empty());
1684     // Collect all dex caches.
1685     ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1686     CollectDexCacheVisitor visitor(handles);
1687     {
1688       ReaderMutexLock mu(self, *Locks::dex_lock_);
1689       class_linker->VisitDexCaches(&visitor);
1690     }
1691 
1692     // Find the primary APK.
1693     AppInfo* app_info = Runtime::Current()->GetAppInfo();
1694     for (Handle<mirror::DexCache> cache : visitor.GetDexCaches()) {
1695       if (app_info->GetRegisteredCodeType(cache->GetDexFile()->GetLocation()) ==
1696               AppInfo::CodeType::kPrimaryApk) {
1697         dex_caches.push_back(handles.NewHandle(cache.Get()));
1698         break;
1699       }
1700     }
1701 
1702     if (dex_caches.empty()) {
1703       return;
1704     }
1705 
1706     const OatDexFile* oat_dex_file = dex_caches[0]->GetDexFile()->GetOatDexFile();
1707     if (oat_dex_file == nullptr) {
1708       // We need a .oat file for loading an app image;
1709       dex_caches.clear();
1710       return;
1711     }
1712 
1713     // Store the dex caches in the order in which their corresponding dex files
1714     // are stored in the oat file. When we check for checksums at the point of
1715     // loading the image, we rely on this order.
1716     for (const OatDexFile* current : oat_dex_file->GetOatFile()->GetOatDexFiles()) {
1717       if (current != oat_dex_file) {
1718         for (Handle<mirror::DexCache> cache : visitor.GetDexCaches()) {
1719           if (cache->GetDexFile()->GetOatDexFile() == current) {
1720             dex_caches.push_back(handles.NewHandle(cache.Get()));
1721           }
1722         }
1723       }
1724     }
1725   }
1726 
PointerToUint64(void * ptr)1727   static uint64_t PointerToUint64(void* ptr) {
1728     return reinterpret_cast64<uint64_t>(ptr);
1729   }
1730 
WriteImageMethods()1731   void WriteImageMethods() {
1732     ScopedObjectAccess soa(Thread::Current());
1733     // We can just use plain runtime pointers.
1734     Runtime* runtime = Runtime::Current();
1735     header_.image_methods_[ImageHeader::kResolutionMethod] =
1736         PointerToUint64(runtime->GetResolutionMethod());
1737     header_.image_methods_[ImageHeader::kImtConflictMethod] =
1738         PointerToUint64(runtime->GetImtConflictMethod());
1739     header_.image_methods_[ImageHeader::kImtUnimplementedMethod] =
1740         PointerToUint64(runtime->GetImtUnimplementedMethod());
1741     header_.image_methods_[ImageHeader::kSaveAllCalleeSavesMethod] =
1742         PointerToUint64(runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveAllCalleeSaves));
1743     header_.image_methods_[ImageHeader::kSaveRefsOnlyMethod] =
1744         PointerToUint64(runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsOnly));
1745     header_.image_methods_[ImageHeader::kSaveRefsAndArgsMethod] =
1746         PointerToUint64(runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsAndArgs));
1747     header_.image_methods_[ImageHeader::kSaveEverythingMethod] =
1748         PointerToUint64(runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveEverything));
1749     header_.image_methods_[ImageHeader::kSaveEverythingMethodForClinit] =
1750         PointerToUint64(runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveEverythingForClinit));
1751     header_.image_methods_[ImageHeader::kSaveEverythingMethodForSuspendCheck] =
1752         PointerToUint64(
1753             runtime->GetCalleeSaveMethod(CalleeSaveType::kSaveEverythingForSuspendCheck));
1754   }
1755 
1756   // Header for the image, created at the end once we know the size of all
1757   // sections.
1758   ImageHeader header_;
1759 
1760   // Allocator for the various data structures to allocate while generating the
1761   // image.
1762   ArenaAllocator allocator_;
1763 
1764   // Contents of the various sections.
1765   ArenaVector<uint8_t> objects_;
1766   ArenaVector<uint8_t> art_fields_;
1767   ArenaVector<uint8_t> art_methods_;
1768   ArenaVector<uint8_t> im_tables_;
1769   ArenaVector<uint8_t> metadata_;
1770   ArenaVector<uint8_t> dex_cache_arrays_;
1771 
1772   ArenaVector<AppImageReferenceOffsetInfo> string_reference_offsets_;
1773 
1774   // Bitmap of live objects in `objects_`. Populated from `object_offsets_`
1775   // once we know `object_section_size`.
1776   gc::accounting::ContinuousSpaceBitmap image_bitmap_;
1777 
1778   // Sections stored in the header.
1779   ArenaVector<ImageSection> sections_;
1780 
1781   // A list of offsets in `objects_` where objects begin.
1782   ArenaVector<uint32_t> object_offsets_;
1783 
1784   ArenaSafeMap<const dex::ClassDef*, uint32_t> classes_;
1785   ArenaSafeMap<std::string, uint32_t> array_classes_;
1786   ArenaSafeMap<const DexFile*, uint32_t> dex_caches_;
1787   ArenaSafeMap<uint32_t, uint32_t> class_hashes_;
1788 
1789   ArenaSafeMap<void*, std::pair<NativeRelocationKind, uint32_t>> native_relocations_;
1790 
1791   // Cached values of boot image information.
1792   const uint32_t boot_image_begin_;
1793   const uint32_t boot_image_size_;
1794 
1795   // Where the image begins: just after the boot image.
1796   const uint32_t image_begin_;
1797 
1798   // Size of the `kSectionObjects` section.
1799   size_t object_section_size_;
1800 
1801   // The location of the primary APK / dex file.
1802   std::string dex_location_;
1803 
1804   // The intern table for strings that we will write to disk.
1805   InternTableSet intern_table_;
1806 
1807   // The class table holding classes that we will write to disk.
1808   ClassTableSet class_table_;
1809 
1810   friend class ClassDescriptorHash;
1811   friend class PruneVisitor;
1812   friend class NativePointerVisitor;
1813 };
1814 
GetOatPath()1815 static std::string GetOatPath() {
1816   const std::string& data_dir = Runtime::Current()->GetProcessDataDirectory();
1817   if (data_dir.empty()) {
1818     // The data ditectory is empty for tests.
1819     return "";
1820   }
1821   return data_dir + "/cache/oat_primary/";
1822 }
1823 
1824 // Note: this may return a relative path for tests.
GetRuntimeImagePath(const std::string & dex_location)1825 std::string RuntimeImage::GetRuntimeImagePath(const std::string& dex_location) {
1826   std::string basename = android::base::Basename(dex_location);
1827   std::string filename = ReplaceFileExtension(basename, "art");
1828 
1829   return GetOatPath() + GetInstructionSetString(kRuntimeISA) + "/" + filename;
1830 }
1831 
EnsureDirectoryExists(const std::string & directory,std::string * error_msg)1832 static bool EnsureDirectoryExists(const std::string& directory, std::string* error_msg) {
1833   if (!OS::DirectoryExists(directory.c_str())) {
1834     static constexpr mode_t kDirectoryMode = S_IRWXU | S_IRGRP | S_IXGRP| S_IROTH | S_IXOTH;
1835     if (mkdir(directory.c_str(), kDirectoryMode) != 0) {
1836       *error_msg =
1837           StringPrintf("Could not create directory %s: %s", directory.c_str(), strerror(errno));
1838       return false;
1839     }
1840   }
1841   return true;
1842 }
1843 
WriteImageToDisk(std::string * error_msg)1844 bool RuntimeImage::WriteImageToDisk(std::string* error_msg) {
1845   gc::Heap* heap = Runtime::Current()->GetHeap();
1846   if (!heap->HasBootImageSpace()) {
1847     *error_msg = "Cannot generate an app image without a boot image";
1848     return false;
1849   }
1850   std::string oat_path = GetOatPath();
1851   if (!oat_path.empty() && !EnsureDirectoryExists(oat_path, error_msg)) {
1852     return false;
1853   }
1854 
1855   ScopedTrace generate_image_trace("Generating runtime image");
1856   std::unique_ptr<RuntimeImageHelper> image(new RuntimeImageHelper(heap));
1857   if (!image->Generate(error_msg)) {
1858     return false;
1859   }
1860 
1861   ScopedTrace write_image_trace("Writing runtime image to disk");
1862 
1863   const std::string path = GetRuntimeImagePath(image->GetDexLocation());
1864   if (!EnsureDirectoryExists(android::base::Dirname(path), error_msg)) {
1865     return false;
1866   }
1867 
1868   // We first generate the app image in a temporary file, which we will then
1869   // move to `path`.
1870   const std::string temp_path = ReplaceFileExtension(path, std::to_string(getpid()) + ".tmp");
1871   ImageFileGuard image_file;
1872   image_file.reset(OS::CreateEmptyFileWriteOnly(temp_path.c_str()));
1873 
1874   if (image_file == nullptr) {
1875     *error_msg = "Could not open " + temp_path + " for writing";
1876     return false;
1877   }
1878 
1879   std::vector<uint8_t> full_data(image->GetHeader()->GetImageSize());
1880   image->FillData(full_data);
1881 
1882   // Specify default block size of 512K to enable parallel image decompression.
1883   static constexpr size_t kMaxImageBlockSize = 524288;
1884   // Use LZ4 as good compromise between CPU time and compression. LZ4HC
1885   // empirically takes 10x more time compressing.
1886   static constexpr ImageHeader::StorageMode kImageStorageMode = ImageHeader::kStorageModeLZ4;
1887   // Note: no need to update the checksum of the runtime app image: we have no
1888   // use for it, and computing it takes CPU time.
1889   if (!image->GetHeader()->WriteData(
1890           image_file,
1891           full_data.data(),
1892           reinterpret_cast<const uint8_t*>(image->GetImageBitmap().Begin()),
1893           kImageStorageMode,
1894           kMaxImageBlockSize,
1895           /* update_checksum= */ false,
1896           error_msg)) {
1897     return false;
1898   }
1899 
1900   if (!image_file.WriteHeaderAndClose(temp_path, image->GetHeader(), error_msg)) {
1901     return false;
1902   }
1903 
1904   if (rename(temp_path.c_str(), path.c_str()) != 0) {
1905     *error_msg =
1906         "Failed to move runtime app image to " + path + ": " + std::string(strerror(errno));
1907     // Unlink directly: we cannot use `out` as we have closed it.
1908     unlink(temp_path.c_str());
1909     return false;
1910   }
1911 
1912   return true;
1913 }
1914 
1915 }  // namespace art
1916